Merge branch 'master' into os1-rc

This commit is contained in:
Isaac Visintainer 2020-02-19 16:04:21 -08:00
commit 00a9eb9eab
194 changed files with 31535 additions and 1841 deletions

4
.gitignore vendored
View File

@ -9,6 +9,9 @@ tags
TAGS
cross/
release/
.stack-work
\#*\#
s/*
**/.DS_Store
**/dist
**/node_modules
@ -16,4 +19,5 @@ release/
**/*.swp
**/*.swo
**/*-min.js
.stack-work
pkg/interface/link-webext/web-ext-artifacts

3
.ignore Normal file
View File

@ -0,0 +1,3 @@
.stack-work
./pkg/hs-vere/.stack-work
./pkg/hs-urbit/.stack-work

View File

@ -23,3 +23,4 @@ Matilde Park <matilde@park.computer> <matilde@tlon.io>
pilfer-pandex <pilfer-pandex@users.noreply.github.com> <47340789+pilfer-pandex@users.noreply.github.com>
Robert <robert@tlon.io>
Ted Blackman <ted@tlon.io> <ted@3scan.com>
pkova <pyry.kovanen@gmail.com> <pkova@users.noreply.github.com>

84
.stylish-haskell.yaml Normal file
View File

@ -0,0 +1,84 @@
steps:
# Convert some ASCII sequences to their Unicode equivalents. This is disabled
# by default.
# - unicode_syntax:
# # In order to make this work, we also need to insert the UnicodeSyntax
# # language pragma. If this flag is set to true, we insert it when it's
# # not already present. You may want to disable it if you configure
# # language extensions using some other method than pragmas. Default:
# # true.
# add_language_pragma: true
- simple_align:
cases: true
top_level_patterns: true
records: true
# Import cleanup
- imports:
align: group
list_align: after_alias
pad_module_names: true
long_list_align: inline
empty_list_align: inherit
list_padding: 4
separate_lists: false
space_surround: false
- language_pragmas:
style: vertical
align: true
remove_redundant: true
- tabs:
spaces: 4
- trailing_whitespace: {}
# squash: {}
columns: 80
newline: lf
language_extensions:
- ApplicativeDo
- BangPatterns
- BlockArguments
- DataKinds
- DefaultSignatures
- DeriveAnyClass
- DeriveDataTypeable
- DeriveFoldable
- DeriveGeneric
- DeriveTraversable
- DerivingStrategies
- EmptyDataDecls
- FlexibleContexts
- FlexibleInstances
- FunctionalDependencies
- GADTs
- GeneralizedNewtypeDeriving
- LambdaCase
- MagicHash
- MultiParamTypeClasses
- NamedFieldPuns
- NoImplicitPrelude
- NumericUnderscores
- OverloadedStrings
- PartialTypeSignatures
- PatternSynonyms
- QuasiQuotes
- Rank2Types
- RankNTypes
- RecordWildCards
- ScopedTypeVariables
- StandaloneDeriving
- TemplateHaskell
- TupleSections
- TypeApplications
- TypeFamilies
- TypeOperators
- UnboxedTuples
- UnicodeSyntax
- ViewPatterns

View File

@ -1,20 +1,45 @@
language: nix
nix: 2.1.3
jobs:
include:
- os: linux
language: nix
nix: 2.1.3
env: STACK_YAML=pkg/hs/stack.yaml
before_install:
- git lfs pull
- sh/travis-install-stack
install:
- nix-env -iA cachix -f https://cachix.org/api/v1/install
install:
- nix-env -iA cachix -f https://cachix.org/api/v1/install
- stack --no-terminal --install-ghc build urbit-king --only-dependencies
before_install:
- git lfs pull
script:
- cachix use urbit2
- ./sh/cachix
- make build
- make release
- sh/release-king-linux64-dynamic
- sh/ci-tests
script:
- cachix use urbit2
- ./sh/cachix || true
- os: osx
language: generic
sudo: required
env: STACK_YAML=pkg/hs/stack.yaml
- make
- make release
before_install:
- sh/travis-install-stack
- sh/ci-tests
install:
- stack --no-terminal --install-ghc build urbit-king --only-dependencies
script:
- sh/release-king-darwin-dynamic
cache:
directories:
- $HOME/.ghc
- $HOME/.cabal
- $HOME/.stack
- $TRAVIS_BUILD_DIR/.stack-work
deploy:
- skip_cleanup: true

21
LICENSE.txt Normal file
View File

@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2015 Urbit
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -5,9 +5,9 @@
Here lies an informal guide for making hotfix releases and deploying them to
the network.
Take [this recent PR][1], as an example. This constituted a great hotfix.
It's a single commit, targeting a problem that existed on the network at the
time. Here's it should be released and deployed OTA.
Take [this PR][1], as an example. This constituted a great hotfix. It's a
single commit, targeting a problem that existed on the network at the time.
Here's it should be released and deployed OTA.
[1]: https://github.com/urbit/urbit/pull/2025
@ -16,14 +16,9 @@ time. Here's it should be released and deployed OTA.
Unless it's very trivial, it should probably have a single "credible looking"
review from somebody else on it.
You can just merge the PR in GitHub. As I, `~nidsut-tomdun`, am a l33t
h4x0r, I use a custom merge commit format, gotten by:
```
git merge --no-ff --signoff --log BRANCH
```
with the commit message:
You should avoid merging the PR in GitHub directly. Instead, use the
`sh/merge-with-custom-msg` script -- it will produce a merge commit with
message along the lines of:
```
Merge branch FOO (#PR_NUM)
@ -32,21 +27,29 @@ Merge branch FOO (#PR_NUM)
bar: ...
baz: ...
Signed-off-by: Jared Tobin <jared@tlon.io>
Signed-off-by: SIGNER <signer@example.com>
```
All this extra wankery is hardly required, but IMO it's nice to have the
commit log information in the merge commit, which GitHub's "Merge PR" button
doesn't do (at least by default).
We do this as it's nice to have the commit log information in the merge commit,
which GitHub's "Merge PR" button doesn't do (at least by default).
`sh/merge-with-custom-msg` performs some useful last-minute urbit-specific
checks, as well.
The script at `sh/merge-with-custom-message` can be used to make this simple(r)
to do. I use `git mu` as an alias for it, locally.
You might want to alias `sh/merge-with-custom-msg` locally, to make it easier
to use. My .git/config contains the following, for example:
```
[alias]
mu = !sh/merge-with-custom-msg
```
so that I can type e.g. `git mu origin/foo 1337`.
### Apply the changes to this era's release branch
This corresponds to the 'vx.y' part of the most recent 'urbit vx.y.z' release.
At the time of writing, we're on v0.10 (and I'll use this branch as a running
example):
For now, the release branch corresponds to the `vx.y` part of the most recent
Vere release (i.e., `urbit vx.y.z`). At the time of writing, we're on v0.10
(and I'll use this branch as a running example):
If the branch doesn't yet exist, just create it via:
@ -55,8 +58,8 @@ git checkout -b v0.10 master
```
If you can get away with merging master to v0.10 without pulling in any
superfluous commits, feel free to do that. Otherwise, you'll want to cherry
pick the commits like so:
superfluous or non-OTA-able commits, feel free to do that. Otherwise, you'll
want to cherry pick the commits like so:
```
git cherry-pick -x TARGET_COMMITS
@ -65,12 +68,43 @@ git cherry-pick -x TARGET_COMMITS
Use the `-x` flag to `git-cherry-pick`, because this will indicate in the
commit message where the things originally came from.
A useful technique is to cherry-pick merge commits on master directly. Take
following commit, for example:
```
commit 769996d09
Merge: 171fcbd26 8428f0ab1
Author: Jared Tobin <jared@tlon.io>
Date: Sun Feb 2 19:11:04 2020 +0400
Merge branch 'liam-fitzgerald/langserver-doc-autocomplete' (#2204)
* liam-fitzgerald/langserver-doc-autocomplete:
language-server: magic-spoon hover, autocomplete
language-server: build ford prelude
language-server: dynamically compute subject
language-server: revive rune/symbol completion
language-server: add completion JSON parsers
Signed-off-by: Jared Tobin <jared@tlon.io>
```
rather than cherry-picking the individual commits, one could just use the
following while on the release branch:
```
git cherry-pick -x -m 1 769996d09
```
you can check the man page for `git-cherry-pick(1)` for details here.
Create Landscape or alternative pill builds, if or as appropriate (i.e., if
anything in Landscape changed -- don't trust the compiled JS/CSS that's
anything in Landscape changed -- don't trust any compiled JS/CSS that's
included in the commit).
You may also want to create a brass pill, in particular, as it's convenient for
tooling to be able to boot directly from a given release.
You should always create a solid pill, in particular, as it's convenient for
tooling to be able to boot directly from a given release. If you're making a
Vere release, just play it safe and update all the pills.
### Tag the resulting commit
@ -106,8 +140,7 @@ You can get the "contributions" section by the shortlog between the
last release and this release:
```
git log --pretty=short --no-merges \
LAST_RELEASE..v0.10 | git shortlog
git log --pretty=short LAST_RELEASE.. | git shortlog
```
I originally tried to curate this list somewhat, but now just paste it
@ -121,7 +154,7 @@ If the commit descriptions are too poor to easily do this, then again, yell at
your fellow contributors to make them better in the future.
If it's *not* a trivial hotfix, you should probably make any number of release
candidate tags (e.g. `arvo.yyyy.mm.dd.rc-1`, `arvo.yyyy.mm.dd.rc-2`, ..), test
candidate tags (e.g. `arvo.yyyy.mm.dd.rc1`, `arvo.yyyy.mm.dd.rc2`, ..), test
them, and after you confirm one of them is good, tag the release as
`arvo.yyyy.mm.dd`.
@ -150,13 +183,17 @@ Contributions:
The same schpeel re: release candidates applies here.
You should probably avoid putting both Arvo and Vere changes into Vere
releases.
Do not include implicit Arvo changes in Vere releases. This used to be done,
historically, but shouldn't be any longer. If there are Arvo and Vere changes
to be released, make two releases.
### Deploy the update
For Arvo updates, this means copying the files into ~zod's %base desk. For
consistency, I download the release tarball and then rsync the files in:
For Arvo updates, this means copying the files into ~zod's %base desk. The
changes will be synced to /~zod/kids and then propagated through other galaxies
and stars to the rest of the network.
For consistency, I download the release tarball and then rsync the files in:
```
$ wget https://github.com/urbit/urbit/archive/arvo.yyyy.mm.dd.tar.gz
@ -166,13 +203,13 @@ $ rsync -zr --delete urbit-arvo.yyyy.mm.dd/pkg/arvo/ zod/base
$ herb zod -p hood -d "+hood/commit %base"
```
For Vere updates, this means shutting down each desired ship, installing the
new binary, and restarting the pier with it.
For Vere updates, this means simply shutting down each desired ship, installing
the new binary, and restarting the pier with it.
### Announce the update
Post an announcement to urbit-dev. The tag annotation, basically, is fine here
-- I usually add the %base hash (for Arvo releases) and the release binary URLs
(for Vere releaes). Check the urbit-dev archives for examples of these
(for Vere releases). Check the urbit-dev archives for examples of these
announcements.

2
pkg/arvo/.gitattributes vendored Normal file
View File

@ -0,0 +1,2 @@
.gitattributes export-ignore
tests export-ignore

3
pkg/arvo/.gitignore vendored
View File

@ -1,3 +0,0 @@
/sec/**/*.atom
*.swp
*.swo

1
pkg/arvo/.ignore Normal file
View File

@ -0,0 +1 @@
app/*/js/*

View File

@ -1,80 +0,0 @@
language: node_js
node_js:
- 4
before_install:
# pwd: ~/urbit/arvo
- cd .travis/
- bash check-trailing-whitespace.sh
- cd ../../
# pwd: ~/urbit
# building vere directly in lieu of a working debian package script
- wget https://github.com/ninja-build/ninja/releases/download/v1.8.2/ninja-linux.zip
- unzip ninja-linux.zip
- sudo mv ninja /usr/bin/
install:
# pwd: ~/urbit
- pip3 install --user -I meson==0.44.1
- git clone https://github.com/urbit/urbit
- cd ./urbit
# pwd: ~/urbit/urbit
- git checkout $(cat ../arvo/.travis/pin-vere-commit.txt)
- ./scripts/bootstrap
- ./scripts/build
- sudo ninja -C build install
- cd ../arvo
# pwd: ~/urbit/arvo
before_script:
- cd .travis
# pwd: ~/urbit/arvo/.travis
- npm install
- bash get-or-build-pill.sh
# https://github.com/travis-ci/travis-ci/issues/2570
script:
- ulimit -c unlimited -S
- npm run -s test; bash print-core-backtrace.sh $?
before_deploy: "[ -d piers ] || { mkdir piers && tar cvzSf piers/zod-$TRAVIS_COMMIT.tgz zod/; }"
addons:
apt:
packages:
- python3
- python3-pip
- libgmp3-dev
- libsigsegv-dev
- openssl
- libssl-dev
- libncurses5-dev
- gcc
- libcurl4-gnutls-dev
- unzip
- gdb
deploy:
- skip_cleanup: true
provider: gcs
access_key_id: GOOGTADOPP55X5ZTH3IKAXQW
secret_access_key:
secure: lALZvAW22oBMCXafvDOkqKDkdP0K8bGKlSb6uhh54z+2bJu49+5vrfxgA9YLcExGiz8uFttzNYhEoAQEjb96DPHAHvH2iJrwieKltrWM4hLkGuSHVSCBIIm+Qe4BVRSVJPQ1rtO1ausNr0XuzO6BVnKY7NCrz8la2XNjm5+miQdtrJUnrfy2JsM/c/Bkwjj3Tc4op9Ne+7Xzc9DI6LB97XiJx5PgeOx1WeZi9IKQ3IhPBHBzBpBrJ4lWxb4PFvDUqNzSk1wuMGy/sH73IFhGcz3CZRZYbeICDdwmHcUnkdPxG6+RLH+YLhSxx175R+HdaARRQvRANxvY9KNJ11NKmV3Rs9q7fZgWZbrptuB0CDMhfZ/Aiz9tgHGV0UVhYHb8n614fDIKzpXwIy5DPjCKpxPoZRVzABQcdzPTvxnZtZDbarsfdfq0vh9xXNPLGuFYZQnZ6iEpv17qp/2TbeCBSMKIxwIG3LQTwr0a4wKL1T/YIZm6oiN6NycHhMHaczQIRANKw9e7oqbgnXu/WnqHIxyTY2CCvzVOgipRmKKa7jz7CcSoP883XZ9o7WAOnfJY+T4ofpdkzHn1ElNXPjDPpX7CUkowNFH4DZk2Ljwe0CgxPOF6ygnsNrqqs4XoNQaBnHGXMq20Upg6OK9MBmZibtlX9STCeSAt4WudekpEOPU=
bucket: ci-piers.urbit.org
local-dir: piers/
acl: public-read
on:
repo: urbit/arvo
all_branches: true
- skip_cleanup: true
provider: gcs
access_key_id: GOOGTADOPP55X5ZTH3IKAXQW
secret_access_key:
secure: lALZvAW22oBMCXafvDOkqKDkdP0K8bGKlSb6uhh54z+2bJu49+5vrfxgA9YLcExGiz8uFttzNYhEoAQEjb96DPHAHvH2iJrwieKltrWM4hLkGuSHVSCBIIm+Qe4BVRSVJPQ1rtO1ausNr0XuzO6BVnKY7NCrz8la2XNjm5+miQdtrJUnrfy2JsM/c/Bkwjj3Tc4op9Ne+7Xzc9DI6LB97XiJx5PgeOx1WeZi9IKQ3IhPBHBzBpBrJ4lWxb4PFvDUqNzSk1wuMGy/sH73IFhGcz3CZRZYbeICDdwmHcUnkdPxG6+RLH+YLhSxx175R+HdaARRQvRANxvY9KNJ11NKmV3Rs9q7fZgWZbrptuB0CDMhfZ/Aiz9tgHGV0UVhYHb8n614fDIKzpXwIy5DPjCKpxPoZRVzABQcdzPTvxnZtZDbarsfdfq0vh9xXNPLGuFYZQnZ6iEpv17qp/2TbeCBSMKIxwIG3LQTwr0a4wKL1T/YIZm6oiN6NycHhMHaczQIRANKw9e7oqbgnXu/WnqHIxyTY2CCvzVOgipRmKKa7jz7CcSoP883XZ9o7WAOnfJY+T4ofpdkzHn1ElNXPjDPpX7CUkowNFH4DZk2Ljwe0CgxPOF6ygnsNrqqs4XoNQaBnHGXMq20Upg6OK9MBmZibtlX9STCeSAt4WudekpEOPU=
bucket: bootstrap.urbit.org
local-dir: built-pill/
acl: public-read
on:
condition: "-d built-pill/"
repo: urbit/arvo
all_branches: true

View File

@ -1,2 +0,0 @@
# Don't show in diffs or auto-merge
package-lock.json binary

View File

@ -1 +0,0 @@
node_modules/

View File

@ -1,11 +0,0 @@
#!/bin/bash
whitespace=$(find .. -path ../.git -prune -o \
-type f -exec egrep -l " +$" {} \;);
if [ ! -z $whitespace ]
then
echo 'found trailing whitespace in:';
echo $whitespace;
exit 1;
fi

View File

@ -1,64 +0,0 @@
#!/bin/bash
set -euo pipefail
set -x
# add urbit-runner to $PATH
PATH=./node_modules/.bin/:$PATH
# XX use -s instead of hash pill
HASH=$(git -C .. log -1 HEAD --format=%H -- sys/)
export PILL_NAME="git-${HASH:0:10}"
if [ ! ${PILL_FORCE:-} ]; then
: Trying pill for commit
wget https://bootstrap.urbit.org/$PILL_NAME.pill -O urbit.pill && exit 0
fi
# if wget failed
if [ ${TRAVIS_COMMIT:-} ] && [ $TRAVIS_COMMIT != $HASH ]; then
: Directory sys/ not modified in commit $TRAVIS_COMMIT
: FIXME ignoring, as current sys/ commits are unlikely to contain the pill-build code
:
# : For auto-build please tag and push $HASH
# exit 1
fi
mkdir prev
{
: Pilling: trying pinned fakezod
wget -i pin-parent-pill-pier.url -O - | tar xvz -C prev/ &&
: Downloaded prev/zod &&
urbit-runner -S prev/zod <<' .'
|autoload |
|mount %
.
[ $? = 0 ] && cp -r ../sys prev/zod/home/ &&
cp ../gen/solid.hoon prev/zod/home/gen/ &&
cp ../lib/pill.hoon prev/zod/home/lib/
} || {
: Pilling: Parent-pill pier not available, trying preceding pill commit
HASH2=$(git -C .. log -2 $HASH --format=%H -- sys/ | tail -1)
PILL_NAME2="git-${HASH2:0:10}"
wget https://bootstrap.urbit.org/$PILL_NAME2.pill -O prev/urbit.pill &&
([ -d prev/zod ] && rm -r prev/zod || true) &&
urbit-runner -A .. -B prev/urbit.pill -cSF zod prev/zod <<' .'
%booted-prev-zod
.
} || {
: Pilling: Out of ideas
exit 1
}
: Pier created, soliding actual pill
urbit-runner -S prev/zod <<.
|label %home %$PILL_NAME
.urbit/pill +solid /==/$PILL_NAME/sys, =dub &
.
cp prev/zod/.urb/put/urbit.pill urbit.pill
mkdir built-pill; cp urbit.pill built-pill/$PILL_NAME.pill
:
: Created $PILL_NAME.pill, to be uploaded if tests pass
:

View File

@ -1,753 +0,0 @@
{
"name": "arvo-tests",
"version": "1.0.0",
"lockfileVersion": 1,
"requires": true,
"dependencies": {
"ajv": {
"version": "5.5.2",
"resolved": "https://registry.npmjs.org/ajv/-/ajv-5.5.2.tgz",
"integrity": "sha1-c7Xuyj+rZT49P5Qis0GtQiBdyWU=",
"requires": {
"co": "^4.6.0",
"fast-deep-equal": "^1.0.0",
"fast-json-stable-stringify": "^2.0.0",
"json-schema-traverse": "^0.3.0"
}
},
"array-differ": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/array-differ/-/array-differ-1.0.0.tgz",
"integrity": "sha1-7/UuN1gknTO+QCuLuOVkuytdQDE="
},
"array-union": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/array-union/-/array-union-1.0.2.tgz",
"integrity": "sha1-mjRBDk9OPaI96jdb5b5w8kd47Dk=",
"requires": {
"array-uniq": "^1.0.1"
}
},
"array-uniq": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/array-uniq/-/array-uniq-1.0.3.tgz",
"integrity": "sha1-r2rId6Jcx/dOBYiUdThY39sk/bY="
},
"arrify": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz",
"integrity": "sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0="
},
"asap": {
"version": "2.0.6",
"resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz",
"integrity": "sha1-5QNHYR1+aQlDIIu9r+vLwvuGbUY="
},
"asn1": {
"version": "0.2.4",
"resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz",
"integrity": "sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg==",
"requires": {
"safer-buffer": "~2.1.0"
}
},
"assert-plus": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz",
"integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU="
},
"asynckit": {
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
"integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k="
},
"aws-sign2": {
"version": "0.7.0",
"resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz",
"integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg="
},
"aws4": {
"version": "1.8.0",
"resolved": "https://registry.npmjs.org/aws4/-/aws4-1.8.0.tgz",
"integrity": "sha512-ReZxvNHIOv88FlT7rxcXIIC0fPt4KZqZbOlivyWtXLt8ESx84zd3kMC6iK5jVeS2qt+g7ftS7ye4fi06X5rtRQ=="
},
"balanced-match": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz",
"integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c="
},
"bcrypt-pbkdf": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz",
"integrity": "sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4=",
"requires": {
"tweetnacl": "^0.14.3"
}
},
"bluebird": {
"version": "2.11.0",
"resolved": "http://registry.npmjs.org/bluebird/-/bluebird-2.11.0.tgz",
"integrity": "sha1-U0uQM8AiyVecVro7Plpcqvu2UOE="
},
"brace-expansion": {
"version": "1.1.11",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
"requires": {
"balanced-match": "^1.0.0",
"concat-map": "0.0.1"
}
},
"caseless": {
"version": "0.12.0",
"resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz",
"integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw="
},
"co": {
"version": "4.6.0",
"resolved": "https://registry.npmjs.org/co/-/co-4.6.0.tgz",
"integrity": "sha1-bqa989hTrlTMuOR7+gvz+QMfsYQ="
},
"colors": {
"version": "1.3.2",
"resolved": "https://registry.npmjs.org/colors/-/colors-1.3.2.tgz",
"integrity": "sha512-rhP0JSBGYvpcNQj4s5AdShMeE5ahMop96cTeDl/v9qQQm2fYClE2QXZRi8wLzc+GmXSxdIqqbOIAhyObEXDbfQ=="
},
"combined-stream": {
"version": "1.0.7",
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.7.tgz",
"integrity": "sha512-brWl9y6vOB1xYPZcpZde3N9zDByXTosAeMDo4p1wzo6UMOX4vumB+TP1RZ76sfE6Md68Q0NJSrE/gbezd4Ul+w==",
"requires": {
"delayed-stream": "~1.0.0"
}
},
"concat-map": {
"version": "0.0.1",
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
"integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s="
},
"core-js": {
"version": "2.5.7",
"resolved": "https://registry.npmjs.org/core-js/-/core-js-2.5.7.tgz",
"integrity": "sha512-RszJCAxg/PP6uzXVXL6BsxSXx/B05oJAQ2vkJRjyjrEcNVycaqOmNb5OTxZPE3xa5gwZduqza6L9JOCenh/Ecw=="
},
"core-util-is": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz",
"integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac="
},
"dashdash": {
"version": "1.14.1",
"resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz",
"integrity": "sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=",
"requires": {
"assert-plus": "^1.0.0"
}
},
"del": {
"version": "2.2.2",
"resolved": "https://registry.npmjs.org/del/-/del-2.2.2.tgz",
"integrity": "sha1-wSyYHQZ4RshLyvhiz/kw2Qf/0ag=",
"requires": {
"globby": "^5.0.0",
"is-path-cwd": "^1.0.0",
"is-path-in-cwd": "^1.0.0",
"object-assign": "^4.0.1",
"pify": "^2.0.0",
"pinkie-promise": "^2.0.0",
"rimraf": "^2.2.8"
}
},
"delayed-stream": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
"integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk="
},
"ecc-jsbn": {
"version": "0.1.2",
"resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz",
"integrity": "sha1-OoOpBOVDUyh4dMVkt1SThoSamMk=",
"requires": {
"jsbn": "~0.1.0",
"safer-buffer": "^2.1.0"
}
},
"emitter-mixin": {
"version": "0.0.3",
"resolved": "https://registry.npmjs.org/emitter-mixin/-/emitter-mixin-0.0.3.tgz",
"integrity": "sha1-WUjLKG8uSO3DslGnz8H3iDOW1lw="
},
"errno": {
"version": "0.1.7",
"resolved": "https://registry.npmjs.org/errno/-/errno-0.1.7.tgz",
"integrity": "sha512-MfrRBDWzIWifgq6tJj60gkAwtLNb6sQPlcFrSOflcP1aFmmruKQ2wRnze/8V6kgyz7H3FF8Npzv78mZ7XLLflg==",
"requires": {
"prr": "~1.0.1"
}
},
"escape-string-regexp": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz",
"integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ="
},
"extend": {
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/extend/-/extend-1.2.1.tgz",
"integrity": "sha1-oPX9bPyDpf5J72mNYOyKYk3UV2w="
},
"extsprintf": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz",
"integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU="
},
"fast-deep-equal": {
"version": "1.1.0",
"resolved": "http://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-1.1.0.tgz",
"integrity": "sha1-wFNHeBfIa1HaqFPIHgWbcz0CNhQ="
},
"fast-json-stable-stringify": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz",
"integrity": "sha1-1RQsDK7msRifh9OnYREGT4bIu/I="
},
"forever-agent": {
"version": "0.6.1",
"resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz",
"integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE="
},
"form-data": {
"version": "2.3.3",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz",
"integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==",
"requires": {
"asynckit": "^0.4.0",
"combined-stream": "^1.0.6",
"mime-types": "^2.1.12"
}
},
"fs.realpath": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
"integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8="
},
"getpass": {
"version": "0.1.7",
"resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz",
"integrity": "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=",
"requires": {
"assert-plus": "^1.0.0"
}
},
"glob": {
"version": "7.1.3",
"resolved": "https://registry.npmjs.org/glob/-/glob-7.1.3.tgz",
"integrity": "sha512-vcfuiIxogLV4DlGBHIUOwI0IbrJ8HWPc4MU7HzviGeNho/UJDfi6B5p3sHeWIQ0KGIU0Jpxi5ZHxemQfLkkAwQ==",
"requires": {
"fs.realpath": "^1.0.0",
"inflight": "^1.0.4",
"inherits": "2",
"minimatch": "^3.0.4",
"once": "^1.3.0",
"path-is-absolute": "^1.0.0"
}
},
"globby": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/globby/-/globby-5.0.0.tgz",
"integrity": "sha1-69hGZ8oNuzMLmbz8aOrCvFQ3Dg0=",
"requires": {
"array-union": "^1.0.1",
"arrify": "^1.0.0",
"glob": "^7.0.3",
"object-assign": "^4.0.1",
"pify": "^2.0.0",
"pinkie-promise": "^2.0.0"
}
},
"graceful-fs": {
"version": "4.1.15",
"resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.15.tgz",
"integrity": "sha512-6uHUhOPEBgQ24HM+r6b/QwWfZq+yiFcipKFrOFiBEnWdy5sdzYoi+pJeQaPI5qOLRFqWmAXUPQNsielzdLoecA=="
},
"har-schema": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz",
"integrity": "sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI="
},
"har-validator": {
"version": "5.1.0",
"resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.0.tgz",
"integrity": "sha512-+qnmNjI4OfH2ipQ9VQOw23bBd/ibtfbVdK2fYbY4acTDqKTW/YDp9McimZdDbG8iV9fZizUqQMD5xvriB146TA==",
"requires": {
"ajv": "^5.3.0",
"har-schema": "^2.0.0"
}
},
"hoek": {
"version": "4.2.1",
"resolved": "http://registry.npmjs.org/hoek/-/hoek-4.2.1.tgz",
"integrity": "sha512-QLg82fGkfnJ/4iy1xZ81/9SIJiq1NGFUMGs6ParyjBZr6jW2Ufj/snDqTHixNlHdPNwN2RLVD0Pi3igeK9+JfA=="
},
"http-signature": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz",
"integrity": "sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=",
"requires": {
"assert-plus": "^1.0.0",
"jsprim": "^1.2.2",
"sshpk": "^1.7.0"
}
},
"inflight": {
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz",
"integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=",
"requires": {
"once": "^1.3.0",
"wrappy": "1"
}
},
"inherits": {
"version": "2.0.3",
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz",
"integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4="
},
"is-path-cwd": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/is-path-cwd/-/is-path-cwd-1.0.0.tgz",
"integrity": "sha1-0iXsIxMuie3Tj9p2dHLmLmXxEG0="
},
"is-path-in-cwd": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/is-path-in-cwd/-/is-path-in-cwd-1.0.1.tgz",
"integrity": "sha512-FjV1RTW48E7CWM7eE/J2NJvAEEVektecDBVBE5Hh3nM1Jd0kvhHtX68Pr3xsDf857xt3Y4AkwVULK1Vku62aaQ==",
"requires": {
"is-path-inside": "^1.0.0"
}
},
"is-path-inside": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-1.0.1.tgz",
"integrity": "sha1-jvW33lBDej/cprToZe96pVy0gDY=",
"requires": {
"path-is-inside": "^1.0.1"
}
},
"is-typedarray": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz",
"integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo="
},
"isemail": {
"version": "2.2.1",
"resolved": "http://registry.npmjs.org/isemail/-/isemail-2.2.1.tgz",
"integrity": "sha1-A1PT2aYpUQgMJiwqoKQrjqjp4qY="
},
"isstream": {
"version": "0.1.2",
"resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz",
"integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo="
},
"items": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/items/-/items-2.1.1.tgz",
"integrity": "sha1-i9FtnIOxlSneWuoyGsqtp4NkoZg="
},
"joi": {
"version": "9.2.0",
"resolved": "http://registry.npmjs.org/joi/-/joi-9.2.0.tgz",
"integrity": "sha1-M4WseQGSEwy+Iw6ALsAskhW7/to=",
"requires": {
"hoek": "4.x.x",
"isemail": "2.x.x",
"items": "2.x.x",
"moment": "2.x.x",
"topo": "2.x.x"
}
},
"jsbn": {
"version": "0.1.1",
"resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz",
"integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM="
},
"json-schema": {
"version": "0.2.3",
"resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz",
"integrity": "sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM="
},
"json-schema-traverse": {
"version": "0.3.1",
"resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.3.1.tgz",
"integrity": "sha1-NJptRMU6Ud6JtAgFxdXlm0F9M0A="
},
"json-stringify-safe": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz",
"integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus="
},
"jsprim": {
"version": "1.4.1",
"resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz",
"integrity": "sha1-MT5mvB5cwG5Di8G3SZwuXFastqI=",
"requires": {
"assert-plus": "1.0.0",
"extsprintf": "1.3.0",
"json-schema": "0.2.3",
"verror": "1.10.0"
}
},
"junk": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/junk/-/junk-1.0.3.tgz",
"integrity": "sha1-h75jSIZJy9ym9Tqzm+yczSNH9ZI="
},
"maximatch": {
"version": "0.1.0",
"resolved": "https://registry.npmjs.org/maximatch/-/maximatch-0.1.0.tgz",
"integrity": "sha1-hs2NawTJ8wfAWmuUGZBtA2D7E6I=",
"requires": {
"array-differ": "^1.0.0",
"array-union": "^1.0.1",
"arrify": "^1.0.0",
"minimatch": "^3.0.0"
}
},
"mime-db": {
"version": "1.37.0",
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.37.0.tgz",
"integrity": "sha512-R3C4db6bgQhlIhPU48fUtdVmKnflq+hRdad7IyKhtFj06VPNVdk2RhiYL3UjQIlso8L+YxAtFkobT0VK+S/ybg=="
},
"mime-types": {
"version": "2.1.21",
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.21.tgz",
"integrity": "sha512-3iL6DbwpyLzjR3xHSFNFeb9Nz/M8WDkX33t1GFQnFOllWk8pOrh/LSrB5OXlnlW5P9LH73X6loW/eogc+F5lJg==",
"requires": {
"mime-db": "~1.37.0"
}
},
"minimatch": {
"version": "3.0.4",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
"integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
"requires": {
"brace-expansion": "^1.1.7"
}
},
"minimist": {
"version": "0.0.8",
"resolved": "http://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz",
"integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0="
},
"mkdirp": {
"version": "0.5.1",
"resolved": "http://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz",
"integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=",
"requires": {
"minimist": "0.0.8"
}
},
"moment": {
"version": "2.22.2",
"resolved": "https://registry.npmjs.org/moment/-/moment-2.22.2.tgz",
"integrity": "sha1-PCV/mDn8DpP/UxSWMiOeuQeD/2Y="
},
"nan": {
"version": "2.3.5",
"resolved": "http://registry.npmjs.org/nan/-/nan-2.3.5.tgz",
"integrity": "sha1-gioNwmYpDOTNOhIoLKPn42Rmigg="
},
"oauth-sign": {
"version": "0.9.0",
"resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz",
"integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ=="
},
"object-assign": {
"version": "4.1.1",
"resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz",
"integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM="
},
"once": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
"integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=",
"requires": {
"wrappy": "1"
}
},
"path-is-absolute": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz",
"integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18="
},
"path-is-inside": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/path-is-inside/-/path-is-inside-1.0.2.tgz",
"integrity": "sha1-NlQX3t5EQw0cEa9hAn+s8HS9/FM="
},
"performance-now": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz",
"integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns="
},
"pify": {
"version": "2.3.0",
"resolved": "http://registry.npmjs.org/pify/-/pify-2.3.0.tgz",
"integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw="
},
"pinkie": {
"version": "2.0.4",
"resolved": "https://registry.npmjs.org/pinkie/-/pinkie-2.0.4.tgz",
"integrity": "sha1-clVrgM+g1IqXToDnckjoDtT3+HA="
},
"pinkie-promise": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/pinkie-promise/-/pinkie-promise-2.0.1.tgz",
"integrity": "sha1-ITXW36ejWMBprJsXh3YogihFD/o=",
"requires": {
"pinkie": "^2.0.0"
}
},
"promise": {
"version": "7.3.1",
"resolved": "https://registry.npmjs.org/promise/-/promise-7.3.1.tgz",
"integrity": "sha512-nolQXZ/4L+bP/UGlkfaIujX9BKxGwmQ9OT4mOt5yvy8iK1h3wqTEJCijzGANTCCl9nWjY41juyAn2K3Q1hLLTg==",
"requires": {
"asap": "~2.0.3"
}
},
"promise-streams": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/promise-streams/-/promise-streams-2.1.1.tgz",
"integrity": "sha1-cwnx02mDMOp/rasZIvE5iSKayFo=",
"requires": {
"bluebird": "^2.10.2"
}
},
"prr": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/prr/-/prr-1.0.1.tgz",
"integrity": "sha1-0/wRS6BplaRexok/SEzrHXj19HY="
},
"psl": {
"version": "1.1.29",
"resolved": "https://registry.npmjs.org/psl/-/psl-1.1.29.tgz",
"integrity": "sha512-AeUmQ0oLN02flVHXWh9sSJF7mcdFq0ppid/JkErufc3hGIV/AMa8Fo9VgDo/cT2jFdOWoFvHp90qqBH54W+gjQ=="
},
"pty.js": {
"version": "0.3.1",
"resolved": "https://registry.npmjs.org/pty.js/-/pty.js-0.3.1.tgz",
"integrity": "sha1-gfW+0zLW5eeraFaI0boDc0ENUbU=",
"requires": {
"extend": "~1.2.1",
"nan": "2.3.5"
}
},
"punycode": {
"version": "1.4.1",
"resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz",
"integrity": "sha1-wNWmOycYgArY4esPpSachN1BhF4="
},
"qs": {
"version": "6.5.2",
"resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz",
"integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA=="
},
"recursive-copy": {
"version": "2.0.9",
"resolved": "https://registry.npmjs.org/recursive-copy/-/recursive-copy-2.0.9.tgz",
"integrity": "sha512-0AkHV+QtfS/1jW01z3m2t/TRTW56Fpc+xYbsoa/bqn8BCYPwmsaNjlYmUU/dyGg9w8MmGoUWihU5W+s+qjxvBQ==",
"requires": {
"del": "^2.2.0",
"emitter-mixin": "0.0.3",
"errno": "^0.1.2",
"graceful-fs": "^4.1.4",
"junk": "^1.0.1",
"maximatch": "^0.1.0",
"mkdirp": "^0.5.1",
"pify": "^2.3.0",
"promise": "^7.0.1",
"slash": "^1.0.0"
}
},
"request": {
"version": "2.88.0",
"resolved": "https://registry.npmjs.org/request/-/request-2.88.0.tgz",
"integrity": "sha512-NAqBSrijGLZdM0WZNsInLJpkJokL72XYjUpnB0iwsRgxh7dB6COrHnTBNwN0E+lHDAJzu7kLAkDeY08z2/A0hg==",
"requires": {
"aws-sign2": "~0.7.0",
"aws4": "^1.8.0",
"caseless": "~0.12.0",
"combined-stream": "~1.0.6",
"extend": "~3.0.2",
"forever-agent": "~0.6.1",
"form-data": "~2.3.2",
"har-validator": "~5.1.0",
"http-signature": "~1.2.0",
"is-typedarray": "~1.0.0",
"isstream": "~0.1.2",
"json-stringify-safe": "~5.0.1",
"mime-types": "~2.1.19",
"oauth-sign": "~0.9.0",
"performance-now": "^2.1.0",
"qs": "~6.5.2",
"safe-buffer": "^5.1.2",
"tough-cookie": "~2.4.3",
"tunnel-agent": "^0.6.0",
"uuid": "^3.3.2"
},
"dependencies": {
"extend": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz",
"integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g=="
}
}
},
"rimraf": {
"version": "2.6.2",
"resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.6.2.tgz",
"integrity": "sha512-lreewLK/BlghmxtfH36YYVg1i8IAce4TI7oao75I1g245+6BctqTVQiBP3YUJ9C6DQOXJmkYR9X9fCLtCOJc5w==",
"requires": {
"glob": "^7.0.5"
}
},
"rx": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/rx/-/rx-4.1.0.tgz",
"integrity": "sha1-pfE/957zt0D+MKqAP7CfmIBdR4I="
},
"safe-buffer": {
"version": "5.1.2",
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz",
"integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g=="
},
"safer-buffer": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="
},
"slash": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/slash/-/slash-1.0.0.tgz",
"integrity": "sha1-xB8vbDn8FtHNF61LXYlhFK5HDVU="
},
"split": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/split/-/split-1.0.1.tgz",
"integrity": "sha512-mTyOoPbrivtXnwnIxZRFYRrPNtEFKlpB2fvjSnCQUiAA6qAZzqwna5envK4uk6OIeP17CsdF3rSBGYVBsU0Tkg==",
"requires": {
"through": "2"
}
},
"sshpk": {
"version": "1.15.2",
"resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.15.2.tgz",
"integrity": "sha512-Ra/OXQtuh0/enyl4ETZAfTaeksa6BXks5ZcjpSUNrjBr0DvrJKX+1fsKDPpT9TBXgHAFsa4510aNVgI8g/+SzA==",
"requires": {
"asn1": "~0.2.3",
"assert-plus": "^1.0.0",
"bcrypt-pbkdf": "^1.0.0",
"dashdash": "^1.12.0",
"ecc-jsbn": "~0.1.1",
"getpass": "^0.1.1",
"jsbn": "~0.1.0",
"safer-buffer": "^2.0.2",
"tweetnacl": "~0.14.0"
}
},
"stream-snitch": {
"version": "0.0.3",
"resolved": "https://registry.npmjs.org/stream-snitch/-/stream-snitch-0.0.3.tgz",
"integrity": "sha1-iXp48TonFPqESqd74VR3qJbYUqk="
},
"through": {
"version": "2.3.8",
"resolved": "http://registry.npmjs.org/through/-/through-2.3.8.tgz",
"integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU="
},
"topo": {
"version": "2.0.2",
"resolved": "http://registry.npmjs.org/topo/-/topo-2.0.2.tgz",
"integrity": "sha1-zVYVdSU5BXwNwEkaYhw7xvvh0YI=",
"requires": {
"hoek": "4.x.x"
}
},
"tough-cookie": {
"version": "2.4.3",
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.4.3.tgz",
"integrity": "sha512-Q5srk/4vDM54WJsJio3XNn6K2sCG+CQ8G5Wz6bZhRZoAe/+TxjWB/GlFAnYEbkYVlON9FMk/fE3h2RLpPXo4lQ==",
"requires": {
"psl": "^1.1.24",
"punycode": "^1.4.1"
}
},
"tunnel-agent": {
"version": "0.6.0",
"resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz",
"integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=",
"requires": {
"safe-buffer": "^5.0.1"
}
},
"tweetnacl": {
"version": "0.14.5",
"resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz",
"integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q="
},
"urbit-runner": {
"version": "github:urbit/runner-js#ee2455015dc4ea243d0e0ec623975632c9249c4e",
"from": "github:urbit/runner-js#ee24550",
"requires": {
"colors": "^1.1.2",
"escape-string-regexp": "^1.0.5",
"once": "^1.4.0",
"promise-streams": "^2.1.1",
"pty.js": "^0.3.1",
"recursive-copy": "^2.0.7",
"split": "^1.0.1",
"stream-snitch": "0.0.3",
"wait-on": "^2.0.2"
}
},
"uuid": {
"version": "3.3.2",
"resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz",
"integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA=="
},
"verror": {
"version": "1.10.0",
"resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz",
"integrity": "sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=",
"requires": {
"assert-plus": "^1.0.0",
"core-util-is": "1.0.2",
"extsprintf": "^1.2.0"
}
},
"wait-on": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/wait-on/-/wait-on-2.1.2.tgz",
"integrity": "sha512-Jm6pzZkbswtcRUXohxY1Ek5MrL16AwHj83drgW2FTQuglHuhZhVMyBLPIYG0rL1wvr5rdC1uzRuU/7Bc+B9Pwg==",
"requires": {
"core-js": "^2.4.1",
"joi": "^9.2.0",
"minimist": "^1.2.0",
"request": "^2.78.0",
"rx": "^4.1.0"
},
"dependencies": {
"minimist": {
"version": "1.2.0",
"resolved": "http://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz",
"integrity": "sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ="
}
}
},
"wrappy": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
"integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8="
}
}
}

View File

@ -1,14 +0,0 @@
{
"name": "arvo-tests",
"version": "1.0.0",
"description": "Test harness for Urbit arvo distribution",
"scripts": {
"test": "node test.js"
},
"private": true,
"author": "~fyr",
"license": "MIT",
"dependencies": {
"urbit-runner": "github:urbit/runner-js#ee24550"
}
}

View File

@ -1 +0,0 @@
https://ci-piers.urbit.org/zod-d71780001aed3ba464d8b24f223f6bc597236718.tgz

View File

@ -1 +0,0 @@
d0401f0034e348ec1db498f2c7884194d99b6de4

View File

@ -1,17 +0,0 @@
#!/bin/bash
set -euo pipefail
set -x
RESULT=$1
if [[ ${RESULT} -eq 0 ]]; then
exit 0
else
for i in $(find ./ -maxdepth 1 -name 'core*' -print)
do
gdb urbit core* -ex "thread apply all bt" -ex "set pagination 0" -batch
done
fi
echo "build failed with status code $RESULT"
exit $RESULT

View File

@ -1,134 +0,0 @@
'use strict';
var fs = require('fs')
var runner = require('urbit-runner')
var Urbit = runner.Urbit;
var ERROR = runner.ERROR;
var actions = runner.actions
var args = ['-B', 'urbit.pill', '-A', '..', '-cSF', 'zod', 'zod'];
var urbit = new Urbit(args);
// XX upstream this into runner-js
//
function rePill(urb) {
return new Promise(function(resolve,reject){
fs.stat('./built-pill/', function(err, stat) {
if (err) return resolve()
fs.readdir('./built-pill/', function(err, files) {
if (err || (1 !== files.length)) {
return resolve()
}
var name = files[0].replace(/\.pill$/, '')
urb.note('re-soliding pill')
return urb.expect(/dojo> /)
.then(function(){
return urb.line('|label %home %' + name)
})
.then(function(){
return urb.expect(/dojo> /)
})
.then(function(){
return urb.line('.latest/pill +solid /==/' + name + '/sys')
})
.then(function(){
return urb.expectEcho("%resolid")
})
.then(function(){
return urb.resetListeners();
})
.then(function(){
var write = fs.createWriteStream('./built-pill/' + name + '.pill')
var read = fs.createReadStream('./zod/.urb/put/latest.pill')
read.on('error', function(err){
return reject(err)
})
write.on('error', function(err){
return reject(err)
})
write.on('finish', function(){
return resolve()
})
return read.pipe(write)
})
// XX find a better way to add this to the promise chain
//
.then(function(){
return barMass(urb);
})
.catch(function(err){
return reject(err)
});
})
})
})
}
// XX upstream this into runner-js
//
function barMass(urb) {
return urb.line("|mass")
.then(function(){
return urb.expectEcho("%ran-mass")
.then(function(){ return urb.resetListeners(); })
})
}
function aqua(urb) {
return urb.line("|start %ph")
.then(function(){
return urb.line(":ph|init");
})
.then(function(){
return urb.line(":aqua &pill +solid");
})
.then(function(){
urb.every(/TEST [^ ]* FAILED/, function(arg){
throw Error(arg);
});
return urb.line(":ph|run %hi");
})
.then(function(){
return urb.expectEcho("ALL TESTS SUCCEEDED")
.then(function(){ return urb.resetListeners(); })
})
}
Promise.resolve(urbit)
.then(actions.safeBoot)
.then(function(){
return barMass(urbit);
})
.then(actions.test)
.then(actions.testCores)
.then(actions.testRenderers)
.then(function(){
return barMass(urbit);
})
.then(function(){
return aqua(urbit);
})
.then(function(){
return rePill(urbit);
})
.then(function(){
return urbit.expect(/dojo> /);
})
.then(function(){
return urbit.exit(0);
})
.catch(function(err){
return urbit.waitSilent()
.then(function(){
urbit.warn('Test aborted:', err);
return urbit.exit(1);
});
});

View File

@ -12,7 +12,7 @@
/- *chat-store, *chat-view, *chat-hook,
*permission-store, *group-store, *invite-store,
sole-sur=sole
/+ sole-lib=sole, chat-eval, default-agent, verb,
/+ sole-lib=sole, chat-eval, default-agent, verb, dbug,
auto=language-server-complete
::
|%
@ -73,9 +73,11 @@
--
=| state
=* all-state -
::
%- agent:dbug
%+ verb |
^- agent:gall
=<
%+ verb |
^- agent:gall
|_ =bowl:gall
+* this .
talk-core +>

View File

@ -3,7 +3,7 @@
:: allow sending chat messages to foreign paths based on write perms
::
/- *permission-store, *chat-hook, *invite-store
/+ *chat-json, *chat-eval, default-agent, verb
/+ *chat-json, *chat-eval, default-agent, verb, dbug
|%
+$ card card:agent:gall
::
@ -31,6 +31,8 @@
--
=| state-zero
=* state -
::
%- agent:dbug
%+ verb |
^- agent:gall
=<

View File

@ -1,6 +1,6 @@
:: chat-store: data store that holds linear sequences of chat messages
::
/+ *chat-json, *chat-eval, default-agent
/+ *chat-json, *chat-eval, default-agent, verb, dbug
|%
+$ card card:agent:gall
+$ versioned-state
@ -21,6 +21,9 @@
::
=| state-zero
=* state -
::
%- agent:dbug
%+ verb |
^- agent:gall
=<
|_ =bowl:gall

View File

@ -7,7 +7,7 @@
*invite-store,
*permission-group-hook,
*chat-hook
/+ *server, *chat-json, default-agent
/+ *server, *chat-json, default-agent, verb, dbug
/= index
/^ octs
/; as-octs:mimes:html
@ -52,6 +52,8 @@
[%permission-group-hook-action permission-group-hook-action]
==
--
%- agent:dbug
%+ verb |
^- agent:gall
=<
|_ bol=bowl:gall

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -1,7 +1,7 @@
:: group-hook: allow syncing group data from foreign paths to local paths
::
/- *group-store, *group-hook
/+ default-agent
/+ default-agent, verb, dbug
|%
+$ card card:agent:gall
::
@ -19,6 +19,9 @@
::
=| state-zero
=* state -
::
%- agent:dbug
%+ verb |
^- agent:gall
=<
|_ =bowl:gall

View File

@ -1,7 +1,7 @@
:: group-store: data store for groups of ships
::
/- *group-store
/+ default-agent
/+ default-agent, verb, dbug
|%
+$ card card:agent:gall
::
@ -22,6 +22,9 @@
::
=| state-zero
=* state -
::
%- agent:dbug
%+ verb |
^- agent:gall
=<
|_ =bowl:gall

View File

@ -1,8 +1,10 @@
/- lsp-sur=language-server
/+ *server,
auto=language-server-complete,
lsp-parser=language-server-parser,
easy-print=language-server-easy-print,
rune-snippet=language-server-rune-snippet,
build=language-server-build,
default-agent
|%
+$ card card:agent:gall
@ -29,10 +31,20 @@
+$ position
[row=@ud col=@ud]
::
+$ all-state bufs=(map uri=@t buf=wall)
+$ state-zero
$: %0
bufs=(map uri=@t buf=wall)
builds=(map uri=@t =vase)
ford-diagnostics=(map uri=@t (list diagnostic:lsp-sur))
preludes=(map uri=@t type)
==
+$ versioned-state
$%
state-zero
==
--
^- agent:gall
=| all-state
=| state-zero
=* state -
=<
|_ =bowl:gall
@ -57,7 +69,7 @@
|= old-state=vase
^- (quip card _this)
~& > %lsp-upgrade
[~ this(state !<(all-state old-state))]
[~ this(state *state-zero)]
::
++ on-poke
^+ on-poke:*agent:gall
@ -65,26 +77,31 @@
^- (quip card _this)
=^ cards state
?+ mark (on-poke:def mark vase)
%handle-http-request
(handle-http-request:lsp !<([eyre-id=@ta inbound-request:eyre] vase))
%language-server-rpc-notification
(on-notification:lsp !<(all:notification:lsp-sur vase))
%language-server-rpc-request
(on-request:lsp !<(all:request:lsp-sur vase))
==
[cards this]
::
++ on-watch
|= =path
?: ?=([%primary ~] path)
`this
?. ?=([%http-response @ ~] path)
(on-watch:def path)
`this
++ on-leave on-leave:def
++ on-peek on-peek:def
++ on-peek on-peek:def
++ on-agent on-agent:def
++ on-arvo
^+ on-arvo:*agent:gall
|= [=wire =sign-arvo]
^- (quip card _this)
=^ cards state
?+ wire (on-arvo:def wire sign-arvo)
[%connect ~] ?>(?=(%bound +<.sign-arvo) `state)
?+ sign-arvo (on-arvo:def wire sign-arvo)
[%e %bound *] `state
[%f *] (handle-build:lsp wire +.sign-arvo)
==
[cards this]
::
@ -93,107 +110,116 @@
::
|_ bow=bowl:gall
::
++ parser
=, dejs:format
|^
%: ot
uri+so
:- %data
%- of
:~ sync+sync
completion+position
commit+ni
hover+position
==
~
==
::
++ sync
%- ar
%: ou
range+(uf ~ (pe ~ range))
'rangeLength'^(uf ~ (pe ~ ni))
text+(un so)
~
==
::
++ range
%: ot
start+position
end+position
~
==
::
++ position
%: ot
line+ni
character+ni
~
==
--
::
++ json-response
|= [eyre-id=@ta jon=json]
^- (list card)
(give-simple-payload:app eyre-id (json-response:gen (json-to-octs jon)))
::
:: +handle-http-request: received on a new connection established
++ give-rpc-notification
|= res=out:notification:lsp-sur
^- (list card)
:_ ~
[%give %fact ~[/primary] %language-server-rpc-notification !>(res)]
::
++ handle-http-request
|= [eyre-id=@ta =inbound-request:eyre]
++ on-notification
|= not=all:notification:lsp-sur
^- (quip card _state)
?> ?=(^ body.request.inbound-request)
=/ =lsp-req
%- parser
(need (de-json:html q.u.body.request.inbound-request))
=/ buf (~(gut by bufs) uri.lsp-req *wall)
=^ cards buf
?- +<.lsp-req
%sync (handle-sync buf eyre-id +>.lsp-req)
%completion (handle-completion buf eyre-id +>.lsp-req)
%commit (handle-commit buf eyre-id uri.lsp-req)
%hover (handle-hover buf eyre-id +>.lsp-req)
=^ cards state
?+ -.not [~ state]
%text-document--did-open (handle-did-open +.not)
%text-document--did-change (handle-did-change +.not)
%text-document--did-save (handle-did-save +.not)
%text-document--did-close (handle-did-close +.not)
%exit handle-exit
==
[cards state]
++ on-request
|= req=all:request:lsp-sur
^- (quip card _state)
=^ cards state
?+ -.req [~ state]
%text-document--hover (handle-hover req)
%text-document--completion (handle-completion req)
==
=. bufs
(~(put by bufs) uri.lsp-req buf)
[cards state]
::
++ regen-diagnostics
|= buf=wall
^- json
=/ t=tape
++ get-subject
|= uri=@t
^- type
(~(gut by preludes) uri -:!>(..zuse))
::
++ handle-completion
|= com=text-document--completion:request:lsp-sur
^- (quip card _state)
:_ state
%^ give-rpc-response %text-document--completion id.com
=/ buf=wall
(~(got by bufs) uri.com)
=/ txt=tape
(zing (join "\0a" buf))
=/ parse
(lily:auto t (lsp-parser *beam))
?: ?=(%| -.parse)
(format-diagnostic p.parse)
=, enjs:format
%- pairs
:~ good+b+&
=/ pos
(get-pos buf row.com col.com)
=/ rune (rune-snippet (swag [(safe-sub pos 2) 2] txt))
?^ rune rune
=/ tab-list
%^ tab-list-tape:auto
(~(gut by preludes) uri.com -:!>(..zuse))
pos txt
?: ?=(%| -.tab-list) ~
?~ p.tab-list ~
?~ u.p.tab-list ~
(turn u.p.tab-list make-completion-item)
::
++ make-completion-item
|= [name=term =type]
^- completion-item:lsp-sur
=/ doc
%- crip
;: weld
"`"
~(ram re ~(duck easy-print type))
"`"
==
[name 1 doc '' name 1]
::
++ give-rpc-response
|= res=all:response:lsp-sur
^- (list card)
:_ ~
[%give %fact ~[/primary] %language-server-rpc-response !>(res)]
::
++ handle-exit
^- (quip card _state)
~& > %lsp-shutdown
:_ *state-zero
%- zing
%+ turn
~(tap in ~(key by builds))
|= uri=@t
:+ [%pass /ford/[uri] %arvo %f %kill ~]
[%pass /ford/[uri]/deps %arvo %f %kill ~]
~
::
++ handle-did-close
|= [uri=@t version=(unit @)]
^- (quip card _state)
=. bufs
(~(del by bufs) uri)
=. ford-diagnostics
(~(del by ford-diagnostics) uri)
=. builds
(~(del by builds) uri)
:_ state
:~
[%pass /ford/[uri] %arvo %f %kill ~]
[%pass /ford/[uri]/deps %arvo %f %kill ~]
==
::
++ format-diagnostic
|= [row=@ col=@]
^- json
=, enjs:format
%- pairs
:~ good+b+|
:+ %diagnostics %a :_ ~
=/ loc (pairs line+(numb (dec row)) character+(numb col) ~)
%- pairs
:~ range+(pairs start+loc end+loc ~)
severity+n+'1'
message+s+'syntax error'
==
==
::
++ handle-commit
|= [buf=wall eyre-id=@ta uri=@t]
^- [(list card) wall]
:_ buf
=/ jon
(regen-diagnostics buf)
:_ (json-response eyre-id jon)
++ handle-did-save
|= [uri=@t version=(unit @)]
^- (quip card _state)
:_ state
:_ (give-rpc-notification (get-diagnostics uri))
:*
%pass
/commit
@ -204,33 +230,141 @@
!>([q.byk.bow |])
==
::
++ handle-did-change
|= [document=versioned-doc-id:lsp-sur changes=(list change:lsp-sur)]
^- (quip card _state)
=/ updated=wall
(sync-buf (~(got by bufs) uri.document) changes)
=. bufs
(~(put by bufs) uri.document updated)
`state
::
++ handle-build
|= [=path =gift:able:ford]
^- (quip card _state)
?. ?=([%made *] gift)
[~ state]
?. ?=([%complete *] result.gift)
[~ state]
=/ uri=@t
(snag 1 path)
=/ =build-result:ford
build-result.result.gift
?+ build-result [~ state]
::
[%success %plan *]
=. preludes
(~(put by preludes) uri -:vase.build-result)
[~ state]
::
[%success %core *]
=. builds
(~(put by builds) uri vase.build-result)
=. ford-diagnostics
(~(del by ford-diagnostics) uri)
:_ state
(give-rpc-notification (get-diagnostics uri))
::
[%error *]
=/ error-ranges=(list =range:lsp-sur)
(get-errors-from-tang:build uri message.build-result)
?~ error-ranges
[~ state]
=. ford-diagnostics
%+ ~(put by ford-diagnostics)
uri
[i.error-ranges 1 'Build Error']~
:_ state
(give-rpc-notification (get-diagnostics uri))
==
::
++ get-diagnostics
|= uri=@t
^- out:notification:lsp-sur
:+ %text-document--publish-diagnostics
uri
%+ weld
(~(gut by ford-diagnostics) uri ~)
(get-parser-diagnostics uri)
::
++ get-build-deps
|= [=path buf=wall]
^- schematic:ford
=/ parse=(like scaffold:ford)
%+ (lsp-parser [byk.bow path]) [1 1]
(zing (join "\0a" buf))
=/ =scaffold:ford
?~ q.parse *scaffold:ford
p.u.q.parse
:* %plan
[[our.bow %home] (flop path)]
*coin
scaffold(sources `(list hoon)`~[[%cnts ~[[%& 1]] ~]])
==
::
++ handle-did-open
|= item=text-document-item:lsp-sur
^- (quip card _state)
=/ buf=wall
(to-wall (trip text.item))
=. bufs
(~(put by bufs) uri.item buf)
=/ =path
(uri-to-path:build uri.item)
=/ =schematic:ford
[%core [our.bow %home] (flop path)]
=/ dep-schematic=schematic:ford
(get-build-deps path buf)
:_ state
%+ weld
(give-rpc-notification (get-diagnostics uri.item))
^- (list card)
:~
[%pass /ford/[uri.item] %arvo %f %build live=%.y schematic]
[%pass /ford/[uri.item]/deps %arvo %f %build live=%.y dep-schematic]
==
::
++ get-parser-diagnostics
|= uri=@t
^- (list diagnostic:lsp-sur)
=/ t=tape
(zing (join "\0a" `wall`(~(got by bufs) uri)))
=/ parse
(lily:auto t (lsp-parser *beam))
?. ?=(%| -.parse)
~
=/ loc=position:lsp-sur
[(dec -.p.parse) +.p.parse]
:_ ~
[[loc loc] 1 'Syntax Error']
::
++ handle-hover
|= [buf=wall eyre-id=@ta row=@ud col=@ud]
^- [(list card) wall]
|= hov=text-document--hover:request:lsp-sur
^- (quip card _state)
:_ state
%^ give-rpc-response %text-document--hover id.hov
=/ buf=wall
(~(got by bufs) uri.hov)
=/ txt
(zing (join "\0a" buf))
=+ (get-id:auto (get-pos buf row col) txt)
?~ id
[(json-response eyre-id *json) buf]
=/ match=(unit (option:auto type))
(search-exact:auto u.id (get-identifiers:auto -:!>(..zuse)))
?~ match
[(json-response eyre-id *json) buf]
=/ contents
%- crip
;: weld
"`"
~(ram re ~(duck easy-print detail.u.match))
"`"
==
:_ buf
%+ json-response eyre-id
%- pairs:enjs:format
[contents+s+contents ~]
=/ tab-list
%^ tab-list-tape:auto
(~(gut by preludes) uri.hov -:!>(..zuse))
(get-pos buf row.hov col.hov)
txt
?: ?=(%| -.tab-list) ~
?~ p.tab-list ~
?~ u.p.tab-list ~
:- ~
%- crip
;: weld
"`"
~(ram re ~(duck easy-print detail.i.u.p.tab-list))
"`"
==
::
++ handle-sync
|= [buf=wall eyre-id=@ta changes=(list change)]
:- (json-response eyre-id *json)
++ sync-buf
|= [buf=wall changes=(list change:lsp-sur)]
|- ^- wall
?~ changes
buf
@ -277,48 +411,4 @@
0
(sub a b)
::
++ handle-completion
|= [buf=wall eyre-id=@ta row=@ud col=@ud]
^- [(list card) wall]
=/ =tape (zing (join "\0a" buf))
=/ pos (get-pos buf row col)
:_ buf
:: Check if we're on a rune
::
=/ rune (swag [(safe-sub pos 2) 2] tape)
?: (~(has by runes:rune-snippet) rune)
(json-response eyre-id (rune-snippet rune))
:: Don't run on large files because it's slow
::
?: (gth (lent buf) 1.000)
=, enjs:format
(json-response eyre-id (pairs good+b+& result+~ ~))
::
=/ tl
(tab-list-tape:auto -:!>(..zuse) pos tape)
=, enjs:format
%+ json-response eyre-id
?: ?=(%| -.tl)
(format-diagnostic p.tl)
?~ p.tl
*json
%- pairs
:~ good+b+&
::
:- %result
%- pairs
:~ 'isIncomplete'^b+&
::
:- %items
:- %a
=/ lots (gth (lent u.p.tl) 10)
%- flop
%+ turn (scag 50 u.p.tl)
|= [=term =type]
?: lots
(frond label+s+term)
=/ detail (crip ~(ram re ~(duck easy-print type)))
(pairs label+s+term detail+s+detail ~)
==
==
--

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -5,7 +5,7 @@
:: configured for them as `access-control`.
::
/- *permission-hook
/+ *permission-json, default-agent
/+ *permission-json, default-agent, verb, dbug
::
|%
+$ state
@ -26,6 +26,8 @@
=| state-0
=* state -
::
%- agent:dbug
%+ verb |
^- agent:gall
=<
|_ =bowl:gall

View File

@ -1,7 +1,7 @@
:: permission-store: track black- and whitelists of ships
::
/- *permission-store
/+ default-agent
/+ default-agent, verb, dbug
::
|%
+$ card card:agent:gall
@ -17,6 +17,9 @@
--
=| state-zero
=* state -
::
%- agent:dbug
%+ verb |
^- agent:gall
=<
|_ =bowl:gall

View File

@ -41,7 +41,11 @@
?. ?=(%poking -.ship-state.u.s)
%- (slog leaf+"ping: strange state {<ship s>}" ~)
`state
=/ until (add ~m5 now)
:: NAT timeouts are often pretty short for UDP entries. 5
:: minutes is a common value. We use 30 seconds, which is fairly
:: aggressive, but should be safe.
::
=/ until (add ~s30 now)
=. ships.state
(~(put by ships.state) ship u.s(ship-state [%waiting until]))
:_ state

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -19,7 +19,7 @@
==
;body
;h1:"Ran generator"
:: ;p:"Executing on {<(scot %p our)>}."
;p:"Executing on {<(scot %p p.bek)>}."
;p:"The method was {<(trip method)>}."
;p:"The url was {<(trip url)>}."
==

View File

@ -0,0 +1,61 @@
/- *language-server
::
|%
++ parse-error
|= =tape
^- (unit [=path =range])
=/ parse-pair
%+ cook
|=([row=@ud col=@ud] [(dec row) col])
(ifix [lac rac] ;~((glue ace) dem dem))
=/ parse-path
%+ cook
|=(p=path (slag 3 p))
(ifix [net (jest '::')] (more net urs:ab))
=/ parse-full
;~(plug parse-path ;~(sfix ;~((glue dot) parse-pair parse-pair) ban))
(rust tape parse-full)
::
++ get-errors-from-tang
|= [uri=@t =tang]
^- (list range)
=/ =path
(uri-to-path uri)
%+ murn tang
|= =tank
^- (unit range)
?. ?=([%leaf *] tank)
~
=/ error
(parse-error p.tank)
?~ error
~
?: =(path path.u.error)
`range.u.error
~
::
++ uri-to-path
|= uri=@t
^- path
=/ pier-root=(set cord)
%- sy
['app' 'gen' 'lib' 'mar' 'ren' 'sur' 'sys' 'test' ~]
=/ path=(list cord)
(parse-uri uri)
|-
?< ?=(~ path)
?: (~(has in pier-root) i.path)
`^path`path
$(path t.path)
::
++ parse-uri
|= uri=@t
=- (fall - /fail)
%+ rush uri
%+ more
;~(pose (plus fas) dot)
%+ cook
crip
(star ;~(pose col hep alf))
::
--

View File

@ -4,7 +4,7 @@
=/ debug |
|%
+* option [item]
[=term detail=item]
[term=cord detail=item]
::
:: Like +rose except also produces line number
::
@ -73,7 +73,7 @@
|* [sid=term options=(list (option))]
=/ match
%+ skim options
|= [id=term *]
|= [id=cord *]
=(sid id)
?~ match
~
@ -82,17 +82,18 @@
:: Get all the identifiers that start with sid.
::
++ search-prefix
|* [sid=term ids=(list (option))]
|* [sid=cord ids=(list (option))]
^+ ids
%+ skim ids
|= [id=term *]
|= [id=cord *]
^- ?(%.y %.n)
=(sid (end 3 (met 3 sid) id))
::
:: Get the longest prefix of a list of identifiers.
::
++ longest-match
|= matches=(list (option))
^- term
^- cord
?~ matches
''
=/ n 1

View File

@ -116,7 +116,7 @@
::
{$face *}
=^ cox gid $(q.ham q.q.ham)
:_(gid [%palm [['/' ~] ~ ~ ~] [%leaf (trip p.q.ham)] cox ~])
:_(gid [%palm [['=' ~] ~ ~ ~] [%leaf (trip p.q.ham)] cox ~])
::
{$list *}
=^ cox gid $(q.ham q.q.ham)
@ -128,7 +128,7 @@
::
{$plot *}
=^ coz gid (many p.q.ham)
:_(gid [%rose [[' ' ~] ['{' ~] ['}' ~]] coz])
:_(gid [%rose [[' ' ~] ['[' ~] [']' ~]] coz])
::
{$pear *}
:_(gid [%leaf '$' ~(rend co [%$ p.q.ham q.q.ham])])

View File

@ -0,0 +1,301 @@
/- lsp=language-server
|%
::
++ util
|%
++ get-json-string
|= [jon=(map @t json) key=@t]
^- (unit cord)
=/ cord-jon=(unit json)
(~(get by jon) key)
?~ cord-jon
~
?> ?=([%s *] u.cord-jon)
`p.u.cord-jon
--
::
::
++ dejs
=, dejs:format
|%
++ request
|= jon=json
?> ?=([%o *] jon)
=/ method=cord
%- method
(trip (need (get-json-string:util p.jon 'method')))
=/ id=cord
(need (get-json-string:util p.jon 'id'))
=/ params=json
(~(got by p.jon) 'params')
^- all:request:lsp
|^
?+ method [%unknown jon]
%text-document--hover (text-document--hover params id)
%text-document--completion (text-document--completion params id)
==
::
++ text-document--hover
|= [params=json id=cord]
^- text-document--hover:request:lsp
:+ %text-document--hover
id
%. params
%: ot
position+position
'textDocument'^text-document-id
~
==
::
++ text-document--completion
|= [params=json id=cord]
:+ %text-document--completion id
%. params
%: ot
position+position
'textDocument'^text-document-id
~
==
--
::
++ notification
|= jon=json
?> ?=([%o *] jon)
=/ method=cord
%- method
(trip (need (get-json-string:util p.jon 'method')))
=/ params=json
(~(got by p.jon) 'params')
^- all:notification:lsp
|^
?+ method [%unknown jon]
%text-document--did-change
(text-document--did-change params)
%text-document--did-open
(text-document--did-open params)
%text-document--did-save
(text-document--did-save params)
%text-document--did-close
(text-document--did-close params)
==
::
++ text-document--did-save
|= jon=json
^- text-document--did-save:notification:lsp
?> ?=([%o *] jon)
=/ doc-id
(~(got by p.jon) 'textDocument')
:- %text-document--did-save
(text-document-id doc-id)
::
++ text-document--did-close
|= jon=json
^- text-document--did-close:notification:lsp
?> ?=([%o *] jon)
=/ doc-id
(~(got by p.jon) 'textDocument')
:- %text-document--did-close
(text-document-id doc-id)
::
++ text-document--did-change
|= jon=json
^- text-document--did-change:notification:lsp
:- %text-document--did-change
%. jon
%: ot
'textDocument'^text-document-id
'contentChanges'^text-document-changes
~
==
::
++ text-document--did-open
|= jon=json
^- text-document--did-open:notification:lsp
?> ?=([%o *] jon)
:- %text-document--did-open
(text-document-item (~(got by p.jon) 'textDocument'))
--
:: Utilities
::
++ text-document-item
|= jon=json
^- text-document-item:lsp
%. jon
%: ot
uri+so
version+(mu ni)
text+so
~
==
::
++ text-document-id
%: ou
uri+(un so)
version+(uf ~ (pe ~ ni))
~
==
::
++ text-document-changes
%- ar
%: ou
range+(uf ~ (pe ~ range))
'rangeLength'^(uf ~ (pe ~ ni))
text+(un so)
~
==
::
++ method
|= =tape
^- cord
%- crip %- zing
%+ join "--"
^- (list ^tape)
%+ turn
^- (list (list ^tape))
%+ scan
tape
%+ more
fas
;~ plug
(star low)
(star ;~(plug (cook |=(a=@ (add a 32)) hig) (star low)))
==
|= words=(list ^tape)
^- ^tape
(zing (join "-" words))
::
++ range
%: ot
start+position
end+position
~
==
::
++ position
%: ot
line+ni
character+ni
~
==
--
::
++ enjs
=, enjs:format
|%
++ text-document--publish-diagnostics
|= pub=text-document--publish-diagnostics:notification:lsp
^- json
%: pairs
uri+s+uri.pub
diagnostics+a+(turn diagnostics.pub diagnostic)
~
==
++ notification
|= notification=all:notification:lsp
^- json
=/ params=json
?+ -.notification !!
%text-document--publish-diagnostics
(text-document--publish-diagnostics notification)
==
~! -.notification
=/ method=cord (crip (unparse-method -.notification))
%: pairs
method+s+method
params+params
~
==
::
++ response
|= res=all:response:lsp
^- json
|^
?- -.res
%text-document--hover (text-document--hover res)
%text-document--completion (text-document--completion res)
==
::
++ wrap-in-id
|= [id=cord res=json]
%: pairs
id+s+id
result+res
~
==
++ text-document--hover
|= hov=text-document--hover:response:lsp
%+ wrap-in-id id.hov
%+ frond 'contents'
?~ contents.hov
~
s+u.contents.hov
::
++ text-document--completion
|= com=text-document--completion:response:lsp
%+ wrap-in-id id.com
[%a (turn completion.com completion-item)]
--
++ unparse-method
|= =cord
^- ^tape
%+ rash cord
%+ cook |=(l=(list ^tape) (zing (join "/" l)))
%+ more (jest '--')
%+ cook
|= tapes=(list ^tape)
^- ^tape
?~ tapes ~
%- zing
:- i.tapes
%+ turn t.tapes
|= t=^tape
^- ^tape
?~ t ~
[`@tD`(sub i.t 32) t.t]
%+ more
;~(less (jest '--') hep)
(star alf)
::
++ completion-item
|= com=completion-item:lsp
^- json
%: pairs
label+s+label.com
detail+s+detail.com
kind+(numb kind.com)
'documentation'^s+doc.com
'insertText'^s+insert-text.com
'insertTextFormat'^(numb insert-text-format.com)
~
==
::
++ position
|= =position:lsp
^- json
%: pairs
line+(numb row.position)
character+(numb col.position)
~
==
::
++ range
|= =range:lsp
^- json
%: pairs
start+(position start.range)
end+(position end.range)
~
==
::
++ diagnostic
|= diag=diagnostic:lsp
^- json
%: pairs
range+(range range.diag)
severity+(numb severity.diag)
message+s+message.diag
~
==
::
--
--

View File

@ -1,3 +1,5 @@
/- lsp-sur=language-server
/+ auto=language-server-complete
=>
|%
++ snippet
@ -11,37 +13,36 @@
==
::
++ runes
^~
%- malt
:~ :- "|$"
^- (list (option:auto tape))
:~ :- '|$'
"""
$\{1:sample}
$\{2:body}
"""
:- "|_"
:- '|_'
"""
$\{1:sample}
++ $\{2:arm}
$\{3:body}
--
"""
:- "|:"
:- '|:'
"""
$\{1:sample}
$\{2:body}
"""
:- "|%"
:- '|%'
"""
++ $\{1:arm}
$\{2:body}
--
"""
:- "|."
:- '|.'
"""
$\{1:body}
"""
:- "|^"
:- '|^'
"""
$\{1:body}
@ -50,492 +51,486 @@
$\{3:body}
--
"""
:- "|-"
:- '|-'
"""
$\{1:body}
"""
:- "|~"
:- '|~'
"""
$\{1:sample}
$\{2:body}
"""
:- "|*"
:- '|*'
"""
$\{1:sample}
$\{2:body}
"""
:- "|="
:- '|='
"""
$\{1:sample}
$\{2:body}
"""
:- "|@"
:- '|@'
"""
++ $\{1:arm}
$\{2:body}
--
"""
:- "|?"
:- '|?'
"""
$\{1:sample}
"""
::
:- ":_"
:- ':_'
"""
$\{1:tail}
$\{2:head}
"""
:- ":^"
:- ':^'
"""
$\{1:car}
$\{2:cadr}
$\{3:caddr}
$\{4:cddr}
"""
:- ":-"
:- ':-'
"""
$\{1:tail}
$\{2:head}
"""
:- ":+"
:- ':+'
"""
$\{1:car}
$\{2:cadr}
$\{3:cddr}
"""
:- ":~"
:- ':~'
"""
$\{1:item}
==
"""
:- ":*"
:- ':*'
"""
$\{1:item}
==
"""
::
:- "%_"
:- '%_'
"""
$\{1:target}
$\{2:wing} $\{3:new-value}
==
"""
:- "%."
:- '%.'
"""
$\{1:arg}
$\{2:gate}
"""
:- "%-"
:- '%-'
"""
$\{1:gate}
$\{2:arg}
"""
:- "%:"
:- '%:'
"""
$\{1:gate}
$\{2:args}
==
"""
:- "%*"
:- '%*'
"""
$\{1:target-wing} $\{2:from}
$\{3:wing} $\{4:new-value}
==
"""
:- "%^"
:- '%^'
"""
$\{1:gate}
$\{2:arg1}
$\{3:arg2}
$\{4:arg3}
"""
:- "%+"
:- '%+'
"""
$\{1:gate}
$\{2:arg1}
$\{3:arg2}
"""
:- "%~"
:- '%~'
"""
$\{1:arm}
$\{2:core}
$\{3:arg}
"""
:- "%="
:- '%='
"""
$\{1:target}
$\{2:wing} $\{3:new-value}
==
"""
::
:- ".^"
:- '.^'
"""
$\{1:mold}
$\{2:path}
"""
:- ".+"
:- '.+'
"""
$\{1:atom}
"""
:- ".*"
:- '.*'
"""
$\{1:subject}
$\{2:formula}
"""
:- ".="
:- '.='
"""
$\{1:a}
$\{2:b}
"""
:- ".?"
:- '.?'
"""
$\{1:noun}
"""
::
:- "^|"
:- '^|'
"""
$\{1:iron-core}
"""
:- "^%"
:- '^%'
"""
$\{1:body}
"""
:- "^."
:- '^.'
"""
$\{1:a}
$\{2:b}
"""
:- "^+"
:- '^+'
"""
$\{1:like}
$\{2:body}
"""
:- "^-"
:- '^-'
"""
$\{1:type}
$\{2:body}
"""
:- "^&"
:- '^&'
"""
$\{1:zinc-core}
"""
:- "^~"
:- '^~'
"""
$\{1:constant}
"""
:- "^="
:- '^='
"""
$\{1:face}
$\{2:body}
"""
:- "^?"
:- '^?'
"""
$\{1:lead-core}
"""
:- "^*"
:- '^*'
"""
$\{1:type}
"""
:- "^:"
:- '^:'
"""
$\{1:type}
"""
::
:- "~|"
:- '~|'
"""
$\{1:trace}
$\{2:body}
"""
:- "~_"
:- '~_'
"""
$\{1:tank}
$\{2:body}
"""
:- "~%"
:- '~%'
"""
$\{1:name}
$\{2:parent}
~
$\{3:body}
"""
:- "~/"
:- '~/'
"""
$\{1:name}
$\{2:body}
"""
:- "~<"
:- '~<'
"""
$\{1:hint}
$\{2:body}
"""
:- "~>"
:- '~>'
"""
$\{1:hint}
$\{2:body}
"""
:- "~$"
:- '~$'
"""
$\{1:name}
$\{2:body}
"""
:- "~+"
:- '~+'
"""
$\{1:body}
"""
:- "~&"
:- '~&'
"""
$\{1:printf}
$\{2:body}
"""
:- "~="
:- '~='
"""
$\{1:a}
$\{2:b}
"""
:- "~?"
:- '~?'
"""
$\{1:condition}
$\{2:printf}
$\{3:body}
"""
:- "~!"
:- '~!'
"""
$\{1:type}
$\{2:body}
"""
::
:- ";="
:- ';='
"""
$\{1:manx}
==
"""
:- ";:"
:- ';:'
"""
$\{1:gate}
$\{2:args}
==
"""
:- ";/"
:- ';/'
"""
$\{1:tape}
"""
:- ";<"
:- ';<'
"""
$\{1:type} bind:m $\{2:body1}
$\{3:body2}
"""
:- ";~"
:- ';~'
"""
$\{1:gate}
$\{2:args}
==
"""
:- ";;"
:- ';;'
"""
$\{1:type}
$\{2:body}
"""
::
:- "=|"
:- '=|'
"""
$\{1:type}
$\{2:body}
"""
:- "=:"
:- '=:'
"""
$\{1:wing} $\{2:value}
==
$\{3:body}
"""
:- "=/"
:- '=/'
"""
$\{1:face}
$\{2:value}
$\{3:body}
"""
:- "=;"
:- '=;'
"""
$\{1:face}
$\{2:body}
$\{3:value}
"""
:- "=."
:- '=.'
"""
$\{1:wing}
$\{2:value}
$\{3:body}
"""
:- "=?"
:- '=?'
"""
$\{1:wing} $\{2:condition}
$\{3:value}
$\{4:body}
"""
:- "=<"
:- '=<'
"""
$\{1:formula}
$\{2:subject}
"""
:- "=-"
:- '=-'
"""
$\{1:body}
$\{2:value}
"""
:- "=>"
:- '=>'
"""
$\{1:subject}
$\{2:formula}
"""
:- "=^"
:- '=^'
"""
$\{1:face} $\{2:wing}
$\{3:computation}
$\{4:body}
"""
:- "=+"
:- '=+'
"""
$\{1:value}
$\{2:body}
"""
:- "=~"
:- '=~'
"""
$\{1:body}
"""
:- "=*"
:- '=*'
"""
$\{1:alias} $\{2:value}
$\{3:body}
"""
:- "=,"
:- '=,'
"""
$\{1:alias}
$\{3:body}
"""
::
:- "?|"
:- '?|'
"""
$\{1:condition}
==
"""
:- "?-"
:- '?-'
"""
$\{1:case}
$\{2:type} $\{3:value}
==
"""
:- "?:"
:- '?:'
"""
$\{1:if}
$\{2:then}
$\{3:else}
"""
:- "?."
:- '?.'
"""
$\{1:if}
$\{2:else}
$\{3:then}
"""
:- "?^"
:- '?^'
"""
$\{1:value}
$\{2:if-cell}
$\{3:if-atom}
"""
:- "?<"
:- '?<'
"""
$\{1:assertion}
$\{2:body}
"""
:- "?>"
:- '?>'
"""
$\{1:assertion}
$\{2:body}
"""
:- "?+"
:- '?+'
"""
$\{1:case} $\{2:else}
$\{3:type} $\{4:value}
==
"""
:- "?&"
:- '?&'
"""
$\{1:condition}
==
"""
:- "?@"
:- '?@'
"""
$\{1:value}
$\{2:if-atom}
$\{3:if-cell}
"""
:- "?~"
:- '?~'
"""
$\{1:value}
$\{2:if-null}
$\{3:if-nonnull}
"""
:- "?#"
:- '?#'
"""
$\{1:skin}
$\{2:wing}
"""
:- "?="
:- '?='
"""
$\{1:type}
$\{2:wing}
"""
:- "?!"
:- '?!'
"""
$\{1:loobean}
"""
::
:- "!,"
:- '!,'
"""
*hoon
$\{1:ast}
"""
:- "!>"
:- '!>'
"""
$\{1:value}
"""
:- "!;"
:- '!;'
"""
$\{1:type}
$\{2:body}
"""
:- "!="
:- '!='
"""
$\{1:body}
"""
:- "!@"
:- '!@'
"""
$\{1:wing}
$\{2:if-exists}
$\{3:if-not-exists}
"""
:- "!?"
:- '!?'
"""
$\{1:version}
$\{2:body}
"""
:- "!!"
:- '!!'
""
==
--
|= rune=tape
=, enjs:format
^- json
%- pairs
:~ good+b+&
:- %result
%- pairs
:~ 'isIncomplete'^b+&
:- %items
:- %a :_ ~
~| [%unrecognized-rune rune]
%+ snippet
rune
(~(got by runes) rune)
== ==
^- (list completion-item:lsp-sur)
=? rune =(' ' (snag 0 rune))
(slag 1 rune)
~& rune
%+ turn (search-prefix:auto (crip rune) runes)
|= [name=cord snippet=tape]
^- completion-item:lsp-sur
[name 1 '' '' (crip snippet) 2]

View File

@ -0,0 +1,16 @@
/- *language-server
/+ lsp-json=language-server-json
|_ not=all:notification
++ grab
|%
++ noun not
++ json
|= jon=^json
(notification:dejs:lsp-json jon)
--
++ grow
|%
++ json
(notification:enjs:lsp-json not)
--
--

View File

@ -0,0 +1,11 @@
/- *language-server
/+ lsp-json=language-server-json
|_ req=all:request
++ grab
|%
++ noun req
++ json
|= jon=^json
(request:dejs:lsp-json jon)
--
--

View File

@ -0,0 +1,15 @@
/- *language-server
/+ lsp=language-server-json
|_ res=all:response
::
++ grow
|%
++ json (response:enjs:lsp res)
--
::
++ grab
|%
++ noun all:response
--
::
--

View File

@ -0,0 +1,114 @@
|%
::
+$ versioned-doc-id
[uri=@t version=(unit @)]
::
++ request
|%
+$ all
$%
text-document--hover
text-document--completion
unknown
==
+$ text-document--hover
[%text-document--hover id=cord position versioned-doc-id]
+$ text-document--completion
[%text-document--completion id=cord position versioned-doc-id]
+$ unknown
[%unknown json]
--
++ response
|%
+$ all
$%
text-document--hover
text-document--completion
==
+$ text-document--hover
[%text-document--hover id=cord contents=(unit @t)]
+$ text-document--completion
[%text-document--completion id=cord completion=(list completion-item)]
--
::
+$ completion-item
$:
label=cord
kind=@ud
detail=cord
doc=cord
insert-text=cord
insert-text-format=@ud
==
::
+$ diagnostic
[=range severity=@ud message=@t]
::
+$ position
[row=@ud col=@ud]
::
+$ text-document-item
[uri=@t version=(unit @) text=@t]
::
++ notification
|%
::
+$ in
$%
text-document--did-change
text-document--did-open
text-document--did-save
text-document--did-close
exit
unknown
==
::
+$ out
$%
text-document--publish-diagnostics
==
::
+$ all
$%
out
in
==
::
+$ text-document--did-change
[%text-document--did-change versioned-doc-id changes=(list change)]
::
+$ text-document--did-open
[%text-document--did-open text-document-item]
::
+$ text-document--did-save
[%text-document--did-save versioned-doc-id]
::
+$ text-document--did-close
[%text-document--did-close versioned-doc-id]
::
+$ exit
[%exit ~]
::
+$ unknown
[%unknown =json]
::
+$ text-document--publish-diagnostics
[%text-document--publish-diagnostics uri=@t diagnostics=(list diagnostic)]
::
--
::
+$ change
$: range=(unit range)
range-length=(unit @ud)
text=@t
==
::
+$ range
$: start=position
end=position
==
::
--

View File

@ -1345,7 +1345,7 @@
a(r c)
c(l a(r l.c))
::
++ rep :: replace by product
++ rep :: reduce to product
|* b/_=>(~ |=({* *} +<+))
|-
?~ a +<+.b
@ -1600,7 +1600,7 @@
a(r d)
d(l a(r l.d))
::
++ rep :: replace by product
++ rep :: reduce to product
|* b/_=>(~ |=({* *} +<+))
|-
?~ a +<+.b

View File

@ -918,11 +918,7 @@
++ call
|= [=duct type=* wrapped-task=(hobo task)]
::
=/ =task
?. ?=(%soft -.wrapped-task)
wrapped-task
~| our^%ames-fail-soft
;;(task p.wrapped-task)
=/ =task ((harden task) wrapped-task)
:: %born: set .unix-duct and start draining .queued-events
::
?: ?=(%born -.task)
@ -1056,11 +1052,7 @@
|= [=duct type=* wrapped-task=(hobo task)]
^- [(list move) _ames-gate]
::
=/ =task
?. ?=(%soft -.wrapped-task)
wrapped-task
~| %ames-bad-task^p.wrapped-task
;;(task p.wrapped-task)
=/ =task ((harden task) wrapped-task)
::
=/ event-core (per-event [our now eny scry-gate] duct ames-state)
::
@ -1894,6 +1886,20 @@
::
=/ =peer-state +.u.ship-state
::
:: XX routing hack to mimic old ames.
::
:: Before removing this, consider: moons when their planet is
:: behind a NAT; a planet receiving initial acknowledgment
:: from a star; a planet talking to another planet under
:: another galaxy.
::
?: ?| =(our ship)
?& !=(final-ship ship)
!=(%czar (clan:title ship))
==
==
(try-next-sponsor sponsor.peer-state)
::
?: =(our ship)
:: if forwarding, don't send to sponsor to avoid loops
::

View File

@ -251,10 +251,7 @@
==
^- [(list move) _behn-gate]
::
=/ =task:able
?. ?=(%soft -.wrapped-task)
wrapped-task
;;(task:able p.wrapped-task)
=/ =task:able ((harden task:able) wrapped-task)
::
=/ event-core (per-event [our now hen] state)
::

View File

@ -4068,11 +4068,7 @@
==
^- [(list move) _..^$]
::
=/ req=task:able
?. ?=(%soft -.wrapped-task)
wrapped-task
;;(task:able p.wrapped-task)
::
=/ req=task:able ((harden task:able) wrapped-task)
:: only one of these should be going at once, so queue
::
?: ?=(?(%info %merg %mont) -.req)

View File

@ -516,10 +516,7 @@
wrapped-task=(hobo task:able)
==
^+ [*(list move) ..^$]
=/ task=task:able
?. ?=(%soft -.wrapped-task)
wrapped-task
;;(task:able p.wrapped-task)
=/ task=task:able ((harden task:able) wrapped-task)
:: the boot event passes thru %dill for initial duct distribution
::
?: ?=(%boot -.task)

View File

@ -2045,11 +2045,7 @@
|= [=duct type=* wrapped-task=(hobo task:able)]
^- [(list move) _http-server-gate]
::
=/ task=task:able
?. ?=(%soft -.wrapped-task)
wrapped-task
~| [%p-wrapped-task p.wrapped-task]
;;(task:able p.wrapped-task)
=/ task=task:able ((harden task:able) wrapped-task)
:: %crud: notifies us of an event failure
::
?: ?=(%crud -.task)

View File

@ -838,7 +838,9 @@
|* [tracker=(request-tracker) request=*]
^- (list duct)
::
~(tap in waiting:(~(got by tracker) request))
?~ val=(~(get by tracker) request)
~
~(tap in waiting.u.val)
:: +put-request: associates a +duct with a request
::
++ put-request
@ -1538,7 +1540,7 @@
::
|- ^+ state
::
=/ client-status=build-status (~(got by builds.state) build)
=/ client-status=build-status (got-build build)
=/ subs=(list ^build) ~(tap in ~(key by subs.client-status))
::
|- ^+ state
@ -1586,14 +1588,14 @@
|= [=build =anchor]
^+ builds.state
::
=/ =build-status (~(got by builds.state) build)
=/ =build-status (got-build build)
=/ subs=(list ^build) ~(tap in ~(key by subs.build-status))
=/ client=^build build
::
|- ^+ builds.state
?~ subs builds.state
::
=/ sub-status=^build-status (~(got by builds.state) i.subs)
=/ sub-status=^build-status (got-build i.subs)
::
=. clients.sub-status
(~(del ju clients.sub-status) anchor client)
@ -1612,7 +1614,7 @@
|= =build
^+ state
::
=/ =build-status (~(got by builds.state) build)
=/ =build-status (got-build build)
=/ new-anchors
~(tap in (~(put in ~(key by clients.build-status)) [%duct duct]))
=/ subs ~(tap in ~(key by subs.build-status))
@ -1642,14 +1644,14 @@
|= [=anchor =build]
^+ builds.state
::
=/ =build-status (~(got by builds.state) build)
=/ =build-status (got-build build)
=/ subs=(list ^build) ~(tap in ~(key by subs.build-status))
=/ client=^build build
::
|- ^+ builds.state
?~ subs builds.state
::
=/ sub-status=^build-status (~(got by builds.state) i.subs)
=/ sub-status=^build-status (got-build i.subs)
::
=/ already-had-anchor=? (~(has by clients.sub-status) anchor)
::
@ -1687,9 +1689,7 @@
++ copy-node
^+ state
::
=/ old-build-status=build-status
~| old-client=(build-to-tape old-client)
(~(got by builds.state) old-client)
=/ old-build-status=build-status (got-build old-client)
::
=/ old-subs=(list build) ~(tap in ~(key by subs.old-build-status))
=/ new-subs=(list build) (turn old-subs |=(a=build a(date new-date)))
@ -1837,7 +1837,7 @@
=. state (add-build build)
:: ignore blocked builds
::
=/ =build-status (~(got by builds.state) build)
=/ =build-status (got-build build)
?: ?=(%blocked -.state.build-status)
=. state (add-anchors-to-build-subs build)
::
@ -1887,10 +1887,7 @@
?~ old-build
(add-build-to-next build)
::
=/ old-build-status=^build-status
~| [%missing-old-build (build-to-tape u.old-build)]
~| [%build-state (turn ~(tap in ~(key by builds.state)) build-to-tape)]
(~(got by builds.state) u.old-build)
=/ old-build-status=^build-status (got-build u.old-build)
:: selectively promote scry builds
::
:: We can only promote a scry if it's not forced and we ran the same
@ -1918,7 +1915,7 @@
?. ?=([~ %value *] old-build-record)
(add-build-to-next build)
::
=. old-build-status (~(got by builds.state) u.old-build)
=. old-build-status (got-build u.old-build)
::
=/ old-subs=(list ^build) ~(tap in ~(key by subs.old-build-status))
=/ new-subs=(list ^build)
@ -5482,6 +5479,13 @@
::
::+| utilities
::
:: +got-build: lookup :build in state, asserting presence
::
++ got-build
|= =build
^- build-status
~| [%ford-missing-build build=(build-to-tape build) duct=duct]
(~(got by builds.state) build)
:: +add-build: store a fresh, unstarted build in the state
::
++ add-build
@ -5554,9 +5558,7 @@
|= [=build update-func=$-(build-status build-status)]
^- [build-status builds=_builds.state]
::
=/ original=build-status
~| [%update-build (build-to-tape build)]
(~(got by builds.state) build)
=/ original=build-status (got-build build)
=/ mutant=build-status (update-func original)
::
[mutant (~(put by builds.state) build mutant)]
@ -5615,9 +5617,7 @@
|= =build
^+ [unblocked builds.state]
::
=/ =build-status
~| [%unblocking (build-to-tape build)]
(~(got by builds.state) build)
=/ =build-status (got-build build)
::
=/ clients=(list ^build) ~(tap in (~(get ju clients.build-status) [%duct duct]))
::
@ -5661,7 +5661,7 @@
::
=/ duct-status (~(got by ducts.state) duct)
::
=/ =build-status (~(got by builds.state) build)
=/ =build-status (got-build build)
?: (~(has in requesters.build-status) [%duct duct])
(on-root-build-complete build)
::
@ -5698,7 +5698,7 @@
::
res
::
=/ =build-status (~(got by builds.state) build)
=/ =build-status (got-build build)
=/ =duct-status (~(got by ducts.state) duct)
:: make sure we have something to send
::
@ -5807,7 +5807,7 @@
|= =build
^+ ..execute
::
=/ =build-status (~(got by builds.state) build)
=/ =build-status (got-build build)
::
=/ orphans=(list ^build)
%+ murn ~(tap by subs.build-status)
@ -5817,6 +5817,11 @@
?: verified.build-relation
~
`sub
:: dequeue orphans in case we were about to run them
::
=/ orphan-set (~(gas in *(set ^build)) orphans)
=. next-builds (~(dif in next-builds) orphan-set)
=. candidate-builds (~(dif in candidate-builds) orphan-set)
:: remove links to orphans in :build's +build-status
::
=^ build-status builds.state
@ -5914,9 +5919,7 @@
?: ?=(%pin -.schematic.build)
~
::
=/ subs
~| [%collect-live-resource (build-to-tape build)]
~(tap in ~(key by subs:(~(got by builds.state) build)))
=/ subs ~(tap in ~(key by subs:(got-build build)))
=| resources=(jug disc resource)
|-
?~ subs
@ -5942,7 +5945,7 @@
:: only recurse on blocked sub-builds
::
=/ subs=(list ^build)
%+ murn ~(tap by subs:(~(got by builds.state) build))
%+ murn ~(tap by subs:(got-build build))
|= [sub=^build =build-relation]
^- (unit ^build)
::
@ -6128,12 +6131,8 @@
++ call
|= [=duct type=* wrapped-task=(hobo task:able)]
^- [(list move) _ford-gate]
:: unwrap :task from :wrapped-task
::
=/ task=task:able
?. ?=(%soft -.wrapped-task)
wrapped-task
;;(task:able p.wrapped-task)
=/ task=task:able ((harden task:able) wrapped-task)
:: we wrap +per-event with a call that binds our event args
::
=* this-event (per-event [our duct now scry-gate] state.ax)
@ -6266,22 +6265,29 @@
++ take-rebuilds
^- [(list move) ford-state]
::
~| [%ford-take-rebuilds wire=wire duct=duct]
?> ?=([@tas %wris *] sign)
=* case-sign p.sign
=* care-paths-sign q.sign
=+ [ship desk date]=(raid:wired t.wire ~[%p %tas %da])
=/ disc [ship desk]
:: ignore spurious clay updates
::
:: Due to asynchronicity of Clay notifications, we might get a
:: subscription update on an already-canceled duct. This is
:: normal; no-op.
::
?~ duct-status=(~(get by ducts.state.ax) duct)
[~ state.ax]
::
=/ =subscription
~| [%ford-take-bad-clay-sub wire=wire duct=duct]
=/ =duct-status (~(got by ducts.state.ax) duct)
?> ?=(%live -.live.duct-status)
?> ?=(^ last-sent.live.duct-status)
?> ?=(^ subscription.u.last-sent.live.duct-status)
u.subscription.u.last-sent.live.duct-status
?> ?=(%live -.live.u.duct-status)
(need subscription:(need last-sent.live.u.duct-status))
::
=/ ducts=(list ^duct)
~| [%ford-take-missing-subscription subscription]
:: sanity check; there must be at least one duct per subscription
::
=- ?<(=(~ -) -)
(get-request-ducts pending-subscriptions.state.ax subscription)
::
=| moves=(list move)
@ -6299,13 +6305,12 @@
++ take-unblocks
^- [(list move) ford-state]
::
~| [%ford-take-unblocks wire=wire duct=duct]
?> ?=([@tas %writ *] sign)
=* riot-sign p.sign
:: scry-request: the +scry-request we had previously blocked on
::
=/ =scry-request
~| [%ford-take-bad-scry-request wire=wire duct=duct]
(need (path-to-scry-request t.wire))
=/ =scry-request (need (path-to-scry-request t.wire))
:: scry-result: parse a (unit cage) from :sign
::
:: If the result is `~`, the requested resource was not available.
@ -6314,9 +6319,9 @@
?~ riot-sign
~
`r.u.riot-sign
:: if spurious Clay response, :ducts will be empty, causing no-op
::
=/ ducts=(list ^duct)
~| [%ford-take-missing-scry-request scry-request]
(get-request-ducts pending-scrys.state.ax scry-request)
::
=| moves=(list move)

View File

@ -38,6 +38,7 @@
%watch-as
%poke
%leave
%missing
==
--
|%
@ -54,7 +55,7 @@
++ state
$: :: state version
::
%3
%4
:: agents by ship
::
=agents
@ -610,12 +611,16 @@
[%a %done *]
=^ remote-request outstanding.agents.state
?~ t.t.t.wire
=/ full-wire sys+wire
=/ stand
%+ ~(gut by outstanding.agents.state) [sys+wire hen]
*(qeu remote-request)
~| [sys+wire=wire hen=hen stand=stand outs=outstanding.agents.state]
%+ ~(gut by outstanding.agents.state) [full-wire hen]
:: default is do nothing; should only hit if cleared queue
:: in +load 3-to-4
::
(~(put to *(qeu remote-request)) %missing)
~| [full-wire=full-wire hen=hen stand=stand outs=outstanding.agents.state]
=^ rr stand ~(get to stand)
[rr (~(put by outstanding.agents.state) [wire hen] stand)]
[rr (~(put by outstanding.agents.state) [full-wire hen] stand)]
:: non-null case of wire is old, remove on next breach after
:: 2019/12
::
@ -631,6 +636,7 @@
%watch (mo-give %unto %watch-ack err)
%poke (mo-give %unto %poke-ack err)
%leave mo-core
%missing (mo-give:(mo-give %unto %watch-ack err) %unto %poke-ack err)
==
::
[%a %boon *]
@ -1476,12 +1482,7 @@
^- [(list move) _gall-payload]
::
~| [%gall-call-failed duct q.hic]
:: make sure our task is hard
::
=/ =task:able
?. ?=(%soft -.q.hic)
q.hic
;; task:able p.q.hic
=/ =task:able ((harden task:able) q.hic)
::
=/ initialised (mo-abed:mo duct)
?- -.task
@ -1581,16 +1582,32 @@
=? all-state ?=(%2 -.all-state)
(state-2-to-3 all-state)
::
?> ?=(%3 -.all-state)
=? all-state ?=(%3 -.all-state)
(state-3-to-4 all-state)
::
?> ?=(%4 -.all-state)
gall-payload(state all-state)
::
:: +all-state: upgrade path
::
++ all-state $%(state-0 state-1 state-2 ^state)
++ all-state $%(state-0 state-1 state-2 state-3 ^state)
::
++ state-3-to-4
|= =state-3
^- ^state
%= state-3
- %4
outstanding.agents ~
==
::
++ state-3
$: %3
=agents
==
::
++ state-2-to-3
|= =state-2
^- ^state
^- state-3
%= state-2
- %3
running.agents-2

View File

@ -306,11 +306,7 @@
|= [=duct type=* wrapped-task=(hobo task:able)]
^- [(list move) _light-gate]
::
=/ task=task:able
?. ?=(%soft -.wrapped-task)
wrapped-task
~| [%p-wrapped-task p.wrapped-task]
;;(task:able p.wrapped-task)
=/ task=task:able ((harden task:able) wrapped-task)
:: %crud: notifies us of an event failure
::
?: ?=(%crud -.task)

View File

@ -968,10 +968,7 @@
hic/(hypo (hobo task:able))
==
^- [(list move) _..^$]
=/ =task:able
?. ?=($soft -.q.hic)
q.hic
(task:able p.q.hic)
=/ =task:able ((harden task:able) q.hic)
=^ did lex
abet:(~(call of [our now eny] lex) hen task)
[did ..^$]

View File

@ -0,0 +1,239 @@
:: tests for lsp JSON parsing
/- lsp-sur=language-server
/+ *test, *language-server-json
=, enjs:format
|%
::
++ position
[5 3]
++ position-jon
^- json
:- %o
%: malt
['character' %n '3']
['line' %n '5']
~
==
::
++ range
[position position]
::
++ range-jon
^- json
:- %o
%: malt
['start' position-jon]
['end' position-jon]
~
==
::
++ change-jon
^- json
:- %o
%: malt
['text' `json`[%s `@t`'text']]
['rangeLength' [%n '3']]
['range' range-jon]
~
==
::
++ changes-jon
^- json
:- %a
^- (list json)
[change-jon ~]
::
++ text-document-item
^- text-document-item:lsp-sur
['file://' `1 'text']
::
++ text-document-item-jon
^- json
:- %o
%: malt
['uri' `json`[%s 'file://']]
['version' `json`[%n '1']]
['text' `json`[%s 'text']]
~
==
::
++ text-document-id
^- versioned-doc-id:lsp-sur
['file://' `1]
::
++ text-document-id-jon
^- json
:- %o
%: malt
['uri' `json`[%s 'file://']]
['version' `json`[%n '1']]
~
==
++ diagnostic
^- diagnostic:lsp-sur
[range 1 'Syntax Error']
::
++ diagnostic-jon
^- json
:- %o
%: malt
['range' range-jon]
['severity' `json`[%n '1']]
['message' `json`[%s 'Syntax Error']]
~
==
::
++ completion-item
^- completion-item:lsp-sur
['label' 1 'detail' 'doc' 'snippet' 1]
::
++ completion-item-jon
^- json
%: pairs
label+s+'label'
detail+s+'detail'
kind+n+'1'
documentation+s+'doc'
'insertText'^s+'snippet'
'insertTextFormat'^n+'1'
~
==
::
++ make-notification-jon
|= [method=@t params=json]
^- json
%: pairs
['method' `json`[%s method]]
params+params
~
==
++ make-request-jon
|= [id=@t method=@t params=json]
^- json
%: pairs
['id' `json`[%s id]]
['method' `json`[%s method]]
params+params
~
==
++ make-response-jon
|= [id=@t result=json]
%: pairs
id+s+id
result+result
~
==
::
:: Notifications
::
++ test-parse-did-change
%+ expect-eq
!> ^- all:notification:lsp-sur
[%text-document--did-change text-document-id [[~ [[5 3] [5 3]]] `3 'text']~]
!> %- notification:dejs
%+ make-notification-jon 'textDocument/didChange'
:- %o
%: malt
['contentChanges' changes-jon]
['textDocument' text-document-id-jon]
~
==
::
++ test-parse-did-save
%+ expect-eq
!> ^- all:notification:lsp-sur
[%text-document--did-save text-document-id]
!> %- notification:dejs
%+ make-notification-jon 'textDocument/didSave'
:- %o
%: malt
['textDocument' text-document-id-jon]
~
==
::
++ test-parse-did-close
%+ expect-eq
!> ^- all:notification:lsp-sur
[%text-document--did-close text-document-id]
!> %- notification:dejs
%+ make-notification-jon 'textDocument/didClose'
:- %o
%: malt
['textDocument' text-document-id-jon]
~
==
::
++ test-parse-did-open
%+ expect-eq
!> ^- all:notification:lsp-sur
[%text-document--did-open text-document-item]
!> %- notification:dejs
%+ make-notification-jon 'textDocument/didOpen'
:- %o
%: malt
['textDocument' text-document-item-jon]
~
==
::
:: Requests
::
++ test-parse-hover
%+ expect-eq
!> ^- all:request:lsp-sur
[%text-document--hover '3' position text-document-id]
!> %- request:dejs
^- json
%^ make-request-jon '3' 'textDocument/hover'
:- %o
%: malt
['position' position-jon]
['textDocument' text-document-id-jon]
~
==
++ test-parse-completion
%+ expect-eq
!> ^- all:request:lsp-sur
[%text-document--completion '3' position text-document-id]
!> %- request:dejs
^- json
%^ make-request-jon '3' 'textDocument/completion'
:- %o
%: malt
['position' position-jon]
['textDocument' text-document-id-jon]
~
==
:: to JSON
::
:: notifications
::
++ test-enjs-publish-diagnostics
%+ expect-eq
!> %- notification:enjs
[%text-document--publish-diagnostics 'file://' [diagnostic ~]]
!> ^- json
%+ make-notification-jon 'textDocument/publishDiagnostics'
:- %o
%: malt
['uri' `json`[%s 'file://']]
['diagnostics' `json`[%a [diagnostic-jon ~]]]
~
==
:: responses
++ test-enjs-hover
%+ expect-eq
!> %- response:enjs
[%text-document--hover '1' `'text']
!> ^- json
%+ make-response-jon '1'
%+ frond 'contents'
s+'text'
::
++ test-enjs-completion
%+ expect-eq
!> %- response:enjs
[%text-document--completion '1' ~[completion-item]]
!> ^- json
%+ make-response-jon '1'
[%a ~[completion-item-jon]]
--

2
pkg/herb/.gitattributes vendored Normal file
View File

@ -0,0 +1,2 @@
.gitattributes export-ignore
default.nix export-ignore

1
pkg/herb/.gitignore vendored
View File

@ -1 +0,0 @@
/result

52
pkg/hs/default.nix Normal file
View File

@ -0,0 +1,52 @@
# Run using:
#
# $(nix-build --no-link -A fullBuildScript)
{
stack2nix-output-path ? "custom-stack2nix-output.nix",
}:
let
cabalPackageName = "urbit-king";
compiler = "ghc865"; # matching stack.yaml
# Pin static-haskell-nix version.
static-haskell-nix =
if builtins.pathExists ../.in-static-haskell-nix
then toString ../. # for the case that we're in static-haskell-nix itself, so that CI always builds the latest version.
# Update this hash to use a different `static-haskell-nix` version:
else fetchTarball https://github.com/nh2/static-haskell-nix/archive/b402b38c3af2300e71caeebe51b5e4e1ae2e924c.tar.gz;
# Pin nixpkgs version
# By default to the one `static-haskell-nix` provides, but you may also give
# your own as long as it has the necessary patches, using e.g.
# pkgs = import (fetchTarball https://github.com/nh2/nixpkgs/archive/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa123.tar.gz) {};
pkgs = import "${static-haskell-nix}/nixpkgs.nix";
stack2nix-script = import "${static-haskell-nix}/static-stack2nix-builder/stack2nix-script.nix" {
inherit pkgs;
stack-project-dir = toString ./.; # where stack.yaml is
hackageSnapshot = "2020-01-20T00:00:00Z"; # pins e.g. extra-deps without hashes or revisions
};
static-stack2nix-builder = import "${static-haskell-nix}/static-stack2nix-builder/default.nix" {
normalPkgs = pkgs;
inherit cabalPackageName compiler stack2nix-output-path;
# disableOptimization = true; # for compile speed
};
# Full invocation, including pinning `nix` version itself.
fullBuildScript = pkgs.writeScript "stack2nix-and-build-script.sh" ''
#!/usr/bin/env bash
set -eu -o pipefail
STACK2NIX_OUTPUT_PATH=$(${stack2nix-script})
export NIX_PATH=nixpkgs=${pkgs.path}
${pkgs.nix}/bin/nix-build --no-link -A static_package --argstr stack2nix-output-path "$STACK2NIX_OUTPUT_PATH" "$@"
'';
in
{
static_package = static-stack2nix-builder.static_package;
inherit fullBuildScript;
# For debugging:
inherit stack2nix-script;
inherit static-stack2nix-builder;
}

12
pkg/hs/lmdb-static/.gitignore vendored Normal file
View File

@ -0,0 +1,12 @@
dist
cabal-dev
*.o
*.hi
*.chi
*.chs.h
.virtualenv
.hsenv
.cabal-sandbox/
cabal.sandbox.config
cabal.config
*~

View File

@ -0,0 +1,24 @@
Copyright (c) 2014, David Barbour
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View File

@ -0,0 +1,13 @@
This is a hack to avoid dynamic depencency on lmdb:
This is a vendoring of `haskell-lmdb` and `lmdb` modified to include
the c-build of `lmdb` statically into `haskell-lmdb`.
```
haskell-lmdb:
repo: https://github.com/dmbarbour/haskell-lmdb.git
hash: 1e562429874919d445576c87cf118d7de5112b5b
lmdb:
repo: https://github.com/LMDB/lmdb.git
hash: c3e6b4209eed13af4a3670e5f04f42169c08e5c6
```

View File

@ -0,0 +1,3 @@
import Distribution.Simple
main = defaultMain

File diff suppressed because it is too large Load Diff

11199
pkg/hs/lmdb-static/cbits/mdb.c Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,421 @@
/** @file midl.c
* @brief ldap bdb back-end ID List functions */
/* $OpenLDAP$ */
/* This work is part of OpenLDAP Software <http://www.openldap.org/>.
*
* Copyright 2000-2019 The OpenLDAP Foundation.
* Portions Copyright 2001-2018 Howard Chu, Symas Corp.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted only as authorized by the OpenLDAP
* Public License.
*
* A copy of this license is available in the file LICENSE in the
* top-level directory of the distribution or, alternatively, at
* <http://www.OpenLDAP.org/license.html>.
*/
#include <limits.h>
#include <string.h>
#include <stdlib.h>
#include <errno.h>
#include <sys/types.h>
#include "midl.h"
/** @defgroup internal LMDB Internals
* @{
*/
/** @defgroup idls ID List Management
* @{
*/
#define CMP(x,y) ( (x) < (y) ? -1 : (x) > (y) )
unsigned mdb_midl_search( MDB_IDL ids, MDB_ID id )
{
/*
* binary search of id in ids
* if found, returns position of id
* if not found, returns first position greater than id
*/
unsigned base = 0;
unsigned cursor = 1;
int val = 0;
unsigned n = ids[0];
while( 0 < n ) {
unsigned pivot = n >> 1;
cursor = base + pivot + 1;
val = CMP( ids[cursor], id );
if( val < 0 ) {
n = pivot;
} else if ( val > 0 ) {
base = cursor;
n -= pivot + 1;
} else {
return cursor;
}
}
if( val > 0 ) {
++cursor;
}
return cursor;
}
#if 0 /* superseded by append/sort */
int mdb_midl_insert( MDB_IDL ids, MDB_ID id )
{
unsigned x, i;
x = mdb_midl_search( ids, id );
assert( x > 0 );
if( x < 1 ) {
/* internal error */
return -2;
}
if ( x <= ids[0] && ids[x] == id ) {
/* duplicate */
assert(0);
return -1;
}
if ( ++ids[0] >= MDB_IDL_DB_MAX ) {
/* no room */
--ids[0];
return -2;
} else {
/* insert id */
for (i=ids[0]; i>x; i--)
ids[i] = ids[i-1];
ids[x] = id;
}
return 0;
}
#endif
MDB_IDL mdb_midl_alloc(int num)
{
MDB_IDL ids = malloc((num+2) * sizeof(MDB_ID));
if (ids) {
*ids++ = num;
*ids = 0;
}
return ids;
}
void mdb_midl_free(MDB_IDL ids)
{
if (ids)
free(ids-1);
}
void mdb_midl_shrink( MDB_IDL *idp )
{
MDB_IDL ids = *idp;
if (*(--ids) > MDB_IDL_UM_MAX &&
(ids = realloc(ids, (MDB_IDL_UM_MAX+2) * sizeof(MDB_ID))))
{
*ids++ = MDB_IDL_UM_MAX;
*idp = ids;
}
}
static int mdb_midl_grow( MDB_IDL *idp, int num )
{
MDB_IDL idn = *idp-1;
/* grow it */
idn = realloc(idn, (*idn + num + 2) * sizeof(MDB_ID));
if (!idn)
return ENOMEM;
*idn++ += num;
*idp = idn;
return 0;
}
int mdb_midl_need( MDB_IDL *idp, unsigned num )
{
MDB_IDL ids = *idp;
num += ids[0];
if (num > ids[-1]) {
num = (num + num/4 + (256 + 2)) & -256;
if (!(ids = realloc(ids-1, num * sizeof(MDB_ID))))
return ENOMEM;
*ids++ = num - 2;
*idp = ids;
}
return 0;
}
int mdb_midl_append( MDB_IDL *idp, MDB_ID id )
{
MDB_IDL ids = *idp;
/* Too big? */
if (ids[0] >= ids[-1]) {
if (mdb_midl_grow(idp, MDB_IDL_UM_MAX))
return ENOMEM;
ids = *idp;
}
ids[0]++;
ids[ids[0]] = id;
return 0;
}
int mdb_midl_append_list( MDB_IDL *idp, MDB_IDL app )
{
MDB_IDL ids = *idp;
/* Too big? */
if (ids[0] + app[0] >= ids[-1]) {
if (mdb_midl_grow(idp, app[0]))
return ENOMEM;
ids = *idp;
}
memcpy(&ids[ids[0]+1], &app[1], app[0] * sizeof(MDB_ID));
ids[0] += app[0];
return 0;
}
int mdb_midl_append_range( MDB_IDL *idp, MDB_ID id, unsigned n )
{
MDB_ID *ids = *idp, len = ids[0];
/* Too big? */
if (len + n > ids[-1]) {
if (mdb_midl_grow(idp, n | MDB_IDL_UM_MAX))
return ENOMEM;
ids = *idp;
}
ids[0] = len + n;
ids += len;
while (n)
ids[n--] = id++;
return 0;
}
void mdb_midl_xmerge( MDB_IDL idl, MDB_IDL merge )
{
MDB_ID old_id, merge_id, i = merge[0], j = idl[0], k = i+j, total = k;
idl[0] = (MDB_ID)-1; /* delimiter for idl scan below */
old_id = idl[j];
while (i) {
merge_id = merge[i--];
for (; old_id < merge_id; old_id = idl[--j])
idl[k--] = old_id;
idl[k--] = merge_id;
}
idl[0] = total;
}
/* Quicksort + Insertion sort for small arrays */
#define SMALL 8
#define MIDL_SWAP(a,b) { itmp=(a); (a)=(b); (b)=itmp; }
void
mdb_midl_sort( MDB_IDL ids )
{
/* Max possible depth of int-indexed tree * 2 items/level */
int istack[sizeof(int)*CHAR_BIT * 2];
int i,j,k,l,ir,jstack;
MDB_ID a, itmp;
ir = (int)ids[0];
l = 1;
jstack = 0;
for(;;) {
if (ir - l < SMALL) { /* Insertion sort */
for (j=l+1;j<=ir;j++) {
a = ids[j];
for (i=j-1;i>=1;i--) {
if (ids[i] >= a) break;
ids[i+1] = ids[i];
}
ids[i+1] = a;
}
if (jstack == 0) break;
ir = istack[jstack--];
l = istack[jstack--];
} else {
k = (l + ir) >> 1; /* Choose median of left, center, right */
MIDL_SWAP(ids[k], ids[l+1]);
if (ids[l] < ids[ir]) {
MIDL_SWAP(ids[l], ids[ir]);
}
if (ids[l+1] < ids[ir]) {
MIDL_SWAP(ids[l+1], ids[ir]);
}
if (ids[l] < ids[l+1]) {
MIDL_SWAP(ids[l], ids[l+1]);
}
i = l+1;
j = ir;
a = ids[l+1];
for(;;) {
do i++; while(ids[i] > a);
do j--; while(ids[j] < a);
if (j < i) break;
MIDL_SWAP(ids[i],ids[j]);
}
ids[l+1] = ids[j];
ids[j] = a;
jstack += 2;
if (ir-i+1 >= j-l) {
istack[jstack] = ir;
istack[jstack-1] = i;
ir = j-1;
} else {
istack[jstack] = j-1;
istack[jstack-1] = l;
l = i;
}
}
}
}
unsigned mdb_mid2l_search( MDB_ID2L ids, MDB_ID id )
{
/*
* binary search of id in ids
* if found, returns position of id
* if not found, returns first position greater than id
*/
unsigned base = 0;
unsigned cursor = 1;
int val = 0;
unsigned n = (unsigned)ids[0].mid;
while( 0 < n ) {
unsigned pivot = n >> 1;
cursor = base + pivot + 1;
val = CMP( id, ids[cursor].mid );
if( val < 0 ) {
n = pivot;
} else if ( val > 0 ) {
base = cursor;
n -= pivot + 1;
} else {
return cursor;
}
}
if( val > 0 ) {
++cursor;
}
return cursor;
}
int mdb_mid2l_insert( MDB_ID2L ids, MDB_ID2 *id )
{
unsigned x, i;
x = mdb_mid2l_search( ids, id->mid );
if( x < 1 ) {
/* internal error */
return -2;
}
if ( x <= ids[0].mid && ids[x].mid == id->mid ) {
/* duplicate */
return -1;
}
if ( ids[0].mid >= MDB_IDL_UM_MAX ) {
/* too big */
return -2;
} else {
/* insert id */
ids[0].mid++;
for (i=(unsigned)ids[0].mid; i>x; i--)
ids[i] = ids[i-1];
ids[x] = *id;
}
return 0;
}
int mdb_mid2l_append( MDB_ID2L ids, MDB_ID2 *id )
{
/* Too big? */
if (ids[0].mid >= MDB_IDL_UM_MAX) {
return -2;
}
ids[0].mid++;
ids[ids[0].mid] = *id;
return 0;
}
#ifdef MDB_VL32
unsigned mdb_mid3l_search( MDB_ID3L ids, MDB_ID id )
{
/*
* binary search of id in ids
* if found, returns position of id
* if not found, returns first position greater than id
*/
unsigned base = 0;
unsigned cursor = 1;
int val = 0;
unsigned n = (unsigned)ids[0].mid;
while( 0 < n ) {
unsigned pivot = n >> 1;
cursor = base + pivot + 1;
val = CMP( id, ids[cursor].mid );
if( val < 0 ) {
n = pivot;
} else if ( val > 0 ) {
base = cursor;
n -= pivot + 1;
} else {
return cursor;
}
}
if( val > 0 ) {
++cursor;
}
return cursor;
}
int mdb_mid3l_insert( MDB_ID3L ids, MDB_ID3 *id )
{
unsigned x, i;
x = mdb_mid3l_search( ids, id->mid );
if( x < 1 ) {
/* internal error */
return -2;
}
if ( x <= ids[0].mid && ids[x].mid == id->mid ) {
/* duplicate */
return -1;
}
/* insert id */
ids[0].mid++;
for (i=(unsigned)ids[0].mid; i>x; i--)
ids[i] = ids[i-1];
ids[x] = *id;
return 0;
}
#endif /* MDB_VL32 */
/** @} */
/** @} */

View File

@ -0,0 +1,200 @@
/** @file midl.h
* @brief LMDB ID List header file.
*
* This file was originally part of back-bdb but has been
* modified for use in libmdb. Most of the macros defined
* in this file are unused, just left over from the original.
*
* This file is only used internally in libmdb and its definitions
* are not exposed publicly.
*/
/* $OpenLDAP$ */
/* This work is part of OpenLDAP Software <http://www.openldap.org/>.
*
* Copyright 2000-2019 The OpenLDAP Foundation.
* Portions Copyright 2001-2019 Howard Chu, Symas Corp.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted only as authorized by the OpenLDAP
* Public License.
*
* A copy of this license is available in the file LICENSE in the
* top-level directory of the distribution or, alternatively, at
* <http://www.OpenLDAP.org/license.html>.
*/
#ifndef _MDB_MIDL_H_
#define _MDB_MIDL_H_
#include "lmdb.h"
#ifdef __cplusplus
extern "C" {
#endif
/** @defgroup internal LMDB Internals
* @{
*/
/** @defgroup idls ID List Management
* @{
*/
/** A generic unsigned ID number. These were entryIDs in back-bdb.
* Preferably it should have the same size as a pointer.
*/
typedef mdb_size_t MDB_ID;
/** An IDL is an ID List, a sorted array of IDs. The first
* element of the array is a counter for how many actual
* IDs are in the list. In the original back-bdb code, IDLs are
* sorted in ascending order. For libmdb IDLs are sorted in
* descending order.
*/
typedef MDB_ID *MDB_IDL;
/* IDL sizes - likely should be even bigger
* limiting factors: sizeof(ID), thread stack size
*/
#define MDB_IDL_LOGN 16 /* DB_SIZE is 2^16, UM_SIZE is 2^17 */
#define MDB_IDL_DB_SIZE (1<<MDB_IDL_LOGN)
#define MDB_IDL_UM_SIZE (1<<(MDB_IDL_LOGN+1))
#define MDB_IDL_DB_MAX (MDB_IDL_DB_SIZE-1)
#define MDB_IDL_UM_MAX (MDB_IDL_UM_SIZE-1)
#define MDB_IDL_SIZEOF(ids) (((ids)[0]+1) * sizeof(MDB_ID))
#define MDB_IDL_IS_ZERO(ids) ( (ids)[0] == 0 )
#define MDB_IDL_CPY( dst, src ) (memcpy( dst, src, MDB_IDL_SIZEOF( src ) ))
#define MDB_IDL_FIRST( ids ) ( (ids)[1] )
#define MDB_IDL_LAST( ids ) ( (ids)[(ids)[0]] )
/** Current max length of an #mdb_midl_alloc()ed IDL */
#define MDB_IDL_ALLOCLEN( ids ) ( (ids)[-1] )
/** Append ID to IDL. The IDL must be big enough. */
#define mdb_midl_xappend(idl, id) do { \
MDB_ID *xidl = (idl), xlen = ++(xidl[0]); \
xidl[xlen] = (id); \
} while (0)
/** Search for an ID in an IDL.
* @param[in] ids The IDL to search.
* @param[in] id The ID to search for.
* @return The index of the first ID greater than or equal to \b id.
*/
unsigned mdb_midl_search( MDB_IDL ids, MDB_ID id );
/** Allocate an IDL.
* Allocates memory for an IDL of the given size.
* @return IDL on success, NULL on failure.
*/
MDB_IDL mdb_midl_alloc(int num);
/** Free an IDL.
* @param[in] ids The IDL to free.
*/
void mdb_midl_free(MDB_IDL ids);
/** Shrink an IDL.
* Return the IDL to the default size if it has grown larger.
* @param[in,out] idp Address of the IDL to shrink.
*/
void mdb_midl_shrink(MDB_IDL *idp);
/** Make room for num additional elements in an IDL.
* @param[in,out] idp Address of the IDL.
* @param[in] num Number of elements to make room for.
* @return 0 on success, ENOMEM on failure.
*/
int mdb_midl_need(MDB_IDL *idp, unsigned num);
/** Append an ID onto an IDL.
* @param[in,out] idp Address of the IDL to append to.
* @param[in] id The ID to append.
* @return 0 on success, ENOMEM if the IDL is too large.
*/
int mdb_midl_append( MDB_IDL *idp, MDB_ID id );
/** Append an IDL onto an IDL.
* @param[in,out] idp Address of the IDL to append to.
* @param[in] app The IDL to append.
* @return 0 on success, ENOMEM if the IDL is too large.
*/
int mdb_midl_append_list( MDB_IDL *idp, MDB_IDL app );
/** Append an ID range onto an IDL.
* @param[in,out] idp Address of the IDL to append to.
* @param[in] id The lowest ID to append.
* @param[in] n Number of IDs to append.
* @return 0 on success, ENOMEM if the IDL is too large.
*/
int mdb_midl_append_range( MDB_IDL *idp, MDB_ID id, unsigned n );
/** Merge an IDL onto an IDL. The destination IDL must be big enough.
* @param[in] idl The IDL to merge into.
* @param[in] merge The IDL to merge.
*/
void mdb_midl_xmerge( MDB_IDL idl, MDB_IDL merge );
/** Sort an IDL.
* @param[in,out] ids The IDL to sort.
*/
void mdb_midl_sort( MDB_IDL ids );
/** An ID2 is an ID/pointer pair.
*/
typedef struct MDB_ID2 {
MDB_ID mid; /**< The ID */
void *mptr; /**< The pointer */
} MDB_ID2;
/** An ID2L is an ID2 List, a sorted array of ID2s.
* The first element's \b mid member is a count of how many actual
* elements are in the array. The \b mptr member of the first element is unused.
* The array is sorted in ascending order by \b mid.
*/
typedef MDB_ID2 *MDB_ID2L;
/** Search for an ID in an ID2L.
* @param[in] ids The ID2L to search.
* @param[in] id The ID to search for.
* @return The index of the first ID2 whose \b mid member is greater than or equal to \b id.
*/
unsigned mdb_mid2l_search( MDB_ID2L ids, MDB_ID id );
/** Insert an ID2 into a ID2L.
* @param[in,out] ids The ID2L to insert into.
* @param[in] id The ID2 to insert.
* @return 0 on success, -1 if the ID was already present in the ID2L.
*/
int mdb_mid2l_insert( MDB_ID2L ids, MDB_ID2 *id );
/** Append an ID2 into a ID2L.
* @param[in,out] ids The ID2L to append into.
* @param[in] id The ID2 to append.
* @return 0 on success, -2 if the ID2L is too big.
*/
int mdb_mid2l_append( MDB_ID2L ids, MDB_ID2 *id );
#ifdef MDB_VL32
typedef struct MDB_ID3 {
MDB_ID mid; /**< The ID */
void *mptr; /**< The pointer */
unsigned int mcnt; /**< Number of pages */
unsigned int mref; /**< Refcounter */
} MDB_ID3;
typedef MDB_ID3 *MDB_ID3L;
unsigned mdb_mid3l_search( MDB_ID3L ids, MDB_ID id );
int mdb_mid3l_insert( MDB_ID3L ids, MDB_ID3 *id );
#endif /* MDB_VL32 */
/** @} */
/** @} */
#ifdef __cplusplus
}
#endif
#endif /* _MDB_MIDL_H_ */

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,89 @@
Name: lmdb-static
Version: 0.2.5
Synopsis: Lightning MDB bindings
Category: Database
Description:
LMDB is a read-optimized Berkeley DB replacement developed by Symas
for the OpenLDAP project. LMDB has impressive performance characteristics
and a friendly BSD-style OpenLDAP license. See <http://symas.com/mdb/>.
.
This library has Haskell bindings to the LMDB library. You must install
the lmdb development files before installing this library,
e.g. `sudo apt-get install liblmdb-dev` works for Ubuntu 14.04.
.
For now, only a low level interface is provided, and the author is moving
on to use LMDB rather than further develop its bindings. If a higher level
API is desired, please consider contributing, or develop a separate package.
Author: David Barbour
Maintainer: dmbarbour@gmail.com
Homepage: http://github.com/dmbarbour/haskell-lmdb
Package-Url:
Copyright: (c) 2014 by David Barbour
License: BSD2
license-file: LICENSE
Stability: experimental
build-type: Simple
cabal-version: >= 1.16.0.3
Source-repository head
type: git
location: http://github.com/dmbarbour/haskell-lmdb.git
Library
hs-Source-Dirs: hsrc_lib
default-language: Haskell2010
Build-Depends: base (>= 4.6 && < 5), array
Build-Tools: hsc2hs
Exposed-Modules:
Database.LMDB.Raw
Include-dirs: cbits
Includes: lmdb.h midl.h
C-Sources: cbits/mdb.c cbits/midl.c
cc-options: -Wall -O2 -g -pthread -fPIC
ghc-options: -Wall -fprof-auto -fPIC
default-extensions: ApplicativeDo
, BangPatterns
, BlockArguments
, DataKinds
, DefaultSignatures
, DeriveAnyClass
, DeriveDataTypeable
, DeriveFoldable
, DeriveGeneric
, DeriveTraversable
, DerivingStrategies
, EmptyCase
, EmptyDataDecls
, FlexibleContexts
, FlexibleInstances
, FunctionalDependencies
, GADTs
, GeneralizedNewtypeDeriving
, LambdaCase
, MagicHash
, MultiParamTypeClasses
, NamedFieldPuns
, NoImplicitPrelude
, NumericUnderscores
, OverloadedStrings
, PartialTypeSignatures
, PatternSynonyms
, QuasiQuotes
, Rank2Types
, RankNTypes
, RecordWildCards
, ScopedTypeVariables
, StandaloneDeriving
, TemplateHaskell
, TupleSections
, TypeApplications
, TypeFamilies
, TypeOperators
, UnboxedTuples
, UnicodeSyntax
, ViewPatterns

3
pkg/hs/proto/.gitignore vendored Normal file
View File

@ -0,0 +1,3 @@
.stack-work/
proto.cabal
*~

21
pkg/hs/proto/LICENSE Normal file
View File

@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2016 urbit
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

41
pkg/hs/proto/app/Main.hs Normal file
View File

@ -0,0 +1,41 @@
module Main where
import ClassyPrelude
import Control.Lens ((&))
import Untyped.Parser hiding (main)
import Untyped.CST
import Untyped.Hoon
import Untyped.Core
import Nock
import SimpleNoun
import Dashboard
import Text.Show.Pretty (pPrint)
import qualified Prelude as P
-------------------------------------------------------------------------------
main :: IO ()
main = (P.head <$> getArgs) >>= compileHoonTest
compileHoonTest :: Text -> IO ()
compileHoonTest ln = do
cst <- parse ln & \case
Left x -> error (unpack x)
Right x -> pure x
-- pPrint cst
hon <- pure $ hone cst
pPrint hon
exp <- pure $ desugar hon
pPrint exp
nok <- pure $ copy exp
putStrLn "==== input ===="
putStrLn ln
putStrLn "==== nock ===="
pPrint nok
putStrLn "==== output ===="
res <- runCare $ nock (A 140) nok
pPrint res

View File

@ -0,0 +1,135 @@
module Dashboard
( pattern FastAtom
, pattern FastHint
, Jet
, Dashboard (match)
, Freeboard
, Hashboard
, Fastboard
, Careboard
, runFree
, runHash
, runFast
, runCare
) where
import ClassyPrelude
import Control.Monad.State.Strict
import SimpleNoun
type Jet = Noun -> Noun
type JetName = Atom
type Hash = Int
pattern FastAtom = 1953718630 -- %fast
pattern FastHint id n =
C (A 11)
(C
(C (A FastAtom) (C (A 1) (A id)))
n)
-- | A context in which to run nock which supports jet lookup.
class Monad m => Dashboard m where
-- | Find the jet associated with the formula represented by the given noun,
-- if any.
match :: Noun -> m (Maybe Jet)
-- | A dashboard which doesn't jet.
newtype Freeboard a = Freeboard (Identity a)
deriving newtype Functor
deriving newtype Applicative
deriving newtype Monad
-- | A dashboard which looks for jets by formula hash
newtype Hashboard a = Hashboard (Identity a)
deriving newtype Functor
deriving newtype Applicative
deriving newtype Monad
-- | A dashboard which checks the head of formulas for "fast
-- hints" and uses the name contained in such a hint to look for jets.
newtype Fastboard a = Fastboard (Identity a)
deriving newtype Functor
deriving newtype Applicative
deriving newtype Monad
-- | A dashboard which uses both lookup strategies, checking for consistency
-- between them and that each fast hint is applied to a unique formula.
-- Violations of these principles are written to standard out.
newtype Careboard a = Careboard (StateT (HashMap JetName Noun) IO a)
deriving newtype Functor
deriving newtype Applicative
deriving newtype Monad
runFree :: Freeboard a -> a
runFree (Freeboard x) = runIdentity x
runHash :: Hashboard a -> a
runHash (Hashboard x) = runIdentity x
runFast :: Fastboard a -> a
runFast (Fastboard x) = runIdentity x
runCare :: Careboard a -> IO a
runCare (Careboard x) = evalStateT x mempty
instance Dashboard Freeboard where
match _ = Freeboard $ pure Nothing
instance Dashboard Hashboard where
match = Hashboard . pure . byHash . hash
instance Dashboard Fastboard where
match = Fastboard . \case
FastHint id n -> pure (byFast id)
_ -> pure Nothing
-- TODO maybe also detect hash collisions
instance Dashboard Careboard where
match = Careboard . \case
n@(FastHint nm _) -> case namely nm of
Just (h, j) -> do
when (h /= hash n) $
putStrLn ("careboard: jet " <> tshowA nm <> " should have its hash "
<> "updated from " <> tshow h <> " to " <> tshow (hash n))
get <&> lookup nm >>= \case
Just n' ->
when (n' /= n) $
putStrLn ("careboard: jet hint " <> tshowA nm <> " has been "
<> "detected on unequal formulae " <> tshow n
<> " and " <> tshow n' <> ", which is very bad")
Nothing -> modify' (insertMap nm n)
pure (Just j)
Nothing -> do
putStrLn ("careboard: unmatched fast hint: " ++ tshowA nm)
pure $ byHash $ hash n
n -> pure $ byHash $ hash n
byFast :: JetName -> Maybe Jet
byFast = flip lookup fast
where
fast :: HashMap JetName Jet
fast = mapFromList $ map (\(n, _, j) -> (n, j)) jets
byHash :: Hash -> Maybe Jet
byHash = flip lookup hash
where
hash :: HashMap Hash Jet
hash = mapFromList $ map (\(_, h, j) -> (h, j)) jets
namely :: JetName -> Maybe (Hash, Jet)
namely = flip lookup fash
where
fash :: HashMap JetName (Hash, Jet)
fash = mapFromList $ map (\(n, h, j) -> (n, (h, j))) jets
tx = textToAtom
type Entry = (JetName, Hash, Jet)
-- | Your jets here
jets :: [Entry]
jets =
[ (tx "dec", 1520491622440108403, \(A a) -> trace "jetting" $ A (a - 1))
]

View File

@ -0,0 +1,302 @@
module Deppy.Core where
import ClassyPrelude
import Bound
import Data.Deriving (deriveEq1, deriveOrd1, deriveRead1, deriveShow1)
import Data.Maybe (fromJust)
import Data.Set (isSubsetOf)
import qualified Data.Set as Set
import Numeric.Natural
type Typ = Exp
data Exp a
= Var a
-- types
| Typ
| Fun (Abs a)
| Cel (Abs a)
| Wut (Set Tag)
-- introduction forms
| Lam (Abs a)
| Cns (Exp a) (Exp a)
| Tag Tag
-- elimination forms
| App (Exp a) (Exp a)
| Hed (Exp a)
| Tal (Exp a)
| Cas (Typ a) (Exp a) (Map Tag (Exp a))
-- recursion, flow control
| Let (Exp a) (Scope () Exp a)
| Rec (Abs a)
deriving (Functor, Foldable, Traversable)
type Tag = Natural
data Abs a = Abs
{ spec :: Typ a
, body :: Scope () Exp a
}
deriving (Functor, Foldable, Traversable)
deriveEq1 ''Abs
deriveOrd1 ''Abs
deriveRead1 ''Abs
deriveShow1 ''Abs
--makeBound ''Abs
deriveEq1 ''Exp
deriveOrd1 ''Exp
deriveRead1 ''Exp
deriveShow1 ''Exp
--makeBound ''Exp
deriving instance Eq a => Eq (Abs a)
deriving instance Ord a => Ord (Abs a)
deriving instance Read a => Read (Abs a)
deriving instance Show a => Show (Abs a)
deriving instance Eq a => Eq (Exp a)
deriving instance Ord a => Ord (Exp a)
deriving instance Read a => Read (Exp a)
deriving instance Show a => Show (Exp a)
instance Applicative Exp where
pure = Var
(<*>) = ap
instance Monad Exp where
return = Var
Var a >>= f = f a
Typ >>= _ = Typ
Fun a >>= f = Fun (bindAbs a f)
Cel a >>= f = Cel (bindAbs a f)
Wut ls >>= _ = Wut ls
Lam a >>= f = Lam (bindAbs a f)
Cns x y >>= f = Cns (x >>= f) (y >>= f)
Tag l >>= _ = Tag l
App x y >>= f = App (x >>= f) (y >>= f)
Hed x >>= f = Hed (x >>= f)
Tal x >>= f = Tal (x >>= f)
Cas t x cs >>= f = Cas (t >>= f) (x >>= f) (cs <&> (>>= f))
Let a b >>= f = Let (a >>= f) (b >>>= f)
Rec a >>= f = Rec (bindAbs a f)
bindAbs :: Abs a -> (a -> Exp b) -> Abs b
bindAbs (Abs s b) f = Abs (s >>= f) (b >>>= f)
lam :: Eq a => a -> Typ a -> Exp a -> Exp a
lam v t e = Lam (Abs t (abstract1 v e))
fun :: Eq a => a -> Typ a -> Typ a -> Typ a
fun v t u = Fun (Abs t (abstract1 v u))
fun_ :: Typ a -> Typ a -> Typ a
fun_ t u = Fun (Abs t (abstract (const Nothing) u))
cel :: Eq a => a -> Typ a -> Typ a -> Typ a
cel v t u = Cel (Abs t (abstract1 v u))
cel_ :: Typ a -> Typ a -> Typ a
cel_ t u = Cel (Abs t (abstract (const Nothing) u))
rec :: Eq a => a -> Typ a -> Exp a -> Exp a
rec v t e = Rec (Abs t (abstract1 v e))
ledt :: Eq a => a -> Exp a -> Exp a -> Exp a
ledt v e e' = Let e (abstract1 v e')
wut = Wut . setFromList
cas t e cs = Cas t e (mapFromList cs)
infixl 9 @:
(@:) = App
-- | typing environment
type Env a = a -> Typ a
extend :: (b -> Typ a) -> Env a -> Env (Var b a)
extend handleNewBindings oldEnv = \case
-- TODO can we use Scope to decrease the cost of this?
B v -> F <$> handleNewBindings v
F v -> F <$> oldEnv v
extend1 :: Typ a -> Env a -> Env (Var () a)
extend1 t = extend \() -> t
-- | amber rule assumptions
type Asm a = Set (Typ a, Typ a)
extendAsm :: (Ord a, Ord b) => Asm a -> Asm (Var b a)
extendAsm = Set.map \(t, u) -> (F <$> t, F <$> u)
-- | Remove types that mention variables that are no longer in scope
retractAsm :: (Ord a, Ord b) => Asm (Var b a) -> Asm a
retractAsm = foldMap wither
where
wither = \case
(cleanTyp -> Just t, cleanTyp -> Just u) -> singleton (t, u)
_ -> mempty
cleanTyp = traverse \case
F v -> pure v
B _ -> Nothing
type Typing = Maybe
-- TODO
-- - better errors
-- - state monad for Asm (how to handle polymorphic recursion?)
nest :: (Show a, Ord a) => Env a -> Typ a -> Typ a -> Typing ()
nest env = fmap void . go env mempty
where
go :: (Show a, Ord a) => Env a -> Asm a -> Typ a -> Typ a -> Typing (Asm a)
-- FIXME use a better more aggro normal form
go env asm0 (whnf -> t0) (whnf -> u0) =
if t0 == u0 || member (t0, u0) asm0
then pure asm0
else let asm = Set.insert (t0, u0) asm0 in
case (t0, u0) of
(Typ, Typ) -> pure asm
-- FIXME yeah actually I think this is wrong
-- we're comaring the type of a type variable with
-- (Var v, u) -> go env asm (env v) u
-- (t, Var v) -> go env asm t (env v)
-- following Cardelli 80something, we check the RHSs assuming
-- the putatively *lesser* of the LHSs for both
(Fun (Abs a b), Fun (Abs a' b')) -> do
asm' <- go env asm a' a
retractAsm <$>
go (extend1 a' env) (extendAsm asm') (fromScope b) (fromScope b')
(Cel (Abs a b), Cel (Abs a' b')) -> do
asm' <- go env asm a a'
retractAsm <$>
go (extend1 a env) (extendAsm asm') (fromScope b) (fromScope b')
(Wut ls, Wut ls') -> do
guard (ls `isSubsetOf` ls')
pure asm
-- TODO put into Typing errors
(Lam{}, _) -> error "nest: lambda"
(_, Lam{}) -> error "nest: lambda"
(Cns{}, _) -> error "nest: cons"
(_, Cns{}) -> error "nest: cons"
(Tag{}, _) -> error "nest: tag"
(_, Tag{}) -> error "nest: tag"
-- Special rule for the Cas eliminator to enable sums and products
(Cas _ e cs, Cas _ e' cs') -> do
guard (whnf e == whnf e')
Wut s <- infer env e
-- TODO I should thread changing asm through the traversal
-- but I can't be bothered right now. Perf regression.
asm <$ traverse_ chk (setToList s)
where
chk tag = case (lookup tag cs, lookup tag cs') of
(Just t, Just u) -> go env asm t u
_ -> error "the Spanish inquisition"
(Cas _ e cs, u) -> do
Wut s <- infer env e
-- TODO thread asms
asm <$ traverse_
(\tag -> go env asm (fromJust $ lookup tag cs) u)
s
(t, Cas _ e cs) -> do
Wut s <- infer env e
-- TODO thread asms
asm <$ traverse_
(\tag -> go env asm t (fromJust $ lookup tag cs))
s
(t@Cas{}, u) -> go env asm (whnf t) u
(t, u@Cas{}) -> go env asm t (whnf u)
(t@(Rec (Abs _ b)), u) -> go env asm (instantiate1 t b) u
(t, u@(Rec (Abs _ b))) -> go env asm t (instantiate1 u b)
_ -> Nothing
check :: (Show a, Ord a) => Env a -> Exp a -> Typ a -> Typing ()
check env e t = do
t' <- infer env e
nest env t' t
infer :: forall a. (Show a, Ord a) => Env a -> Exp a -> Typing (Typ a)
infer env = \case
Var v -> pure $ env v
Typ -> pure Typ
Fun (Abs t b) -> do
Typ <- infer env t
Typ <- infer (extend1 t env) (fromScope b)
pure Typ
Cel (Abs t b) -> do
Typ <- infer env t
Typ <- infer (extend1 t env) (fromScope b)
pure Typ
Wut _ -> pure Typ
Lam (Abs t b) -> do
-- TODO do I need (whnf -> Typ)? (and elsewhere)
Typ <- infer env t
(toScope -> t') <- infer (extend1 t env) (fromScope b)
pure $ Fun (Abs t t')
Cns x y -> do
-- Infer non-dependent pairs; if you want dependency, you must annotate
t <- infer env x
u <- infer env y
pure $ Cel (Abs t (abstract (const Nothing) u))
Tag t -> pure $ Wut (singleton t)
App x y -> do
Fun (Abs t b) <- infer env x
check env y t
pure $ whnf (instantiate1 y b)
Hed x -> do
Cel (Abs t _) <- infer env x
pure t
Tal x -> do
Cel (Abs _ u) <- infer env x
pure $ instantiate1 (whnf $ Hed $ x) u
Cas t x cs -> do
Typ <- infer env t
Wut ts <- infer env x
-- pretty restrictive - do we want?
guard (ts == keysSet cs)
traverse_ (\e -> check env e t) cs
pure t
-- Let e b -> do
-- -- TODO is below faster, or infer env (instantiate1 e b)?
-- t <- infer env e
-- instantiate1 e $ infer (extend1 t env) (fromScope b)
Rec (Abs t b) -> do
Typ <- infer env t
-- todo can F <$> be made faster?
check (extend1 t env) (fromScope b) (F <$> t)
pure t
whnf :: (Show a, Eq a) => Exp a -> Exp a
whnf = \case
App (whnf -> Lam (Abs _ b)) x -> whnf $ instantiate1 x b
Hed (whnf -> Cns x _) -> whnf x
Tal (whnf -> Cns _ y) -> whnf y
Cas _ (whnf -> Tag t) cs -> whnf $ fromJust $ lookup t cs
e@(Rec (Abs _ b)) -> whnf $ instantiate1 e b
e -> trace "sadface" e
{-
= Var a
-- types
| Typ
| Fun (Abs a)
| Cel (Abs a)
| Wut (Set Tag)
-- introduction forms
| Lam (Abs a)
| Cns (Exp a) (Exp a)
| Tag Tag
-- elimination forms
| App (Exp a) (Exp a)
| Hed (Exp a)
| Tal (Exp a)
| Cas (Typ a) (Exp a) (Map Tag (Exp a))
-- recursion
| Rec (Abs a)
-}
nf :: (Show a, Eq a) => Exp a -> Exp a
nf = traceShowId . \case
Typ -> Typ
_ -> undefined

114
pkg/hs/proto/lib/Nock.hs Normal file
View File

@ -0,0 +1,114 @@
module Nock where
import ClassyPrelude
import Dashboard
import SimpleNoun
data Nock
= NC Nock Nock -- ^ ^: autocons
| N0 Axis -- ^ 0, axis: tree addressing
| N1 Noun -- ^ 1, const
| N2 Nock Nock -- ^ 2, compose: compute subject, formula; apply
| N3 Nock -- ^ 3, is cell
| N4 Nock -- ^ 4, succ
| N5 Nock Nock -- ^ 5, eq
| N6 Nock Nock Nock -- ^ 6, if
| N7 Nock Nock -- ^ 7, then: =>
| N8 Nock Nock -- ^ 8, push: =+
| N9 Axis Nock -- ^ 9, invoke
| N10 (Axis, Nock) Nock -- ^ 10, edit
| N11 Hint Nock -- ^ 11, hint
| N12 Nock Nock -- ^ 12, scry
deriving (Eq, Ord, Read, Generic)
data Hint
= Tag Atom
| Assoc Atom Nock
deriving (Eq, Ord, Read, Show, Generic)
instance Hashable Nock
instance Hashable Hint
instance Show Nock where
show = show . nockToNoun
nockToNoun :: Nock -> Noun
nockToNoun = go
where
go = \case
NC f g -> C (go f) (go g)
N0 a -> C (A 0) (A a)
N1 n -> C (A 1) n
N2 f g -> C (A 2) (C (go f) (go g))
N3 f -> C (A 3) (go f)
N4 f -> C (A 4) (go f)
N5 f g -> C (A 5) (C (go f) (go g))
N6 f g h -> C (A 6) (C (go f) (C (go g) (go h)))
N7 f g -> C (A 7) (C (go f) (go g))
N8 f g -> C (A 8) (C (go f) (go g))
N9 a f -> C (A 9) (C (A a) (go f))
N10 (a, f) g -> C (A 10) (C (C (A a) (go f)) (go g))
N11 (Tag a) f -> C (A 11) (C (A a) (go f))
N11 (Assoc a f) g -> C (A 11) (C (C (A a) (go f)) (go g))
N12 f g -> C (A 12) (C (go f) (go g))
nounToNock :: Noun -> Nock
nounToNock = go
where
go = \case
A{} -> error "nounToNock: atom"
C n@C{} m -> NC (go n) (go m)
C (A op) n -> case op of
0 | (A a) <- n -> N0 a
1 -> N1 n
2 | (C m o) <- n -> N2 (go m) (go o)
3 -> N3 (go n)
4 -> N4 (go n)
5 | (C m o) <- n -> N5 (go m) (go o)
6 | (C m (C o p)) <- n -> N6 (go m) (go o) (go p)
7 | (C m o) <- n -> N7 (go m) (go o)
8 | (C m o) <- n -> N8 (go m) (go o)
9 | (C (A a) m) <- n -> N9 a (go m)
10 | (C (C (A a) m) o) <- n -> N10 (a, (go m)) (go o)
11 | (C (C (A a) m) o) <- n -> N11 (Assoc a (go m)) (go o)
| (C (A a) m) <- n -> N11 (Tag a) (go m)
12 | (C m o) <- n -> N12 (go m) (go o)
_ -> error ("nockToNoun: invalid " <> show op <> " " <> show n)
-- | Nock interpreter
nock :: (Dashboard d) => Noun -> Nock -> d Noun
nock n = \case
NC f g -> C <$> nock n f <*> nock n g
N0 a -> pure $ axis a n
N1 n' -> pure n'
N2 sf ff -> do
s <- nock n sf
f <- nock n ff
match f >>= \case
Just jet -> pure (jet s)
Nothing -> nock s (nounToNock f)
N3 f -> nock n f <&> \case
C{} -> yes
A{} -> no
N4 f -> nock n f <&> \case
C{} -> error "nock: cannot increment cell"
A a -> A (a + 1)
N5 f g -> loob <$> ((==) <$> nock n f <*> nock n g)
N6 f g h -> nock n f >>= \case
(A 0) -> nock n g
(A 1) -> nock n h
_ -> error "nock: invalid test value"
N7 f g -> do
n' <- nock n f
nock n' g
N8 f g -> do
n' <- nock n f
nock (C n' n) g
N9 a f -> do
c <- nock n f
nock c (nounToNock (axis a c))
N10 (a, f) g -> edit a <$> nock n f <*> nock n g
N11 _ f -> nock n f
N12{} -> error "nock: scrying is not allowed"

View File

@ -0,0 +1,122 @@
module SimpleNoun where
import ClassyPrelude
import Numeric.Natural
import qualified Urbit.Noun as N
type Atom = Natural
type Noun = Tree Atom
data Tree a
= A !a
| C !(Tree a) !(Tree a)
deriving (Eq, Ord, Read, Functor, Generic)
instance Hashable a => Hashable (Tree a)
data Fern a
= FernA !a
| FernF [Fern a]
toFern :: Tree a -> Fern a
toFern = \case
A a -> FernA a
C h t -> case toFern t of
a@FernA{} -> FernF [toFern h, a]
FernF fs -> FernF (toFern h : fs)
instance Show a => Show (Fern a) where
show = \case
FernA a -> show a
FernF xs -> "[" <> intercalate " " (map show xs) <> "]"
instance Show a => Show (Tree a) where
show = show . toFern
yes, no :: Noun
yes = A 0
no = A 1
loob :: Bool -> Noun
loob = \case
True -> yes
False -> no
textToAtom :: Text -> Atom
textToAtom t = case N.textToUtf8Atom t of
N.A a -> a
showA :: Atom -> String
showA a = show (N.A a)
tshowA :: Atom -> Text
tshowA = pack . showA
-- | Tree address
type Axis = Atom
data Dir = L | R
deriving (Eq, Ord, Enum, Read, Show)
type Path = [Dir]
-- some stuff from hoon.hoon
cap :: Axis -> Dir
cap = \case
2 -> L
3 -> R
a | a <= 1 -> error "cap: bad axis"
| otherwise -> cap (div a 2)
mas :: Axis -> Axis
mas = \case
2 -> 1
3 -> 1
a | a <= 1 -> error "mas: bad axis"
| otherwise -> (mod a 2) + 2 * mas (div a 2)
capMas :: Axis -> (Dir, Axis)
capMas = \case
2 -> (L, 1)
3 -> (R, 1)
a | a <= 1 -> error "capMas: bad axis"
| otherwise -> (d, (mod a 2) + 2 * r)
where
(d, r) = capMas (div a 2)
peg :: Axis -> Axis -> Axis
peg a = \case
1 -> a
2 -> a * 2
3 -> a * 2 + 1
b -> (mod b 2) + 2 * peg a (div b 2)
axis :: Axis -> Tree a -> Tree a
axis 1 n = n
axis (capMas -> (d, r)) (C n m) = case d of
L -> axis r n
R -> axis r m
axis a _ = error ("bad axis: " ++ show a)
edit :: Axis -> Tree a -> Tree a -> Tree a
edit 1 v n = v
edit (capMas -> (d, r)) v (C n m) = case d of
L -> C (edit r v n) m
R -> C n (edit r v m)
edit a _ _ = error ("bad edit: " ++ show a)
-- Write an axis as a binary number; e.g. 5 as 101.
-- The rule is: after droping the 1 in the msb, you read from left to right.
-- 0 becomes L and 1 becomes R. So 5 becomes [L,R]
toPath :: Axis -> Path
toPath = \case
1 -> []
(capMas -> (d, r)) -> d : toPath r
toAxis :: Path -> Axis
toAxis = foldl' step 1
where
step r = \case
L -> 2 * r
R -> 2 * r + 1

View File

@ -0,0 +1,54 @@
module Untyped.CST where
import ClassyPrelude
import Prelude (foldr1)
import SimpleNoun
import qualified Untyped.Hoon as H
import Untyped.Parser -- remove after we've moved the CST type
hone :: CST -> H.Hoon Sym
hone = go
where
go = \case
WutCol c d e -> H.WutCol (go c) (go d) (go e)
WutPat c d e -> H.WutPat (go c) (go d) (go e)
WutKet c d e -> H.WutKet (go c) (go d) (go e)
WutPam cs -> foldr H.WutPam (H.HAtom 0) $ map go cs
WutBar cs -> foldr H.WutBar (H.HAtom 1) $ map go cs
WutHep c pcs -> H.WutHep (go c) (map tr pcs)
TisFas s c d -> H.TisFas s (go c) (go d)
ColHep c d -> H.HCons (go c) (go d)
ColLus{} -> error "hone: offensive rune :+ -- use :*"
ColKet{} -> error "hone: offensive rune :^ -- use :*"
ColTar [] -> error "hone: empty :*"
ColTar cs -> foldr1 H.HCons $ map go cs
ColSig cs -> foldr H.HCons (H.HAtom 0) $ map go cs
BarTis s c -> H.BarTis s (go c)
BarHep r v i c -> H.BarHep r v (go i) (go c)
BarCen pcs -> H.BarCen (map tr pcs)
CenHep c d -> H.CenHep (go c) (go d)
CenDot c d -> H.CenDot (go c) (go d)
DotDot s c -> H.DotDot s (go c)
SigFas (go -> H.HAtom a) c -> H.SigFas a (go c)
SigFas{} -> error "hone: invalid ~/ tag"
ZapZap -> H.ZapZap
Tupl cs -> go (ColTar cs)
Var s -> H.HVar s
Atom a -> H.HAtom a
Tag tx -> H.HAtom (textToAtom tx)
Cord tx -> H.HAtom (textToAtom tx)
Tape tx -> undefined
Incr c -> H.DotLus (go c)
IncrIrr c -> H.DotLus (go c)
AppIrr c d -> H.CenHep (go c) (go d)
IsEq c d -> H.DotTis (go c) (go d)
IsEqIrr c d -> H.DotTis (go c) (go d)
Pam -> H.HAtom 0
Bar -> H.HAtom 1
Yes -> H.HAtom 0
No -> H.HAtom 1
Sig -> H.HAtom 0
tr (PatTar, c) = (H.Wild, go c)
tr (PatTag s, c) = (H.Exact (A $ textToAtom s), go c)

View File

@ -0,0 +1,230 @@
module Untyped.Core where
import ClassyPrelude
import Bound
import Control.Monad.Writer hiding (fix)
import Data.Deriving (deriveEq1, deriveOrd1, deriveRead1, deriveShow1)
import qualified Data.Function as F
import Data.List (elemIndex)
import Data.Maybe (fromJust)
import qualified Data.Set as Set
import Data.Void
import Dashboard (pattern FastAtom)
import Nock
import SimpleNoun
type Nat = Int
data Exp a
= Var a
| App (Exp a) (Exp a)
| Lam (Scope () Exp a)
| Atm Atom
| Cel (Exp a) (Exp a)
| IsC (Exp a)
| Suc (Exp a)
| Eql (Exp a) (Exp a)
| Ift (Exp a) (Exp a) (Exp a)
| Let (Exp a) (Scope () Exp a)
| Jet Atom (Exp a)
| Fix (Scope () Exp a)
| Zap
deriving (Functor, Foldable, Traversable)
deriveEq1 ''Exp
deriveOrd1 ''Exp
deriveRead1 ''Exp
deriveShow1 ''Exp
makeBound ''Exp
deriving instance Eq a => Eq (Exp a)
deriving instance Ord a => Ord (Exp a)
deriving instance Read a => Read (Exp a)
deriving instance Show a => Show (Exp a)
lam :: Eq a => a -> Exp a -> Exp a
lam v e = Lam (abstract1 v e)
ledt :: Eq a => a -> Exp a -> Exp a -> Exp a
ledt v e f = Let e (abstract1 v f)
fix :: Eq a => a -> Exp a -> Exp a
fix v e = Fix (abstract1 v e)
-- | The expression that returns the given noun as a constant.
con :: Noun -> Exp a
con = \case
A a -> Atm a
C n m -> Cel (con n) (con m)
data CExp a
= CVar a
| CSef a
| CApp (CExp a) (CExp a)
| CLam [a] (CExp (Var () Int))
| CAtm Atom
| CCel (CExp a) (CExp a)
| CIsC (CExp a)
| CSuc (CExp a)
| CEql (CExp a) (CExp a)
| CIft (CExp a) (CExp a) (CExp a)
| CLet (CExp a) (CExp (Var () a))
| CJet Atom (CExp a)
| CFix [a] (CExp (Var () Int))
| CZap
deriving (Functor, Foldable, Traversable)
deriveEq1 ''CExp
deriveOrd1 ''CExp
deriveRead1 ''CExp
deriveShow1 ''CExp
deriving instance Eq a => Eq (CExp a)
deriving instance Ord a => Ord (CExp a)
deriving instance Read a => Read (CExp a)
deriving instance Show a => Show (CExp a)
data Manner a
= Direct a
| Selfish a
deriving (Functor, Foldable, Traversable)
rude :: Manner a -> a
rude = \case
Direct x -> x
Selfish x -> x
toCopy :: Ord a => Exp a -> CExp b
toCopy = fst . runWriter . go \v -> error "toCopy: free variable"
where
go :: Ord a => (a -> Manner c) -> Exp a -> Writer (Set a) (CExp c)
go env = \case
Var v -> do
tell (singleton v)
case env v of
Direct v' -> pure (CVar v')
Selfish v' -> pure (CSef v')
App e f -> CApp <$> go env e <*> go env f
Atm a -> pure (CAtm a)
Cel e f -> CCel <$> go env e <*> go env f
IsC e -> CIsC <$> go env e
Suc e -> CSuc <$> go env e
Eql e f -> CEql <$> go env e <*> go env f
Ift e t f -> CIft <$> go env e <*> go env t <*> go env f
Jet a e -> CJet a <$> go env e
Zap -> pure CZap
Let e s -> do
ce <- go env e
let
env' = \case
B () -> Direct (B ())
F x -> fmap F (env x)
cf <- retcon removeBound (go env' (fromScope s))
pure (CLet ce cf)
Fix s -> lam s env CFix Selfish
Lam s -> lam s env CLam Direct
lam s env ctor manner =
writer
( ctor (rude . env <$> Set.toAscList usedLexicals) ce
, usedLexicals
)
where
(ce, usedVars) = runWriter $ go env' $ fromScope s
env' = \case
B () -> manner $ B ()
F v -> env v $> F (Set.findIndex v usedLexicals)
usedLexicals = removeBound usedVars
removeBound :: (Ord a, Ord b) => Set (Var b a) -> Set a
removeBound = mapMaybeSet \case
B _ -> Nothing
F v -> Just v
-- | Like censor, except you can change the type of the log
retcon :: (w -> uu) -> Writer w a -> Writer uu a
retcon f = mapWriter \(a, m) -> (a, f m)
-- I begin to wonder why there aren't primary abstractions around filtering.
mapMaybeSet :: (Ord a, Ord b) => (a -> Maybe b) -> Set a -> Set b
mapMaybeSet f = setFromList . mapMaybe f . toList
-- Possible improvements:
-- - a "quote and unquote" framework for nock code generation (maybe)
copyToNock :: CExp a -> Nock
copyToNock = go \v -> error "copyToNock: free variable"
where
-- if you comment out this declaration, you get a type error!
go :: (a -> Path) -> CExp a -> Nock
go env = \case
CVar v -> N0 (toAxis $ env v)
CSef v -> N2 (N0 $ toAxis $ env v) (N0 $ toAxis $ env v)
CApp e f -> N2 (go env f) (go env e)
CAtm a -> N1 (A a)
CCel e f -> cell (go env e) (go env f)
CIsC e -> N3 (go env e)
CSuc e -> N4 (go env e)
CEql e f -> N5 (go env e) (go env f)
CIft e t f -> N6 (go env e) (go env t) (go env f)
CJet a e -> jet a (go env e)
CZap -> N0 0
CLet e f -> N8 (go env e) (go env' f)
where
env' = \case
B () -> [L]
F v -> R : env v
CLam vs e -> lam (map (go env . CVar) vs) (go (lamEnv vs) e)
CFix vs e ->
N7
(lam (map (go env . CVar) vs) (go (lamEnv vs) e))
(N2 (N0 1) (N0 1))
lamEnv vs = if null vs
then \case
B () -> []
F _ -> error "copyToNock: unexpected lexical"
else \case
B () -> [R]
F i -> L : posIn i (length vs)
jet a ef =
NC
(N1 (A 11))
(NC
(N1
(C (A FastAtom)
(C (A 1) (A a))))
ef)
lam vfs ef = case layOut vfs of
Nothing -> N1 (nockToNoun ef)
Just pr -> NC (N1 (A 8)) $ NC (NC (N1 (A 1)) pr) $ N1 (nockToNoun ef)
cell :: Nock -> Nock -> Nock
cell (N1 n) (N1 m) = N1 (C n m)
cell ef ff = NC ef ff
layOut :: [Nock] -> Maybe Nock
layOut = \case
[] -> Nothing
[x] -> Just x
xs -> Just $ NC (fromJust $ layOut l) (fromJust $ layOut r)
where
(l, r) = splitAt (length xs `div` 2) xs
posIn :: Int -> Int -> Path
posIn 0 1 = []
posIn i n
| i < 0 || n <= i = error ("posIn: " <> show i <> " out of bound " <> show n)
| i < mid = L : posIn i mid
| otherwise = R : posIn (i - mid) (n - mid)
where mid = n `div` 2
-- | The proposed new calling convention
copy :: Ord a => Exp a -> Nock
copy = copyToNock . toCopy
-- | Decrements its argument.
decrement :: Exp String
decrement = lam "a" $ App (fix "f" $ lam "b" $ Ift (Eql (Var "a") (Suc (Var "b"))) (Var "b") (App (Var "f") (Suc (Var "b")))) (Atm 0)

View File

@ -0,0 +1,77 @@
module Untyped.Hoon where
import ClassyPrelude
import Bound
import Bound.Name
import SimpleNoun
import Untyped.Core
data Hoon a
= HVar a
| HAtom Atom
| HCons (Hoon a) (Hoon a)
| BarCen (Cases a)
| BarHep a a (Hoon a) (Hoon a)
| BarTis a (Hoon a)
| CenDot (Hoon a) (Hoon a)
| CenHep (Hoon a) (Hoon a)
-- | CenKet (Hoon a) (Hoon a) (Hoon a)
-- | CenTar [Hoon a]
| TisFas a (Hoon a) (Hoon a)
| DotDot a (Hoon a)
| DotLus (Hoon a)
| DotTis (Hoon a) (Hoon a)
| SigFas Atom (Hoon a)
| WutBar (Hoon a) (Hoon a)
| WutCol (Hoon a) (Hoon a) (Hoon a)
| WutHep (Hoon a) (Cases a)
| WutKet (Hoon a) (Hoon a) (Hoon a)
| WutPam (Hoon a) (Hoon a)
| WutPat (Hoon a) (Hoon a) (Hoon a)
| ZapZap
deriving (Functor)
deriving instance Show a => Show (Hoon a)
type Cases a = [(Pat, Hoon a)]
data Pat
= Exact Noun
| Wild
deriving (Show)
desugar :: Eq a => Hoon a -> Exp a
desugar = go
where
go = \case
HVar v -> Var v
HAtom a -> Atm a
HCons h j -> Cel (go h) (go j)
BarCen cs -> Lam $ Scope $ branch (Var . F . go) (Var (B ())) cs
BarHep r s i h -> go $ CenDot i $ DotDot r $ BarTis s $ h
BarTis v h -> lam v (go h)
CenDot h j -> App (go j) (go h)
CenHep h j -> App (go h) (go j)
TisFas v h j -> ledt v (go h) (go j)
DotDot v h -> fix v (go h)
DotLus h -> Suc (go h)
DotTis h j -> Eql (go h) (go j)
SigFas a h -> Jet a (go h)
WutBar h j -> Ift (go h) (Atm 0) (go j)
WutCol h j k -> Ift (go h) (go j) (go k)
-- or branch go (go h) cs
WutHep h cs -> Let (go h) $ Scope $ branch (Var . F . go) (Var (B ())) cs
WutKet h j k -> Ift (IsC (go h)) (go j) (go k)
WutPam h j -> Ift (go h) (go j) (Atm 1)
WutPat h j k -> go $ WutKet h k j
ZapZap -> Zap
branch :: (Hoon b -> Exp a) -> Exp a -> Cases b -> Exp a
branch go e = foldr f Zap
where
f c acc = case c of
(Exact n, h) -> Ift (Eql e (con n)) (go h) acc
(Wild, h) -> go h

View File

@ -0,0 +1,345 @@
module Untyped.Parser where
import ClassyPrelude hiding (head, many, some, try)
import Control.Lens
import GHC.Natural
import Text.Megaparsec
import Text.Megaparsec.Char
import Control.Monad.State.Lazy
import Data.List.NonEmpty (NonEmpty(..))
import Data.Void (Void)
import Prelude (head)
import Text.Format.Para (formatParas)
import qualified Data.MultiMap as MM
import qualified Data.Text as T
import qualified Data.Text.Lazy as LT
import qualified Data.Text.Lazy.IO as LT
import qualified Prelude
-- Types -----------------------------------------------------------------------
type Nat = Natural
type Sym = Text
-- CST -------------------------------------------------------------------------
data Pat
= PatTar
| PatTag Sym
deriving (Eq, Ord, Show)
data CST
= WutCol CST CST CST -- ?:(c t f)
| WutPat CST CST CST -- ?@(c t f)
| WutKet CST CST CST -- ?^(c t f)
| WutPam [CST] -- ?&(c cs ...)
| WutBar [CST] -- ?|(c cs ...)
| WutHep CST [(Pat, CST)] -- ?-(c p e ps es ...)
| TisFas Sym CST CST -- =/(x 3 x)
| ColHep CST CST -- :-(a b)
| ColLus CST CST CST -- :+(a b c)
| ColKet CST CST CST CST -- :^(a b c d)
| ColTar [CST] -- :*(a as ...)
| ColSig [CST] -- :~(a as ...)
| BarTis Sym CST -- |=(s h)
| BarHep Sym Sym CST CST -- |-(rec var init body)
| BarCen [(Pat, CST)] -- |% %a 3 ==
| CenHep CST CST -- %- f x
| CenDot CST CST -- %. x f
| DotDot Sym CST -- .. $ f
| SigFas CST CST
| ZapZap -- !!
| Tupl [CST] -- [a b ...]
| Var Sym -- a
| Atom Nat -- 3
| Tag Text -- %asdf
| Cord Text -- 'cord'
| Tape Text -- "tape"
| Incr CST -- .+(3)
| IncrIrr CST -- +(3)
| AppIrr CST CST -- (x y)
| IsEq CST CST -- .=(3 4)
| IsEqIrr CST CST -- =(3 4)
| Pam -- &
| Bar -- |
| Yes -- %.y
| No -- %.n
| Sig -- ~
deriving (Eq, Ord, Show)
-- Parser Monad ----------------------------------------------------------------
data Mode = Wide | Tall
deriving (Eq, Ord, Show)
type Parser = StateT Mode (Parsec Void Text)
withLocalState Monad m => s StateT s m a StateT s m a
withLocalState val x = do { old <- get; put val; x <* put old }
inWideMode Parser a Parser a
inWideMode = withLocalState Wide
ace, pal, par Parser ()
ace = void (char ' ')
pal = void (char '(')
par = void (char ')')
-- Simple Lexers ---------------------------------------------------------------
gap Parser ()
gap = choice [ char ' ' >> void (some spaceChar)
, newline >> void (many spaceChar)
]
whitespace Parser ()
whitespace = ace <|> void gap
-- Literals --------------------------------------------------------------------
alpha Parser Char
alpha = oneOf (['a'..'z'] ++ ['A'..'Z'])
sym Parser Sym
sym = bucSym <|> pack <$> some alpha
where bucSym = char '$' *> pure ""
atom Parser Nat
atom = do
init some digitChar
rest many (char '.' *> some digitChar)
guard True -- TODO Validate '.'s
pure (Prelude.read $ concat $ init:rest)
nat Parser Nat
nat = Prelude.read <$> some digitChar
tape Parser Text
tape = do
between (char '"') (char '"') $
pack <$> many (label "tape char" (anySingleBut '"'))
cord Parser Text
cord = do
between (char '\'') (char '\'') $
pack <$> many (label "cord char" (anySingleBut '\''))
tag Parser Text
tag = try (char '%' >> sym)
literal Parser CST
literal = choice
[ Yes <$ string "%.y"
, No <$ string "%.n"
, Var <$> sym
, Atom <$> atom
, Pam <$ char '&'
, Bar <$ char '|'
, Sig <$ char '~'
, Tag <$> tag
, Cord <$> cord
, Tape <$> tape
]
-- Rune Helpers ----------------------------------------------------------------
{-
- If the parser is in `Wide` mode, only accept the `wide` form.
- If the parser is in `Tall` mode, either
- accept the `tall` form or:
- swich to `Wide` mode and then accept the wide form.
-}
parseRune Parser a Parser a Parser a
parseRune tall wide = get >>= \case
Wide wide
Tall tall <|> inWideMode wide
rune0 a Parser a
rune0 = pure
rune1 (ab) Parser a Parser b
rune1 node x = parseRune tall wide
where tall = do gap; px; pure (node p)
wide = do pal; px; par; pure (node p)
rune2 (abc) Parser a Parser b Parser c
rune2 node x y = parseRune tall wide
where tall = do gap; px; gap; qy; pure (node p q)
wide = do pal; px; ace; qy; par; pure (node p q)
rune3 (abcd) Parser a Parser b Parser c Parser d
rune3 node x y z = parseRune tall wide
where tall = do gap; px; gap; qy; gap; rz; pure (node p q r)
wide = do pal; px; ace; qy; ace; rz; par; pure (node p q r)
rune4 (abcde) Parser a Parser b Parser c Parser d Parser e
rune4 node x y z g = parseRune tall wide
where tall = do gap; px; gap; qy; gap; rz; gap; sg; pure (node p q r s)
wide = do pal; px; ace; qy; ace; rz; ace; sg; pure (node p q r s)
runeN ([a]b) Parser a Parser b
runeN node elem = node <$> parseRune tall wide
where tall = gap >> elems
where elems = term <|> elemAnd
elemAnd = do x elem; gap; xs elems; pure (x:xs)
term = string "==" *> pure []
wide = pal *> option [] elems <* par
where elems = (:) <$> elem <*> many (ace >> elem)
runeNE (NonEmpty a b) Parser a Parser b
runeNE node elem = node <$> parseRune tall wide
where tall = do
let elems = term <|> elemAnd
elemAnd = do x elem; gap; xs elems; pure (x:xs)
term = string "==" *> pure []
fst <- gap *> elem
rst <- gap *> elems
pure (fst :| rst)
wide = mzero -- No wide form for cores
-- Irregular Syntax ------------------------------------------------------------
inc Parser CST -- +(3)
inc = do
string "+("
h cst
char ')'
pure h
equals Parser (CST, CST) -- =(3 4)
equals = do
string "=("
x cst
ace
y cst
char ')'
pure (x, y)
tuple a. Parser a Parser [a]
tuple p = char '[' >> elems
where
xs Parser [a]
xs = do { x p; (x:) <$> tail }
tail Parser [a]
tail = (pure [] <* char ']')
<|> (ace >> elems)
elems Parser [a]
elems = (pure [] <* char ']') <|> xs
appIrr Parser CST
appIrr = do
char '('
x <- cst
char ' '
y <- cst
char ')'
pure (AppIrr x y)
irregular Parser CST
irregular =
inWideMode $
choice [ Tupl <$> tuple cst
, IncrIrr <$> inc
, uncurry IsEqIrr <$> equals
, appIrr
]
-- Runes -----------------------------------------------------------------------
pat Parser Pat
pat = choice [ PatTag <$> tag
, char '*' $> PatTar
]
cases Parser [(Pat, CST)]
cases = do
mode get
guard (mode == Tall)
end <|> lop
where
goo = lop <|> end
end = string "==" $> []
lop = do { p <- pat; gap; b <- cst; gap; ((p,b):) <$> goo }
wutHep Parser CST
wutHep = do
mode get
guard (mode == Tall)
gap
ex <- cst
gap
cs <- cases
pure (WutHep ex cs)
barCen Parser CST
barCen = do
mode get
guard (mode == Tall)
gap
cs <- cases
pure (BarCen cs)
rune Parser CST
rune = runeSwitch [ ("|=", rune2 BarTis sym cst)
, ("|-", rune4 BarHep sym sym cst cst)
, (":-", rune2 ColHep cst cst)
, (":+", rune3 ColLus cst cst cst)
, (":^", rune4 ColKet cst cst cst cst)
, (":*", runeN ColTar cst)
, (":~", runeN ColSig cst)
, ("%-", rune2 CenHep cst cst)
, ("%.", rune2 CenDot cst cst)
, ("..", rune2 DotDot sym cst)
, ("!!", rune0 ZapZap)
, ("?:", rune3 WutCol cst cst cst)
, ("?@", rune3 WutPat cst cst cst)
, ("?&", runeN WutPam cst)
, ("?|", runeN WutBar cst)
, ("?^", rune3 WutKet cst cst cst)
, ("=/", rune3 TisFas sym cst cst)
, (".+", rune1 Incr cst)
, (".=", rune2 IsEq cst cst)
, ("?-", wutHep)
, ("|%", barCen)
, ("~/", rune2 SigFas cst cst)
]
runeSwitch [(Text, Parser a)] Parser a
runeSwitch = choice . fmap (\(s, p) string s *> p)
-- CST Parser ------------------------------------------------------------------
cst Parser CST
cst = irregular <|> rune <|> literal
-- Entry Point -----------------------------------------------------------------
hoonFile = do
option () whitespace
h cst
option () whitespace
eof
pure h
parse Text Either Text CST
parse txt =
runParser (evalStateT hoonFile Tall) "stdin" txt & \case
Left e Left (pack $ errorBundlePretty e)
Right x pure x
parseHoonTest Text IO ()
parseHoonTest = parseTest (evalStateT hoonFile Tall)
main IO ()
main = (head <$> getArgs) >>= parseHoonTest

View File

@ -0,0 +1,24 @@
module Untyped.ShittyCorePrinter where
-- it's pretty clowny but whatever
-- TODO: handle the new cases (maybe don't do)
import Prelude
import Bound
import Data.Foldable
import Untyped.Core
prettyPrec :: [String] -> Bool -> Int -> Exp String -> ShowS
prettyPrec _ d n (Var a) = showString a
prettyPrec vs d n (App x y) = showParen d $
prettyPrec vs False n x . showChar ' ' . prettyPrec vs True n y
prettyPrec (v:vs) d n (Lam b) = showParen d $
showString v . showString ". " . prettyPrec vs False n (instantiate1 (Var v) b)
prettyWith :: [String] -> Exp String -> String
prettyWith vs t = prettyPrec (filter (`notElem` toList t) vs) False 0 t ""
pretty :: Exp String -> String
pretty = prettyWith $ [ [i] | i <- ['a'..'z']] ++ [i : show j | j <- [1 :: Int ..], i <- ['a'..'z'] ]

91
pkg/hs/proto/package.yaml Normal file
View File

@ -0,0 +1,91 @@
name: proto
version: 0.1.0
license: MIT
license-file: LICENSE
dependencies:
- base
- bound
- classy-prelude
- containers
- deriving-compat
- lens
- megaparsec
- mtl
- multimap
- para
- pretty-show
- text
- transformers
- transformers-compat
- unordered-containers
- urbit-king
default-extensions:
- ApplicativeDo
- BangPatterns
- BlockArguments
- DeriveAnyClass
- DeriveDataTypeable
- DeriveFoldable
- DeriveGeneric
- DeriveTraversable
- DerivingStrategies
- EmptyDataDecls
- FlexibleContexts
- FlexibleInstances
- FunctionalDependencies
- GADTs
- GeneralizedNewtypeDeriving
- LambdaCase
- MultiParamTypeClasses
- NamedFieldPuns
- NoImplicitPrelude
- NumericUnderscores
- OverloadedStrings
- PartialTypeSignatures
- PatternSynonyms
- QuasiQuotes
- Rank2Types
- RankNTypes
- RecordWildCards
- ScopedTypeVariables
- StandaloneDeriving
- TemplateHaskell
- TupleSections
- TypeApplications
- TypeFamilies
- UnicodeSyntax
- ViewPatterns
library:
source-dirs: lib
ghc-options:
- -fwarn-incomplete-patterns
- -fwarn-unused-binds
- -fwarn-unused-imports
- -O2
executables:
proto:
main: Main.hs
source-dirs: app
dependencies:
- proto
ghc-options:
- -threaded
- -rtsopts
- -O2
- "-with-rtsopts=-N"
- -fwarn-incomplete-patterns
tests:
proto-test:
main: Spec.hs
source-dirs: test
dependencies:
- proto
ghc-options:
- -threaded
- -rtsopts
- -with-rtsopts=-N

View File

@ -0,0 +1,6 @@
module Main where
import ClassyPrelude
main :: IO ()
main = putStrLn "Test suite not yet implemented"

26
pkg/hs/stack.yaml Normal file
View File

@ -0,0 +1,26 @@
resolver: lts-14.21
packages:
- lmdb-static
- proto
- terminal-progress-bar
- urbit-atom
- urbit-azimuth
- urbit-king
extra-deps:
- flat-0.3.4@sha256:002a0e0ae656ea8cc02a772d0bcb6ea7dbd7f2e79070959cc748ad1e7138eb38
- base58-bytestring-0.1.0@sha256:a1da72ee89d5450bac1c792d9fcbe95ed7154ab7246f2172b57bd4fd9b5eab79
- lock-file-0.7.0.0@sha256:3ad84b5e454145e1d928063b56abb96db24a99a21b493989520e58fa0ab37b00
- urbit-hob-0.3.1@sha256:afbdc7ad071eefc6ca85f5b598b6c62ed49079d15d1840dac27438a3b3150303
- para-1.1@sha256:a90eebb063ad70271e6e2a7f00a93e8e8f8b77273f100f39852fbf8301926f81
# This allows building on NixOS.
nix:
packages:
- pkgconfig
- zlib
# TODO: Why is this here?
ghc-options:
urbit-king: '-optP-Wno-nonportable-include-path'

View File

@ -0,0 +1 @@
../LICENSE

View File

@ -0,0 +1 @@
../README.markdown

View File

@ -0,0 +1,2 @@
import Distribution.Simple
main = defaultMain

View File

@ -0,0 +1,51 @@
{-# language PackageImports #-}
module Main where
import "base" Data.Monoid ( (<>) )
import "criterion" Criterion.Main
import "terminal-progress-bar" System.ProgressBar
import "time" Data.Time.Clock ( UTCTime(..) )
main :: IO ()
main = defaultMain
[ renderProgressBarBenchmark 10 0
, renderProgressBarBenchmark 10 50
, renderProgressBarBenchmark 10 100
, renderProgressBarBenchmark 100 0
, renderProgressBarBenchmark 100 50
, renderProgressBarBenchmark 100 100
, renderProgressBarBenchmark 200 0
, renderProgressBarBenchmark 200 50
, renderProgressBarBenchmark 200 100
, labelBenchmark "percentage" percentage (Progress 0 100 ())
, labelBenchmark "percentage" percentage (Progress 50 100 ())
, labelBenchmark "percentage" percentage (Progress 100 100 ())
, labelBenchmark "exact" exact (Progress 0 100 ())
, labelBenchmark "exact" exact (Progress 50 100 ())
, labelBenchmark "exact" exact (Progress 100 100 ())
]
renderProgressBarBenchmark :: Int -> Int -> Benchmark
renderProgressBarBenchmark width done =
bench name $ nf (\(s, p, t) -> renderProgressBar s p t)
( defStyle{styleWidth = ConstantWidth width}
, Progress done 100 ()
, someTiming
)
where
name = "progressBar/default - "
<> show width <> " wide - progress " <> show done <> " % 100"
labelBenchmark :: String -> Label () -> Progress () -> Benchmark
labelBenchmark labelName label progress =
bench name $ nf (\(p, t) -> runLabel label p t) (progress, someTiming)
where
name = "label/" <> labelName <> " "
<> show (progressDone progress) <> " % "
<> show (progressTodo progress)
someTime :: UTCTime
someTime = UTCTime (toEnum 0) 0
someTiming :: Timing
someTiming = Timing someTime someTime

View File

@ -0,0 +1 @@
../changelog.md

View File

@ -0,0 +1,2 @@
(import ../.).haskellPackages.terminal-progress-bar.env
# (import ../.).haskell.packages.ghc844.terminal-progress-bar.env

Some files were not shown because too many files have changed in this diff Show More