Merge branch 'next/kelvin/413' into m/typed-paths

This commit is contained in:
Ted Blackman 2023-03-17 12:07:39 -04:00 committed by GitHub
commit 3622c5f33e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1520 changed files with 63721 additions and 410736 deletions

View File

@ -1,3 +0,0 @@
module.exports = {
ignorePatterns: ["**/*"]
};

10
.github/ISSUE_TEMPLATE/blank-issue.md vendored Normal file
View File

@ -0,0 +1,10 @@
---
name: Blank Issue
about: Other
title: ''
labels: ''
assignees: ''
---

View File

@ -1,8 +1,8 @@
blank_issues_enabled: true blank_issues_enabled: true
contact_links: contact_links:
- name: Submit a Landscape issue - name: Submit a Landscape issue
url: https://github.com/urbit/landscape/issues/new/choose url: https://github.com/tloncorp/landscape-apps/issues/new/choose
about: Issues with Landscape (Tlon's flagship client) should be filed at urbit/landscape. This includes groups, chats, collections, notebooks, and more. about: Issues with Landscape should be filed at tloncorp/landscape-apps. This includes Groups, Talk, and Landscape proper.
- name: urbit-dev mailing list - name: urbit-dev mailing list
url: https://groups.google.com/a/urbit.org/g/dev url: https://groups.google.com/a/urbit.org/g/dev
about: Developer questions and discussions also take place on the urbit-dev mailing list. about: Developer questions and discussions also take place on the urbit-dev mailing list.

View File

@ -1,32 +0,0 @@
#!/usr/bin/env bash
cd "$GITHUB_WORKSPACE" || exit
echo "$2" | base64 -d > service-account
echo "$3" | base64 -d > id_ssh
echo "$4" | base64 -d > id_ssh.pub
chmod 600 service-account
chmod 600 id_ssh
chmod 600 id_ssh.pub
janeway release glob-all --dev --no-pill \
--credentials service-account \
--ssh-key id_ssh \
--ci \
| bash
SHORTHASH=$(git rev-parse --short HEAD)
janeway release prepare-ota arvo-glob-"$SHORTHASH" "$1" \
--credentials service-account \
--ssh-key id_ssh \
--ci \
| bash
janeway release perform-ota "$1" \
--credentials service-account \
--ssh-key id_ssh \
--ci \
| bash

View File

@ -1,4 +1,4 @@
FROM tloncorp/janeway:v0.15.4 FROM tloncorp/janeway:v0.17.0
COPY entrypoint.sh /entrypoint.sh COPY entrypoint.sh /entrypoint.sh
EXPOSE 22/tcp EXPOSE 22/tcp
ENTRYPOINT ["/entrypoint.sh"] ENTRYPOINT ["/entrypoint.sh"]

View File

@ -1,18 +1,21 @@
name: 'glob' name: 'ota'
description: 'Create a glob and deploy it to a moon' description: 'perform an OTA update of arvo on a remote ship'
inputs: inputs:
ship: ship:
description: "Ship to deploy to" description: "target ship"
required: true required: true
credentials: credentials:
description: "base64-encoded GCP Service Account credentials" description: "base64-encoded GCP Service Account credentials"
required: true required: true
ssh-sec-key: ssh-sec-key:
description: "A base64-encoded SSH secret key for the container to use" description: "base64-encoded SSH secret key for the container to use"
required: true required: true
ssh-pub-key: ssh-pub-key:
description: "The corresponding base64-encoded SSH public key" description: "base64-encoded corresponding SSH public key"
required: true required: true
ref:
description: "git ref of arvo source to check out"
required: false
runs: runs:
using: 'docker' using: 'docker'
@ -22,4 +25,4 @@ runs:
- ${{ inputs.credentials }} - ${{ inputs.credentials }}
- ${{ inputs.ssh-sec-key }} - ${{ inputs.ssh-sec-key }}
- ${{ inputs.ssh-pub-key }} - ${{ inputs.ssh-pub-key }}
- ${{ inputs.ref }}

20
.github/actions/ota/entrypoint.sh vendored Executable file
View File

@ -0,0 +1,20 @@
#!/usr/bin/env bash
echo "$2" | base64 -d > /service-account
echo "$3" | base64 -d > /id_ssh
echo "$4" | base64 -d > /id_ssh.pub
chmod 600 /service-account
chmod 600 /id_ssh
chmod 600 /id_ssh.pub
janeway \
--ci \
--verbose \
--credentials /service-account \
--ssh-key /id_ssh \
release ota \
arvo \
"$1" \
${5:+"--ref"} ${5:+"$5"} \
| bash

View File

@ -1,119 +0,0 @@
# Notes:
#
# jobs.<job_id>
#
# A seperate job id results in a lot of duplication of nix/cachix work.
# The build will have to download any substituted derivations from cachix
# for the steps with each distinct job id and upload built derivations to
# cachix after each job has completed, either succesfully or on failure.
#
# jobs.<job_id>.steps.run
#
# build + test are distinct as each step entry results in a collapsable title
# within the log output, which makes it easier to view failing builds or
# tests independently.
#
# jobs.<job_id>.strategy.fail-fast
#
# Set to false so developers working on vere or king-haskell can have their
# respective builds proceed without the other causing failure.
#
# shell.nix
#
# mkShell doesn't allow you to build it - so instantiate all the subshells
# defined for the individual pkg/*/shell.nix as a sanity check and to create
# some artefacts suitable for developers to pull from cachix. The top-level
# shell.nix build time is dominated by Haskell dependencies so it's built as
# part of the haskell build steps.
#
# Syntax:
#
# https://docs.github.com/en/free-pro-team@latest/actions/reference/workflow-syntax-for-github-actions
name: build
on:
push:
paths:
- 'pkg/arvo/**'
- 'pkg/docker-image/**'
- 'pkg/ent/**'
- 'pkg/ge-additions/**'
- 'pkg/libaes_siv/**'
- 'pkg/urbit/**'
- 'pkg/urcrypt/**'
- 'bin/**'
- 'nix/**'
- default.nix
pull_request:
paths:
- 'pkg/arvo/**'
- 'pkg/docker-image/**'
- 'pkg/ent/**'
- 'pkg/ge-additions/**'
- 'pkg/libaes_siv/**'
- 'pkg/urbit/**'
- 'pkg/urcrypt/**'
- 'bin/**'
- 'nix/**'
- default.nix
jobs:
urbit:
strategy:
fail-fast: false
matrix:
include:
- { os: ubuntu-latest }
- { os: macos-latest }
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v2
# We only want the extra nix config on linux, where it is necessary
# for the docker build. We don't want in on Mac, where it isn't but
# it breaks the nix install. The two `if` clauses should be mutually
# exclusive
- uses: cachix/install-nix-action@v16
with:
extra_nix_config: |
system-features = nixos-test benchmark big-parallel kvm
if: ${{ matrix.os == 'ubuntu-latest' }}
- uses: cachix/install-nix-action@v16
if: ${{ matrix.os != 'ubuntu-latest' }}
- uses: cachix/cachix-action@v10
with:
name: ares
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
- run: nix-build -A urbit --arg enableStatic true
- if: ${{ matrix.os == 'ubuntu-latest' }}
run: nix-build -A urbit-tests
- if: ${{ matrix.os == 'ubuntu-latest' }}
run: nix-build -A docker-image
mingw:
runs-on: windows-latest
defaults:
run:
shell: C:\msys64\msys2_shell.cmd -mingw64 -defterm -no-start -here -c ". <(cygpath '{0}')"
working-directory: ./pkg/urbit
steps:
- uses: actions/checkout@v2
with:
lfs: true
# echo suppresses pacman prompt
- run: echo|./configure
env:
CACHIX_CACHE: ares
CACHIX_AUTH_TOKEN: ${{ secrets.CACHIX_AUTH_TOKEN }}
- run: mingw32-make build/urbit
- run: build/urbit -l -d -B ../../bin/solid.pill -F bus && curl -f --data '{"source":{"dojo":"+hood/exit"},"sink":{"app":"hood"}}' http://localhost:12321

View File

@ -1,27 +0,0 @@
name: Chromatic Deployment
on:
pull_request:
paths:
- 'pkg/interface/**'
push:
paths:
- 'pkg/interface/**'
branches:
- 'release/next-userspace'
jobs:
chromatic-deployment:
runs-on: ubuntu-latest
name: "Deploy Chromatic"
steps:
- uses: actions/checkout@v2
with:
fetch-depth: 0
- run: npm i && npm run bootstrap
- name: Publish to Chromatic
uses: chromaui/action@v1
with:
token: ${{ secrets.GITHUB_TOKEN }}
projectToken: ${{ secrets.CHROMATIC_PROJECT_TOKEN }}
workingDir: pkg/interface

34
.github/workflows/develop.yml vendored Normal file
View File

@ -0,0 +1,34 @@
name: Push to develop
on:
push:
branches:
- 'develop'
paths:
- '.github/workflows/feature.yml'
- '.github/workflows/develop.yml'
- '.github/workflows/release.yml'
- '.github/workflows/master.yml'
- '.github/workflows/vere.yml'
- 'pkg/arvo/**'
- 'pkg/docker-image/**'
- 'pkg/ent/**'
- 'pkg/ge-additions/**'
- 'pkg/libaes_siv/**'
- 'pkg/urbit/**'
- 'pkg/urcrypt/**'
- 'tests/**'
- 'bin/**'
- 'nix/**'
- 'default.nix'
jobs:
call-vere:
uses: ./.github/workflows/vere.yml
with:
pace: 'edge'
upload: >-
${{
(github.ref_name == 'next/vere' && github.ref_type == 'branch')
}}
secrets: inherit

32
.github/workflows/feature.yml vendored Normal file
View File

@ -0,0 +1,32 @@
name: Feature pull request
on:
pull_request:
paths:
- '.github/workflows/feature.yml'
- '.github/workflows/develop.yml'
- '.github/workflows/release.yml'
- '.github/workflows/master.yml'
- '.github/workflows/vere.yml'
- 'pkg/arvo/**'
- 'pkg/docker-image/**'
- 'pkg/ent/**'
- 'pkg/ge-additions/**'
- 'pkg/libaes_siv/**'
- 'pkg/urbit/**'
- 'pkg/urcrypt/**'
- 'tests/**'
- 'bin/**'
- 'nix/**'
- 'default.nix'
jobs:
call-vere:
uses: ./.github/workflows/vere.yml
with:
pace: 'edge'
upload: >-
${{
(github.ref_name == 'next/vere' && github.ref_type == 'branch')
}}
secrets: inherit

View File

@ -1,24 +0,0 @@
name: frontend-test
on:
pull_request:
paths:
- 'pkg/interface/**'
- 'pkg/btc-wallet/**'
- 'pkg/npm/**'
jobs:
frontend-test:
runs-on: ubuntu-latest
name: "Test changed frontend packages"
steps:
- uses: actions/checkout@v2
with:
fetch-depth: 0
- run: git fetch --prune
- name: 'Setup root deps'
run: npm ci
- name: 'Setup dependencies'
run: npm run bootstrap
- name: 'Run tests'
run: npm run test -- --since origin/$GITHUB_BASE_REF --include-dependents

View File

@ -1,20 +0,0 @@
name: glob
on:
push:
branches:
- 'release/next-userspace'
jobs:
glob:
runs-on: ubuntu-latest
name: "Create and deploy a glob to ~hanruc-nalfus-nidsut-tomdun"
steps:
- uses: actions/checkout@v2
with:
lfs: true
- uses: ./.github/actions/glob
with:
ship: 'hanruc-nalfus-nidsut-tomdun'
credentials: ${{ secrets.JANEWAY_SERVICE_KEY }}
ssh-sec-key: ${{ secrets.JANEWAY_SSH_SEC_KEY }}
ssh-pub-key: ${{ secrets.JANEWAY_SSH_PUB_KEY }}

34
.github/workflows/master.yml vendored Normal file
View File

@ -0,0 +1,34 @@
name: Push to master
on:
push:
branches:
- 'master'
paths:
- '.github/workflows/feature.yml'
- '.github/workflows/develop.yml'
- '.github/workflows/release.yml'
- '.github/workflows/master.yml'
- '.github/workflows/vere.yml'
- 'pkg/arvo/**'
- 'pkg/docker-image/**'
- 'pkg/ent/**'
- 'pkg/ge-additions/**'
- 'pkg/libaes_siv/**'
- 'pkg/urbit/**'
- 'pkg/urcrypt/**'
- 'tests/**'
- 'bin/**'
- 'nix/**'
- 'default.nix'
jobs:
call-vere:
uses: ./.github/workflows/vere.yml
with:
pace: 'live'
upload: >-
${{
(github.ref_name == 'next/vere' && github.ref_type == 'branch')
}}
secrets: inherit

View File

@ -1,27 +0,0 @@
name: merge
on:
push:
branches:
- 'master'
jobs:
merge-to-next-js:
runs-on: ubuntu-latest
name: "Merge master to release/next-userspace"
steps:
- uses: actions/checkout@v2
- uses: devmasx/merge-branch@v1.3.1
with:
type: now
target_branch: release/next-userspace
github_token: ${{ secrets.JANEWAY_BOT_TOKEN }}
merge-to-group-timer:
runs-on: ubuntu-latest
name: "Merge master to ops/group-timer"
steps:
- uses: actions/checkout@v2
- uses: devmasx/merge-branch@v1.3.1
with:
type: now
target_branch: ops/group-timer
github_token: ${{ secrets.JANEWAY_BOT_TOKEN }}

View File

@ -1,17 +0,0 @@
name: ops-merge
on:
push:
branches:
- 'release/*'
jobs:
merge-release-to-ops:
runs-on: ubuntu-latest
name: "Merge to ops-tlon"
steps:
- uses: actions/checkout@v2
- uses: devmasx/merge-branch@v1.3.1
with:
type: now
target_branch: ops-tlon
github_token: ${{ secrets.JANEWAY_BOT_TOKEN }}

19
.github/workflows/ota.yml vendored Normal file
View File

@ -0,0 +1,19 @@
name: ota
on:
workflow_dispatch:
push:
branches:
- 'develop'
jobs:
deploy:
runs-on: ubuntu-latest
name: "make an OTA update to arvo on ~binnec-dozzod-marzod"
steps:
- uses: actions/checkout@v3
- uses: ./.github/actions/ota
with:
ship: 'canary'
credentials: ${{ secrets.JANEWAY_SERVICE_KEY }}
ssh-sec-key: ${{ secrets.JANEWAY_SSH_SEC_KEY }}
ssh-pub-key: ${{ secrets.JANEWAY_SSH_PUB_KEY }}
ref: 'develop'

View File

@ -1,62 +0,0 @@
name: publish-npm-packages
on:
push:
branches:
- 'master'
paths:
- 'pkg/npm/**'
jobs:
publish-api:
runs-on: ubuntu-latest
name: "Publish '@urbit/api' if a new version is available"
steps:
- uses: actions/checkout@v2
with:
lfs: true
- uses: actions/setup-node@v2
with:
node-version: '14'
- run: 'npm install'
working-directory: 'pkg/npm/api'
- uses: JS-DevTools/npm-publish@v1
with:
check-version: true
package: './pkg/npm/api/package.json'
token: ${{ secrets.NPM_TOKEN }}
publish-http-api:
runs-on: ubuntu-latest
name: "Publish '@urbit/http-api' if a new version is available"
steps:
- uses: actions/checkout@v2
with:
lfs: true
- uses: actions/setup-node@v2
with:
node-version: '14'
- run: 'npm install'
working-directory: 'pkg/npm/http-api'
- uses: JS-DevTools/npm-publish@v1
with:
check-version: true
package: './pkg/npm/http-api/package.json'
token: ${{ secrets.NPM_TOKEN }}
publish-eslint-config:
runs-on: ubuntu-latest
name: "Publish '@urbit/eslint-config' if a new version is available"
steps:
- uses: actions/checkout@v2
with:
lfs: true
- uses: actions/setup-node@v2
with:
node-version: '14'
- run: 'npm install'
working-directory: 'pkg/npm/eslint-config'
- uses: JS-DevTools/npm-publish@v1
with:
check-version: true
package: './pkg/npm/eslint-config/package.json'
token: ${{ secrets.NPM_TOKEN }}

View File

@ -1,51 +0,0 @@
name: release-docker
on:
release: null
push:
tags: 'urbit-v[0-9]+.[0-9]+'
jobs:
upload:
strategy:
matrix:
include:
- { os: ubuntu-latest, system: x86_64-linux }
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v2
- uses: cachix/install-nix-action@v16
with:
extra_nix_config: |
system-features = nixos-test benchmark big-parallel kvm
- uses: cachix/cachix-action@v10
with:
name: ares
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
- uses: docker/docker-login-action@v1.8.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- uses: christian-korneck/update-container-description-action@v1
env:
DOCKER_USER: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKER_PASS: ${{ secrets.DOCKERHUB_TOKEN }}
with:
destination_container_repo: ${{ secrets.DOCKERHUB_USERNAME }}/urbit
provider: dockerhub
short_description: 'Urbit: a clean-slate OS and network for the 21st century'
readme_file: 'pkg/docker-image/README.md'
- run: |
version="$(cat ./pkg/urbit/version)"
image="$(nix-build -A docker-image)"
imageName="$(nix-instantiate --eval -A docker-image.imageName | cut -d'"' -f2)"
imageTag="$(nix-instantiate --eval -A docker-image.imageTag | cut -d'"' -f2)"
# Load the image from the nix-built tarball
docker load -i $image
docker tag "$imageName:$imageTag" ${{secrets.DOCKERHUB_USERNAME }}/urbit:v$version
docker tag "$imageName:$imageTag" ${{secrets.DOCKERHUB_USERNAME }}/urbit:latest
docker push ${{secrets.DOCKERHUB_USERNAME }}/urbit:v$version
docker push ${{secrets.DOCKERHUB_USERNAME }}/urbit:latest

View File

@ -1,44 +1,34 @@
name: release name: Push to release branch
on: on:
release: null
push: push:
tags: ['*'] branches:
- 'release/*'
paths:
- '.github/workflows/feature.yml'
- '.github/workflows/develop.yml'
- '.github/workflows/release.yml'
- '.github/workflows/master.yml'
- '.github/workflows/vere.yml'
- 'pkg/arvo/**'
- 'pkg/docker-image/**'
- 'pkg/ent/**'
- 'pkg/ge-additions/**'
- 'pkg/libaes_siv/**'
- 'pkg/urbit/**'
- 'pkg/urcrypt/**'
- 'tests/**'
- 'bin/**'
- 'nix/**'
- 'default.nix'
jobs: jobs:
upload: call-vere:
strategy: uses: ./.github/workflows/vere.yml
matrix: with:
include: pace: 'soon'
- { os: ubuntu-latest, system: x86_64-linux } upload: >-
- { os: macos-latest, system: x86_64-darwin } ${{
(github.ref_name == 'next/vere' && github.ref_type == 'branch')
runs-on: ${{ matrix.os }} }}
secrets: inherit
steps:
- uses: actions/checkout@v2
- uses: cachix/install-nix-action@v16
- uses: cachix/cachix-action@v10
with:
name: ${{ secrets.CACHIX_NAME }}
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
- uses: google-github-actions/setup-gcloud@v0.2.0
with:
version: '290.0.1'
service_account_key: ${{ secrets.GCS_SERVICE_ACCOUNT_KEY }}
project_id: ${{ secrets.GCS_PROJECT }}
export_default_credentials: true
- run: nix-build -A tarball --arg enableStatic true
- name: Run upload to bootstrap.urbit.org
run: |
version="$(cat ./pkg/urbit/version)"
system="$(nix-instantiate --eval --expr 'builtins.currentSystem')"
system=${system:1:${#system}-2}
target="gs://bootstrap.urbit.org/ci/urbit-v${version}-${system}-${GITHUB_SHA:0:9}.tgz"
gsutil cp -n ./result "$target"
echo "upload to $target complete."

83
.github/workflows/vere.yml vendored Normal file
View File

@ -0,0 +1,83 @@
name: vere
on:
workflow_call:
inputs:
upload:
description: 'upload binaries to gcp'
type: boolean
default: false
required: false
pace:
description: 'release pace'
type: string
default: 'edge'
required: false
secrets:
CACHIX_AUTH_TOKEN:
required: false
GCP_CREDENTIALS:
required: false
GCS_SERVICE_ACCOUNT_KEY:
required: false
GCS_PROJECT:
required: false
workflow_dispatch:
inputs:
upload:
description: 'upload binaries to gcp'
type: boolean
default: false
required: false
pace:
description: 'release pace'
type: choice
options:
- edge
- soon
- live
jobs:
urbit:
strategy:
fail-fast: false
matrix:
include:
- { os: ubuntu-latest, type: linux }
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v2
# We only want the extra nix config on linux, where it is necessary
# for the docker build. We don't want in on Mac, where it isn't but
# it breaks the nix install. The two `if` clauses should be mutually
# exclusive
- uses: cachix/install-nix-action@v20
with:
extra_nix_config: |
system-features = nixos-test benchmark big-parallel kvm
if: ${{ matrix.type == 'linux' }}
- uses: cachix/install-nix-action@v20
if: ${{ matrix.os != 'ubuntu-latest' }}
- uses: cachix/cachix-action@v10
with:
name: ares
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
- if: ${{ matrix.os == 'ubuntu-latest' }}
name: run urbit-tests
run: |
cp -RL tests pkg/arvo/tests
vere=$(curl https://bootstrap.urbit.org/vere/${{ inputs.pace }}/last)
echo $vere
url="$(echo https://bootstrap.urbit.org/vere/${{ inputs.pace }}/v${vere}/vere-v${vere}-linux-x86_64)"
echo $url
# put in .jam so it doesn't crash when it gets -A'd in
curl -Lo pkg/arvo/vere.jam "$url"
chmod +x pkg/arvo/vere.jam
nix-build -A urbit-tests

1
.nvmrc
View File

@ -1 +0,0 @@
16.14.0

View File

@ -1,16 +0,0 @@
bin
doc
extras
nix
pkg/arvo
pkg/base-dev
pkg/docker-image
pkg/ent
pkg/garden
pkg/garden-dev
pkg/ge-additions
pkg/herb
pkg/hs
pkg/libaes_siv
pkg/urbit
sh

View File

@ -1,141 +1,101 @@
# Contributing to Urbit ## Workflow
Thank you for your interest in contributing to Urbit. Before beginning any unit of work, you should have a GitHub issue detailing the
scope of the work. This could be an issue someone else filed and has been
assigned to you (or you've assigned to yourself) or a new issue you filed
specifically for this unit of work. As much as possible, discussion of the work
should take place in the issue. When this is not possible, please update the
issue with relevant details from any offline conversations. Each issue should
provide a clear and thorough history of the work from inception to completion.
See [urbit.org/using/install][start] for basic orientation and usage ## Issues
instructions. You may also want to subscribe to [urbit-dev][list], the Urbit
development mailing list. For specific information on contributing to the Urbit
interface, see its [contribution guidelines][interface].
For information on Arvo's maintainers, see [pkg/arvo][main]. The [GitHub tracker][issu] is our canonical source of truth around issues, bugs,
performance problems and feature requests. If you encounter any issues when
developing on Urbit, feel free to submit a report about it here.
For more extensive information on Urbit development, such as how to set up an A good bug report, description of a crash, etc., should ideally be
environment and how to submit a grant, see the [developer documentation][dev]. *reproducible*, with clear steps as to how another developer can replicate and
examine your problem. That said, this isn't always possible -- some bugs depend
on having created a complicated or unusual state, or can otherwise simply be
difficult to trigger again.
[start]: https://urbit.org/using/install Your issue should thus at a minimum be *informative*. The best advice here is
[interface]: /pkg/interface/CONTRIBUTING.md probably "don't write bad issues," where "bad" is a matter of judgment and
[dev]: https://urbit.org/docs/development taste. Issues that the maintainers don't judge to be sufficiently useful or
informative may be closed.
## Fake ships Feature requests are welcome, but they should include sufficient detail and
explanation, as well as a discussion of perceived benefits one could expect from
them. "It would be cool if.." probably does not, in itself, constitute a good
feature request; instead, try to be specific about what you're requesting, and
what your desired feature would accomplish.
You may have an identity on the live network, but doing all your development on ### Feature Branch Names
the live network would be cumbersome and unnecessary. Standard practice in
Urbit development is to work on a fake `~zod`. Fake ships use deterministic
keys (derived from the ship address) and don't talk to the live network. They
can talk to each other over the local loopback.
To start a fake ship, simply specify the name with `-F`: Every branch that you intend to put up for review must adhere to the form
`i/<N>/<...>`, where `<N>` is the number of the issue that the branch
corresponds to and `<...>` is an optional short description of the branch to aid
in readability. If `<...>` is omitted, the `/` should be omitted as well, which
makes `i/<N>` a well-formed branch name. These feature branches should be based
off of `develop`.
``` ### Commits
$ urbit -F zod
```
You can also pass a name for the *pier* (or ship directory): Commits should generally be relevant, atomic, and have descriptions formatted in
the following manner:
```
$ urbit -F zod -c my-fake-zod
```
To resume a fake ship, just pass the name of the pier:
```
$ urbit my-fake-zod
```
Fake ships by default use the same pre-compiled kernelspace ('pills') as livenet
ships do: boot pills, which are not always current with `master`. If you wish to
develop using code off the master branch, run the following from the repo
directory:
```
git lfs install
git lfs pull
urbit -F zod -B "bin/solid.pill" -A "pkg/arvo"
```
## Git practice
### Contributing
The canonical source tree is the `master` branch of
[https://github.com/urbit/urbit][repo]. You should typically branch off of
`master` when commencing new work. Most pull requests should be merging into
one of the `next/*` branches, depending on what part of the system the pull
request is targeting.
Since we use GitHub, we request you contribute via a GitHub pull request. Tag
the [maintainer][main] for the component. If you have a question for the
maintainer, you can direct message them from your Urbit ship using that
information.
When contributing changes, via whatever means, make sure you describe them
appropriately. You should attach a reasonably high-level summary of what the
changes are and what they do; reference any useful background material that may
exist, e.g. a GitHub issue, a mailing list discussion, a UP, etc. [Here][jbpr]
is a good example of a pull request with a useful, concise description.
If your changes replace significant extant functionality, be sure to compare
them with the thing you're replacing. You may also want to cc reviewers,
or other parties who might have a particular interest in what you're
contributing.
[jbpr]: https://github.com/urbit/urbit/pull/1782
### Hygiene
Commits should generally be relevant, atomic, and have descriptions formatted
in the following manner:
> component: short description > component: short description
> >
> long description > long description
The 'component' is a short prefix of what area of the codebase the commit The 'component' is a short prefix of what area of the codebase the commit
applies to. If a commit patches `%gall`, for example, the description should applies to. If a commit patches `%gall`, for example, the description should be
be prefixed by 'gall'. If it touches `:aqua`, it should be prefixed by 'aqua'. prefixed by 'gall'. If it touches `:aqua`, it should be prefixed by 'aqua'. If
If it touches multiple components, then separate these by commas, e.g. "gall, it touches multiple components, then separate these by commas, e.g. "gall, aqua,
aqua, ph" -- but note that this may be a warning that too many changes are ph" -- but note that this may be a warning that too many changes are being
being packed into a single commit. The 'component' and 'short description' packed into a single commit. The 'component' and 'short description' combined
combined should be no more than 50 characters. should be no more than 50 characters.
A lengthier description is encouraged, where useful, but is not always strictly Every individual commit should at a minimum be in a compiling and runnable state.
required. You should use the longer description to give any useful background Broken commits or commits simply marked "wip" are not allowed. If you need to
on or motivation for the commit, provide a summary of what it does, link to clean up the commits in your branch, you can soft reset to an earlier state and
relevant issues, proposals, or other commits, and so on. recommit with better metadata (or if the change is small enough, squash to one
good commit at the end).
A lengthier description is encouraged, but is not always strictly required. You
should use the longer description to give any useful background on or motivation
for the commit, provide a summary of what it does, link to relevant issues,
proposals, or other commits, and so on.
Here is an example of our commit format, taken from a commit in the history: Here is an example of our commit format, taken from a commit in the history:
> zuse: remove superfluous 'scup' and 'culm' types. > zuse: remove superfluous 'scup' and 'culm' types.
> >
> %zuse includes definitions for 'scup' and 'culm', both of which are > %zuse includes definitions for 'scup' and 'culm', both of which are
> superfluous. 'scup' is simply (pair ship desk) and is used only in > superfluous. 'scup' is simply (pair ship desk) and is used only in the
> the definition of 'culm', a tagged union in which three of the four > definition of 'culm', a tagged union in which three of the four branches are
> branches are commented out (i.e. are unused). > commented out (i.e. are unused).
> >
> This commit deletes 'scup' and 'culm' and refactors what little code > This commit deletes 'scup' and 'culm' and refactors what little code made use
> made use of them. > of them.
Note that the short description is prefixed by `zuse:`, which is what the Note that the short description is prefixed by `zuse:`, which is what the commit
commit touches. Otherwise it just includes a summary of the change. touches. Otherwise it just includes a summary of the change.
Here's another example: Here's another example:
> build: give arvo a high priority > build: give arvo a high priority
> >
> 0bdced981e4 introduced the 'arvo-ropsten' derivation. Attempting to > 0bdced981e4 introduced the 'arvo-ropsten' derivation. Attempting to install
> install both 'arvo' and 'arvo-ropsten' via nix-env will result in a > both 'arvo' and 'arvo-ropsten' via nix-env will result in a priority error;
> priority error; this assigns a higher priority to 'arvo' to resolve the > this assigns a higher priority to 'arvo' to resolve the conflict.
> conflict.
> >
> Fixes #1912. > Fixes #1912.
Note that it cites a previous relevant commit, `0bdced981e4`, in its summary, Note that it cites a previous relevant commit, `0bdced981e4`, in its summary,
and also points at the issue that it resolves. and also points at the issue that it resolves.
If you're in doubt about how to format your commit descriptions, take a look at
the recent history and try to mimic the style that you can see others broadly
follow there.
When we say commits should be "atomic", we mean with respect to some distinct When we say commits should be "atomic", we mean with respect to some distinct
logical unit, e.g. a type definition used across many files, or a single file, logical unit, e.g. a type definition used across many files, or a single file,
or just a single function in a single file. Commits should be atomic at the or just a single function in a single file. Commits should be atomic at the
@ -144,104 +104,83 @@ into a single one that captures everything you're trying to do -- the history
will never make for pleasant bedtime reading, so focus instead on making your will never make for pleasant bedtime reading, so focus instead on making your
commits useful for tools like `git-blame` and `git-bisect`. commits useful for tools like `git-blame` and `git-bisect`.
Your contribution must apply cleanly to `master` in order to be considered Your contribution must apply cleanly to `develop` in order to be considered
mergeable. You may want to regularly [rebase your changes][reba] onto `master` mergeable. You may want to regularly [rebase your changes][reba] onto
in order to both clean up any intermediate "development" commits you make and `develop` in order to both clean up any intermediate "development" commits you
to ensure that you're up to date. make and to ensure that you're up to date.
If you're making a GitHub pull request, it's good practice to make it from a ### Pull Requests and Merges
topic branch, rather than `master`, on your fork.
### Pills When your work is ready for review, open a pull request, making sure to link to
the tracking issue in the description, which should be formatted as follows
(where `<N>` is the number of this work's tracking issue):
```
### Description
Resolves #<N>.
Thoroughly describe the changes made.
### Related
Reference any related issues, links, papers, etc. here.
```
Tests will run automatically via GitHub Actions when you open a pull request or
push new commits to an existing pull request.
Once you've collected and addressed feedback, tests are passing, and your PR has
been approved, merge the pull request.
**Note**: If you are merging into develop, you *must* be syncing OTAs from
`~binnec-dozzod-marzod` which gets the tip of develop deployed to it. If
your merge breaks `binnec` it's your responsibility to alert people and
fix it. Your PR is shipped when it's successfully been deployed to
`~binnec` and picked up by your personal ship. If you're merging on behalf
of an external developer, this is also your responsibility.
If you properly included the "Resolves
#N." directive in the pull request description, merging will automatically close
the tracking issue associated with the pull request.
## Code style
Hoon will be a less familiar language to many contributors. We've published
some [style guidelines for Hoon][hoon], but above all you should try to mimic
the style of the code around you. With regards to the style used throughout the
codebase: the more recently the code was written, the more standard and accepted
its style is likely to be.
### Kernel Development and Pills
Urbit bootstraps itself from a pill (you can see it being fetched from
`bootstrap.urbit.org` on boot). This is the compiled version of the kernel
(which you can find in the `sys` directory of [Arvo][arvo]), along with a
complete copy of the Arvo source.
You can find the latest solid pill, as well as the latest so-called *brass*
and *ivory* pills, in the `bin/` directory at the repository root.
Any contribution that touches the kernel (i.e., anything in `pkg/arvo/sys`), Any contribution that touches the kernel (i.e., anything in `pkg/arvo/sys`),
should be accompanied by an updated [solid pill](#the-kernel-and-pills). Pills should be accompanied by an updated [solid pill](#the-kernel-and-pills). Pills
are tracked in the repository via [git LFS][git-lfs]. are tracked in the repository via [git LFS][git-lfs].
Whenever you make a contribution to the kernel, please create a new solid pill
via:
``` ```
sh/update-solid-pill $ git lfs init
$ git lfs pull
``` ```
and include it along with your contribution. [git-lfs]: https://git-lfs.github.com
Historically, we've sometimes included these updated pills in separate, The +solid command is used to write the compiled kernel to a file.
standalone commits (you will see plenty of "pills: update solid" and similar
commits if you look through the history), but this practice is considered to be
deprecated -- you should usually just include the updated pill in the same
commit that updates the source.
## Releases
We typically create releases by tagging appropriate commits on `master`, so any
given commit in `master` may not actually be present in the latest release.
We perform updates by pushing releases over-the-air to `~zod` approximately
once per week, so any contribution that can be deployed OTA will usually find
its way onto the network pretty rapidly.
If you want to propose a hotfix (i.e. a small, OTA-updateable change, usually a
bugfix, to some currently-deployed release) then simply make it clear that your
contribution -- whether it be a pull request, patch, or whatever -- is intended
to be a hotfix. A maintainer can then deploy it to the network outside of the
normal release schedule.
Less frequently we release new Vere versions, which requires users to download
new binaries, and occasionally, while Urbit is still in early development, we
breach network continuity in order to release large changes that are difficult
to push out over-the-air. Contributions to Vere, or non-OTA-able updates to
Arvo, will find their way into releases before terribly long.
## Code style
The Urbit project uses two-space indentation and avoids tab characters.
In C code, it should not be too difficult to mimic the style of the code
around you, which is just fairly standard K&R with braces on every
compound statement. One thing to watch out for is top-level sections in
source files that are denoted by comments and are actually indented one
level.
Hoon will be a less familiar language to many contributors. We've published
some [style guidelines for Hoon][hoon], but above all you should try to mimic
the style of the code around you. With regards to the style used throughout
the codebase: the more recently the code was written, the more standard and
accepted its style is likely to be.
## Kernel development
Working on either C or non-kernel Hoon should not bring any surprises, but the
Hoon kernel (anything under [`pkg/arvo/sys/`][sys]) is bootstrapped from a
so-called *pill*, and must be recompiled if any changes are made. This should
happen automatically when you make changes, but if it doesn't, the command to
manually recompile and install the new kernel is `|reset` in `dojo`. This
rebuilds from the `sys` directory in the `base` desk in `%clay`.
Currently, `|reset` does not reload apps like `dojo` itself, which will still
reference the old kernel. To force them to reload, make a trivial edit to their
main source file (under the `app` directory) in `%clay`.
[arvo]: https://github.com/urbit/urbit/tree/master/pkg/arvo
[sys]: https://github.com/urbit/urbit/tree/master/pkg/arvo/sys
## The kernel and pills
Urbit bootstraps itself using a binary blob called a pill (you can see it being
fetched from `bootstrap.urbit.org` on boot). This is the compiled version of
the kernel (which you can find in the `sys` directory of [Arvo][arvo]), along
with a complete copy of the Arvo source.
The procedure for creating a pill is often called "soliding." It is somewhat
similar to `|reset`, but instead of replacing your running kernel, it writes
the compiled kernel to a file. The command to solid is:
``` ```
> .urbit/pill +solid > .urbit/pill +solid
``` ```
When the compilation finishes, your pill will be found in the When the compilation finishes, your pill will be found in the `[pier]/.urb/put/`
`[pier]/.urb/put/` directory as `urbit.pill`. directory as `urbit.pill`.
You can boot a new ship from your local pill with `-B`: You can boot a new ship from your local pill with `-B`:
@ -253,48 +192,57 @@ Release pills, i.e. those corresponding to vere releases, are cached at
`https://bootstrap.urbit.org` and are indexed by the vere version number, e.g. `https://bootstrap.urbit.org` and are indexed by the vere version number, e.g.
`urbit-0.8.2.pill`. `urbit-0.8.2.pill`.
Pills are also cached in version control via [git LFS][git-lfs]. You can find Whenever you make a contribution to the kernel, please create a new solid pill
the latest solid pill, as well as the latest so-called *brass* and *ivory* via:
pills, in the `bin/` directory at the repository root. Note that you'll need
to initialise git LFS in order to check these pills out:
``` ```
$ git lfs init sh/update-solid-pill
$ git lfs pull
``` ```
[git-lfs]: https://git-lfs.github.com You should include the updated pill in the same commit that updates the source.
## Issues ## Development Environment
The [GitHub tracker][issu] is our canonical source of truth around issues, Although you likely have an identity on the live network, developing on the live
bugs, performance problems, feature requests, and so on. If you encounter any network is high-risk and largely unnecessary. Instead, standard practice is to
issues when developing on Urbit, feel free to submit a report about it here. work on a fake ship. Fake ships use deterministic keys derived from the ship's
address, don't communicate on the live network, and can communicate with other
fake ships over the local loopback.
A good bug report, description of a crash, etc., should ideally be ### Boot a New Fake Ship
*reproducible*, with clear steps as to how another developer can replicate and
examine your problem. That said, this isn't always possible -- some bugs
depend on having created a complicated or unusual state, or can otherwise
simply be difficult to trigger again (say, you encountered it in the last
continuity era).
Your issue should thus at a minimum be *informative*. The best advice here is To boot a new fake ship, pass the `-F` flag and a valid Urbit ship name to
probably "don't write bad issues," where "bad" is a matter of judgment and `urbit`:
taste. Issues that the maintainers don't judge to be sufficiently useful or
informative may be closed.
Feature requests are welcome, but they should include sufficient detail and ```console
explanation, as well as a discussion of perceived benefits one could expect $ bazel build :urbit
from them. "It would be cool if.." probably does not, in itself, constitute a $ ln -s bazel-bin/pkg/vere/urbit urbit
good feature request; instead, try to be specific about what you're requesting, $ ./urbit -F <ship>
and what your desired feature would accomplish. ```
## Staying in touch By default, booting a fake ship will use the same pill that livenet ships use,
which leads to a non-trivial boot time on the order of tens of minutes. However,
using a development specific "solid" pill reduces this time to a couple minutes.
To boot using the solid pill, download or create one as described in the Kernel
Development and Pills section above and then run the following:
```console
$ ./urbit -F <ship> -B solid.pill
```
### Launch an Existing Fake Ship
To launch an existing fake ship, supply the pier (the ship directory), which is
simply the name of the ship[^1], to `urbit`:
```console
$ ./urbit <ship>
```
[^1]: Unless you specified the pier name using the `-c` flag.
Questions or other communications about contributing to Urbit can go to
[support@urbit.org][mail].
[mail]: mailto:support@urbit.org
[list]: https://groups.google.com/a/urbit.org/forum/#!forum/dev [list]: https://groups.google.com/a/urbit.org/forum/#!forum/dev
[repo]: https://github.com/urbit/urbit [repo]: https://github.com/urbit/urbit
[reba]: https://git-rebase.io/ [reba]: https://git-rebase.io/

View File

@ -1,249 +1,98 @@
# Maintainers' Guide # Maintaining
## Branch organization ## Overview
The essence of this branching scheme is that you create "release branches" of We use a three-stage release pipeline. Each stage of the release pipeline has
independently releasable units of work. These can then be released by their its own dedicated branch and corresponding testing moon. Features and bug fixes
maintainers when ready. progress through each stage--and are subject to testing along the way--until
they're eventually released to the live network. This pipeline automates our
release process, making it much easier to quickly and reliably ship code. It's
also simple to reason about.
### Master branch ## Branches and Moons
Master is what's released on the network. Deployment instructions are in the
next section, but tagged releases should always come from this branch.
### Feature branches
Anyone can create feature branches. For those with commit access to
urbit/urbit, you're welcome to create them in this repo; otherwise, fork the
repo and create them there.
Usually, new development should start from master, but if your work depends on
work in another feature branch or release branch, start from there.
If, after starting your work, you need changes that are in master, merge it into
your branch. If you need changes that are in a release branch or feature
branch, merge it into your branch, but understand that your work now depends on
that release branch, which means it won't be released until that one is
released.
### Release branches
Release branches are code that is ready to release. All release branch names
should start with `next/`.
All code must be reviewed before being pushed to a release branch. Thus,
feature branches should be PR'd against a release branch, not master.
Create new release branches as needed. You don't need a new one for every PR,
since many changes are relatively small and can be merged together with little
risk. However, once you merge two branches, they're now coupled and will only
be released together -- unless one of the underlying commits is separately put
on a release branch.
Here's a worked example. The rule is to make however many branches are useful,
and no more. This example is not prescriptive; the developers making the
changes may add, remove, or rename branches in this flow at will.
Suppose you (plural, the dev community at large) complete some work in a
userspace app, and you put it in `next/landscape`. Separately, you make a small
JS change. If you PR it to `next/landscape`, then it will only be released at
the same time as the app changes. Maybe this is fine, or maybe you want this
change to go out quickly, and the change in `next/landscape` is relatively
risky, so you don't want to push it out on Friday afternoon. In this case, put
the change in another release branch, say `next/js`. Now either can be released
independently.
Suppose you do further work that you want to PR to `next/landscape`, but it
depends on your fixes in `next/js`. Simply merge `next/js` into either your
feature branch or `next/landscape` and PR your finished work to
`next/landscape`. Now there is a one-way coupling: `next/landscape` contains
`next/js`, so releasing it will implicitly release `next/js`. However, you can
still release `next/js` independently.
This scheme extends to other branches, like `next/base` or `next/os1.1` or
`next/ford-fusion`. Some branches may be long-lived and represent simply the
"next" release of something, while others will have a definite lifetime that
corresponds to development of a particular feature or numbered release.
Since they are "done", release branches should be considered "public", in the
sense that others may depend on them at will. Thus, never rebase a release
branch.
When cutting a new release, you can filter branches with `git branch --list
'next/*'` or by typing "next/" in the branch filter on Github. This will give
you the list of branches which have passed review and may be merged to master
and released. When choosing which branches to release, make sure you understand
the risks of releasing them immediately. If merging these produces nontrivial
conflicts, consider asking the developers on those branches to merge between
themselves. In many cases a developer can do this directly, but if it's
sufficiently nontrivial, this may be a reviewed PR of one release branch into
another.
#### Standard release branches
While you can always create non-standard release branches to stage for a
particular release, most changes should go through the following:
- next/base -- changes to the %base desk in pkg/arvo
- next/garden -- changes to the %garden desk
- next/landscape -- changes to the %landscape desk
- next/bitcoin -- changes to the %bitcoin desk
- next/webterm -- changes to the %webterm desk
- next/vere -- changes to the runtime
### Other cases
Outside contributors can generally target their PRs against master unless
specifically instructed. Maintainers should retarget those branches as
appropriate.
If a commit is not something that goes into a release (eg changes to README or
CI), it may be committed straight to master.
If a hotfix is urgent, it may be PR'd straight to master. This should only be
done if you reasonably expect that it will be released soon and before anything
else is released.
If a series of commits that you want to release is on a release branch, but you
really don't want to release the whole branch, you must cherry-pick them onto
another release branch. Cherry-picking isn't ideal because those commits will
be duplicated in the history, but it won't have any serious side effects.
## Hotfixes
Here lies an informal guide for making hotfix releases and deploying them to
the network.
Take [this PR][1], as an example. This constituted a great hotfix. It's a
single commit, targeting a problem that existed on the network at the time.
Here's how it should be released and deployed OTA.
[1]: https://github.com/urbit/urbit/pull/2025
### If the thing is acceptable to merge, merge it to master
Unless it's very trivial, it should probably have a single "credible looking"
review from somebody else on it.
You should avoid merging the PR in GitHub directly. Instead, use the
`sh/merge-with-custom-msg` script -- it will produce a merge commit with
message along the lines of:
The branches and their corresponding moons that comprise the stages of the
release pipeline are:
``` ```
Merge branch FOO (#PR_NUM) ----------------------------------------------------------------------------------------------
Branch | Moon | Target audience | Contains
* FOO: ----------------------------------------------------------------------------------------------
bar: ... `develop` | `~binnec-dozzod-marzod` | Kernel developers | Latest `develop` branch commit
baz: ... `release` | `~marnec-dozzod-marzod` | Early Adopters | Latest `release` branch commit
`release` | `~doznec-dozzod-marzod` | App Developers | Latest release candidate
Signed-off-by: SIGNER <signer@example.com> `master` | `~zod` | Everyone else | Latest release
``` ```
We do this as it's nice to have the commit log information in the merge commit, **WARNING**: If you lack the requisite skills to troubleshoot and fix kernel issues, you should not sync from develop/~binnec. If you're not prepared to breach your ship in response to an issue stemming from an early release, do not use pre-release moons.
which GitHub's "Merge PR" button doesn't do (at least by default).
`sh/merge-with-custom-msg` performs some useful last-minute urbit-specific
checks, as well.
You might want to alias `sh/merge-with-custom-msg` locally, to make it easier `develop` is the default branch in the repo, which means that all new pull
to use. My .git/config contains the following, for example: requests target it by default. The general flow of a new feature or bug fix
through the pipeline is:
```
[alias]
mu = !sh/merge-with-custom-msg
```
so that I can type e.g. `git mu origin/foo 1337`.
### Prepare a release commit
If you're making a Vere release, just play it safe and update all the pills.
To produce multi pills, you will need to set up an environment with the
appropriate desks with the appropriate contents, doing something like the
following (where `> ` denotes an urbit command and `% ` denotes a unix shell
command):
```console ```console
> |merge %garden our %base feature branch ----> develop ----> release ---------> master
> |merge %landscape our %base | | |
> |merge %bitcoin our %base deployed to deployed to deployed to
> |merge %webterm our %base ~binnec moon ~marnec/~doznec moon network
> |mount %
> |mount %garden
> |mount %landscape
> |mount %bitcoin
> |mount %webterm
% rsync -avL --delete pkg/arvo/ zod/base/
% rm -rf zod/base/tests/
% for desk in garden landscape bitcoin webterm; do \
rsync -avL --delete pkg/$desk/ zod/$desk/ \
done
> |commit %base
> |commit %garden
> |commit %landscape
> |commit %bitcoin
> |commit %webterm
> .multi/pill +solid %base %garden %landscape %bitcoin %webterm
> .multi-brass/pill +brass %base %garden %landscape %bitcoin %webterm
``` ```
And then of course: If an issue arises in the course of testing the `release` branch (because more
people are using `marnec` than `binnec`), a PR can be opened to target
`release`. If that's the case, the `master` needs to be merged back into
`develop` after `release` merges into `master` to ensure that `develop` gets the
fix.
```console ## Release Workflow
> .solid/pill +solid
> .brass/pill +brass
> .ivory/pill +ivory
```
For an Urbit OS release, after all the merge commits, make a release with the Developers work on feature branches built against `develop`. While doing this,
commit message "release: urbit-os-v1.0.xx". This commit should have up-to-date they continually merge in changes from `develop` to their feature branch. When
artifacts from pkg/interface and a new version number in the desk.docket-0 of their feature is ready (and they've tested it), they open a pull request. After
any desk which changed. If neither the pill nor the JS need to be updated (e.g code review approval and passing tests, their feature can merge into `develop`.
if the pill was already updated in the previous merge commit), consider making Every merge into `develop` immediately triggers a deploy to the `binnec` moon.
the release commit with --allow-empty. If your merge breaks `binnec` it's your responsibility to fix it.
If anything in `pkg/interface` has changed, ensure it has been built and Once a week on Tuesday, a `release` branch is cut off of `develop`. This release
deployed properly. You'll want to do this before making a pill, since you want gets deployed to `marnec` to be tested for the rest of the week. Any fixes that
the pill to have the new files/hash. For most things, it is sufficient to run have to go into the release can go straight into the release branch. New work
`npm install; npm run build:prod` in `pkg/interface`. that didn't make the release continues on feature branches against `develop`
(eventually merging there). After initial testing on `marnec`, a release
candidate is tagged and merges into `~doznec` where early adopters and app
developers can pick it up and test or update their apps for a new kelvin. If
it's a new kelvin, we also send an email to urbit-dev with instructions for
testing the breaking changes.
However, if you've made a change to Landscape's JS, then you will need to build Then on the next Tuesday the release branch merges into master and tagged using
a "glob" and upload it to bootstrap.urbit.org. To do this, run `npm install; the tag instructions below, we create a GitHub release (marked latest) using
npm run build:prod` in `pkg/interface`, and add the resulting that tag on `master` which documents the changes that went into the release. In
`pkg/arvo/app/landscape/index.[hash].js` to a fakezod at that path (or just create a the Github UI you can get the changelog by selecting the tag prior to it from
new fakezod with `urbit -F zod -B bin/solid.pill -A pkg/arvo`). Run the previous release when creating the new release. Then the release is deployed
`:glob|make`, and this will output a file in `fakezod/.urb/put/glob-0vXXX.glob`. to the broader network via `zod`. Master is then merged back into `develop`
where any fixes that went straight to release get picked up. Lastly, a new
release branch is cut from `develop` and the process begins again.
Upload this file to bootstrap.urbit.org, and modify `+hash` at the top of ### Tagging
`pkg/arvo/app/glob.hoon` to match the hash in the filename of the `.glob` file.
Amend `pkg/arvo/app/landscape/index.html` to import the hashed JS bundle, instead
of the unversioned index.js. Do not commit the produced `index.js` and
make sure it doesn't end up in your pills (they should be less than 10MB each).
### Tag the resulting commit When we branch release to deploy to `~marnec`, we need to tag it as a release candidate (RC), like `urbit-os-vx.y-rc1`. Here 'x' is the major version and 'y' is an OTA patch counter. After this any change that goes into release gets a new tag that increments the rc.
What you should do here depends on the type of release being made. After we ship a release to the live network, add a tag that is not a release candidate, like `urbit-os-vx.y`, to the master branch, since that's what was released.
First, for Urbit OS releases: #### Applying the Tag Locally
If it's a very trivial hotfix that you know isn't going to break anything, tag Use an annotated tag with the `-a` git argument. Make sure to follow
it as `urbit-os-vx.y`. Here 'x' is the major version and 'y' is an OTA patch the naming convention for RCs and live releases, described above.
counter. Change `urbit-os` to e.g. `landscape` or another desk if that's what you're
releasing. If you're releasing changes to more than one desk, add a separate
tag for each desk (but only make one announcment email/post, with all of the
desks listed).
Use an annotated tag, i.e.
To add a tag to the local repo, run this:
``` ```
git tag -a urbit-os-vx.y git tag -a <tagname>
``` ```
The tag format should look something like this: This will bring up an editor, where you should add the release notes,
which should look like this:
``` ```
urbit-os-vx.y <tagname>
This release will be pushed to the network as an over-the-air update. This release will be pushed to the network as an over-the-air update.
@ -256,99 +105,68 @@ Contributions:
[..] [..]
``` ```
You can get the "contributions" section by the shortlog between the To fill in the "contributions" section, copy in the shortlog between the last release and this release, obtained by running this command:
last release and this release:
``` ```
git shortlog LAST_RELEASE.. git shortlog --no-merges LAST_RELEASE..
``` ```
I originally tried to curate this list somewhat, but now just paste it #### Pushing the Tag to the Main Repo
verbatim. If it's too noisy, yell at your colleagues to improve their commit
messages.
Try to include a high-level summary of the changes in the "release notes" Once you have added a tag, push it to the main repository using the
section. You should be able to do this by simply looking at the git log and following command:
skimming the commit descriptions (or perhaps copying some of them in verbatim).
If the commit descriptions are too poor to easily do this, then again, yell at
your fellow contributors to make them better in the future.
If it's *not* a trivial hotfix, you should probably make any number of release
candidate tags (e.g. `urbit-os-vx.y.rc1`, `urbit-os-vx.y.rc2`, ..), test
them, and after you confirm one of them is good, tag the release as
`urbit-os-vx.y`.
For Vere releases:
Tag the release as `urbit-vx.y`. The tag format should look something like
this:
``` ```
urbit-vx.y git push origin <tagname>
Note that this Vere release will by default boot fresh ships using an Urbit OS
va.b.c pill.
Release binaries:
(linux64)
https://bootstrap.urbit.org/urbit-vx.y-linux64.tgz
(macOS)
https://bootstrap.urbit.org/urbit-vx.y-darwin.tgz
Release notes:
[..]
Contributions:
[..]
``` ```
Ensure the Vere release is marked as the 'latest' release and upload the two ## Releases
`.tgz` files to the release as `darwin.tgz` and `linux64.tgz`;
this allows us to programmatically retrieve the latest release at
[urbit.org/install/mac/latest/](https://urbit.org/install/mac/latest) and
[urbit.org/install/linux64/latest](https://urbit.org/install/linux64/latest),
respectively.
The same schpeel re: release candidates applies here. - [ ] Create a pull request from the relevant release branch (with the format `release/urbit-os-vX.XX`) to `master`.
- [ ] ssh into `~zod`
- [ ] Check to ensure that nobody else is ssh'd into `~zod`, by running `screen -ls` and verifying no sessions are attached.
- [ ] Attach to the screen session using `screen -x`
- [ ] Ensure that the release candidate was correctly propagated through the prerelease moons
- Use `-read %z ~SHIP %DESK da+now /` to check desk hashes
- [ ] Run the above check on the following [SHIP DESK] pairs: [~marnec-dozzod-marzod %base], [~marnec-dozzod-marzod %kids], [~doznec-dozzod-marzod %base], [~doznec-dozzod-marzod %kids] — **they should all match**
- [ ] Install the contents of the `%kids` desk on `~doznec-dozzod-marzod` into our `%base`: `|merge %base ~doznec-dozzod-marzod %kids, =gem %only-that`
- [ ] Check that `~zod` has updated to the latest release. For a Kelvin release, you can run `zuse` in the Dojo. Each non-Kelvin release might its own way of checking whether the update has completed, possibly through checking the `%cz` hash of the `%base` desk matches the hash on `~marnec-dozzod-marzod` by comparing the outputs of `+vat %base` on both ships.
- [ ] Merge `~zod`'s `%base` desk into its `%kids` desk to distribute the new code to the network: `|merge %kids our %base, =gem %only-that`
- [ ] Before exiting the screen session on `~zod`, make sure the screen session is not left in copy mode for a long period of time, since that will disrupt `~zod`'s operation.
Note that the release notes indicate which version of Urbit OS the Vere release ### Release Communications
will use by default when booting fresh ships. Do not include implicit Urbit OS
changes in Vere releases; this used to be done, historically, but shouldn't be
any longer. If there are Urbit OS and Vere changes to be released, make two
separate releases.
### Deploy the update - [ ] Tag the commit that went onto the live network as a release, using GitHub's "Releases" interface. See the "Tagging" section of this document for details.
- [ ] Update (add a response) the mailing list post to include the base hash of the new release, and indicate that this has now been deployed to the network.
- [ ] Tweet from the `@zodisok` Twitter account linking to the GitHub release.
- [ ] Post links to the release in the Urbit Community Development channel and a channel in the UF public group.
(**Note**: the following steps are automated by some other Tlon-internal ### Post-Release Git Cleanup
tooling. Just ask `~nidsut-tomdun` for details.)
For Urbit OS updates, this means copying the files into ~zod's %base desk. The - [ ] Merge `master` back into `develop`.
changes should be merged into /~zod/kids and then propagated through other galaxies - [ ] Cut a new release branch from `develop`. The branch should have the format `release/urbit-os-vX.XX`
and stars to the rest of the network.
For consistency, I create a release tarball and then rsync the files in. ### Release Next Release Candidate
``` We'll now need to **IMMEDIATELY** deploy the new release candidate to the pre-release moon(s). Otherwise PRs merged during this window will bypass the testing period on `~binnec` and go straight to the release candidate.
$ wget https://github.com/urbit/urbit/archive/urbit-os-vx.y.tar.gz
$ tar xzf urbit-os-vx.y.tar.gz
$ herb zod -p hood -d "+hood/mount /=base="
$ rsync -zr --delete urbit-urbit-os-vx.y/pkg/arvo/ zod/base
$ herb zod -p hood -d "+hood/commit %base"
$ herb zod -p hood -d "+hood/merge %kids our %base"
```
For Vere updates, this means simply shutting down each desired ship, installing - On `~marnec-dozzod-marzod`:
the new binary, and restarting the pier with it. - [ ] `|merge %base ~binnec-dozzod-marzod %kids, =gem %only-that` to update `~marnec` with the contents of the GH release branch
- [ ] `|merge %kids our %base, =gem %only-that` to OTA the release candidate to subscribers
- When ready to deploy the release candidate to App Developers, on `~doznec-dozzod-marzod`:
- [ ] `|merge %base ~marnec-dozzod-marzod %kids, =gem %only-that` to update `~doznec` with the release candidate
- [ ] `|merge %kids our %base, =gem %only-that` to OTA the release candidate to subscribers
### Announce the update ### Post-Release Checks
- [ ] Check that `~marzod` and other distribution stars are receiving the update by running `|ames-sift ~zod` and `|ames-verb %rcv %ges`. You should see lots of packets from `~zod`. Once you have confirmed packets are flowing, run `|ames-sift` and `|ames-verb` with no arguments to reset the verbosity state.
- [ ] Check that planets are receiving the update. They should start updating within an hour or so.
- [ ] Monitor the Urbit Community Help channel, UF public group channels, and Twitter to make
Post an announcement to urbit-dev. The tag annotation, basically, is fine here ### Post-Release Artifacts
-- I usually add the %cz hash (for Urbit OS releases) and the release binary After waiting at least 24 hours after the release to the network, make and distribute a pill.
URLs (for Vere releases). Check the urbit-dev archives for examples of these - [ ] Find a ship on the network (for now, use `~halbex-palheb`, which runs the UF public group) whose sources for `%base` and the standard app desks are mainline, not devstream.
announcements. - [ ] Ensure the `%cz` hashes of all desks match those on the distribution ships.
- [ ] Make a pill by running `.multi-vX-XX/pill +solid %base %garden %webterm %landscape %groups %talk` (replacing `X-XX` with the appropriate version numbers, in this and later steps).
Post the same announcement to the group feed of Urbit Community. - [ ] Boot a fakezod off that pill to make sure the pill is viable.
- [ ] Upload the pill to `bootstrap.urbit.org` using the Google Cloud SDK by running: `gsutil cp /path/to/pier/.urb/put/multi-vX-XX.pill gs://bootstrap.urbit.org/urbit-vX.XX.pill` -- note that it should be `vX.XX`, not `vX-XX` as in the original pill filename.
- [ ] Boot a ship with the latest binary and check that it downloads the pill from `https://bootstrap.urbit.org/urbit-vX.XX.pill` where you just uploaded it.

View File

@ -1,10 +1,10 @@
.PHONY: build build-all install cross release test pills ropsten-pills clean .PHONY: build build-all install cross release test pills goerli-pills clean
build: build:
nix-build -A urbit -A herb --no-out-link nix-build -A urbit --no-out-link
install: install:
nix-env -f . -iA urbit -iA herb nix-env -f . -iA urbit
release: release:
sh/release sh/release
@ -17,8 +17,8 @@ pills:
sh/update-brass-pill sh/update-brass-pill
sh/update-ivory-pill sh/update-ivory-pill
ropsten-pills: goerli-pills:
sh/create-ropsten-pills sh/create-goerli-pills
interface: interface:
sh/build-interface sh/build-interface

View File

@ -7,101 +7,36 @@ has an identity layer (Azimuth), virtual machine (Vere), and operating system
A running Urbit "ship" is designed to operate with other ships peer-to-peer. A running Urbit "ship" is designed to operate with other ships peer-to-peer.
Urbit is a general-purpose, peer-to-peer computer and network. Urbit is a general-purpose, peer-to-peer computer and network.
This repository contains: This repository contains the [Arvo Kernel][arvo]
- The [Arvo OS][arvo] For the Runtime, see [Vere][vere].
- [herb][herb], a tool for Unix control of an Urbit ship For more on the identity layer, see [Azimuth][azim].
- Source code for [Landscape's web interface][land] To manage your Urbit identity, use [Bridge][brid].
- Source code for the [vere][vere] virtual machine.
For more on the identity layer, see [Azimuth][azim]. To manage your Urbit
identity, use [Bridge][brid].
[arvo]: https://github.com/urbit/urbit/tree/master/pkg/arvo
[azim]: https://github.com/urbit/azimuth
[brid]: https://github.com/urbit/bridge
[herb]: https://github.com/urbit/urbit/tree/master/pkg/herb
[land]: https://github.com/urbit/urbit/tree/master/pkg/interface
[vere]: https://github.com/urbit/urbit/tree/master/pkg/urbit
## Install ## Install
To install and run Urbit, please follow the instructions at To install and run Urbit, please follow the instructions at
[urbit.org/using/install][start]. You'll be on the live network in a [urbit.org/getting-started][start]. You'll be on the live network in a
few minutes. few minutes.
If you're interested in Urbit development, keep reading. [start]: https://urbit.org/getting-started/
[start]: https://urbit.org/using/install/
## Development
[![License][license-badge]][license]
[![Build][build-badge]][build]
[![Nix][nix-badge]][nix]
[![Cachix][cachix-badge]][cachix]
Urbit uses [Nix][nix] to manage builds. On Linux and macOS you can install Nix
via:
```
curl -L https://nixos.org/nix/install | sh
```
You can optionally setup Nix to pull build artefacts from the binary cache
that continuous integration uses. This will improve build times and avoid
unnecessary recompilations of common dependencies. Once Nix has been installed
you can setup Cachix via:
```
nix-env -iA cachix -f https://cachix.org/api/v1/install
cachix use ares
```
The Makefile in the project's root directory contains useful phony targets for
building, installing, testing, and so on. You can use it to avoid dealing with
Nix explicitly.
To build the Urbit virtual machine binary, for example, use:
```
make build
```
The test suite can similarly be run via a simple:
```
make test
```
Note that some of the Makefile targets need access to pills tracked via [git
LFS][git-lfs], so you'll also need to have those available locally:
```
git lfs install
git lfs pull
```
[license]: https://raw.githubusercontent.com/urbit/urbit/master/LICENSE.txt
[license-badge]: https://img.shields.io/badge/license-MIT-blue.svg
[build]: https://github.com/urbit/urbit/actions
[build-badge]: https://github.com/urbit/urbit/workflows/build/badge.svg
[cachix]: https://ares.cachix.org
[cachix-badge]: https://img.shields.io/badge/cachix-ares-purple.svg
[nix]: https://nixos.org
[nix-badge]: https://img.shields.io/badge/builtwith-nix-purple.svg
[git-lfs]: https://git-lfs.github.com
## Contributing ## Contributing
Contributions of any form are more than welcome! Please take a look at our Contributions of any form are more than welcome! Please take a look at our
[contributing guidelines][cont] for details on our git practices, coding [contributing guidelines][cont] for details on our git practices, coding
styles, how we manage issues, and so on. styles, and how we manage issues.
For instructions on contributing to Landscape, see [its][lcont] guidelines.
You might also be interested in joining the [urbit-dev][list] mailing list. You might also be interested in joining the [urbit-dev][list] mailing list.
## Release
For details about our release process, see the [maintainers guidelines][main]
[arvo]: https://github.com/urbit/urbit/tree/master/pkg/arvo
[azim]: https://github.com/urbit/azimuth
[brid]: https://github.com/urbit/bridge
[vere]: https://github.com/urbit/vere
[list]: https://groups.google.com/a/urbit.org/forum/#!forum/dev [list]: https://groups.google.com/a/urbit.org/forum/#!forum/dev
[cont]: https://github.com/urbit/urbit/blob/master/CONTRIBUTING.md [cont]: https://github.com/urbit/urbit/blob/master/CONTRIBUTING.md
[lcont]: https://github.com/urbit/urbit/blob/master/pkg/interface/CONTRIBUTING.md [main]: https://github.com/urbit/urbit/blob/master/MAINTAINERS.md

View File

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1 version https://git-lfs.github.com/spec/v1
oid sha256:c4247c64a7d9fc0c0f1d2f017c21dd3464ddfe56529c7d6eef0e64554bd453e8 oid sha256:bd487cdb8294fdef6878f623bceb893553b36b2a616d22d30017b430361586fb
size 7611162 size 3889185

View File

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1 version https://git-lfs.github.com/spec/v1
oid sha256:5123a1ac30b83ec026587574df1ce13a73e72d06588ff68b5c41c09e1bebb5b7 oid sha256:26ff86808886beb831e4a135f478e42ce83ef4a09ad24808b3fe97248ce7a6b7
size 949962 size 1136643

View File

@ -1,3 +0,0 @@
version https://git-lfs.github.com/spec/v1
oid sha256:2f46209c31bc7be965b6ba32db92fb0746be15d9613b1c3c8d09ce7fa0e5e157
size 8280141

View File

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1 version https://git-lfs.github.com/spec/v1
oid sha256:187ea751a274dba7ed69df3a5b8f6f7ac620e3f9787abd75b18cf494d0c41f05 oid sha256:4e4c99cd57805f38ffa1c8d0abe2e21cf1b93d5dbb76e32135721a9b8f46aa31
size 11174099 size 7740551

View File

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1 version https://git-lfs.github.com/spec/v1
oid sha256:c45166ff0f8ab8dc1552bcef519c77c0afa6ca52f8ed1ba31ed632012667d619 oid sha256:9d0ff563027d47436d12f00b7500352ec844a1db7d336b0e5b9cfd5c4c49c66c
size 8674763 size 6017943

View File

@ -40,7 +40,9 @@
, crossOverlays ? [ ] , crossOverlays ? [ ]
# Whether to use pkgs.pkgsStatic.* to obtain statically linked package # Whether to use pkgs.pkgsStatic.* to obtain statically linked package
# dependencies - ie. when building fully-static libraries or executables. # dependencies - ie. when building fully-static libraries or executables.
, enableStatic ? false }: , enableStatic ? false
# release channel (when static)
, verePace ? "" }:
let let
@ -55,7 +57,10 @@ let
if system == "x86_64-linux" && crossSystem == null && enableStatic then if system == "x86_64-linux" && crossSystem == null && enableStatic then
"x86_64-unknown-linux-musl" "x86_64-unknown-linux-musl"
else else
crossSystem; if system == "aarch64-linux" && crossSystem == null && enableStatic then
"aarch64-unknown-linux-musl"
else
crossSystem;
}; };
# Use nixpkgs' top-level/static overlay if enableStatic = true. # Use nixpkgs' top-level/static overlay if enableStatic = true.
@ -95,8 +100,6 @@ let
marsSources = callPackage ./nix/pkgs/marsSources { }; marsSources = callPackage ./nix/pkgs/marsSources { };
urbit = callPackage ./nix/pkgs/urbit { inherit enableStatic; };
urcrypt = callPackage ./nix/pkgs/urcrypt { inherit enableStatic; }; urcrypt = callPackage ./nix/pkgs/urcrypt { inherit enableStatic; };
docker-image = callPackage ./nix/pkgs/docker-image { }; docker-image = callPackage ./nix/pkgs/docker-image { };
@ -107,16 +110,14 @@ let
# Expose packages with local customisations (like patches) for dev access. # Expose packages with local customisations (like patches) for dev access.
inherit (pkgsStatic) libsigsegv lmdb; inherit (pkgsStatic) libsigsegv lmdb;
urbit-debug = urbit.override { enableDebug = true; };
urbit-tests = libLocal.testFakeShip { urbit-tests = libLocal.testFakeShip {
inherit arvo; inherit arvo;
urbit = urbit-debug;
pill = solid.lfs; pill = solid.lfs;
}; };
ivory-ropsten = ivory.override { arvo = arvo.ropsten; }; ivory-goerli = ivory.override { arvo = arvo.goerli; };
brass-ropsten = brass.override { arvo = arvo.ropsten; }; brass-goerli = brass.override { arvo = arvo.goerli; };
# Create a .tgz of the primary binaries. # Create a .tgz of the primary binaries.
tarball = let tarball = let

70
doc/spec/nock/10.txt Normal file
View File

@ -0,0 +1,70 @@
Author: Mencius Moldbug [moldbug@gmail.com]
Date: 9/15/2008
Version: 10K
1. Introduction
This file defines one function, "nock."
nock is in the public domain.
2. Data
A "noun" is either an "atom" or a "cell." An "atom" is an unsigned
integer of any size. A "cell" is an ordered pair of any two nouns,
the "head" and "tail."
3. Semantics
nock maps one noun to another. It doesn't always terminate.
4. Pseudocode
nock is defined in a pattern-matching pseudocode, below.
Brackets enclose cells. [a b c] is [a [b c]].
5. Definition
5.1 Transformations
*[a [b c] d] => [*[a b c] *[a d]]
*[a 0 b] => /[b a]
*[a 1 b] => [b]
*[a 2 b c d] => *[a 3 [0 1] 3 [1 c d] [1 0] 3 [1 2 3] [1 0] 5 5 b]
*[a 3 b] => **[a b]
*[a 4 b] => &*[a b]
*[a 5 b] => ^*[a b]
*[a 6 b] => =*[a b]
*[a] => *[a]
5.2 Operators
5.2.1 Goto [*]
*[a] -> nock[a]
5.2.2 Deep [&]
&[a b] -> 0
&[a] -> 1
5.2.4 Bump [^]
^[a b] -> ^[a b]
^[a] -> (a + 1)
5.2.5 Like [=]
=[a a] -> 0
=[a b] -> 1
=[a] -> =[a]
5.2.6 Snip [/]
/[1 a] -> a
/[2 a b] -> a
/[3 a b] -> b
/[(a + a) b] -> /[2 /[a b]]
/[(a + a + 1) b] -> /[3 /[a b]]
/[a] -> /[a]

74
doc/spec/nock/11.txt Normal file
View File

@ -0,0 +1,74 @@
Author: Mencius Moldbug (moldbug@gmail.com)
Date: 5/25/2008
Version: 11K
1. Introduction
This file defines one function, "nock."
nock is in the public domain.
2. Data
A "noun" is either an "atom" or a "cell." An "atom" is an unsigned
integer of any size. A "cell" is an ordered pair of any two nouns,
the "head" and "tail."
3. Semantics
nock maps one noun to another. It doesn't always terminate.
4. Pseudocode
nock is defined in a pattern-matching pseudocode, below.
Parentheses enclose cells. (a b c) is (a (b c)).
5. Definition
5.1 Transformations
*(a (b c) d) => (*(a b c) *(a d))
*(a 0 b) => /(b a)
*(a 1 b) => (b)
*(a 2 b c d) => *(a 3 (0 1) 3 (1 c d) (1 0) 3 (1 2 3) (1 0) 5 5 b)
*(a 3 b) => **(a b)
*(a 4 b) => &*(a b)
*(a 5 b) => ^*(a b)
*(a 6 b) => =*(a b)
*(a 7 b c) => *(a 3 (((1 0) b) c) 1 0 3)
*(a 8 b c) => *(a c)
*(a) => *(a)
5.2 Operators
5.2.1 Goto (*)
*(a) -> nock(a)
5.2.2 Deep (&)
&(a b) -> 0
&(a) -> 1
5.2.4 Bump (^)
^(a b) -> ^(a b)
^(a) -> a + 1
5.2.5 Same (=)
=(a a) -> 0
=(a b) -> 1
=(a) -> =(a)
5.2.6 Snip (/)
/(1 a) -> a
/(2 a b) -> a
/(3 a b) -> b
/((a + a) b) -> /(2 /(a b))
/((a + a + 1) b) -> /(3 /(a b))
/(a) -> /(a)

75
doc/spec/nock/12.txt Normal file
View File

@ -0,0 +1,75 @@
Author: Curtis Yarvin (curtis.yarvin@gmail.com)
Date: 3/28/2008
Version: 0.12
1. Introduction
This file defines one function, "nock."
nock is in the public domain.
2. Data
A "noun" is either an "atom" or a "cell." An "atom" is an unsigned
integer of any size. A "cell" is an ordered pair of any two nouns,
the "head" and "tail."
3. Semantics
nock maps one noun to another. It doesn't always terminate.
4. Pseudocode
nock is defined in a pattern-matching pseudocode, below.
Parentheses enclose cells. (a b c) is (a (b c)).
5. Definition
5.1 Transformations
*(a (b c) d) => (*(a b c) *(a d))
*(a 0 b) => /(b a)
*(a 1 b) => (b)
*(a 2 b c) => *(*(a b) c)
*(a 3 b) => **(a b)
*(a 4 b) => &*(a b)
*(a 5 b) => ^*(a b)
*(a 6 b) => =*(a b)
*(a 7 b c d) => *(a 3 (0 1) 3 (1 c d) (1 0) 3 (1 2 3) (1 0) 5 5 b)
*(a 8 b c) => *(a 2 (((1 0) b) c) 0 3)
*(a 9 b c) => *(a c)
*(a) => *(a)
5.2 Operators
5.2.1 Goto (*)
*(a) -> nock(a)
5.2.2 Deep (&)
&(a b) -> 0
&(a) -> 1
5.2.4 Bump (^)
^(a b) -> ^(a b)
^(a) -> a + 1
5.2.5 Same (=)
=(a a) -> 0
=(a b) -> 1
=(a) -> =(a)
5.2.6 Snip (/)
/(1 a) -> a
/(2 a b) -> a
/(3 a b) -> b
/((a + a) b) -> /(2 /(a b))
/((a + a + 1) b) -> /(3 /(a b))
/(a) -> /(a)

71
doc/spec/nock/13.txt Normal file
View File

@ -0,0 +1,71 @@
Author: Curtis Yarvin (curtis.yarvin@gmail.com)
Date: 3/8/2008
Version: 0.13
1. Manifest
This file defines one Turing-complete function, "nock."
nock is in the public domain. So far as I know, it is
neither patentable nor patented. Use it at your own risk.
2. Data
Both the domain and range of nock are "nouns."
A "noun" is either an "atom" or a "cell." An "atom" is an unsigned
integer of any size. A "cell" is an ordered pair of any two nouns,
the "head" and "tail."
3. Pseudocode
nock is defined in a pattern-matching pseudocode.
Match precedence is top-down. Operators are prefix. Parens
denote cells, and group right: (a b c) is (a (b c)).
4. Definition
4.1 Transformations
*(a 0 b c) => *(*(a b) c)
*(a 0 b) => /(b a)
*(a 1 b) => (b)
*(a 2 b) => **(a b)
*(a 3 b) => &*(a b)
*(a 4 b) => ^*(a b)
*(a 5 b) => =*(a b)
*(a 6 b c d) => *(a 2 (0 1) 2 (1 c d) (1 0) 2 (1 2 3) (1 0) 4 4 b)
*(a b c) => (*(a b) *(a c))
*(a) => *(a)
4.2 Operators
4.2.1 Goto (*)
*(a) -> nock(a)
4.2.2 Deep (&)
&(a b) -> 0
&(a) -> 1
4.2.3 Bump (^)
^(a b) -> ^(a b)
^(a) -> a + 1
4.2.4 Same (=)
=(a a) -> 0
=(a b) -> 1
=(a) -> =(a)
4.2.5 Snip (/)
/(1 a) -> a
/(2 a b) -> a
/(3 a b) -> b
/((a + a) b) -> /(2 /(a b))
/((a + a + 1) b) -> /(3 /(a b))
/(a) -> /(a)

42
doc/spec/nock/6.txt Normal file
View File

@ -0,0 +1,42 @@
1 Structures
A noun is an atom or a cell. An atom is any natural number.
A cell is an ordered pair of nouns.
2 Reductions
nock(a) *a
[a b c] [a [b c]]
?[a b] 0
?a 1
+a 1 + a
=[a a] 0
=[a b] 1
/[1 a] a
/[2 a b] a
/[3 a b] b
/[(a + a) b] /[2 /[a b]]
/[(a + a + 1) b] /[3 /[a b]]
*[a [b c] d] [*[a b c] *[a d]]
*[a 0 b] /[b a]
*[a 1 b] b
*[a 2 b c] *[*[a b] *[a c]]
*[a 3 b] ?*[a b]
*[a 4 b] +*[a b]
*[a 5 b] =*[a b]
*[a 6 b c d] *[a 2 [0 1] 2 [1 c d] [1 0] 2 [1 2 3] [1 0] 4 4 b]
*[a 7 b c] *[a 2 b 1 c]
*[a 8 b c] *[a 7 [[0 1] b] c]
*[a 9 b c] *[a 7 c 0 b]
*[a 10 b c] *[a c]
*[a 10 [b c] d] *[a 8 c 7 [0 2] d]
+[a b] +[a b]
=a =a
/a /a
*a *a

42
doc/spec/nock/7.txt Normal file
View File

@ -0,0 +1,42 @@
1 Structures
A noun is an atom or a cell. An atom is any natural number.
A cell is any ordered pair of nouns.
2 Pseudocode
[a b c] [a [b c]]
nock(a) *a
?[a b] 0
?a 1
^a 1 + a
=[a a] 0
=[a b] 1
/[1 a] a
/[2 a b] a
/[3 a b] b
/[(a + a) b] /[2 /[a b]]
/[(a + a + 1) b] /[3 /[a b]]
*[a [b c] d] [*[a b c] *[a d]]
*[a 0 b] /[b a]
*[a 1 b] b
*[a 2 b c] *[*[a b] *[a c]]
*[a 3 b] ?*[a b]
*[a 4 b] ^*[a b]
*[a 5 b] =*[a b]
*[a 6 b c d] *[a 2 [0 1] 2 [1 c d] [1 0] 2 [1 2 3] [1 0] 4 4 b]
*[a 7 b c] *[a 2 b 1 c]
*[a 8 b c] *[a 7 [[7 [0 1] b] 0 1] c]
*[a 9 b c] *[a 7 c 0 b]
*[a 10 b c] *[a c]
*[a 10 [b c] d] *[a 8 c 7 [0 3] d]
^[a b] ^[a b]
=a =a
/a /a
*a *a

45
doc/spec/nock/8.txt Normal file
View File

@ -0,0 +1,45 @@
1 Structures
A noun is an atom or a cell. An atom is any unsigned integer.
A cell is an ordered pair of nouns.
2 Pseudocode
[a b c] is [a [b c]]; *a is nock(a). Reductions match top-down.
3 Reductions
?[a b] 0
?a 1
^a (a + 1)
=[a a] 0
=[a b] 1
/[1 a] a
/[2 a b] a
/[3 a b] b
/[(a + a) b] /[2 /[a b]]
/[(a + a + 1) b] /[3 /[a b]]
*[a [b c] d] [*[a b c] *[a d]]
*[a 0 b] /[b a]
*[a 1 b] b
*[a 2 b c] *[*[a b] *[a c]]
*[a 3 b] ?*[a b]
*[a 4 b] ^*[a b]
*[a 5 b] =*[a b]
*[a 6 b c d] *[a 2 [0 1] 2 [1 c d] [1 0] 2 [1 2 3] [1 0] 4 4 b]
*[a 7 b c] *[a 2 b 1 c]
*[a 8 b c] *[a 7 [7 b [0 1]] c]
*[a 9 b c] *[a 8 b 2 [[7 [0 3] d] [0 5]] 0 5]
*[a 10 b c] *[a 8 b 8 [7 [0 3] c] 0 2]
*[a 11 b c] *[a 8 b 7 [0 3] c]
*[a 12 b c] *[a [1 0] 1 c]
^[a b] ^[a b]
=a =a
/a /a
*a *a

43
doc/spec/nock/9.txt Normal file
View File

@ -0,0 +1,43 @@
1 Context
This spec defines one function, Nock.
2 Structures
A noun is an atom or a cell. An atom is any unsigned integer.
A cell is an ordered pair of any two nouns.
3 Pseudocode
Brackets enclose cells. [a b c] is [a [b c]].
*a is Nock(a). Reductions match top-down.
4 Reductions
?[a b] => 0
?a => 1
^[a b] => ^[a b]
^a => (a + 1)
=[a a] => 0
=[a b] => 1
=a => =a
/[1 a] => a
/[2 a b] => a
/[3 a b] => b
/[(a + a) b] => /[2 /[a b]]
/[(a + a + 1) b] => /[3 /[a b]]
/a => /a
*[a 0 b] => /[b a]
*[a 1 b] => b
*[a 2 b c d] => *[a 3 [0 1] 3 [1 c d] [1 0] 3 [1 2 3] [1 0] 5 5 b]
*[a 3 b] => **[a b]
*[a 4 b] => ?*[a b]
*[a 5 b] => ^*[a b]
*[a 6 b] => =*[a b]
*[a [b c] d] => [*[a b c] *[a d]]
*a => *a

View File

@ -1,9 +0,0 @@
{
"packages": [
"pkg/npm/*",
"pkg/btc-wallet",
"pkg/interface",
"pkg/grid"
],
"version": "independent"
}

View File

@ -25,6 +25,7 @@ let
(import ./overlays/native.nix) (import ./overlays/native.nix)
# Specific overrides guarded by the host platform. # Specific overrides guarded by the host platform.
(import ./overlays/musl.nix) (import ./overlays/musl.nix)
(import ./overlays/arm.nix)
]; ];
}; };

View File

@ -1,6 +1,6 @@
{ lib, stdenvNoCC, curl }: { lib, stdenvNoCC, curl }:
{ urbit, arvo ? null, pill, ship, arguments ? [ "-l" ] }: { arvo ? null, pill, ship, arguments ? [ "-l" ] }:
let let
@ -10,14 +10,14 @@ let
in stdenvNoCC.mkDerivation { in stdenvNoCC.mkDerivation {
name = "fake-${ship}"; name = "fake-${ship}";
buildInputs = [ curl urbit ]; buildInputs = [ curl ];
phases = [ "buildPhase" "installPhase " ]; phases = [ "buildPhase" "installPhase " ];
buildPhase = '' buildPhase = ''
set -xeuo pipefail set -xeuo pipefail
urbit ${lib.concatStringsSep " " args} ./pier ${arvo}/vere.jam ${lib.concatStringsSep " " args} -c ./pier
cleanup () { cleanup () {
if [ -f ./pier/.vere.lock ]; then if [ -f ./pier/.vere.lock ]; then

View File

@ -1,16 +1,15 @@
{ lib, stdenvNoCC, curl, python3, bootFakeShip }: { lib, stdenvNoCC, curl, python3, bootFakeShip }:
{ urbit, arvo ? null, pill, ship ? "bus", arguments ? urbit.meta.arguments { arvo ? null, pill, ship ? "bus", doCheck ? true }:
, doCheck ? true }:
stdenvNoCC.mkDerivation { stdenvNoCC.mkDerivation {
name = "test-${ship}"; name = "test-${ship}";
src = bootFakeShip { inherit urbit arvo pill ship; }; src = bootFakeShip { inherit arvo pill ship; };
phases = [ "unpackPhase" "buildPhase" "checkPhase" ]; phases = [ "unpackPhase" "buildPhase" "checkPhase" ];
buildInputs = [ curl python3 urbit ]; buildInputs = [ curl python3 ];
unpackPhase = '' unpackPhase = ''
cp -R $src ./pier cp -R $src ./pier
@ -20,7 +19,7 @@ stdenvNoCC.mkDerivation {
buildPhase = '' buildPhase = ''
set -x set -x
urbit ${lib.concatStringsSep " " arguments} -d ./pier 2> urbit-output ${arvo}/vere.jam -d ./pier 2> urbit-output
# Sledge Hammer! # Sledge Hammer!
# See: https://github.com/travis-ci/travis-ci/issues/4704#issuecomment-348435959 # See: https://github.com/travis-ci/travis-ci/issues/4704#issuecomment-348435959

View File

@ -3,10 +3,11 @@ final: prev:
let let
isAarch64 = prev.stdenv.hostPlatform.isAarch64; isAarch64 = prev.stdenv.hostPlatform.isAarch64;
isDarwin = prev.stdenv.isDarwin;
in prev.lib.optionalAttrs isAarch64 { in prev.lib.optionalAttrs (isAarch64 && !isDarwin) {
libsigsegv = prev.libsigsegv.overrideAttrs (attrs: { libsigsegv = prev.libsigsegv.overrideAttrs (attrs: {
preConfigure = (old.preConfigure or "") + '' preConfigure = (prev.preConfigure or "") + ''
sed -i 's/^CFG_FAULT=$/CFG_FAULT=fault-linux-arm.h/' configure sed -i 's/^CFG_FAULT=$/CFG_FAULT=fault-linux-arm.h/' configure
''; '';
}); });

View File

@ -27,6 +27,10 @@ in {
brotliSupport = false; brotliSupport = false;
}; };
# lies, all lies
openssl-static-osx = prev.openssl;
zlib-static-osx = prev.zlib;
lmdb = prev.lmdb.overrideAttrs (attrs: { lmdb = prev.lmdb.overrideAttrs (attrs: {
patches = patches =
optionalList attrs.patches ++ prev.lib.optional prev.stdenv.isDarwin [ optionalList attrs.patches ++ prev.lib.optional prev.stdenv.isDarwin [

View File

@ -16,12 +16,19 @@ let
in { in {
gmp = enableStatic prev.gmp; gmp = enableStatic prev.gmp;
curlUrbit = enableStatic prev.curlUrbit; curlUrbit = enableStatic (prev.curlUrbit.override { openssl = final.openssl-static-osx; zlib = final.zlib-static-osx; });
libuv = enableStatic prev.libuv; libuv = enableStatic prev.libuv;
libffi = enableStatic prev.libffi; libffi = enableStatic prev.libffi;
openssl-static-osx = prev.openssl.override {
static = true;
withPerl = false;
};
zlib-static-osx = if final.stdenv.isDarwin then prev.zlib.static else prev.zlib;
secp256k1 = enableStatic prev.secp256k1; secp256k1 = enableStatic prev.secp256k1;
lmdb = prev.lmdb.overrideAttrs (old: lmdb = prev.lmdb.overrideAttrs (old:

View File

@ -5,16 +5,16 @@ stdenvNoCC.mkDerivation {
src = marsSources; src = marsSources;
outputs = [ "out" "ropsten" ]; outputs = [ "out" "goerli" ];
phases = [ "mainnetPhase" "ropstenPhase" ]; phases = [ "mainnetPhase" "goerliPhase" ];
mainnetPhase = '' mainnetPhase = ''
ln -s ${marsSources.out}/arvo $out ln -s ${marsSources.out}/arvo $out
''; '';
ropstenPhase = '' goerliPhase = ''
ln -s ${marsSources.ropsten}/arvo $ropsten ln -s ${marsSources.goerli}/arvo $goerli
''; '';
preferLocalBuild = true; preferLocalBuild = true;

View File

@ -6,16 +6,16 @@ stdenvNoCC.mkDerivation {
buildInputs = [ bc ]; buildInputs = [ bc ];
outputs = [ "out" "ropsten" ]; outputs = [ "out" "goerli" ];
phases = [ "mainnetPhase" "ropstenPhase" ]; phases = [ "mainnetPhase" "goerliPhase" ];
mainnetPhase = '' mainnetPhase = ''
cp -r $src $out cp -r $src $out
chmod -R u+w $out chmod -R u+w $out
''; '';
ropstenPhase = '' goerliPhase = ''
cp -r $src tmp cp -r $src tmp
chmod -R u+w tmp chmod -R u+w tmp
@ -23,9 +23,9 @@ stdenvNoCC.mkDerivation {
AMES=tmp/arvo/sys/vane/ames.hoon AMES=tmp/arvo/sys/vane/ames.hoon
ACME=tmp/arvo/app/acme.hoon ACME=tmp/arvo/app/acme.hoon
# Replace the mainnet azimuth contract with the ropsten contract # Replace the mainnet azimuth contract with the goerli contract
sed --in-place \ sed --in-place \
's/\(\+\+ contracts \)mainnet\-contracts/\1ropsten-contracts/' \ 's/\(\+\+ contracts \)mainnet\-contracts/\1goerli-contracts/' \
$ZUSE $ZUSE
# Increment the %ames protocol version # Increment the %ames protocol version
@ -38,8 +38,8 @@ stdenvNoCC.mkDerivation {
's_https://acme-v02.api.letsencrypt.org/directory_https://acme-staging-v02.api.letsencrypt.org/directory_' \ 's_https://acme-v02.api.letsencrypt.org/directory_https://acme-staging-v02.api.letsencrypt.org/directory_' \
$ACME $ACME
cp -r tmp $ropsten cp -r tmp $goerli
chmod -R u+w $ropsten chmod -R u+w $goerli
''; '';
preferLocalBuild = true; preferLocalBuild = true;

View File

@ -1,5 +1,5 @@
{ lib, stdenvNoCC, fetchGitHubLFS, bootFakeShip, solid, urbit, arvo, curl { lib, stdenvNoCC, fetchGitHubLFS, bootFakeShip, solid, urbit, arvo, curl
, withRopsten ? false }: , withGoerli ? false }:
let let
@ -9,9 +9,9 @@ in {
build = import ./builder.nix { build = import ./builder.nix {
inherit stdenvNoCC urbit curl; inherit stdenvNoCC urbit curl;
name = "brass" + lib.optionalString withRopsten "-ropsten"; name = "brass" + lib.optionalString withGoerli "-goerli";
builder = ./brass.sh; builder = ./brass.sh;
arvo = if withRopsten then arvo.ropsten else arvo; arvo = if withGoerli then arvo.goerli else arvo;
pier = bootFakeShip { pier = bootFakeShip {
inherit urbit; inherit urbit;
@ -19,4 +19,4 @@ in {
ship = "zod"; ship = "zod";
}; };
}; };
} // lib.optionalAttrs (!withRopsten) { inherit lfs; } } // lib.optionalAttrs (!withGoerli) { inherit lfs; }

View File

@ -1,5 +1,5 @@
{ lib, stdenvNoCC, fetchGitHubLFS, bootFakeShip, solid, urbit, arvo, curl, xxd { lib, stdenvNoCC, fetchGitHubLFS, bootFakeShip, solid, urbit, arvo, curl, xxd
, withRopsten ? false }: , withGoerli ? false }:
let let
@ -9,9 +9,9 @@ in {
build = import ./builder.nix { build = import ./builder.nix {
inherit stdenvNoCC urbit curl; inherit stdenvNoCC urbit curl;
name = "ivory" + lib.optionalString withRopsten "-ropsten"; name = "ivory" + lib.optionalString withGoerli "-goerli";
builder = ./ivory.sh; builder = ./ivory.sh;
arvo = if withRopsten then arvo.ropsten else arvo; arvo = if withGoerli then arvo.goerli else arvo;
pier = bootFakeShip { pier = bootFakeShip {
inherit urbit; inherit urbit;
@ -39,4 +39,4 @@ in {
preferLocalBuild = true; preferLocalBuild = true;
}; };
} // lib.optionalAttrs (!withRopsten) { inherit lfs; } } // lib.optionalAttrs (!withGoerli) { inherit lfs; }

View File

@ -1,9 +1,11 @@
{ lib, stdenv, coreutils, pkgconfig # build/env { lib, stdenv, coreutils, pkgconfig # build/env
, cacert, ca-bundle, ivory # codegen , cacert, ca-bundle, ivory # codegen
, curlUrbit, ent, gmp, h2o, libsigsegv, libuv, lmdb # libs , curlUrbit, ent, gmp, h2o, libsigsegv, libuv, lmdb # libs
, murmur3, openssl, softfloat3, urcrypt, zlib # , murmur3, openssl, openssl-static-osx, softfloat3 #
, urcrypt, zlib, zlib-static-osx #
, enableStatic ? stdenv.hostPlatform.isStatic # opts , enableStatic ? stdenv.hostPlatform.isStatic # opts
, enableDebug ? false , enableDebug ? false
, verePace ? ""
, doCheck ? true , doCheck ? true
, enableParallelBuilding ? true , enableParallelBuilding ? true
, dontStrip ? true }: , dontStrip ? true }:
@ -40,10 +42,10 @@ in stdenv.mkDerivation {
libuv libuv
lmdb lmdb
murmur3 murmur3
openssl (if stdenv.isDarwin && enableStatic then openssl-static-osx else openssl)
softfloat3 softfloat3
urcrypt urcrypt
zlib (if stdenv.isDarwin && enableStatic then zlib-static-osx else zlib)
]; ];
# Ensure any `/usr/bin/env bash` shebang is patched. # Ensure any `/usr/bin/env bash` shebang is patched.
@ -69,6 +71,7 @@ in stdenv.mkDerivation {
MEMORY_DEBUG = enableDebug; MEMORY_DEBUG = enableDebug;
CPU_DEBUG = enableDebug; CPU_DEBUG = enableDebug;
EVENT_TIME_DEBUG = false; EVENT_TIME_DEBUG = false;
VERE_PACE = if enableStatic then verePace else "";
# See https://github.com/NixOS/nixpkgs/issues/18995 # See https://github.com/NixOS/nixpkgs/issues/18995
hardeningDisable = lib.optionals enableDebug [ "all" ]; hardeningDisable = lib.optionals enableDebug [ "all" ];

View File

@ -1,5 +1,5 @@
{ stdenv, autoreconfHook, pkgconfig { stdenv, autoreconfHook, pkgconfig
, libaes_siv, openssl, secp256k1 , libaes_siv, openssl, openssl-static-osx, secp256k1
, enableStatic ? stdenv.hostPlatform.isStatic }: , enableStatic ? stdenv.hostPlatform.isStatic }:
stdenv.mkDerivation rec { stdenv.mkDerivation rec {

18
nix/sources-openssl.json Normal file
View File

@ -0,0 +1,18 @@
{
"openssl": {
"branch": "1_1_1n",
"homepage": "https://www.openssl.org/",
"pmnsh": {
"include": "build/include",
"lib": "build",
"prepare": "./config --prefix=`mkdir -p build && readlink -f ./build` --libdir=. no-shared no-tests",
"make": "install_dev"
},
"owner": "openssl",
"repo": "openssl",
"rev": "OpenSSL_1_1_1n",
"type": "tarball",
"url": "https://github.com/openssl/openssl/archive/refs/tags/OpenSSL_1_1_1n.tar.gz",
"url_template": "https://github.com/<owner>/<repo>/archive/refs/tags/<rev>.tar.gz"
}
}

View File

@ -4,6 +4,11 @@
"description": "A command line tool and library for transferring data with URL syntax", "description": "A command line tool and library for transferring data with URL syntax",
"homepage": "http://curl.se/", "homepage": "http://curl.se/",
"pmnsh": { "pmnsh": {
"compat": {
"openbsd": {
"prepare": "autoreconf -vfi && ./configure --disable-shared --disable-ldap --disable-rtsp --without-brotli --without-libidn2 --without-libpsl --without-nghttp2 --with-openssl=`readlink -f ../openssl/build`"
}
},
"include": "include", "include": "include",
"lib": "lib/.libs", "lib": "lib/.libs",
"prepare": "autoreconf -vfi && ./configure --disable-shared --disable-ldap --disable-rtsp --without-brotli --without-libidn2 --without-libpsl --without-nghttp2 --with-openssl", "prepare": "autoreconf -vfi && ./configure --disable-shared --disable-ldap --disable-rtsp --without-brotli --without-libidn2 --without-libpsl --without-nghttp2 --with-openssl",
@ -76,6 +81,13 @@
}, },
"urcrypt": { "urcrypt": {
"pmnsh": { "pmnsh": {
"compat": {
"openbsd": {
"make": "install prefix=`readlink -f .` exec_prefix=`readlink -f .`",
"include": "include",
"lib": "lib"
}
},
"prepare": "./autogen.sh && ./configure --disable-shared PKG_CONFIG_PATH=../secp256k1 CFLAGS=\"-I../secp256k1/include -I../libaes_siv\" LDFLAGS=-L../libaes_siv", "prepare": "./autogen.sh && ./configure --disable-shared PKG_CONFIG_PATH=../secp256k1 CFLAGS=\"-I../secp256k1/include -I../libaes_siv\" LDFLAGS=-L../libaes_siv",
"make": "install" "make": "install"
} }

View File

@ -8,6 +8,9 @@
"compat": { "compat": {
"mingw": { "mingw": {
"prepare": "cmake -G\"MSYS Makefiles\" -DCMAKE_INSTALL_PREFIX=. ." "prepare": "cmake -G\"MSYS Makefiles\" -DCMAKE_INSTALL_PREFIX=. ."
},
"openbsd": {
"prepare": "cmake -DOPENSSL_ROOT_DIR=`readlink -f ../openssl/build` ."
} }
}, },
"include": "include", "include": "include",
@ -35,6 +38,10 @@
"mingw": { "mingw": {
"make": "aes_siv_static", "make": "aes_siv_static",
"prepare": "cmake -G\"MSYS Makefiles\" -DDISABLE_DOCS:BOOL=ON ." "prepare": "cmake -G\"MSYS Makefiles\" -DDISABLE_DOCS:BOOL=ON ."
},
"openbsd": {
"make": "aes_siv_static",
"prepare": "cmake -DDISABLE_DOCS:BOOL=ON -DOPENSSL_ROOT_DIR=`readlink -f ../openssl/build` ."
} }
} }
}, },
@ -98,6 +105,10 @@
"mingw": { "mingw": {
"lib": "build/Win64-MinGW-w64", "lib": "build/Win64-MinGW-w64",
"make": "-C build/Win64-MinGW-w64 libsoftfloat3.a" "make": "-C build/Win64-MinGW-w64 libsoftfloat3.a"
},
"openbsd": {
"lib": "build/template-FAST_INT64",
"make": "-C build/template-FAST_INT64 libsoftfloat3.a"
} }
}, },
"include": "source/include" "include": "source/include"

15716
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -1,21 +0,0 @@
{
"name": "root",
"private": true,
"engines": {
"node": "16.14.0"
},
"devDependencies": {
"eslint": "^7.29.0",
"husky": "^6.0.0",
"lerna": "^4.0.0",
"lint-staged": "^11.1.2",
"prettier": "^2.3.2"
},
"scripts": {
"watch-libs": "lerna run watch --no-private --parallel",
"build-libs": "lerna run build --no-private",
"test": "lerna run test",
"bootstrap": "lerna bootstrap",
"build:prod": "lerna run build:prod"
}
}

View File

@ -584,10 +584,11 @@
~| [%no-next-domain idx=idx] ~| [%no-next-domain idx=idx]
(head (skim pending |=([turf idx=@ud ?] =(idx ^idx)))) (head (skim pending |=([turf idx=@ud ?] =(idx ^idx))))
:: XX should confirm that :turf points to us :: XX should confirm that :turf points to us
:: confirms that domain exists (and an urbit is on :80) :: confirms that domain exists (and an urbit is on the standard port)
:: ::
=/ sec=? p:.^(hart:eyre %e /(scot %p our.bow)/host/(scot %da now.bow))
=/ =purl =/ =purl
:- [sec=| por=~ host=[%& turf.next]] :- [sec=sec por=~ host=[%& turf.next]]
[[ext=~ path=/'~debug'] query=~] [[ext=~ path=/'~debug'] query=~]
=/ =wire =/ =wire
(acme-wire try %validate-domain /idx/(scot %ud idx.next)) (acme-wire try %validate-domain /idx/(scot %ud idx.next))
@ -754,9 +755,8 @@
?> ?=(%wake sas.u.rod) ?> ?=(%wake sas.u.rod)
=* aut u.active.aut.u.rod =* aut u.active.aut.u.rod
=/ pat=path /'.well-known'/acme-challenge/[tok.cal.aut] =/ pat=path /'.well-known'/acme-challenge/[tok.cal.aut]
:: note: requires port 80, just as the ACME service will =/ sec=? p:.^(hart:eyre %e /(scot %p our.bow)/host/(scot %da now.bow))
:: =/ url=purl [[sec=sec por=~ hos=[%& dom.aut]] [ext=~ pat] hed=~]
=/ url=purl [[sec=| por=~ hos=[%& dom.aut]] [ext=~ pat] hed=~]
:: =/ url=purl [[sec=| por=`8.081 hos=[%& /localhost]] [ext=~ pat] hed=~] :: =/ url=purl [[sec=| por=`8.081 hos=[%& /localhost]] [ext=~ pat] hed=~]
:: XX idx in wire? :: XX idx in wire?
:: ::

View File

@ -7,6 +7,7 @@
:: OR :: OR
:: :aqua &pill +solid :: :aqua &pill +solid
:: ::
:: XX: update these examples
:: Then try stuff: :: Then try stuff:
:: :aqua [%init ~[~bud ~dev]] :: :aqua [%init ~[~bud ~dev]]
:: :aqua [%dojo ~[~bud ~dev] "[our eny (add 3 5)]"] :: :aqua [%dojo ~[~bud ~dev] "[our eny (add 3 5)]"]
@ -480,14 +481,15 @@
=^ ms state (poke-pill pil) =^ ms state (poke-pill pil)
(emit-cards ms) (emit-cards ms)
:: ::
[%swap-files ~] [%swap-files @tas]
=/ =desk +.val
=. userspace-ova.pil =. userspace-ova.pil
=/ slim-dirs=(list path) :: take all files from a userspace desk
~[/app /ted /gen /lib /mar /sur /hoon/sys /arvo/sys /zuse/sys] =/ all-dirs=(list path) ~[/]
:_ ~ :_ ~
%- unix-event:pill-lib %- unix-event:pill-lib
%- %*(. file-ovum:pill-lib directories slim-dirs) %+ %*(. file-ovum:pill-lib directories all-dirs)
/(scot %p our.hid)/work/(scot %da now.hid) desk /(scot %p our.hid)/[desk]/(scot %da now.hid)
=^ ms state (poke-pill pil) =^ ms state (poke-pill pil)
(emit-cards ms) (emit-cards ms)
:: ::

View File

@ -6,22 +6,16 @@
default-agent, default-agent,
verb, verb,
dbug dbug
:: Generally don't update the snapshot until we have clay tombstoning.
::
/* snap %azimuth-snapshot /app/azimuth/version-0/azimuth-snapshot
:: To update, run from dojo: :: To update, run from dojo:
:: -azimuth-snap-state %default 'version-0' :: -azimuth-snap-state %default 'version-0'
:: ::
:: To recreate from a full list of logs (at /app/azimuth/logs/eth-logs): :: To recreate from a full list of logs (at /app/azimuth/logs/eth-logs):
:: -azimuth-snap-logs %default 'version-0' :: -azimuth-snap-logs %default 'version-0'
:: ::
=/ snap=snap-state snap
=/ last-snap=@ number.id.snap
::
=, jael =, jael
|% |%
+$ app-state +$ app-state
$: %6 $: %7
url=@ta url=@ta
=net =net
refresh=_~m5 refresh=_~m5
@ -30,10 +24,14 @@
own=owners own=owners
spo=sponsors spo=sponsors
logs=(list =event-log:rpc:ethereum) logs=(list =event-log:rpc:ethereum)
sap=snap-state
== ==
:: ::
+$ poke-data +$ poke-data
$% :: %listen $% :: %load: load snapshot
::
[%load snap=snap-state]
:: %listen
:: ::
[%listen whos=(list ship) =source:jael] [%listen whos=(list ship) =source:jael]
:: %watch: configure node url and network :: %watch: configure node url and network
@ -75,14 +73,18 @@
== ==
:: ::
++ init-timer ++ init-timer
|= =bowl:gall |= at=@da
^- card ^- card
[%pass /init %arvo %b %wait now.bowl] [%pass /init %arvo %b %wait at]
:: ::
++ start-log-retrieval ++ start-log-retrieval
|= [=ship args=vase] |= [=ship args=vase]
^- card ^- card
[%pass /wa %agent [ship %eth-watcher] %poke %eth-watcher-poke args] [%pass /wa %agent [ship %eth-watcher] %poke %eth-watcher-poke args]
::
++ start-azimuth-load
^- card
[%pass /al %arvo %k %fard %base %azimuth-load %noun !>(~)]
-- --
:: ::
=< =<
@ -92,22 +94,10 @@
def ~(. (default-agent this %|) bowl) def ~(. (default-agent this %|) bowl)
:: ::
++ on-init ++ on-init
^- (quip card _this)
=/ points=@ud ~(wyt by points.nas.snap)
%- %- slog
[leaf+"ship: loading azimuth snapshot ({<points>} points)"]~
::
=: net.state %default
nas.state nas.snap
own.state owners.snap
spo.state sponsors.snap
url.state 'http://eth-mainnet.urbit.org:8545'
==
:_ this :_ this
?: .^(? %j /(scot %p our.bowl)/fake/(scot %da now.bowl)) ?: .^(? %j /(scot %p our.bowl)/fake/(scot %da now.bowl))
~ ~
~[(nuke-azimuth-tracker bowl) (init-timer bowl)] ~[(init-timer now.bowl)]
::
++ on-save !>(state) ++ on-save !>(state)
++ on-load ++ on-load
|= old=vase |= old=vase
@ -122,7 +112,7 @@
`old-state `old-state
%- %- slog :_ ~ %- %- slog :_ ~
leaf+"ship: loading snapshot with {<(lent logs.old-state)>} events" leaf+"ship: loading snapshot with {<(lent logs.old-state)>} events"
=. +.state +:(state-5-to-6 old-state) =. +.state +:(state-6-to-7 (state-5-to-6 old-state))
=^ cards state =^ cards state
(%*(run-logs do nas.state *^state:naive) logs.state) (%*(run-logs do nas.state *^state:naive) logs.state)
[(jael-update:do (to-udiffs:do cards)) state] [(jael-update:do (to-udiffs:do cards)) state]
@ -130,12 +120,12 @@
?. ?=(%2 -.old-state) ?. ?=(%2 -.old-state)
`old-state `old-state
~& > '%azimuth: updating to state 3' ~& > '%azimuth: updating to state 3'
=. +.state +:(state-5-to-6 old-state) =. +.state +:(state-6-to-7 (state-5-to-6 old-state))
:: replace naive state and indices with snapshot :: replace naive state and indices with snapshot
:: ::
=: nas.state nas.snap =: nas.state nas.sap.state
own.state owners.snap own.state owners.sap.state
spo.state sponsors.snap spo.state sponsors.sap.state
logs.state ~ logs.state ~
:: TODO: shouldn't be needed but have seen eth-watcher :: TODO: shouldn't be needed but have seen eth-watcher
:: threads use a url='' if this is not used :: threads use a url='' if this is not used
@ -144,8 +134,8 @@
== ==
=/ points=@ud ~(wyt by points.nas.state) =/ points=@ud ~(wyt by points.nas.state)
%- %- slog :_ ~ %- %- slog :_ ~
leaf+"ship: processing azimuth snapshot ({<points>} points)" leaf+"ship: processing azimuth snapshot (~{<points>} points)"
=/ snap-cards=udiffs:point (run-state:do id.snap points.nas.state) =/ snap-cards=udiffs:point (run-state:do id.sap.state points.nas.state)
:_ [%3 url net whos nas own spo logs]:state :_ [%3 url net whos nas own spo logs]:state
%+ weld %+ weld
(jael-update:do snap-cards) (jael-update:do snap-cards)
@ -160,22 +150,30 @@
=^ cards-4 old-state =^ cards-4 old-state
?. ?=(%4 -.old-state) [cards-3 old-state] ?. ?=(%4 -.old-state) [cards-3 old-state]
=^ cards this =^ cards this
%- %*(. on-poke +.state.this +:(state-5-to-6 old-state)) %- %*(. on-poke +.state.this +:(state-6-to-7 (state-5-to-6 old-state)))
[%azimuth-poke !>([%watch [url net]:old-state])] [%azimuth-poke !>([%watch [url net]:old-state])]
~& > '%azimuth: updating to state 5' ~& > '%azimuth: updating to state 5'
[cards [%5 url net whos nas own spo logs]:state.this] [cards [%5 url net whos nas own spo logs]:state.this]
=? old-state ?=(%5 -.old-state) =? old-state ?=(%5 -.old-state)
(state-5-to-6 old-state) (state-5-to-6 old-state)
?> ?=(%6 -.old-state) =? old-state ?=(%6 -.old-state)
(state-6-to-7 old-state)
?> ?=(%7 -.old-state)
[cards-4 this(state old-state)] [cards-4 this(state old-state)]
:: ::
++ app-states $%(state-0 state-1-2-3-4-5 app-state) ++ app-states $%(state-0 state-1-2-3-4-5 state-6 app-state)
::
++ state-5-to-6
|= state-1-2-3-4-5
^- app-state
[%6 url net ~m5 whos nas own spo logs]
:: ::
+$ state-6
$: %6
url=@ta
=net
refresh=_~m5
whos=(set ship)
nas=^state:naive
own=owners
spo=sponsors
logs=(list =event-log:rpc:ethereum)
==
+$ state-1-2-3-4-5 +$ state-1-2-3-4-5
$: ?(%1 %2 %3 %4 %5) $: ?(%1 %2 %3 %4 %5)
url=@ta url=@ta
@ -196,6 +194,15 @@
own=owners own=owners
logs=(list =event-log:rpc:ethereum) logs=(list =event-log:rpc:ethereum)
== ==
++ state-5-to-6
|= state-1-2-3-4-5
^- state-6
[%6 url net ~m5 whos nas own spo logs]
::
++ state-6-to-7
|= state-6
^- app-state
[%7 url net refresh whos nas own spo logs *snap-state]
-- --
:: ::
++ on-poke ++ on-poke
@ -219,9 +226,9 @@
[(subscribe-to-eth-watcher bowl)]~ [(subscribe-to-eth-watcher bowl)]~
:: ::
%resnap %resnap
=: nas.state nas.snap =: nas.state nas.sap.state
own.state owners.snap own.state owners.sap.state
spo.state sponsors.snap spo.state sponsors.sap.state
== ==
`this `this
== ==
@ -229,7 +236,23 @@
?. ?=(%azimuth-poke mark) ?. ?=(%azimuth-poke mark)
(on-poke:def mark vase) (on-poke:def mark vase)
=+ !<(poke=poke-data vase) =+ !<(poke=poke-data vase)
|-
?- -.poke ?- -.poke
%load
=/ points=@ud ~(wyt by points.nas.snap.poke)
%- %- slog
[leaf+"ship: loading azimuth snapshot ({<points>} points)"]~
::
=: net.state %default
nas.state nas.snap.poke
own.state owners.snap.poke
spo.state sponsors.snap.poke
url.state 'http://eth-mainnet.urbit.org:8545'
sap.state snap.poke
logs.state ~
==
$(poke [%kick ~])
::
%listen %listen
[[(listen-to-azimuth (silt whos.poke) source.poke)]~ this] [[(listen-to-azimuth (silt whos.poke) source.poke)]~ this]
:: ::
@ -237,8 +260,8 @@
=/ last-block=@ =/ last-block=@
?^ logs.state ?^ logs.state
number:(last-block-id:dice logs.state) number:(last-block-id:dice logs.state)
~& >> %no-logs-in-azimuth-state :: ~& >> %no-logs-in-azimuth-state
last-snap number.id.sap.state
=+ [our=(scot %p our.bowl) now=(scot %da now.bowl)] =+ [our=(scot %p our.bowl) now=(scot %da now.bowl)]
=+ .^(dudes=(set [dude:gall ?]) %ge our %base now /) =+ .^(dudes=(set [dude:gall ?]) %ge our %base now /)
=/ running=? (~(has in dudes) [%eth-watcher &]) =/ running=? (~(has in dudes) [%eth-watcher &])
@ -246,43 +269,44 @@
|((~(has in dudes) [%eth-watcher &]) (~(has in dudes) [%eth-watcher |])) |((~(has in dudes) [%eth-watcher &]) (~(has in dudes) [%eth-watcher |]))
:_ this :_ this
=/ cards=(list card) =/ cards=(list card)
:- :: %jael will re-subscribe to get all azimuth diffs ?: installed
:: ~
(listen-to-azimuth ~ [%| dap.bowl])
:: we poke eth-watcher to retrieve logs from the latest we have
::
%*(start do last-snap last-block)
=? cards !running
:: restart %eth-watcher
::
~& >> %starting-eth-watcher
=/ rein=[desk rein] [%base %.y [%eth-watcher ~ ~] ~]
:_ cards
[%pass /rein %agent [our.bowl %hood] %poke kiln-rein+!>(rein)]
=? cards !installed
:: reinstall %base desk :: reinstall %base desk
:: ::
=+ spo=(sein:title [our now our]:bowl) =+ spo=(sein:title [our now our]:bowl)
~& >> re-installing-base-from+spo ~& >> re-installing-base-from+spo
=/ fresh=[desk ship desk] [%base spo %kids] =/ fresh=[desk ship desk] [%base spo %kids]
[%pass /fresh %agent [our.bowl %hood] %poke kiln-install+!>(fresh)]~
=? cards !running
:: restart %eth-watcher
::
~& >> %starting-eth-watcher
=/ rein=[desk rein] [%base [%eth-watcher %&] ~ ~]
:_ cards :_ cards
[%pass /fresh %agent [our.bowl %hood] %poke kiln-install+!>(fresh)] [%pass /rein %agent [our.bowl %hood] %poke kiln-rein+!>(rein)]
:: resubscribe if we somehow get unsubscribed from eth-watcher =. cards
:: :: we poke eth-watcher to retrieve logs from the latest we have
?: (~(has by wex.bowl) [/eth-watcher our.bowl %eth-watcher]) ::
cards (weld %*(start do number.id.sap.state last-block) cards)
~& >> %resubscribing-to-eth-watcher =? cards !(~(has by wex.bowl) [/eth-watcher our.bowl %eth-watcher])
[(subscribe-to-eth-watcher bowl) cards] :: resubscribe if we somehow get unsubscribed from eth-watcher
::
[(subscribe-to-eth-watcher bowl) cards]
=. cards
:: %jael will re-subscribe to get all azimuth diffs
::
[(listen-to-azimuth ~ [%| dap.bowl]) cards]
(flop cards)
:: ::
%watch %watch
=: nas.state ?:(?=(%default net.poke) nas.snap *^state:naive) =: nas.state ?:(?=(%default net.poke) nas.sap.state *^state:naive)
own.state ?:(?=(%default net.poke) owners.snap ~) own.state ?:(?=(%default net.poke) owners.sap.state ~)
spo.state ?:(?=(%default net.poke) sponsors.snap ~) spo.state ?:(?=(%default net.poke) sponsors.sap.state ~)
net.state net.poke net.state net.poke
url.state url.poke url.state url.poke
logs.state ~ logs.state ~
== ==
[start:do this] `this
== ==
:: ::
++ on-watch ++ on-watch
@ -309,7 +333,7 @@
:- %leaf :- %leaf
"ship: processing azimuth snapshot ({<points>} points)" "ship: processing azimuth snapshot ({<points>} points)"
=/ snap-cards=udiffs:point =/ snap-cards=udiffs:point
(%*(run-state do logs.state ~) id.snap points.nas.state) (%*(run-state do logs.state ~) id.sap.state points.nas.state)
[(weld (jael-update:do snap-cards) start:do) this] [(weld (jael-update:do snap-cards) start:do) this]
:: ::
++ on-leave on-leave:def ++ on-leave on-leave:def
@ -318,13 +342,14 @@
^- (unit (unit cage)) ^- (unit (unit cage))
|^ |^
?+ path (on-peek:def path) ?+ path (on-peek:def path)
[%x %logs ~] ``noun+!>(logs.state) [%x %logs ~] ``noun+!>(logs.state)
[%x %nas ~] ``noun+!>(nas.state) [%x %nas ~] ``noun+!>(nas.state)
[%x %dns ~] ``noun+!>(dns.nas.state) [%x %dns ~] ``noun+!>(dns.nas.state)
[%x %own ~] ``noun+!>(own.state) [%x %own ~] ``noun+!>(own.state)
[%x %spo ~] ``noun+!>(spo.state) [%x %spo ~] ``noun+!>(spo.state)
[%x %refresh ~] ``atom+!>(refresh.state) [%x %refresh ~] ``atom+!>(refresh.state)
[%x %point @ ~] ``noun+(point i.t.t.path) [%x %point @ ~] ``noun+(point i.t.t.path)
[%x %last-snap ~] ``noun+!>(sap.state)
== ==
:: ::
++ point ++ point
@ -369,19 +394,24 @@
:: ::
++ on-arvo ++ on-arvo
|= [=wire =sign-arvo] |= [=wire =sign-arvo]
?: &(=(/al wire) ?=(%arow +<.sign-arvo))
?- -.p.sign-arvo
%& `this
%|
%- (slog 'loading azimuth snapshot failed! still trying' p.p.sign-arvo)
[~[(init-timer (add ~s10 now.bowl))] this]
==
?. &(=(/init wire) ?=(%wake +<.sign-arvo)) ?. &(=(/init wire) ?=(%wake +<.sign-arvo))
(on-arvo:def wire sign-arvo) (on-arvo:def wire sign-arvo)
?^ error.sign-arvo ?^ error.sign-arvo
%- (slog 'azimuth: failed to initialize!' ~) %- (slog 'azimuth: failed to initialize!' ~)
`this `this
:_ this :_ this
~[(subscribe-to-eth-watcher bowl) (listen-to-azimuth ~ [%| dap.bowl])] ~[start-azimuth-load]
:: ::
++ on-fail on-fail:def ++ on-fail on-fail:def
-- --
|_ =bowl:gall |_ =bowl:gall
:: TODO: maybe flop the endianness here so metamask signs it in normal
:: order?
:: ::
++ verifier ++ verifier
^- ^verifier:naive ^- ^verifier:naive
@ -514,7 +544,7 @@
:+ %watch /[dap.bowl] :+ %watch /[dap.bowl]
^- config:eth-watcher ^- config:eth-watcher
:* url.state =(%czar (clan:title our.bowl)) refresh.state ~h30 :* url.state =(%czar (clan:title our.bowl)) refresh.state ~h30
(max launch.net ?:(=(net.state %default) +(last-snap) 0)) (max launch.net ?:(=(net.state %default) +(number.id.sap.state) 0))
~ ~
~[azimuth.net] ~[azimuth.net]
~[naive.net] ~[naive.net]

View File

@ -185,7 +185,7 @@
:: ::
?- network ?- network
%mainnet 0x1 %mainnet 0x1
%ropsten 0x3 %goerli 0x5
%fakenet `@ux``@`1.337 %fakenet `@ux``@`1.337
[%other *] id.network [%other *] id.network
== ==

View File

@ -716,7 +716,7 @@
'rtt'^(numb (div rtt ~s1)) 'rtt'^(numb (div rtt ~s1))
'rttvar'^(numb (div rttvar ~s1)) 'rttvar'^(numb (div rttvar ~s1))
'ssthresh'^(numb ssthresh) 'ssthresh'^(numb ssthresh)
'num-live'^(numb num-live) 'num-live'^(numb ~(wyt by live))
'cwnd'^(numb cwnd) 'cwnd'^(numb cwnd)
'counter'^(numb counter) 'counter'^(numb counter)
== ==
@ -801,8 +801,8 @@
=/ heads=(list [tako desk]) =/ heads=(list [tako desk])
%+ turn ~(tap in desks) %+ turn ~(tap in desks)
|= =desk |= =desk
=+ .^(=dome %cv /(scot %p our.bowl)/[desk]/(scot %da now.bowl)) =+ .^(=domo %cv /(scot %p our.bowl)/[desk]/(scot %da now.bowl))
=/ =tako (~(got by hit.dome) let.dome) =/ =tako (~(got by hit.domo) let.domo)
[tako desk] [tako desk]
=/ yakis=(set yaki) =/ yakis=(set yaki)
%- silt %- silt

File diff suppressed because one or more lines are too long

View File

@ -3,16 +3,16 @@
:: :: :: :: :: ::
/? 309 :: arvo kelvin /? 309 :: arvo kelvin
/- *sole, lens :: console structures /- *sole, lens :: console structures
/+ sole, pprint, :: /+ sole, pprint, dprint, ::
auto=language-server-complete, :: auto=language-server-complete, ::
easy-print=language-server-easy-print :: easy-print=language-server-easy-print ::
:: :: :: :: :: ::
:::: :: :::: :::: :: ::::
:: :: :: :: :: ::
=> |% :: external structures => |% :: external structures
+$ id @tasession :: session id +$ id sole-id :: session id
+$ house :: all state +$ house :: all state
$: %8 $: %9
egg=@u :: command count egg=@u :: command count
hoc=(map id session) :: conversations hoc=(map id session) :: conversations
acl=(set ship) :: remote access whitelist acl=(set ship) :: remote access whitelist
@ -54,6 +54,7 @@
r=@t r=@t
== ==
[%poke p=goal] :: poke app [%poke p=goal] :: poke app
[%help p=(list term)] :: doccords
[%show p=?(%0 %1 %2 %3 %4 %5)] :: val/type/hoon/xray [%show p=?(%0 %1 %2 %3 %4 %5)] :: val/type/hoon/xray
[%verb p=term] :: store variable [%verb p=term] :: store variable
== :: == ::
@ -174,8 +175,22 @@
:: ::
;~ pfix fas ;~ pfix fas
;~ pose ;~ pose
(parse-variable (cold %sur hep) ;~(pfix gap parse-cables)) (parse-variable (cold %sur hep) ;~(pfix gap (parse-cables %sur)))
(parse-variable (cold %lib lus) ;~(pfix gap parse-cables)) (parse-variable (cold %lib lus) ;~(pfix gap (parse-cables %lib)))
;~(pfix tis gap (parse-variable sym ;~(pfix gap parse-path)))
;~(pfix cen gap (parse-variable sym ;~(pfix gap parse-mark)))
==
==
::
;~ pfix hax
;~ pose
;~ pfix ace
%+ cook
|= a=(list term)
[[%help (flop a)] 0 %ex [%cnts p=~[[%.y p=1]] q=~]]
(most fas sym)
==
(easy [[%help ~[%$]] 0 %ex [%cnts p=~[[%.y p=1]] q=~]])
== ==
== ==
:: ::
@ -194,22 +209,17 @@
== ==
:: ::
++ parse-cables ++ parse-cables
%+ cook |= base-path=@ta
|= cables=(list cable:clay) %- cook :_ (most ;~(plug com gaw) parse-cable)
:+ 0 %ex |= cables=(list cable:clay)
^- hoon :+ 0 %tu
:: ::
:- %clsg %+ turn cables
%+ turn cables |= cable=cable:clay
|= cable=cable:clay ^- dojo-source
^- hoon =+ add-face=?~(face.cable "|*(n=* n)" ;:(weld "|*(n=* ^=(" (trip u.face.cable) " n))"))
:: :^ 0 %do (scan add-face parse-hoon)
:+ %clhp :+ 0 %dv [-.dir `path`[base-path file-path.cable ~]]
?~ face.cable
[%rock %n ~]
[%clhp [%rock %n ~] [%sand %tas u.face.cable]]
[%sand %tas file-path.cable]
(most ;~(plug com gaw) parse-cable)
:: ::
++ parse-cable ++ parse-cable
%+ cook |=(a=cable:clay a) %+ cook |=(a=cable:clay a)
@ -218,6 +228,16 @@
(cook |=([face=term tis=@ file=term] [`face file]) ;~(plug sym tis sym)) (cook |=([face=term tis=@ file=term] [`face file]) ;~(plug sym tis sym))
(cook |=(a=term [`a a]) sym) (cook |=(a=term [`a a]) sym)
== ==
::
++ parse-mark
%- cook :_ ;~(pfix cen sym)
|= mark=@tas
[0 %dv -.dir `path`[~.mar mark ~]]
::
++ parse-path
%+ cook |=(=path [0 %dv -.dir path])
;~(pfix fas (more fas sym))
::
++ parse-source (stag 0 parse-build) ++ parse-source (stag 0 parse-build)
++ parse-build ++ parse-build
%+ knee *dojo-build |. ~+ %+ knee *dojo-build |. ~+
@ -532,45 +552,35 @@
?: ?=([%show %3] -.mad) ?: ?=([%show %3] -.mad)
(dy-rash %tan (dy-show-source q.mad) ~) (dy-rash %tan (dy-show-source q.mad) ~)
?: ?=(%brev -.mad) ?: ?=(%brev -.mad)
?: ?=(?(%eny %now %our) p.mad)
(dy-rash %tan (cat 3 p.mad ' is immutable') ~)
=. var (~(del by var) p.mad) =. var (~(del by var) p.mad)
=< dy-amok =< dy-amok
?+ p.mad . ?+ p.mad .
$?(%eny %now %our) !!
%lib .(lib ~)
%sur .(sur ~)
%dir .(dir [[our.hid %base ud+0] /]) %dir .(dir [[our.hid %base ud+0] /])
== ==
=+ cay=(~(got by rez) p.q.mad) =+ cay=(~(got by rez) p.q.mad)
?- -.p.mad ?- -.p.mad
%verb %verb
?: ?=(?(%eny %now %our) p.p.mad)
(dy-rash %tan (cat 3 p.p.mad ' is immutable') ~)
=. var (~(put by var) p.p.mad cay) =. var (~(put by var) p.p.mad cay)
~| bad-set+[p.p.mad p.q.cay] ~| bad-set+[p.p.mad p.q.cay]
=< dy-amok =< dy-amok
?+ p.p.mad . ?+ p.p.mad .
%eny ~|(%entropy-is-eternal !!) %dir
%now ~|(%time-is-immutable !!) =/ bem=beam
%our ~|(%self-is-immutable !!) %- need %- de-beam
%lib =+ pax=((dy-cast path !>(*path)) q.cay)
%_ . ?: ?=(~ pax) ~[(scot %p our.hid) %base '0']
lib ?: ?=([@ ~] pax) ~[i.pax %base '0']
((dy-cast (list cable:clay) !>(*(list cable:clay))) q.cay) ?: ?=([@ @ ~] pax) ~[i.pax i.t.pax '0']
== pax
:: ?: =(~ .^((list path) %ct (en-beam he-beam(dir bem))))
%sur +(..dy (he-diff %tan 'dojo: dir does not exist' ~))
%_ . =. dir bem
sur =- +>(..dy (he-diff %tan - ~))
((dy-cast (list cable:clay) !>(*(list cable:clay))) q.cay) rose+[" " `~]^~[leaf+"=%" (smyt (en-beam he-beak s.dir))]
==
::
%dir =+ ^= pax ^- path
=+ pax=((dy-cast path !>(*path)) q.cay)
?: ?=(~ pax) ~[(scot %p our.hid) %base '0']
?: ?=([@ ~] pax) ~[i.pax %base '0']
?: ?=([@ @ ~] pax) ~[i.pax i.t.pax '0']
pax
=. dir (need (de-beam pax))
=- +>(..dy (he-diff %tan - ~))
rose+[" " `~]^~[leaf+"=%" (smyt (en-beam he-beak s.dir))]
== ==
:: ::
%poke %poke
@ -624,6 +634,9 @@
++ maar ?: =(%noun p.cay) ~ ++ maar ?: =(%noun p.cay) ~
[[%rose [~ " " ~] >p.cay< ~] ~] [[%rose [~ " " ~] >p.cay< ~] ~]
-- --
::
%help
(dy-inspect p.p.mad p.q.cay)
== ==
:: ::
++ dy-show |=(cay=cage (dy-print cay ~)) ++ dy-show |=(cay=cage (dy-print cay ~))
@ -663,6 +676,20 @@
:- i="" :- i=""
t=(turn `wain`?~(r.hit ~ (to-wain:format q.u.r.hit)) trip) t=(turn `wain`?~(r.hit ~ (to-wain:format q.u.r.hit)) trip)
== ==
::
++ dy-inspect
|= [topics=(list term) sut=type]
%+ dy-rash %mor
=+ to-display=(mule |.((find-item-in-type:dprint (flop topics) sut)))
?: ?=(%| -.to-display)
[%tan [%leaf "Could not find help A"] p.to-display]~
?~ p.to-display
[%tan [%leaf "Could not find help B"]~]~
=/ item (mule |.((print-item:dprint u.p.to-display)))
?: ?=(%| -.item)
[%tan [%leaf "Could not find help C"] p.item]~
p.item
::
++ dy-show-type-noun ++ dy-show-type-noun
|= a=type ^- tank |= a=type ^- tank
=- >[-]< =- >[-]<
@ -679,11 +706,16 @@
[%face ^] a(q $(a q.a)) [%face ^] a(q $(a q.a))
[%cell ^] a(p $(a p.a), q $(a q.a)) [%cell ^] a(p $(a p.a), q $(a q.a))
[%fork *] a(p (silt (turn ~(tap in p.a) |=(b=type ^$(a b))))) [%fork *] a(p (silt (turn ~(tap in p.a) |=(b=type ^$(a b)))))
[%hint *] ?. ?=(%know -.q.p.a) $(a q.a) [%hint *] ?+ q.p.a $(a q.a)
?@ p.q.p.a [(cat 3 '#' mark.p.q.p.a)]~ [%know *]
[(rap 3 '#' auth.p.q.p.a (spat type.p.q.p.a) ~)]~ ?@ p.q.p.a [(cat 3 '#' mark.p.q.p.a)]~
[(rap 3 '#' auth.p.q.p.a '+' (spat type.p.q.p.a) ~)]~
::
[%help *]
[summary.crib.p.q.p.a]~
==
[%core ^] `wain`/core [%core ^] `wain`/core
[%hold *] a(p $(a p.a)) [%hold *] $(a (~(play ut p.a) q.a))
== ==
:: ::
:: XX needs filter :: XX needs filter
@ -735,9 +767,9 @@
^+ +>+> ^+ +>+>
=^ dat say (~(transceive sole say) cal) =^ dat say (~(transceive sole say) cal)
?: |(?=(^ per) ?=(^ pux) ?=(~ pro)) ?: |(?=(^ per) ?=(^ pux) ?=(~ pro))
~& %dy-edit-busy
=^ lic say (~(transmit sole say) dat) =^ lic say (~(transmit sole say) dat)
(dy-diff %mor [%det lic] [%bel ~] ~) =/ tip=@t 'dojo: busy (press backspace to abort)'
(dy-diff %mor [%det lic] [%bel ~] [%tan [tip ~]] ~)
=> .(per `dat) => .(per `dat)
=/ res (mule |.((slam u.pro !>((tufa buf.say))))) =/ res (mule |.((slam u.pro !>((tufa buf.say)))))
?: ?=(%| -.res) ?: ?=(%| -.res)
@ -823,12 +855,23 @@
=/ poz=vase (dy-sore p.cig) =/ poz=vase (dy-sore p.cig)
=/ kev=vase =/ kev=vase
=/ kuv=(unit vase) (slew 7 som) =/ kuv=(unit vase) (slew 7 som)
?: =(~ q.cig)
(fall kuv !>(~))
=/ soz=(list [var=term vax=vase]) =/ soz=(list [var=term vax=vase])
%~ tap by %~ tap by
%- ~(run by q.cig) %- ~(run by q.cig)
|=(val=(unit dojo-source) ?~(val !>([~ ~]) (dy-vase p.u.val))) |=(val=(unit dojo-source) ?~(val !>([~ ~]) (dy-vase p.u.val)))
:: if the generator takes a named argument "drum-session",
:: then if a value isn't already supplied, we set it to the session
:: that this dojo instance is being run in.
:: (dojo is, indeed, quite coupled with drum.)
::
=? soz
?& ?=(^ kuv)
(slab %both %drum-session p.u.kuv)
!(~(has by q.cig) %drum-session)
==
[[%drum-session !>(ses.id)] soz] ::TODO does the who matter?
?: =(~ soz)
(fall kuv !>(~))
~| keyword-arg-failure+~(key by q.cig) ~| keyword-arg-failure+~(key by q.cig)
%+ slap %+ slap
(with-faces kuv+(need kuv) rep+(with-faces soz) ~) (with-faces kuv+(need kuv) rep+(with-faces soz) ~)
@ -1021,13 +1064,14 @@
|= =card:agent:gall |= =card:agent:gall
^+ +> ^+ +>
=? card ?=(%pass -.card) =? card ?=(%pass -.card)
card(p [id p.card]) ^- card:agent:gall
card(p [(scot %p who.id) ses.id p.card])
%_(+> moz [card moz]) %_(+> moz [card moz])
:: ::
++ he-diff :: emit update ++ he-diff :: emit update
|= fec=sole-effect |= fec=sole-effect
^+ +> ^+ +>
(he-card %give %fact ~[/sole/[id]] %sole-effect !>(fec)) (he-card %give %fact ~[(id-to-path:sole id)] %sole-effect !>(fec))
:: ::
++ he-stop :: abort work ++ he-stop :: abort work
^+ . ^+ .
@ -1535,21 +1579,47 @@
:: ::
++ on-load ++ on-load
|= ole=vase |= ole=vase
^- (quip card:agent:gall _..on-init)
|^ =+ old=!<(house-any ole) |^ =+ old=!<(house-any ole)
=? old ?=(%5 -.old) =? old ?=(%5 -.old)
^- house-any
^- house-6
(house-5-to-6 old) (house-5-to-6 old)
=? old ?=(?(%6 %7) -.old) =? old ?=(?(%6 %7) -.old)
(house-6-7-to-8 +.old) (house-6-7-to-8 +.old)
?> ?=(%8 -.old) =^ caz old
`..on-init(state old) ?. ?=(%8 -.old) [~ old]
(house-8-to-9 old)
?> ?=(%9 -.old)
[caz ..on-init(state old)]
:: ::
+$ house-any $%(house house-7 house-6 house-5) +$ house-any $%(house house-8 house-7 house-6 house-5)
::
+$ id-8 @tasession
+$ house-8
$: %8
egg=@u
hoc=(map id-8 session)
acl=(set ship)
==
++ house-8-to-9
|= old=house-8
^- (quip card:agent:gall house)
:- %+ turn ~(tap in ~(key by hoc.old))
|= id=@ta
^- card:agent:gall
[%give %kick ~[/sole/[id]] ~]
=- [%9 egg.old - acl.old]
%- ~(gas by *(map sole-id session))
%+ murn ~(tap by hoc.old)
|= [id=@ta s=session]
(bind (upgrade-id:sole id) (late s))
:: ::
+$ house-7 [%7 house-6-7] +$ house-7 [%7 house-6-7]
+$ house-6 [%6 house-6-7] +$ house-6 [%6 house-6-7]
+$ house-6-7 +$ house-6-7
$: egg=@u :: command count $: egg=@u :: command count
hoc=(map id session-6) :: conversations hoc=(map id-8 session-6) :: conversations
acl=(set ship) :: remote access whitelist acl=(set ship) :: remote access whitelist
== :: == ::
+$ session-6 :: per conversation +$ session-6 :: per conversation
@ -1576,9 +1646,10 @@
old(poy ~, -.dir [our.hid %base ud+0]) old(poy ~, -.dir [our.hid %base ud+0])
:: ::
+$ house-5 +$ house-5
[%5 egg=@u hoc=(map id session)] [%5 egg=@u hoc=(map id-8 session-6)]
++ house-5-to-6 ++ house-5-to-6
|= old=house-5 |= old=house-5
^- house-6
[%6 egg.old hoc.old *(set ship)] [%6 egg.old hoc.old *(set ship)]
-- --
:: ::
@ -1594,7 +1665,8 @@
he-abet:(~(he-type he hid id.act ~ (~(got by hoc) id.act)) act) he-abet:(~(he-type he hid id.act ~ (~(got by hoc) id.act)) act)
:: ::
%lens-command %lens-command
=+ !<([=id =command:lens] vase) =+ !<([ses=@ta =command:lens] vase)
=/ =id [our.hid ses]
he-abet:(~(he-lens he hid id ~ (~(got by hoc) id)) command) he-abet:(~(he-lens he hid id ~ (~(got by hoc) id)) command)
:: ::
%allow-remote-login %allow-remote-login
@ -1632,8 +1704,7 @@
?> ?| (team:title our.hid src.hid) ?> ?| (team:title our.hid src.hid)
(~(has in acl) src.hid) (~(has in acl) src.hid)
== ==
?> ?=([%sole @ ~] path) =/ =id (need (path-to-id:sole path))
=/ id i.t.path
=? hoc (~(has by hoc) id) =? hoc (~(has by hoc) id)
~& [%dojo-peer-replaced id] ~& [%dojo-peer-replaced id]
(~(del by hoc) id) (~(del by hoc) id)
@ -1645,7 +1716,7 @@
++ on-leave ++ on-leave
|= =path |= =path
?> ?=([%sole *] path) ?> ?=([%sole *] path)
=. hoc (~(del by hoc) t.path) =. hoc (~(del by hoc) (need (path-to-id:sole path)))
[~ ..on-init] [~ ..on-init]
:: ::
++ on-peek ++ on-peek
@ -1654,13 +1725,15 @@
:: ::
++ on-agent ++ on-agent
|= [=wire =sign:agent:gall] |= [=wire =sign:agent:gall]
?> ?=([@ @ *] wire) ^- (quip card:agent:gall _..on-init)
=/ =session (~(got by hoc) i.wire) ?> ?=([@ @ @ *] wire)
=/ he-full ~(. he hid i.wire ~ session) =/ =id [(slav %p i.wire) i.t.wire]
=/ =session (~(got by hoc) id)
=/ he-full ~(. he hid id ~ session)
=^ moves state =^ moves state
=< he-abet =< he-abet
^+ he ^+ he
?+ i.t.wire ~|([%dojo-bad-on-agent wire -.sign] !!) ?+ i.t.t.wire ~|([%dojo-bad-on-agent wire -.sign] !!)
%poke (he-unto:he-full t.wire sign) %poke (he-unto:he-full t.wire sign)
%wool (he-wool:he-full t.wire sign) %wool (he-wool:he-full t.wire sign)
== ==
@ -1668,14 +1741,16 @@
:: ::
++ on-arvo ++ on-arvo
|= [=wire =sign-arvo] |= [=wire =sign-arvo]
?> ?=([@ *] wire) ^- (quip card:agent:gall _..on-init)
=/ =session (~(got by hoc) i.wire) ?> ?=([@ @ *] wire)
=/ he-full ~(. he hid i.wire ~ session) =/ =id [(slav %p i.wire) i.t.wire]
=/ =session (~(got by hoc) id)
=/ he-full ~(. he hid id ~ session)
=^ moves state =^ moves state
=< he-abet =< he-abet
?+ +<.sign-arvo ~|([%dojo-bad-take +<.sign-arvo] !!) ?+ +<.sign-arvo ~|([%dojo-bad-take +<.sign-arvo] !!)
%writ (he-writ:he-full t.wire +>.sign-arvo) %writ (he-writ:he-full t.t.wire +>.sign-arvo)
%http-response (he-http-response:he-full t.wire +>.sign-arvo) %http-response (he-http-response:he-full t.t.wire +>.sign-arvo)
== ==
[moves ..on-init] [moves ..on-init]
:: if dojo fails unexpectedly, kill whatever each session is working on :: if dojo fails unexpectedly, kill whatever each session is working on

View File

@ -469,6 +469,10 @@
[~ this(dogs.state (~(put by dogs.state) path u.dog(running ~)))] [~ this(dogs.state (~(put by dogs.state) path u.dog(running ~)))]
:: ::
%thread-done %thread-done
:: if empty, that means we cancelled this thread
::
?: =(*vase q.cage.sign)
`this
=+ !<([vows=disavows pup=watchpup] q.cage.sign) =+ !<([vows=disavows pup=watchpup] q.cage.sign)
=. u.dog =. u.dog
%_ u.dog %_ u.dog

View File

@ -1,8 +1,13 @@
:: herm: stand-in for term.c with http interface :: herm: stand-in for term.c with http interface
:: ::
/- herm
/+ default-agent, dbug, verb /+ default-agent, dbug, verb
:: keep relevant mark conversions in cache for performance
::
/$ blit-to-json %blit %json /$ blit-to-json %blit %json
/$ json-to-blit %json %blit /$ json-to-blit %json %blit
/$ json-to-task %json %herm-task
::
=, jael =, jael
|% |%
+$ state-0 [%0 ~] +$ state-0 [%0 ~]
@ -13,15 +18,18 @@
%+ verb | %+ verb |
%- agent:dbug %- agent:dbug
^- agent:gall ^- agent:gall
=> |%
++ pass-session
|= [ses=@tas tas=session-task:dill]
[%pass /dill/[ses] %arvo %d %shot ses tas]
--
|_ =bowl:gall |_ =bowl:gall
+* this . +* this .
def ~(. (default-agent this %|) bowl) def ~(. (default-agent this %|) bowl)
:: ::
++ on-init ++ on-init
^- (quip card:agent:gall _this) ^- (quip card:agent:gall _this)
:: set up dill session subscription [~ this]
::
[[%pass [%view %$ ~] %arvo %d %view ~]~ this]
:: ::
++ on-save !>([%0 ~]) ++ on-save !>([%0 ~])
++ on-load ++ on-load
@ -32,47 +40,68 @@
++ on-watch ++ on-watch
|= =path |= =path
^- (quip card:agent:gall _this) ^- (quip card:agent:gall _this)
?> =(our src):bowl
?> ?=([%session @ %view ~] path)
:_ this :_ this
:: scry prompt and cursor position out of dill for initial response ~| path
?> ?=([%session @ %view ~] path)
=* ses i.t.path
:: subscribe to the requested session
:: ::
=/ base=^path ::NOTE multiple views do not result in multiple subscriptions
/dx/(scot %p our.bowl)//(scot %da now.bowl)/sessions :: because they go over the same wire/duct
:~ [%give %fact ~ %blit !>(.^(blit:dill (weld base //line)))] ::
[%give %fact ~ %blit !>(`blit:dill`hop+.^(@ud (weld base //cursor)))] [(pass-session ses %view ~)]~
==
:: ::
++ on-arvo ++ on-arvo
|= [=wire =sign-arvo] |= [=wire =sign-arvo]
^- (quip card:agent:gall _this) ^- (quip card:agent:gall _this)
~| wire
?+ wire (on-arvo:def wire sign-arvo) ?+ wire (on-arvo:def wire sign-arvo)
[%tube *] [~ this] :: we no longer care about these [%tube *] [~ this] :: we no longer care about these
:: ::
:: pass on dill blits for the session :: pass on dill blits for the session
:: ::
[%view %$ ~] [%dill @ ~]
=* ses i.t.wire
?. ?=([%dill %blit *] sign-arvo) ?. ?=([%dill %blit *] sign-arvo)
~| [%unexpected-sign [- +<]:sign-arvo] ~| [%unexpected-sign [- +<]:sign-arvo]
!! !!
:_ this :_ this
%+ turn p.sign-arvo %+ turn p.sign-arvo
|= =blit:dill |= =blit:dill
[%give %fact [%session %$ %view ~]~ %blit !>(blit)] [%give %fact [%session ses %view ~]~ %blit !>(blit)]
::
:: clean up old-style subscriptions
::
[%view @ ~]
=* ses i.t.wire
:_ this
[%pass wire %arvo %d %shot ses %flee ~]~
== ==
:: ::
++ on-poke ++ on-poke
|= [=mark =vase] |= [=mark =vase]
^- (quip card:agent:gall _this) ^- (quip card:agent:gall _this)
?> =(our src):bowl
?. ?=(%belt mark)
~| [%unexpected-mark mark]
!!
:_ this :_ this
[%pass [%belt %$ ~] %arvo %d %belt !<(belt:dill vase)]~ :_ ~
?+ mark ~|([%unexpected-mark mark] !!)
%belt (pass-session %$ %belt !<(belt:dill vase))
%herm-task (pass-session !<(task:herm vase))
==
::
++ on-peek
|= =path
^- (unit (unit cage))
?+ path ~
[%x %sessions ~]
:+ ~ ~
:- %json
!> ^- json
=- a+(turn ~(tap in -) (lead %s))
.^((set @tas) %dy /(scot %p our.bowl)//(scot %da now.bowl)/sessions)
==
:: ::
++ on-leave on-leave:def ++ on-leave on-leave:def
++ on-peek on-peek:def ::
++ on-agent on-agent:def ++ on-agent on-agent:def
++ on-fail on-fail:def ++ on-fail on-fail:def
-- --

View File

@ -2,8 +2,8 @@
/+ drum=hood-drum, helm=hood-helm, kiln=hood-kiln /+ drum=hood-drum, helm=hood-helm, kiln=hood-kiln
|% |%
+$ state +$ state
$~ [%23 *state:drum *state:helm *state:kiln] $~ [%26 *state:drum *state:helm *state:kiln]
$>(%23 any-state) $>(%26 any-state)
:: ::
+$ any-state +$ any-state
$% [ver=?(%1 %2 %3 %4 %5 %6) lac=(map @tas fin-any-state)] $% [ver=?(%1 %2 %3 %4 %5 %6) lac=(map @tas fin-any-state)]
@ -24,6 +24,9 @@
[%21 drum=state-4:drum helm=state-1:helm kiln=state-8:kiln] [%21 drum=state-4:drum helm=state-1:helm kiln=state-8:kiln]
[%22 drum=state-4:drum helm=state-1:helm kiln=state-9:kiln] [%22 drum=state-4:drum helm=state-1:helm kiln=state-9:kiln]
[%23 drum=state-4:drum helm=state-2:helm kiln=state-9:kiln] [%23 drum=state-4:drum helm=state-2:helm kiln=state-9:kiln]
[%24 drum=state-4:drum helm=state-2:helm kiln=state-10:kiln]
[%25 drum=state-5:drum helm=state-2:helm kiln=state-10:kiln]
[%26 drum=state-6:drum helm=state-2:helm kiln=state-10:kiln]
== ==
+$ any-state-tuple +$ any-state-tuple
$: drum=any-state:drum $: drum=any-state:drum
@ -91,8 +94,7 @@
:: ::
?+ mark (on-poke:def mark vase) ?+ mark (on-poke:def mark vase)
%atom poke-helm(mark %helm-atom) %atom poke-helm(mark %helm-atom)
%dill-belt poke-drum(mark %drum-dill-belt) %dill-poke poke-drum
%dill-blit poke-drum(mark %drum-dill-blit)
%hood-sync poke-kiln(mark %kiln-sync) %hood-sync poke-kiln(mark %kiln-sync)
%write-sec-atom poke-helm(mark %helm-write-sec-atom) %write-sec-atom poke-helm(mark %helm-write-sec-atom)
== ==
@ -107,6 +109,7 @@
?+ path (on-watch:def +<) ?+ path (on-watch:def +<)
[%drum *] =^(c drum.state (peer:drum-core t.path) [c this]) [%drum *] =^(c drum.state (peer:drum-core t.path) [c this])
[%kiln *] =^(c kiln.state (peer:kiln-core t.path) [c this]) [%kiln *] =^(c kiln.state (peer:kiln-core t.path) [c this])
[%dill *] =^(c drum.state (peer:drum-core +<) [c this])
== ==
:: ::
++ on-agent ++ on-agent
@ -122,6 +125,7 @@
|= [=wire syn=sign-arvo] |= [=wire syn=sign-arvo]
^- step:agent:gall ^- step:agent:gall
?+ wire ~|([%hood-bad-wire wire] !!) ?+ wire ~|([%hood-bad-wire wire] !!)
[%drum *] =^(c drum.state (take-arvo:drum-core t.wire syn) [c this])
[%helm *] =^(c helm.state (take-arvo:helm-core t.wire syn) [c this]) [%helm *] =^(c helm.state (take-arvo:helm-core t.wire syn) [c this])
[%kiln *] =^(c kiln.state (take-arvo:kiln-core t.wire syn) [c this]) [%kiln *] =^(c kiln.state (take-arvo:kiln-core t.wire syn) [c this])
== ==

View File

@ -83,7 +83,8 @@
:: ::
?+ -.source.com ?+ -.source.com
:_ this(job.state (some [eyre-id com])) :_ this(job.state (some [eyre-id com]))
[%pass /sole %agent [our.bowl %dojo] %watch /sole/[eyre-id]]~ =/ =path /sole/(scot %p our.bowl)/[eyre-id]
[%pass /sole %agent [our.bowl %dojo] %watch path]~
:: ::
%export %export
:_ this(job.state (some [eyre-id com])) :_ this(job.state (some [eyre-id com]))

View File

@ -65,7 +65,7 @@
:: frequency: time to wait between sending batches (TODO fancier) :: frequency: time to wait between sending batches (TODO fancier)
:: endpoint: ethereum rpc endpoint to use :: endpoint: ethereum rpc endpoint to use
:: contract: ethereum contract address :: contract: ethereum contract address
:: chain-id: mainnet, ropsten, local (https://chainid.network/) :: chain-id: mainnet, goerli, local (https://chainid.network/)
:: resend-time: time to resend a batch with higher gas prie :: resend-time: time to resend a batch with higher gas prie
:: update-rate: frequency to update the roller's predicted state :: update-rate: frequency to update the roller's predicted state
:: fallback-gas-price: default batch gas price :: fallback-gas-price: default batch gas price
@ -1017,9 +1017,9 @@
=/ [contract=@ux chain-id=@] =/ [contract=@ux chain-id=@]
=< [naive chain-id] =< [naive chain-id]
=, azimuth =, azimuth
?- net.config ?+ net.config !!
%mainnet mainnet-contracts %mainnet mainnet-contracts
%ropsten ropsten-contracts %goerli goerli-contracts
%local local-contracts %local local-contracts
%default contracts %default contracts
== ==

View File

@ -43,13 +43,13 @@
++ on-fail on-fail:def ++ on-fail on-fail:def
:: ::
++ command-parser ++ command-parser
|= sole-id=@ta |= =sole-id:shoe
^+ |~(nail *(like [? command])) ^+ |~(nail *(like [? command]))
%+ stag & %+ stag &
(perk %demo %row %table ~) (perk %demo %row %table ~)
:: ::
++ tab-list ++ tab-list
|= sole-id=@ta |= =sole-id:shoe
^- (list [@t tank]) ^- (list [@t tank])
:~ ['demo' leaf+"run example command"] :~ ['demo' leaf+"run example command"]
['row' leaf+"print a row"] ['row' leaf+"print a row"]
@ -57,7 +57,7 @@
== ==
:: ::
++ on-command ++ on-command
|= [sole-id=@ta =command] |= [=sole-id:shoe =command]
^- (quip card _this) ^- (quip card _this)
=; [to=(list _sole-id) fec=shoe-effect:shoe] =; [to=(list _sole-id) fec=shoe-effect:shoe]
[[%shoe to fec]~ this] [[%shoe to fec]~ this]
@ -87,7 +87,7 @@
== ==
:: ::
++ can-connect ++ can-connect
|= sole-id=@ta |= =sole-id:shoe
^- ? ^- ?
?| =(~zod src.bowl) ?| =(~zod src.bowl)
(team:title [our src]:bowl) (team:title [our src]:bowl)

View File

@ -166,10 +166,10 @@
(on-load on-save) (on-load on-save)
=^ cards state =^ cards state
?+ mark (on-poke:def mark vase) ?+ mark (on-poke:def mark vase)
%spider-input (on-poke-input:sc !<(input vase)) %spider-input (on-poke-input:sc !<(input vase))
%spider-start (handle-start-thread:sc !<(start-args:spider vase)) %spider-start (handle-start-thread:sc !<(start-args:spider vase))
%spider-stop (handle-stop-thread:sc !<([tid ?] vase)) %spider-inline (handle-inline-thread:sc !<(inline-args:spider vase))
:: %spider-stop (handle-stop-thread:sc !<([tid ?] vase))
%handle-http-request %handle-http-request
(handle-http-request:sc !<([@ta =inbound-request:eyre] vase)) (handle-http-request:sc !<([@ta =inbound-request:eyre] vase))
== ==
@ -303,12 +303,31 @@
++ handle-start-thread ++ handle-start-thread
~/ %handle-start-thread ~/ %handle-start-thread
|= [parent-tid=(unit tid) use=(unit tid) =beak file=term =vase] |= [parent-tid=(unit tid) use=(unit tid) =beak file=term =vase]
(prep-thread parent-tid use beak %| file vase)
::
++ handle-inline-thread
~/ %handle-inline-thread
|= [parent-tid=(unit tid) use=(unit tid) =beak =shed:khan]
(prep-thread parent-tid use beak %& shed)
::
++ prep-thread
|= $: parent-tid=(unit tid) use=(unit tid) =beak
source=(each shed:khan [file=term =vase])
==
^- (quip card ^state) ^- (quip card ^state)
=/ parent-yarn=yarn =/ parent-yarn=yarn
?~ parent-tid ?~ parent-tid
/ /
(~(got by tid.state) u.parent-tid) (~(got by tid.state) u.parent-tid)
=/ new-tid (fall use (new-thread-id file)) =/ new-tid
?^ use
u.use
%- new-thread-id
?- -.source
%& (cat 3 'inline-' q.beak)
%| file.p.source
==
::
=/ =yarn (snoc parent-yarn new-tid) =/ =yarn (snoc parent-yarn new-tid)
:: ::
?: (~(has of running.state) yarn) ?: (~(has of running.state) yarn)
@ -321,16 +340,19 @@
=? serving.state !(~(has by serving.state) new-tid) =? serving.state !(~(has by serving.state) new-tid)
(~(put by serving.state) new-tid [~ %noun q.beak]) (~(put by serving.state) new-tid [~ %noun q.beak])
:: ::
=: starting.state (~(put by starting.state) yarn [%build vase]) =. tid.state (~(put by tid.state) new-tid yarn)
tid.state (~(put by tid.state) new-tid yarn) ?- -.source
%& (begin-shed yarn p.source)
%|
=. starting.state (~(put by starting.state) yarn [%build vase.p.source])
=/ pax=path
~| no-file-for-thread+file.p.source
(need (get-fit:clay beak %ted file.p.source))
:_ state
:_ ~
:+ %pass /build/[new-tid]
[%arvo %c %warp p.beak q.beak ~ %sing %a r.beak pax]
== ==
=/ pax=path
~| no-file-for-thread+file
(need (get-fit:clay beak %ted file))
:_ state
:_ ~
:+ %pass /build/[new-tid]
[%arvo %c %warp p.beak q.beak ~ %sing %a r.beak pax]
:: ::
++ handle-build ++ handle-build
~/ %handle-build ~/ %handle-build
@ -349,23 +371,25 @@
=/ maybe-thread (mule |.(!<(thread !<(vase q.r.u.riot)))) =/ maybe-thread (mule |.(!<(thread !<(vase q.r.u.riot))))
?: ?=(%| -.maybe-thread) ?: ?=(%| -.maybe-thread)
(thread-fail-not-running tid %thread-not-thread ~) (thread-fail-not-running tid %thread-not-thread ~)
(start-thread yarn p.maybe-thread) (slam-thread yarn p.maybe-thread)
:: ::
++ start-thread ++ slam-thread
~/ %start-thread ~/ %slam-thread
|= [=yarn =thread] |= [=yarn =thread]
^- (quip card ^state) ^- (quip card ^state)
=/ =vase vase:(~(got by starting.state) yarn) =/ =vase vase:(~(got by starting.state) yarn)
?< (~(has of running.state) yarn)
=/ m (strand ,^vase)
=/ res (mule |.((thread vase))) =/ res (mule |.((thread vase)))
?: ?=(%| -.res) ?: ?=(%| -.res)
(thread-fail-not-running (yarn-to-tid yarn) %false-start p.res) (thread-fail-not-running (yarn-to-tid yarn) %false-start p.res)
=/ =eval-form:eval:m =. starting.state (~(del by starting.state) yarn)
(from-form:eval:m p.res) (begin-shed yarn p.res)
=: starting.state (~(del by starting.state) yarn) ::
running.state (~(put of running.state) yarn eval-form) ++ begin-shed
== |= [=yarn =shed:khan]
?< (~(has of running.state) yarn)
=/ m (strand ,vase)
=/ =eval-form:eval:m (from-form:eval:m shed)
=. running.state (~(put of running.state) yarn eval-form)
(take-input yarn ~) (take-input yarn ~)
:: ::
++ handle-stop-thread ++ handle-stop-thread

View File

@ -0,0 +1,46 @@
:: print [len] %ames flows, sorted by number-per-ship
::
:- %say
|= [[now=@da eny=@uvJ bec=beak] arg=$@(~ [len=@ ~]) ~]
:- %noun
::
=; flows
^- (list [=ship open=[out-open=@ out-closing=@ in=@ nax=@] corked=@])
=/ len ?^(arg len.arg 50)
(scag len (sort flows |=([[@ [a=@ud *] *] @ [b=@ud *] *] (gth a b))))
::
=/ peers-map
.^ (map ship ?(%alien %known))
%ax /(scot %p p.bec)//(scot %da now)/peers
==
=/ peers=(list ship)
%+ murn ~(tap by peers-map)
|= [=ship val=?(%alien %known)]
?: =(ship p.bec)
~ :: this is weird, but we saw it
?- val
%alien ~
%known (some ship)
==
::
^- (list [=ship open=[out-open=@ out-closing=@ in=@ nax=@] corked=@])
%+ turn peers
|= =ship
=+ .^ =ship-state:ames
%ax /(scot %p p.bec)//(scot %da now)/peers/(scot %p ship)
==
=/ =peer-state:ames ?>(?=(%known -.ship-state) +.ship-state)
=/ corked ~(wyt in corked.peer-state)
=- [ship - corked]
::
=+ %+ roll ~(tap in ~(key by snd.peer-state))
|= [b=bone [out=(list bone) in=(list bone) nax=(list bone)]]
=/ m (mod b 4)
?+ m ~|([%odd-bone b] !!)
%0 [[b out] in nax]
%1 [out [b in] nax]
%3 [out in [b nax]]
==
=/ [out-closing=(list bone) out-open=(list bone)]
(skid out ~(has ^in closing.peer-state))
[(lent out-open) (lent out-closing) (lent in) (lent nax)]

View File

@ -0,0 +1,20 @@
:: print [len] %ames message-pump timers, sorted by number-per-ship
::
:- %say
|= [[now=@da eny=@uvJ bec=beak] arg=$@(~ [len=@ ~]) ~]
:- %noun
::
=; who
^- (list [@ta @ud])
=/ len ?^(arg len.arg 50)
(scag len (sort ~(tap by who) |=([[@ a=@ud] @ b=@ud] (gth a b))))
::
=| who=(map @ta @ud)
=/ tim .^((list (pair @da duct)) bx+/(scot %p p.bec)//(scot %da now)/debug/timers)
|- ^+ who
?~ tim who
?. &(?=(^ q.i.tim) ?=([%ames %pump ^] i.q.i.tim))
$(tim t.tim)
=* her i.t.t.i.q.i.tim
=/ i (~(gut by who) her 0)
$(tim t.tim, who (~(put by who) her +(i)))

View File

@ -5,8 +5,8 @@
:- %aqua-events :- %aqua-events
%+ turn %+ turn
^- (list unix-event) ^- (list unix-event)
:~ [/d/term/1 %belt %ctl `@c`%e] :~ [/d/term/1 %belt %mod %ctl `@c`%e]
[/d/term/1 %belt %ctl `@c`%u] [/d/term/1 %belt %mod %ctl `@c`%u]
[/d/term/1 %belt %txt ((list @c) command)] [/d/term/1 %belt %txt ((list @c) command)]
[/d/term/1 %belt %ret ~] [/d/term/1 %belt %ret ~]
== ==

View File

@ -1,5 +0,0 @@
:: Kick azimuth
::
:- %say
|= *
[%azimuth-poke %kick ~]

View File

@ -1,5 +1,5 @@
:: Change node url and network for azimuth :: Change node url and network for azimuth
:: ::
:- %say :- %say
|= [* [url=@ta net=?(%mainnet %ropsten %local %default) ~] ~] |= [* [url=@ta net=?(%mainnet %goerli %local %default) ~] ~]
[%azimuth-poke %watch url net] [%azimuth-poke %watch url net]

View File

@ -24,7 +24,7 @@
rest=(list desk) rest=(list desk)
== ==
:: ::
~ prime=_|
== ==
:- %pill :- %pill
^- pill:pill ^- pill:pill
@ -39,50 +39,10 @@
?~ arg %base ?~ arg %base
?>(?=(@ base.arg) base.arg) ?>(?=(@ base.arg) base.arg)
/(scot %p p.bec)/[desk]/(scot %da now)/sys /(scot %p p.bec)/[desk]/(scot %da now)/sys
=/ bas=path
(scag 3 sys)
=/ dez=(list [desk path]) =/ dez=(list [desk path])
?~ arg ~ ?~ arg ~
%+ turn rest.arg %+ turn rest.arg
|= =desk |= =desk
[desk /(scot %p p.bec)/[desk]/(scot %da now)] [desk /(scot %p p.bec)/[desk]/(scot %da now)]
:: ::
:: compiler-source: hoon source file producing compiler, `sys/hoon` (brass:pill sys dez prime)
::
=+ compiler-source=.^(@t %cx (welp sys /hoon/hoon))
::
:: compiler-twig: compiler as hoon expression
::
~& %brass-parsing
=+ compiler-twig=(rain /sys/hoon/hoon compiler-source)
~& %brass-parsed
::
:: compiler-formula: compiler as nock formula
::
~& %brass-compiling
=+ compiler-formula=q:(~(mint ut %noun) %noun compiler-twig)
~& %brass-compiled
::
:: arvo-source: hoon source file producing arvo kernel, `sys/arvo`
::
=+ arvo-source=.^(@t %cx (welp sys /arvo/hoon))
::
:: boot-ova: startup events
::
=/ boot-ova=(list)
:~ aeon:eden:part
boot:eden:part
compiler-formula
compiler-source
arvo-source
==
:: a pill is a 3-tuple of event-lists: [boot kernel userspace]
::
:+ %pill %brass
:+ boot-ova
:~ (boot-ovum:pill compiler-source arvo-source)
(file-ovum2:pill bas)
==
%+ turn
(snoc dez [%base bas])
file-ovum:pill

View File

@ -0,0 +1,28 @@
:: +desk-requests: count pending requests for a desk
::
:- %say
|= $: [now=@da eny=@uvJ bec=beak]
[=desk ~]
~
==
:- %tang
^- tang
=/ cul=(list [@p rave:clay])
%~ tap in
.^ (set [@p rave:clay])
/cx/(scot %p p.bec)//(scot %da now)/cult/[desk]
==
::
=/ [loc=_cul inc=_cul]
(skid cul |=([=@p rave:clay] =(p p.bec)))
=/ syc=_cul
=/ nex=@ud
+(ud:.^(cass:clay %cw /(scot %p p.bec)/[desk]/(scot %da now)))
(skim inc |=([@p =rave:clay] =([%sing %w ud+nex /] rave)))
::
%- flop
:~ leaf+"total: {<(lent cul)>}"
leaf+"- local: {<(lent loc)>}"
leaf+"- incoming: {<(lent inc)>}"
leaf+" - for next: {<(lent syc)>}"
==

View File

@ -0,0 +1,10 @@
:: +gall-nonces: print %gall agent subscription nonces, highest-last
::
:- %say
|= [[now=@da eny=@uvJ bec=beak] ~ ~]
:- %noun
^- (list [dude:gall @ud])
%+ sort
%~ tap by
.^((map dude:gall @ud) %gf /(scot %p p.bec)//(scot %da now))
|=([[* a=@ud] [* b=@ud]] (lth a b))

View File

@ -39,7 +39,7 @@
|= a=* ^- [cord path] |= a=* ^- [cord path]
[;;(@t a) (welp (slag len pax) /[nam])] [;;(@t a) (welp (slag len pax) /[nam])]
-- --
:: ::TODO: make this work with doccords
:- %say :- %say
|= [[now=time @ our=ship ^] typ=$@(~ [p=term ~]) ~] |= [[now=time @ our=ship ^] typ=$@(~ [p=term ~]) ~]
=/ pax=path /(scot %p our)/base/(scot %da now)/gen :: XX hardcoded =/ pax=path /(scot %p our)/base/(scot %da now)/gen :: XX hardcoded

View File

@ -0,0 +1,6 @@
:: Helm: Adjust Ames congestion control constants
::
:- %say
|= [^ [msg=@ud mem=@ud ~] ~]
:- %helm-ames-cong
msg^mem

View File

@ -0,0 +1,10 @@
:: Helm: Set Ames Blocklist
::
/? 310
::
::::
::
:- %say
|= [^ ships=(list ship) ~]
:- %helm-ames-snub
ships

View File

@ -1,14 +0,0 @@
:: Helm: Reload vane/s from /=base=
::
:::: /hoon/breload/hood/gen
::
/? 310
::
::::
::
:- %say
|= $: [now=@da eny=@uvJ bec=beak]
[arg=(list term) ~]
==
:+ %helm-reload-desk %base
arg

View File

@ -1,7 +1,3 @@
:- %say :- %say
|= $: [now=@da eny=@uvJ bec=beak] |= [[now=@da eny=@uvJ bec=beak] ~ ~]
~ [%kiln-bump ~]
force=_|
except=(set desk)
==
[%kiln-bump except force]

View File

@ -0,0 +1,9 @@
:: Deletes all stale ames flows from failed (re) subscriptions
::
:: It runs in dry mode by default, printing the flows that can be closed.
:: To actually close the flows, run with |close-flows, =dry |
::
:- %say
|= [^ arg=~ dry=?]
::
[%helm-ames-kroc dry]

View File

@ -0,0 +1,8 @@
:- %say
|= $: [now=@da eny=@uvJ bec=beak]
~
[dude=_`dude:gall`%$ ship=_`@p`(bex 128)]
==
=/ darg=(unit dude:gall) ?:(=(%$ dude) ~ `dude)
=/ sarg=(unit ^ship) ?:(=((bex 128) ship) ~ `ship)
[%helm-doff darg sarg]

View File

@ -0,0 +1,8 @@
:: Helm: Set Gall Verbosity by Agent
::
/? 310
::
:- %say
|= [^ dudes=(list dude:gall) ~]
:- %helm-gall-sift
dudes

View File

@ -0,0 +1,11 @@
:: Helm: Adjust Gall verbosity
::
:: List of diagnostic flags is in verb:gall in zuse.hoon, documented in
:: gall.hoon
::
/? 310
::
:- %say
|= [^ veb=(list verb:gall) ~]
:- %helm-gall-verb
veb

View File

@ -1,4 +1,4 @@
:: Helm: Adjust vane error verbosity knob :: Drum: Adjust vane error verbosity knob
:: ::
/? 310 /? 310
:: ::
@ -6,5 +6,5 @@
:: ::
:- %say :- %say
|= [^ [error-tag=@tas level=?(%hush %soft %loud) ~] ~] |= [^ [error-tag=@tas level=?(%hush %soft %loud) ~] ~]
:- %helm-knob :- %drum-knob
[error-tag level] [error-tag level]

View File

@ -8,9 +8,11 @@
:: ::
:- %say :- %say
|= $: [now=@da eny=@uvJ byk=beak] |= $: [now=@da eny=@uvJ byk=beak]
[arg=$?([dap=term ~] [who=ship dap=term ~]) ~] arg=$?([dap=term ~] [who=ship dap=term ~])
drum-session=@ta
== ==
:- %drum-link :- %drum-link
:- drum-session
?~ +.arg ?~ +.arg
[p.byk dap.arg] [p.byk dap.arg]
[who.arg dap.arg] [who.arg dap.arg]

View File

@ -0,0 +1,50 @@
:: |new-desk: creates a minimal desk
::
/+ *generators
::
:- %ask
|= $: [now=@da eny=@uvJ bek=beak]
[=desk ~]
[from=$~(%base desk) hard=_|]
==
::
=; make-new-desk
?. ?& !hard
(~(has in .^((set ^desk) %cd (en-beam bek(q %$) /))) desk)
==
(make-new-desk)
%+ print (rap 3 'the desk %' desk ' already exists. overwrite it?' ~)
%+ prompt [%& %prompt "overwrite? (y/N) "]
|= in=tape
?. |(=("y" in) =("Y" in) =("yes" in))
no-product
(make-new-desk)
::
|. %- produce
:- %helm-pass
%^ new-desk:cloy desk
~
%- ~(gas by *(map path page:clay))
|^ =- (turn - mage)
^- (list path)
:~ /mar/noun/hoon
/mar/hoon/hoon
/mar/txt/hoon
/mar/kelvin/hoon
/sys/kelvin
==
::
++ mage
|= =path
:- path
^- page:clay
:- (rear path)
~| [%missing-source-file from path]
.^ *
%cx
(scot %p p.bek)
from
(scot %da now)
path
==
--

View File

@ -8,9 +8,7 @@
== ==
:- %kiln-rein :- %kiln-rein
:- desk :- desk
=+ .^(=cone:clay %cx /(scot %p p.bec)//(scot %da now)/domes)
%+ roll arg %+ roll arg
=| =rein:hood |: [*[on=? =dude:gall] rein=ren:(~(got by cone) [p.bec desk])]
|: [*[on=? =dude:gall] rein=rein(liv liv)] (~(put by rein) dude on)
?: on
rein(add (~(put in add.rein) dude))
rein(sub (~(put in sub.rein) dude))

View File

@ -15,4 +15,7 @@
?@ +.arg [q.bec -.arg] ?@ +.arg [q.bec -.arg]
?> ((sane %tas) +<.arg) ?> ((sane %tas) +<.arg)
[-.arg +<.arg] [-.arg +<.arg]
[%kiln-rein des & [dap ~ ~] ~] =+ .^(=cone:clay %cx /(scot %p p.bec)//(scot %da now)/domes)
=/ =dome:clay (~(gut by cone) [p.bec des] *dome:clay)
=+ ((slog ?:(=(%live liv.dome) ~ ['kiln: desk not live' ~])) ~)
[%kiln-rein des (~(put by ren.dome) dap &)]

View File

@ -13,8 +13,7 @@
=/ =lobe u.fil.arch =/ =lobe u.fil.arch
=+ .^(=rang %cx /(scot %p p.bec)//(scot %da now)/rang) =+ .^(=rang %cx /(scot %p p.bec)//(scot %da now)/rang)
=+ .^(=cone %cx /(scot %p p.bec)//(scot %da now)/domes) =+ .^(=cone %cx /(scot %p p.bec)//(scot %da now)/domes)
=/ domes=(list [[=ship =desk] =dome tom=(map tako norm) nor=norm]) =/ domes=(list [[=ship =desk] dome]) ~(tap by cone)
~(tap by cone)
=/ norms =/ norms
|^ |^
|- ^- (set [ship desk tako norm path]) |- ^- (set [ship desk tako norm path])
@ -24,14 +23,14 @@
=/ =aeon 1 =/ =aeon 1
%- ~(uni in $(domes t.domes)) %- ~(uni in $(domes t.domes))
|- ^- (set [ship desk tako norm path]) |- ^- (set [ship desk tako norm path])
?: (lth let.dome.i.domes aeon) ?: (lth let.i.domes aeon)
~ ~
=/ =tako (~(got by hit.dome.i.domes) aeon) =/ =tako (~(got by hit.i.domes) aeon)
=/ paths (draw-tako ship.i.domes desk.i.domes +.i.domes tako) =/ paths (draw-tako ship.i.domes desk.i.domes +.i.domes tako)
(~(uni in paths) $(aeon +(aeon))) (~(uni in paths) $(aeon +(aeon)))
:: ::
++ draw-tako ++ draw-tako
|= [=ship =desk [dome tom=(map tako norm) nor=norm] =tako] |= [=ship =desk dome =tako]
^- (set [^ship ^desk ^tako norm path]) ^- (set [^ship ^desk ^tako norm path])
~+ ~+
=/ =yaki (~(got by hut.rang) tako) =/ =yaki (~(got by hut.rang) tako)

View File

@ -1,14 +0,0 @@
:: Kiln: merge each version of remote desk? XX clarify
::
:::: /hoon/track/hood/gen
::
/? 310
::
::::
::
:- %say
|= $: [now=@da eny=@uvJ bec=beak]
[arg=[syd=@tas her=@p sud=@tas ~] ~]
==
:- %kiln-track
[syd her sud]:arg

View File

@ -8,9 +8,11 @@
:: ::
:- %say :- %say
|= $: [now=@da eny=@uvJ byk=beak] |= $: [now=@da eny=@uvJ byk=beak]
[arg=$?([dap=term ~] [who=ship dap=term ~]) ~] arg=$?([dap=term ~] [who=ship dap=term ~])
drum-session=@ta
== ==
:- %drum-unlink :- %drum-unlink
:- drum-session
?~ +.arg ?~ +.arg
[p.byk dap.arg] [p.byk dap.arg]
[who.arg dap.arg] [who.arg dap.arg]

View File

@ -17,60 +17,4 @@
=/ sys=path =/ sys=path
?^ arg top.arg ?^ arg top.arg
/(scot %p p.bec)/[q.bec]/(scot %da now)/sys /(scot %p p.bec)/[q.bec]/(scot %da now)/sys
=/ lib (ivory:pill sys)
(welp (flop (tail (flop sys))) /lib)
::
|^ =/ ver
=/ sub *(trap vase)
=. sub (build-sys sub %hoon)
=. sub (build-sys sub %arvo)
=. sub (build-sys sub %lull)
=. sub (build-sys sub %zuse)
=. sub (build-lib sub & %ethereum)
=. sub (build-lib sub & %azimuth)
(build-lib sub | %vere)
=/ nok !.
=> *[ver=(trap vase) ~]
!= q:$:ver
ivory/[nok ver ~]
::
++ build-sys
|= [sub=(trap vase) nam=term] ^- (trap vase)
~> %slog.[0 leaf+"ivory: building /sys/{(trip nam)}"]
(swat sub (rain /sys/[nam]/hoon .^(@t cx+(welp sys /[nam]/hoon))))
::
++ build-lib
|= [sub=(trap vase) imp=? nam=term] ^- (trap vase)
~> %slog.[0 leaf+"ivory: building /lib/{(trip nam)}"]
=/ hun=hoon
%+ mist /lib/[nam]/hoon
.^(@t cx+(welp lib /[nam]/hoon))
?. imp (swat sub hun)
(swel sub [%ktts nam hun])
:: +mist: +rain but skipping past ford runes
::
++ mist
|= [bon=path txt=@]
^- hoon
=+ vas=vast
~| bon
%+ scan (trip txt)
%- full
=; fud
(ifix [;~(plug gay fud) gay] tall:vas(wer bon))
%- star
;~ pose vul
%+ ifix [fas (just `@`10)]
(star ;~(less (just `@`10) next))
==
:: +swel: +swat but with +slop
::
++ swel
|= [tap=(trap vase) gen=hoon]
^- (trap vase)
=/ gun (~(mint ut p:$:tap) %noun gen)
=> [tap=tap gun=gun]
|. ~+
=/ pro q:$:tap
[[%cell p.gun p:$:tap] [.*(pro q.gun) pro]]
--

Some files were not shown because too many files have changed in this diff Show More