mirror of
https://github.com/urbit/shrub.git
synced 2024-12-01 06:35:32 +03:00
Merge branch 'next/kelvin/412' into yu/enable-close-flows
This commit is contained in:
commit
897d00334f
14
.github/workflows/shared.yml
vendored
14
.github/workflows/shared.yml
vendored
@ -55,7 +55,9 @@ jobs:
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
lfs: true
|
||||
|
||||
# We only want the extra nix config on linux, where it is necessary
|
||||
# for the docker build. We don't want in on Mac, where it isn't but
|
||||
@ -77,7 +79,6 @@ jobs:
|
||||
- if: ${{ matrix.os == 'ubuntu-latest' }}
|
||||
name: run urbit-tests
|
||||
run: |
|
||||
cp -RL tests pkg/arvo/tests
|
||||
if [[ "${{ inputs.next }}" == "next/kelvin/"* ]]; then
|
||||
next=$(echo ${{ inputs.next }} | sed 's/[^0-9]//g')
|
||||
base="https://bootstrap.urbit.org/vere/next/kelvin/${next}"
|
||||
@ -91,7 +92,8 @@ jobs:
|
||||
url="$(echo ${base}/v${vere}/vere-v${vere}-linux-x86_64)"
|
||||
echo $vere
|
||||
echo $url
|
||||
# put in .jam so it doesn't crash when it gets -A'd in
|
||||
curl -Lo pkg/arvo/vere.jam "$url"
|
||||
chmod +x pkg/arvo/vere.jam
|
||||
nix-build -A urbit-tests
|
||||
curl -Lo urbit "$url"
|
||||
chmod +x urbit
|
||||
git add urbit
|
||||
|
||||
nix flake check --keep-build-log -L
|
||||
|
76
flake.lock
Normal file
76
flake.lock
Normal file
@ -0,0 +1,76 @@
|
||||
{
|
||||
"nodes": {
|
||||
"flake-utils": {
|
||||
"inputs": {
|
||||
"systems": "systems"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1687709756,
|
||||
"narHash": "sha256-Y5wKlQSkgEK2weWdOu4J3riRd+kV/VCgHsqLNTTWQ/0=",
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"rev": "dbabf0ca0c0c4bce6ea5eaf65af5cb694d2082c7",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1688465017,
|
||||
"narHash": "sha256-xzFcCnzPOgQaX7Acprfqo+tqHJ2UKWC38pXrcqvdXHU=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "0d5682acc109add20f70440269587a1b169cc2fe",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"id": "nixpkgs",
|
||||
"type": "indirect"
|
||||
}
|
||||
},
|
||||
"root": {
|
||||
"inputs": {
|
||||
"flake-utils": "flake-utils",
|
||||
"nixpkgs": "nixpkgs",
|
||||
"tools": "tools"
|
||||
}
|
||||
},
|
||||
"systems": {
|
||||
"locked": {
|
||||
"lastModified": 1681028828,
|
||||
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"tools": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1687273483,
|
||||
"narHash": "sha256-41X9PWXGM7lFzPiIj4dw3i8bgMvpT85iYHSeyIxPJcc=",
|
||||
"owner": "urbit",
|
||||
"repo": "tools",
|
||||
"rev": "2ad116fdb539e4116dc18d86772a241415cc1ef4",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "urbit",
|
||||
"repo": "tools",
|
||||
"type": "github"
|
||||
}
|
||||
}
|
||||
},
|
||||
"root": "root",
|
||||
"version": 7
|
||||
}
|
75
flake.nix
Normal file
75
flake.nix
Normal file
@ -0,0 +1,75 @@
|
||||
{
|
||||
inputs = {
|
||||
flake-utils.url = "github:numtide/flake-utils";
|
||||
tools = {
|
||||
flake = false;
|
||||
url = "github:urbit/tools";
|
||||
};
|
||||
};
|
||||
|
||||
outputs = { self, nixpkgs, flake-utils, tools }:
|
||||
flake-utils.lib.eachDefaultSystem (system:
|
||||
let
|
||||
usableTools = pkgs.runCommand "patched-tools" { } ''
|
||||
cp -r ${tools} $out
|
||||
chmod +w -R $out
|
||||
patchShebangs $out
|
||||
'';
|
||||
pkgs = import nixpkgs { inherit system; };
|
||||
bootFakeShip = { pill, arvo }:
|
||||
pkgs.runCommand "fake-pier" { } ''
|
||||
${./urbit} --pier $out -F zod -B ${pill} -l -x -t -A ${arvo}
|
||||
'';
|
||||
fakePier = bootFakeShip {
|
||||
pill = ./bin/solid.pill;
|
||||
arvo = "${./pkg}/arvo";
|
||||
};
|
||||
testPier = bootFakeShip {
|
||||
pill = ./bin/solid.pill;
|
||||
arvo = pkgs.runCommand "test-arvo" {} ''
|
||||
cp -r ${./pkg} $out
|
||||
chmod +w -R $out
|
||||
cp -r ${./tests} $out/arvo/tests
|
||||
cp -r ${./test-desk.bill} $out/arvo/desk.bill
|
||||
'' + "/arvo";
|
||||
};
|
||||
buildPillThread = pill:
|
||||
pkgs.writeTextFile {
|
||||
name = "";
|
||||
text = ''
|
||||
=/ m (strand ,vase)
|
||||
;< [=ship =desk =case] bind:m get-beak
|
||||
;< ~ bind:m (poke [ship %dojo] %lens-command !>([%$ [%dojo '+${pill}'] [%output-pill '${pill}/pill']]))
|
||||
;< ~ bind:m (poke [ship %hood] %drum-exit !>(~))
|
||||
(pure:m !>(~))
|
||||
'';
|
||||
};
|
||||
buildPill = pill:
|
||||
pkgs.runCommand ("${pill}.pill") { buildInputs = [ pkgs.netcat ]; } ''
|
||||
cp -r ${fakePier} pier
|
||||
chmod +w -R pier
|
||||
${./urbit} -d pier
|
||||
${usableTools}/pkg/click/click -k -p -i ${buildPillThread pill} pier
|
||||
|
||||
# Sleep to let urbit spin down properly
|
||||
sleep 5
|
||||
|
||||
cp pier/.urb/put/${pill}.pill $out
|
||||
'';
|
||||
|
||||
in {
|
||||
checks = {
|
||||
testFakeShip = import ./nix/test-fake-ship.nix {
|
||||
inherit pkgs;
|
||||
pier = testPier;
|
||||
click = usableTools + "/pkg/click/click";
|
||||
};
|
||||
};
|
||||
packages = {
|
||||
inherit fakePier testPier;
|
||||
brass = buildPill "brass";
|
||||
ivory = buildPill "ivory";
|
||||
solid = buildPill "solid";
|
||||
};
|
||||
});
|
||||
}
|
@ -1,34 +0,0 @@
|
||||
# The build system where packages will be _built_.
|
||||
{ system ? builtins.currentSystem
|
||||
# The host system where packages will _run_.
|
||||
, crossSystem ? null
|
||||
# Additional sources.json overrides.
|
||||
, sources ? { }
|
||||
# Additional nixpkgs.config overrides.
|
||||
, config ? { }
|
||||
# Additional nixpkgs.overlays.
|
||||
, overlays ? [ ]
|
||||
# Overlays to apply to the last package set in cross compilation.
|
||||
, crossOverlays ? [ ] }:
|
||||
|
||||
let
|
||||
|
||||
finalSources = import ./sources.nix { } // sources;
|
||||
|
||||
pkgs = import finalSources.nixpkgs {
|
||||
inherit system config crossSystem crossOverlays;
|
||||
|
||||
overlays = [
|
||||
# Make prev.sources available to subsequent overlays.
|
||||
(_final: _prev: { sources = finalSources; })
|
||||
# General unguarded (native) overrides for nixpkgs.
|
||||
(import ./overlays/native.nix)
|
||||
# Specific overrides guarded by the host platform.
|
||||
(import ./overlays/musl.nix)
|
||||
(import ./overlays/arm.nix)
|
||||
];
|
||||
};
|
||||
|
||||
in pkgs // {
|
||||
pkgsStatic = pkgs.pkgsStatic.extend (import ./overlays/static.nix);
|
||||
}
|
@ -1,72 +0,0 @@
|
||||
{ lib, stdenvNoCC, curl }:
|
||||
|
||||
{ arvo ? null, pill, ship, arguments ? [ "-l" ] }:
|
||||
|
||||
let
|
||||
|
||||
args = arguments ++ [ "-d" "-F" "${ship}" "-B" "${pill}" ]
|
||||
++ lib.optionals (arvo != null) [ "-A" "${arvo}" ];
|
||||
|
||||
in stdenvNoCC.mkDerivation {
|
||||
name = "fake-${ship}";
|
||||
|
||||
buildInputs = [ curl ];
|
||||
|
||||
phases = [ "buildPhase" "installPhase " ];
|
||||
|
||||
buildPhase = ''
|
||||
set -xeuo pipefail
|
||||
|
||||
${arvo}/vere.jam ${lib.concatStringsSep " " args} -c ./pier
|
||||
|
||||
cleanup () {
|
||||
if [ -f ./pier/.vere.lock ]; then
|
||||
kill $(< ./pier/.vere.lock) || true
|
||||
fi
|
||||
|
||||
set +x
|
||||
}
|
||||
|
||||
trap cleanup EXIT
|
||||
|
||||
port=$(cat ./pier/.http.ports | grep loopback | tr -s ' ' '\n' | head -n 1)
|
||||
|
||||
lensd() {
|
||||
curl -f -s \
|
||||
--data "{\"source\":{\"dojo\":\"$1\"},\"sink\":{\"stdout\":null}}" \
|
||||
"http://localhost:$port" | xargs printf %s | sed 's/\\n/\n/g'
|
||||
}
|
||||
|
||||
lensa() {
|
||||
curl -f -s \
|
||||
--data "{\"source\":{\"dojo\":\"$2\"},\"sink\":{\"app\":\"$1\"}}" \
|
||||
"http://localhost:$port" | xargs printf %s | sed 's/\\n/\n/g'
|
||||
}
|
||||
|
||||
check () {
|
||||
[ 3 -eq "$(lensd 3)" ]
|
||||
}
|
||||
|
||||
if check && sleep 10 && check; then
|
||||
header "boot success"
|
||||
lensa hood '+hood/exit'
|
||||
while [ -f ./pier/.vere.lock ]; do
|
||||
echo "waiting for pier to shut down"
|
||||
sleep 5
|
||||
done
|
||||
else
|
||||
header "boot failure"
|
||||
kill $(< ./pier/.vere.lock) || true
|
||||
set +x
|
||||
exit 1
|
||||
fi
|
||||
|
||||
set +x
|
||||
'';
|
||||
|
||||
installPhase = ''
|
||||
ls
|
||||
ls -a ./pier
|
||||
mv ./pier $out
|
||||
'';
|
||||
}
|
@ -1,13 +0,0 @@
|
||||
# Functions that are expected run on the native (non-cross) system.
|
||||
|
||||
{ callPackage }:
|
||||
|
||||
rec {
|
||||
bootFakeShip = callPackage ./boot-fake-ship.nix { };
|
||||
|
||||
testFakeShip = callPackage ./test-fake-ship.nix { inherit bootFakeShip; };
|
||||
|
||||
fetchGitHubLFS = callPackage ./fetch-github-lfs.nix { };
|
||||
|
||||
makeReleaseTarball = callPackage ./make-release-tarball.nix { };
|
||||
}
|
@ -1,107 +0,0 @@
|
||||
{ lib, stdenvNoCC, runCommandLocal, cacert, curl, jq }:
|
||||
|
||||
{ src
|
||||
# `name` shouldn't use `baseNameOf` otherwise we'll
|
||||
# get `is not allowed to refer to a store path` errors.
|
||||
, name ? baseNameOf src, owner ? "urbit", repo ? "urbit"
|
||||
, preferLocalBuild ? true }:
|
||||
|
||||
assert builtins.isPath src;
|
||||
|
||||
let
|
||||
|
||||
# Parse the first 7 characters of the supplied `src` path for the required
|
||||
# `version` key as defined by the lfs specification:
|
||||
# https://github.com/git-lfs/git-lfs/blob/master/docs/spec.md
|
||||
#
|
||||
# If `version` exists we assume we're dealing with a lfs pointer and parse
|
||||
# the `oid` and `size` from the pointer and write these into a JSON object.
|
||||
#
|
||||
# If the first 7 characters are unrecognised we assume the path is a binary
|
||||
# file and set both `oid` and `size` to `null`.
|
||||
#
|
||||
# The `oid` and `size` are then JSON decoded into an expression to use
|
||||
# as the fixed-output derivation's `sha256 = oid`, and to form a download
|
||||
# operation payload to request the actual lfs blob's real url.
|
||||
pointer = builtins.fromJSON (builtins.readFile
|
||||
(runCommandLocal "lfs-pointer-${name}" { } ''
|
||||
oid="null"
|
||||
size="null"
|
||||
|
||||
if [[ "$(head -c 7 "${src}")" != "version" ]]; then
|
||||
header "lfs ${src} is a binary blob, skipping"
|
||||
else
|
||||
header "reading lfs pointer from ${src}"
|
||||
|
||||
contents=($(awk '{print $2}' "${src}"))
|
||||
oid="''${contents[1]#sha256:}"
|
||||
size="''${contents[2]}"
|
||||
fi
|
||||
|
||||
cat <<EOF > "$out"
|
||||
{"oid": "$oid", "size": $size}
|
||||
EOF
|
||||
''));
|
||||
|
||||
downloadUrl =
|
||||
"https://github.com/${owner}/${repo}.git/info/lfs/objects/batch";
|
||||
|
||||
# Encode `oid` and `size` into a download operation per:
|
||||
# https://github.com/git-lfs/git-lfs/blob/master/docs/api/batch.md
|
||||
#
|
||||
# This is done using toJSON to avoid bash quotation issues.
|
||||
downloadPayload = builtins.toJSON {
|
||||
operation = "download";
|
||||
objects = [ pointer ];
|
||||
};
|
||||
|
||||
# Define a fixed-output derivation using the lfs pointer's `oid` as the
|
||||
# expected sha256 output hash, if `oid` is not null.
|
||||
#
|
||||
|
||||
# 1. Request the actual url of the binary file from the lfs batch api.
|
||||
# 2. Download the binary file contents to `$out`.
|
||||
download = stdenvNoCC.mkDerivation {
|
||||
name = "lfs-blob-${name}";
|
||||
nativeBuildInputs = [ curl jq ];
|
||||
phases = [ "installPhase" ];
|
||||
installPhase = ''
|
||||
curl=(
|
||||
curl
|
||||
--location
|
||||
--max-redirs 20
|
||||
--retry 3
|
||||
--disable-epsv
|
||||
--cookie-jar cookies
|
||||
$NIX_CURL_FLAGS
|
||||
)
|
||||
|
||||
header "reading lfs metadata from ${downloadUrl}"
|
||||
|
||||
href=$("''${curl[@]}" \
|
||||
-d '${downloadPayload}' \
|
||||
-H 'Accept: application/vnd.git-lfs+json' \
|
||||
'${downloadUrl}' \
|
||||
| jq -r '.objects[0].actions.download.href')
|
||||
|
||||
header "download lfs data from remote"
|
||||
|
||||
# Pozor/Achtung: the href contains credential and signature information,
|
||||
# so we avoid echoing it to stdout/err.
|
||||
"''${curl[@]}" -s --output "$out" "$href"
|
||||
'';
|
||||
|
||||
impureEnvVars = lib.fetchers.proxyImpureEnvVars;
|
||||
|
||||
SSL_CERT_FILE = "${cacert}/etc/ssl/certs/ca-bundle.crt";
|
||||
|
||||
outputHashAlgo = "sha256";
|
||||
outputHashMode = "flat";
|
||||
outputHash = pointer.oid;
|
||||
|
||||
inherit preferLocalBuild;
|
||||
};
|
||||
|
||||
# If `pointer.oid` is null then supplied the `src` must be a binary
|
||||
# blob and can be returned directly.
|
||||
in if pointer.oid == null || pointer.size == null then src else download
|
@ -1,30 +0,0 @@
|
||||
{ lib, stdenvNoCC, coreutils }:
|
||||
|
||||
{ name, extension ? "tgz", contents # { target = source, ... }
|
||||
}:
|
||||
|
||||
let
|
||||
|
||||
transforms = builtins.concatStringsSep " " (lib.mapAttrsToList
|
||||
(target: source: ''--transform "s,${source},${target},"'') contents);
|
||||
|
||||
sources = builtins.concatStringsSep " "
|
||||
(lib.mapAttrsToList (_target: source: "${source}") contents);
|
||||
|
||||
in stdenvNoCC.mkDerivation {
|
||||
name = "${name}.${extension}";
|
||||
phases = [ "buildPhase" ];
|
||||
|
||||
nativeBuildInputs = [ coreutils ];
|
||||
|
||||
buildPhase = ''
|
||||
tar -vczf $out \
|
||||
--owner=0 --group=0 --mode=u+rw,uga+r \
|
||||
--absolute-names \
|
||||
--hard-dereference \
|
||||
${transforms} \
|
||||
${sources}
|
||||
'';
|
||||
|
||||
preferLocalBuild = true;
|
||||
}
|
@ -1,188 +0,0 @@
|
||||
{ lib, stdenvNoCC, curl, python3, bootFakeShip }:
|
||||
|
||||
{ arvo ? null, pill, ship ? "bus", doCheck ? true }:
|
||||
|
||||
stdenvNoCC.mkDerivation {
|
||||
name = "test-${ship}";
|
||||
|
||||
src = bootFakeShip { inherit arvo pill ship; };
|
||||
|
||||
phases = [ "unpackPhase" "buildPhase" "checkPhase" ];
|
||||
|
||||
buildInputs = [ curl python3 ];
|
||||
|
||||
unpackPhase = ''
|
||||
cp -R $src ./pier
|
||||
chmod -R u+rw ./pier
|
||||
'';
|
||||
|
||||
buildPhase = ''
|
||||
set -x
|
||||
|
||||
${arvo}/vere.jam -d ./pier 2> urbit-output
|
||||
|
||||
# Sledge Hammer!
|
||||
# See: https://github.com/travis-ci/travis-ci/issues/4704#issuecomment-348435959
|
||||
python3 -c $'import os\n[os.set_blocking(i, True) for i in range(3)]\n'
|
||||
|
||||
port=$(cat ./pier/.http.ports | grep loopback | tr -s ' ' '\n' | head -n 1)
|
||||
|
||||
lensd() {
|
||||
# -f elided, this can hit server-side timeouts
|
||||
curl -s \
|
||||
--data "{\"source\":{\"dojo\":\"$1\"},\"sink\":{\"stdout\":null}}" \
|
||||
"http://localhost:$port" | xargs printf %s | sed 's/\\n/\n/g'
|
||||
}
|
||||
|
||||
lensa() {
|
||||
# -f elided, this can hit server-side timeouts
|
||||
curl -s \
|
||||
--data "{\"source\":{\"dojo\":\"$2\"},\"sink\":{\"app\":\"$1\"}}" \
|
||||
"http://localhost:$port" | xargs printf %s | sed 's/\\n/\n/g'
|
||||
}
|
||||
|
||||
tail -F urbit-output >&2 &
|
||||
|
||||
tailproc=$!
|
||||
|
||||
cleanup () {
|
||||
kill $(cat ./pier/.vere.lock) || true
|
||||
kill "$tailproc" 2>/dev/null || true
|
||||
|
||||
set +x
|
||||
}
|
||||
|
||||
trap cleanup EXIT
|
||||
|
||||
# measure initial memory usage
|
||||
#
|
||||
lensd '~& ~ ~& %init-mass-start ~'
|
||||
lensa hood '+hood/mass'
|
||||
lensd '~& ~ ~& %init-mass-end ~'
|
||||
|
||||
# run the unit tests
|
||||
#
|
||||
lensd '~& ~ ~& %test-unit-start ~'
|
||||
lensd '-test %/tests ~'
|
||||
lensd '~& ~ ~& %test-unit-end ~'
|
||||
|
||||
# use the :test app to build all agents, generators, and marks
|
||||
#
|
||||
lensa hood '+hood/start %test'
|
||||
|
||||
lensd '~& ~ ~& %test-agents-start ~'
|
||||
lensa test '%agents'
|
||||
lensd '~& ~ ~& %test-agents-end ~'
|
||||
|
||||
lensd '~& ~ ~& %test-generators-start ~'
|
||||
lensa test '%generators'
|
||||
lensd '~& ~ ~& %test-generators-end ~'
|
||||
|
||||
lensd '~& ~ ~& %test-marks-start ~'
|
||||
lensa test '%marks'
|
||||
lensd '~& ~ ~& %test-marks-end ~'
|
||||
|
||||
# measure memory usage post tests
|
||||
#
|
||||
lensd '~& ~ ~& %test-mass-start ~'
|
||||
lensa hood '+hood/mass'
|
||||
lensd '~& ~ ~& %test-mass-end ~'
|
||||
|
||||
# defragment the loom
|
||||
#
|
||||
lensd '~& ~ ~& %pack-start ~'
|
||||
lensa hood '+hood/pack'
|
||||
lensd '~& ~ ~& %pack-end ~'
|
||||
|
||||
# reclaim space within arvo
|
||||
#
|
||||
lensd '~& ~ ~& %trim-start ~'
|
||||
lensa hood '+hood/trim'
|
||||
lensd '~& ~ ~& %trim-end ~'
|
||||
|
||||
# measure memory usage pre |meld
|
||||
#
|
||||
lensd '~& ~ ~& %trim-mass-start ~'
|
||||
lensa hood '+hood/mass'
|
||||
lensd '~& ~ ~& %trim-mass-end ~'
|
||||
|
||||
# globally deduplicate
|
||||
#
|
||||
lensd '~& ~ ~& %meld-start ~'
|
||||
lensa hood '+hood/meld'
|
||||
lensd '~& ~ ~& %meld-end ~'
|
||||
|
||||
# measure memory usage post |meld
|
||||
#
|
||||
lensd '~& ~ ~& %meld-mass-start ~'
|
||||
lensa hood '+hood/mass'
|
||||
lensd '~& ~ ~& %meld-mass-end ~'
|
||||
|
||||
lensa hood '+hood/exit'
|
||||
|
||||
cleanup
|
||||
|
||||
# Collect output
|
||||
cp urbit-output test-output-unit
|
||||
cp urbit-output test-output-agents
|
||||
cp urbit-output test-output-generators
|
||||
cp urbit-output test-output-marks
|
||||
|
||||
sed -i '0,/test-unit-start/d' test-output-unit
|
||||
sed -i '/test-unit-end/,$d' test-output-unit
|
||||
|
||||
sed -i '0,/test-agents-start/d' test-output-agents
|
||||
sed -i '/test-agents-end/,$d' test-output-agents
|
||||
|
||||
sed -i '0,/test-generators-start/d' test-output-generators
|
||||
sed -i '/test-generators-end/,$d' test-output-generators
|
||||
|
||||
sed -i '0,/test-marks-start/d' test-output-marks
|
||||
sed -i '/test-marks-end/,$d' test-output-marks
|
||||
|
||||
mkdir -p $out
|
||||
|
||||
cp test-output-* $out/
|
||||
|
||||
set +x
|
||||
'';
|
||||
|
||||
checkPhase = ''
|
||||
hdr () {
|
||||
echo =====$(sed 's/./=/g' <<< "$1")=====
|
||||
echo ==== $1 ====
|
||||
echo =====$(sed 's/./=/g' <<< "$1")=====
|
||||
}
|
||||
|
||||
for f in $(find "$out/" -type f); do
|
||||
hdr "$(basename $f)"
|
||||
cat "$f"
|
||||
done
|
||||
|
||||
fail=0
|
||||
|
||||
for f in $(find "$out/" -type f); do
|
||||
if egrep "((FAILED|CRASHED)|warn:) " $f >/dev/null; then
|
||||
if [[ $fail -eq 0 ]]; then
|
||||
hdr "Test Failures"
|
||||
fi
|
||||
|
||||
echo "ERROR Test failure in $(basename $f)"
|
||||
|
||||
((fail++))
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ $fail -eq 0 ]]; then
|
||||
hdr "Success"
|
||||
fi
|
||||
|
||||
exit "$fail"
|
||||
'';
|
||||
|
||||
inherit doCheck;
|
||||
|
||||
# Fix 'bind: operation not permitted' when nix.useSandbox = true on darwin.
|
||||
# See https://github.com/NixOS/nix/blob/5f6840fbb49ae5b534423bd8a4360646ee93dbaf/src/libstore/build.cc#L2961
|
||||
__darwinAllowLocalNetworking = true;
|
||||
}
|
@ -1,14 +0,0 @@
|
||||
final: prev:
|
||||
|
||||
let
|
||||
|
||||
isAarch64 = prev.stdenv.hostPlatform.isAarch64;
|
||||
isDarwin = prev.stdenv.isDarwin;
|
||||
|
||||
in prev.lib.optionalAttrs (isAarch64 && !isDarwin) {
|
||||
libsigsegv = prev.libsigsegv.overrideAttrs (attrs: {
|
||||
preConfigure = (prev.preConfigure or "") + ''
|
||||
sed -i 's/^CFG_FAULT=$/CFG_FAULT=fault-linux-arm.h/' configure
|
||||
'';
|
||||
});
|
||||
}
|
@ -1,26 +0,0 @@
|
||||
final: prev:
|
||||
|
||||
let
|
||||
|
||||
isMusl = prev.stdenv.hostPlatform.isMusl;
|
||||
|
||||
optionalList = xs: if xs == null then [ ] else xs;
|
||||
|
||||
overrideStdenv = pkg: pkg.override { stdenv = prev.gcc9Stdenv; };
|
||||
|
||||
in prev.lib.optionalAttrs isMusl {
|
||||
libsigsegv = prev.libsigsegv.overrideAttrs (attrs: {
|
||||
preConfigure = (attrs.preConfigure or "") + ''
|
||||
sed -i 's/^CFG_FAULT=$/CFG_FAULT=fault-linux-i386.h/' configure
|
||||
'';
|
||||
});
|
||||
|
||||
secp256k1 = prev.secp256k1.overrideAttrs (attrs: {
|
||||
nativeBuildInputs = (attrs.nativeBuildInputs or [ ])
|
||||
++ [ prev.buildPackages.stdenv.cc ];
|
||||
});
|
||||
|
||||
rhash = overrideStdenv prev.rhash;
|
||||
|
||||
numactl = overrideStdenv prev.numactl;
|
||||
}
|
@ -1,40 +0,0 @@
|
||||
final: prev:
|
||||
|
||||
let
|
||||
|
||||
optionalList = xs: if xs == null then [ ] else xs;
|
||||
|
||||
in {
|
||||
h2o = prev.h2o.overrideAttrs (_attrs: {
|
||||
version = final.sources.h2o.rev;
|
||||
src = final.sources.h2o;
|
||||
outputs = [ "out" "dev" "lib" ];
|
||||
meta.platforms = prev.lib.platforms.linux ++ prev.lib.platforms.darwin;
|
||||
});
|
||||
|
||||
libsigsegv = prev.libsigsegv.overrideAttrs (attrs: {
|
||||
patches = optionalList attrs.patches ++ [
|
||||
../pkgs/libsigsegv/disable-stackvma_fault-linux-arm.patch
|
||||
../pkgs/libsigsegv/disable-stackvma_fault-linux-i386.patch
|
||||
];
|
||||
});
|
||||
|
||||
curlUrbit = prev.curlMinimal.override {
|
||||
http2Support = false;
|
||||
scpSupport = false;
|
||||
gssSupport = false;
|
||||
ldapSupport = false;
|
||||
brotliSupport = false;
|
||||
};
|
||||
|
||||
# lies, all lies
|
||||
openssl-static-osx = prev.openssl;
|
||||
zlib-static-osx = prev.zlib;
|
||||
|
||||
lmdb = prev.lmdb.overrideAttrs (attrs: {
|
||||
patches =
|
||||
optionalList attrs.patches ++ prev.lib.optional prev.stdenv.isDarwin [
|
||||
../pkgs/lmdb/darwin-fsync.patch
|
||||
];
|
||||
});
|
||||
}
|
@ -1,40 +0,0 @@
|
||||
final: prev:
|
||||
|
||||
let
|
||||
|
||||
# https://github.com/NixOS/nixpkgs/pull/97047/files
|
||||
# Will make pkgs.stdenv.isStatic available indepedent of the platform.
|
||||
# isStatic = prev.stdenv.hostPlatform.isStatic;
|
||||
|
||||
configureFlags = attrs: {
|
||||
configureFlags = (attrs.configureFlags or [ ])
|
||||
++ [ "--disable-shared" "--enable-static" ];
|
||||
};
|
||||
|
||||
enableStatic = pkg: pkg.overrideAttrs configureFlags;
|
||||
|
||||
in {
|
||||
gmp = enableStatic prev.gmp;
|
||||
|
||||
curlUrbit = enableStatic (prev.curlUrbit.override { openssl = final.openssl-static-osx; zlib = final.zlib-static-osx; });
|
||||
|
||||
libuv = enableStatic prev.libuv;
|
||||
|
||||
libffi = enableStatic prev.libffi;
|
||||
|
||||
openssl-static-osx = prev.openssl.override {
|
||||
static = true;
|
||||
withPerl = false;
|
||||
};
|
||||
|
||||
zlib-static-osx = if final.stdenv.isDarwin then prev.zlib.static else prev.zlib;
|
||||
|
||||
secp256k1 = enableStatic prev.secp256k1;
|
||||
|
||||
lmdb = prev.lmdb.overrideAttrs (old:
|
||||
configureFlags old // {
|
||||
postPatch = ''
|
||||
sed '/^ILIBS\t/s/liblmdb\$(SOEXT)//' -i Makefile
|
||||
'';
|
||||
});
|
||||
}
|
@ -1,21 +0,0 @@
|
||||
{ lib, stdenvNoCC, marsSources }:
|
||||
|
||||
stdenvNoCC.mkDerivation {
|
||||
name = "arvo";
|
||||
|
||||
src = marsSources;
|
||||
|
||||
outputs = [ "out" "goerli" ];
|
||||
|
||||
phases = [ "mainnetPhase" "goerliPhase" ];
|
||||
|
||||
mainnetPhase = ''
|
||||
ln -s ${marsSources.out}/arvo $out
|
||||
'';
|
||||
|
||||
goerliPhase = ''
|
||||
ln -s ${marsSources.goerli}/arvo $goerli
|
||||
'';
|
||||
|
||||
preferLocalBuild = true;
|
||||
}
|
@ -1,29 +0,0 @@
|
||||
{ stdenvNoCC, xxd, cacert }:
|
||||
|
||||
stdenvNoCC.mkDerivation {
|
||||
name = "ca-bundle";
|
||||
|
||||
nativeBuildInputs = [ cacert xxd ];
|
||||
|
||||
phases = [ "installPhase" ];
|
||||
|
||||
installPhase = ''
|
||||
set -euo pipefail
|
||||
|
||||
if ! [ -f "$SSL_CERT_FILE" ]; then
|
||||
header "$SSL_CERT_FILE doesn't exist"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
mkdir include
|
||||
|
||||
cat $SSL_CERT_FILE > include/ca-bundle.crt
|
||||
xxd -i include/ca-bundle.crt > ca-bundle.h
|
||||
|
||||
mkdir -p $out/include
|
||||
|
||||
mv ca-bundle.h $out/include
|
||||
'';
|
||||
|
||||
preferLocalBuild = true;
|
||||
}
|
@ -1,112 +0,0 @@
|
||||
{ urbit, curl, libcap, coreutils, bashInteractive, dockerTools, writeScriptBin, amesPort ? 34343 }:
|
||||
let
|
||||
startUrbit = writeScriptBin "start-urbit" ''
|
||||
#!${bashInteractive}/bin/bash
|
||||
|
||||
set -eu
|
||||
|
||||
# set defaults
|
||||
amesPort=${toString amesPort}
|
||||
|
||||
# check args
|
||||
for i in "$@"
|
||||
do
|
||||
case $i in
|
||||
-p=*|--port=*)
|
||||
amesPort="''${i#*=}"
|
||||
shift
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# If the container is not started with the `-i` flag
|
||||
# then STDIN will be closed and we need to start
|
||||
# Urbit/vere with the `-t` flag.
|
||||
ttyflag=""
|
||||
if [ ! -t 0 ]; then
|
||||
echo "Running with no STDIN"
|
||||
ttyflag="-t"
|
||||
fi
|
||||
|
||||
# Check if there is a keyfile, if so boot a ship with its name, and then remove the key
|
||||
if [ -e *.key ]; then
|
||||
# Get the name of the key
|
||||
keynames="*.key"
|
||||
keys=( $keynames )
|
||||
keyname=''${keys[0]}
|
||||
mv $keyname /tmp
|
||||
|
||||
# Boot urbit with the key, exit when done booting
|
||||
urbit $ttyflag -w $(basename $keyname .key) -k /tmp/$keyname -c $(basename $keyname .key) -p $amesPort -x
|
||||
|
||||
# Remove the keyfile for security
|
||||
rm /tmp/$keyname
|
||||
rm *.key || true
|
||||
elif [ -e *.comet ]; then
|
||||
cometnames="*.comet"
|
||||
comets=( $cometnames )
|
||||
cometname=''${comets[0]}
|
||||
rm *.comet
|
||||
|
||||
urbit $ttyflag -c $(basename $cometname .comet) -p $amesPort -x
|
||||
fi
|
||||
|
||||
# Find the first directory and start urbit with the ship therein
|
||||
dirnames="*/"
|
||||
dirs=( $dirnames )
|
||||
dirname=''${dirnames[0]}
|
||||
|
||||
exec urbit $ttyflag -p $amesPort $dirname
|
||||
'';
|
||||
|
||||
getUrbitCode = writeScriptBin "get-urbit-code" ''
|
||||
#!${bashInteractive}/bin/bash
|
||||
|
||||
raw=$(curl -s -X POST -H "Content-Type: application/json" \
|
||||
-d '{ "source": { "dojo": "+code" }, "sink": { "stdout": null } }' \
|
||||
http://127.0.0.1:12321)
|
||||
|
||||
# trim \n" from the end
|
||||
trim="''${raw%\\n\"}"
|
||||
|
||||
# trim " from the start
|
||||
code="''${trim#\"}"
|
||||
|
||||
echo "$code"
|
||||
'';
|
||||
|
||||
resetUrbitCode = writeScriptBin "reset-urbit-code" ''
|
||||
#!${bashInteractive}/bin/bash
|
||||
|
||||
curl=$(curl -s -X POST -H "Content-Type: application/json" \
|
||||
-d '{ "source": { "dojo": "+hood/code %reset" }, "sink": { "app": "hood" } }' \
|
||||
http://127.0.0.1:12321)
|
||||
|
||||
if [[ $? -eq 0 ]]
|
||||
then
|
||||
echo "OK"
|
||||
else
|
||||
echo "Curl error: $?"
|
||||
fi
|
||||
'';
|
||||
|
||||
in dockerTools.buildImage {
|
||||
name = "urbit";
|
||||
tag = "v${urbit.version}";
|
||||
contents = [ bashInteractive urbit curl startUrbit getUrbitCode resetUrbitCode coreutils ];
|
||||
runAsRoot = ''
|
||||
#!${bashInteractive}
|
||||
mkdir -p /urbit
|
||||
mkdir -p /tmp
|
||||
${libcap}/bin/setcap 'cap_net_bind_service=+ep' /bin/urbit
|
||||
'';
|
||||
config = {
|
||||
Cmd = [ "/bin/start-urbit" ];
|
||||
Env = [ "PATH=/bin" ];
|
||||
WorkingDir = "/urbit";
|
||||
Volumes = {
|
||||
"/urbit" = {};
|
||||
};
|
||||
Expose = [ "80/tcp" "${toString amesPort}/udp" ];
|
||||
};
|
||||
}
|
@ -1,14 +0,0 @@
|
||||
{ lib, stdenv, enableParallelBuilding ? true }:
|
||||
|
||||
stdenv.mkDerivation {
|
||||
name = "ent";
|
||||
src = lib.cleanSource ../../../pkg/ent;
|
||||
|
||||
postPatch = ''
|
||||
patchShebangs ./configure
|
||||
'';
|
||||
|
||||
installFlags = [ "PREFIX=$(out)" ];
|
||||
|
||||
inherit enableParallelBuilding;
|
||||
}
|
@ -1,29 +0,0 @@
|
||||
{ lib, stdenvNoCC, python }:
|
||||
|
||||
# Avoid using `python.withPackages` as it creates a wrapper script to set
|
||||
# PYTHONPATH, and the script is used verbatim as a python shebang.
|
||||
#
|
||||
# Unfortunately Darwin does not allow scripts as a shebang - so to get a
|
||||
# cross platform python interpreter with appropriate site-packages setup
|
||||
# we use `wrapPython/Packages` which handles these cases correctly.
|
||||
|
||||
stdenvNoCC.mkDerivation {
|
||||
name = "herb";
|
||||
src = ../../../pkg/herb/herb;
|
||||
|
||||
nativeBuildInputs = [ python.pkgs.wrapPython ];
|
||||
buildInputs = [ python python.pkgs.requests ];
|
||||
pythonPath = [ python.pkgs.requests ];
|
||||
|
||||
phases = [ "installPhase" "fixupPhase" ];
|
||||
|
||||
installPhase = ''
|
||||
mkdir -p $out/bin
|
||||
cp $src $out/bin/herb
|
||||
chmod +x $out/bin/herb
|
||||
'';
|
||||
|
||||
postFixup = ''
|
||||
wrapPythonPrograms
|
||||
'';
|
||||
}
|
@ -1,34 +0,0 @@
|
||||
--- a/CMakeLists.txt
|
||||
+++ b/CMakeLists.txt
|
||||
@@ -5,6 +5,8 @@ if("${CMAKE_BUILD_TYPE}" STREQUAL "")
|
||||
set(CMAKE_BUILD_TYPE Release)
|
||||
endif("${CMAKE_BUILD_TYPE}" STREQUAL "")
|
||||
|
||||
+option(BUILD_SHARED_LIBS "Build shared libraries" ON)
|
||||
+
|
||||
include(GNUInstallDirs)
|
||||
|
||||
# Warning: don't use the UB sanitizer in production builds. It can introduce timing side-channels
|
||||
@@ -31,10 +33,12 @@ endif(NOT DISABLE_DOCS)
|
||||
configure_file(config.h.in config.h)
|
||||
include_directories(${CMAKE_CURRENT_BINARY_DIR})
|
||||
|
||||
+if(BUILD_SHARED_LIBS)
|
||||
add_library(aes_siv SHARED aes_siv.c)
|
||||
target_include_directories(aes_siv PUBLIC ${OPENSSL_INCLUDE_DIR})
|
||||
target_link_libraries(aes_siv ${OPENSSL_CRYPTO_LIBRARY})
|
||||
set_target_properties(aes_siv PROPERTIES VERSION "1.0.1" SOVERSION 1)
|
||||
+endif()
|
||||
|
||||
add_library(aes_siv_static STATIC aes_siv.c)
|
||||
target_include_directories(aes_siv_static PUBLIC ${OPENSSL_INCLUDE_DIR})
|
||||
@@ -63,7 +67,9 @@ endif(ENABLE_SANITIZER)
|
||||
add_executable(bench EXCLUDE_FROM_ALL bench.c)
|
||||
target_link_libraries(bench aes_siv_static)
|
||||
|
||||
+if(BUILD_SHARED_LIBS)
|
||||
install(TARGETS aes_siv LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR})
|
||||
+endif()
|
||||
install(TARGETS aes_siv_static ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR})
|
||||
install(FILES aes_siv.h DESTINATION ${CMAKE_INSTALL_INCLUDEDIR})
|
||||
|
@ -1,17 +0,0 @@
|
||||
{ stdenv, sources, cmake, openssl, enableParallelBuilding ? true }:
|
||||
|
||||
stdenv.mkDerivation {
|
||||
name = "libaes_siv";
|
||||
version = sources.libaes_siv.rev;
|
||||
src = sources.libaes_siv;
|
||||
patches = [ ./cmakefiles_static.patch ];
|
||||
|
||||
nativeBuildInputs = [ cmake ];
|
||||
buildInputs = [ openssl ];
|
||||
|
||||
cmakeFlags = [
|
||||
"-DBUILD_SHARED_LIBS=OFF"
|
||||
];
|
||||
|
||||
inherit enableParallelBuilding;
|
||||
}
|
@ -1,11 +0,0 @@
|
||||
--- a/src/fault-linux-arm.h
|
||||
+++ b/src/fault-linux-arm.h
|
||||
@@ -17,6 +17,8 @@
|
||||
|
||||
#include "fault-posix-ucontext.h"
|
||||
|
||||
+#define HAVE_STACKVMA 0
|
||||
+
|
||||
#if defined(__aarch64__) || defined(__ARM_64BIT_STATE) || defined(__ARM_PCS_AAPCS64) /* 64-bit */
|
||||
|
||||
/* See glibc/sysdeps/unix/sysv/linux/aarch64/sys/ucontext.h.
|
@ -1,11 +0,0 @@
|
||||
--- a/src/fault-linux-i386.h
|
||||
+++ b/src/fault-linux-i386.h
|
||||
@@ -18,6 +18,8 @@
|
||||
|
||||
#include "fault-posix-ucontext.h"
|
||||
|
||||
+#define HAVE_STACKVMA 0
|
||||
+
|
||||
#if defined __x86_64__
|
||||
/* 64 bit registers */
|
||||
|
@ -1,13 +0,0 @@
|
||||
diff --git a/libraries/liblmdb/mdb.c b/libraries/liblmdb/mdb.c
|
||||
index fe65e30..0070215 100644
|
||||
--- a/libraries/liblmdb/mdb.c
|
||||
+++ b/libraries/liblmdb/mdb.c
|
||||
@@ -2526,7 +2526,7 @@ mdb_env_sync(MDB_env *env, int force)
|
||||
rc = ErrCode();
|
||||
} else
|
||||
#endif
|
||||
- if (MDB_FDATASYNC(env->me_fd))
|
||||
+ if (fcntl(env->me_fd, F_FULLFSYNC, 0))
|
||||
rc = ErrCode();
|
||||
}
|
||||
}
|
@ -1,46 +0,0 @@
|
||||
{ lib, stdenvNoCC, bc }:
|
||||
|
||||
stdenvNoCC.mkDerivation {
|
||||
name = "sources";
|
||||
src = lib.cleanSource ../../../pkg;
|
||||
|
||||
buildInputs = [ bc ];
|
||||
|
||||
outputs = [ "out" "goerli" ];
|
||||
|
||||
phases = [ "mainnetPhase" "goerliPhase" ];
|
||||
|
||||
mainnetPhase = ''
|
||||
cp -r $src $out
|
||||
chmod -R u+w $out
|
||||
'';
|
||||
|
||||
goerliPhase = ''
|
||||
cp -r $src tmp
|
||||
chmod -R u+w tmp
|
||||
|
||||
ZUSE=tmp/arvo/sys/zuse.hoon
|
||||
AMES=tmp/arvo/sys/vane/ames.hoon
|
||||
ACME=tmp/arvo/app/acme.hoon
|
||||
|
||||
# Replace the mainnet azimuth contract with the goerli contract
|
||||
sed --in-place \
|
||||
's/\(\+\+ contracts \)mainnet\-contracts/\1goerli-contracts/' \
|
||||
$ZUSE
|
||||
|
||||
# Increment the %ames protocol version
|
||||
sed -r --in-place \
|
||||
's_^(=/ protocol\-version=\?\(.*\) %)([0-7])_echo "\1$(echo "(\2+1) % 8" | bc)"_e' \
|
||||
$AMES
|
||||
|
||||
# Use the staging API in :acme
|
||||
sed --in-place \
|
||||
's_https://acme-v02.api.letsencrypt.org/directory_https://acme-staging-v02.api.letsencrypt.org/directory_' \
|
||||
$ACME
|
||||
|
||||
cp -r tmp $goerli
|
||||
chmod -R u+w $goerli
|
||||
'';
|
||||
|
||||
preferLocalBuild = true;
|
||||
}
|
@ -1,17 +0,0 @@
|
||||
{ stdenv, sources }:
|
||||
|
||||
stdenv.mkDerivation {
|
||||
pname = "murmur3";
|
||||
version = sources.murmur3.rev;
|
||||
src = sources.murmur3;
|
||||
|
||||
buildPhase = ''
|
||||
$CC -fPIC -O3 -o murmur3.o -c $src/murmur3.c
|
||||
'';
|
||||
|
||||
installPhase = ''
|
||||
mkdir -p $out/{lib,include}
|
||||
$AR rcs $out/lib/libmurmur3.a murmur3.o
|
||||
cp $src/*.h $out/include/
|
||||
'';
|
||||
}
|
@ -1,22 +0,0 @@
|
||||
{ lib, stdenvNoCC, fetchGitHubLFS, bootFakeShip, solid, urbit, arvo, curl
|
||||
, withGoerli ? false }:
|
||||
|
||||
let
|
||||
|
||||
lfs = fetchGitHubLFS { src = ../../../bin/brass.pill; };
|
||||
|
||||
in {
|
||||
build = import ./builder.nix {
|
||||
inherit stdenvNoCC urbit curl;
|
||||
|
||||
name = "brass" + lib.optionalString withGoerli "-goerli";
|
||||
builder = ./brass.sh;
|
||||
arvo = if withGoerli then arvo.goerli else arvo;
|
||||
pier = bootFakeShip {
|
||||
inherit urbit;
|
||||
|
||||
pill = solid.lfs;
|
||||
ship = "zod";
|
||||
};
|
||||
};
|
||||
} // lib.optionalAttrs (!withGoerli) { inherit lfs; }
|
@ -1,40 +0,0 @@
|
||||
source $stdenv/setup
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
cp -r $src ./pier
|
||||
chmod -R u+rw ./pier
|
||||
|
||||
urbit -d ./pier
|
||||
|
||||
cleanup () {
|
||||
if [ -f ./pier/.vere.lock ]; then
|
||||
kill $(< ./pier/.vere.lock) || true
|
||||
fi
|
||||
}
|
||||
|
||||
trap cleanup EXIT
|
||||
|
||||
header "running +brass"
|
||||
|
||||
port=$(cat ./pier/.http.ports | grep loopback | tr -s ' ' '\n' | head -n 1)
|
||||
|
||||
lensa() {
|
||||
# -f elided, this can hit server-side timeouts
|
||||
curl -s \
|
||||
--data "{\"source\":{\"dojo\":\"$2\"},\"sink\":{\"app\":\"$1\"}}" \
|
||||
"http://localhost:$port" | xargs printf %s | sed 's/\\n/\n/g'
|
||||
}
|
||||
|
||||
lensf() {
|
||||
# -f elided, this can hit server-side timeouts
|
||||
d=$(echo $1 | sed 's/\./\//g')
|
||||
curl -sJO \
|
||||
--data "{\"source\":{\"dojo\":\"$2\"},\"sink\":{\"output-pill\":\"$d\"}}" \
|
||||
"http://localhost:$port"
|
||||
}
|
||||
|
||||
lensf brass.pill '+brass'
|
||||
lensa hood '+hood/exit'
|
||||
|
||||
stopNest
|
@ -1,16 +0,0 @@
|
||||
{ stdenvNoCC, urbit, arvo, curl, name, builder, pier }:
|
||||
|
||||
stdenvNoCC.mkDerivation {
|
||||
name = "${name}.pill";
|
||||
src = pier;
|
||||
buildInputs = [ curl urbit ];
|
||||
dontUnpack = true;
|
||||
|
||||
buildPhase = builtins.readFile builder;
|
||||
|
||||
installPhase = ''
|
||||
mv ${name}.pill $out
|
||||
'';
|
||||
|
||||
ARVO = arvo;
|
||||
}
|
@ -1,42 +0,0 @@
|
||||
{ lib, stdenvNoCC, fetchGitHubLFS, bootFakeShip, solid, urbit, arvo, curl, xxd
|
||||
, withGoerli ? false }:
|
||||
|
||||
let
|
||||
|
||||
lfs = fetchGitHubLFS { src = ../../../bin/ivory.pill; };
|
||||
|
||||
in {
|
||||
build = import ./builder.nix {
|
||||
inherit stdenvNoCC urbit curl;
|
||||
|
||||
name = "ivory" + lib.optionalString withGoerli "-goerli";
|
||||
builder = ./ivory.sh;
|
||||
arvo = if withGoerli then arvo.goerli else arvo;
|
||||
pier = bootFakeShip {
|
||||
inherit urbit;
|
||||
|
||||
pill = solid.lfs;
|
||||
ship = "zod";
|
||||
};
|
||||
};
|
||||
|
||||
# The hexdump of the `.lfs` pill contents as a C header.
|
||||
header = stdenvNoCC.mkDerivation {
|
||||
name = "ivory-header";
|
||||
src = lfs;
|
||||
nativeBuildInputs = [ xxd ];
|
||||
phases = [ "installPhase" ];
|
||||
|
||||
installPhase = ''
|
||||
file=u3_Ivory.pill
|
||||
|
||||
header "writing $file"
|
||||
|
||||
mkdir -p $out/include
|
||||
cat $src > $file
|
||||
xxd -i $file > $out/include/ivory_impl.h
|
||||
'';
|
||||
|
||||
preferLocalBuild = true;
|
||||
};
|
||||
} // lib.optionalAttrs (!withGoerli) { inherit lfs; }
|
@ -1,40 +0,0 @@
|
||||
source $stdenv/setup
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
cp -r $src ./pier
|
||||
chmod -R u+rw ./pier
|
||||
|
||||
urbit -d ./pier
|
||||
|
||||
cleanup () {
|
||||
if [ -f ./pier/.vere.lock ]; then
|
||||
kill $(< ./pier/.vere.lock) || true
|
||||
fi
|
||||
}
|
||||
|
||||
trap cleanup EXIT
|
||||
|
||||
header "running +ivory"
|
||||
|
||||
port=$(cat ./pier/.http.ports | grep loopback | tr -s ' ' '\n' | head -n 1)
|
||||
|
||||
lensa() {
|
||||
# -f elided, this can hit server-side timeouts
|
||||
curl -s \
|
||||
--data "{\"source\":{\"dojo\":\"$2\"},\"sink\":{\"app\":\"$1\"}}" \
|
||||
"http://localhost:$port" | xargs printf %s | sed 's/\\n/\n/g'
|
||||
}
|
||||
|
||||
lensf() {
|
||||
# -f elided, this can hit server-side timeouts
|
||||
d=$(echo $1 | sed 's/\./\//g')
|
||||
curl -sJO \
|
||||
--data "{\"source\":{\"dojo\":\"$2\"},\"sink\":{\"output-pill\":\"$d\"}}" \
|
||||
"http://localhost:$port"
|
||||
}
|
||||
|
||||
lensf ivory.pill '+ivory'
|
||||
lensa hood '+hood/exit'
|
||||
|
||||
stopNest
|
@ -1,23 +0,0 @@
|
||||
{ stdenvNoCC, fetchGitHubLFS, bootFakeShip, solid, urbit, arvo, curl }:
|
||||
|
||||
let
|
||||
|
||||
lfs = fetchGitHubLFS { src = ../../../bin/solid.pill; };
|
||||
|
||||
in {
|
||||
inherit lfs;
|
||||
|
||||
build = import ./builder.nix {
|
||||
inherit stdenvNoCC urbit arvo curl;
|
||||
|
||||
name = "solid";
|
||||
builder = ./solid.sh;
|
||||
pier = bootFakeShip {
|
||||
inherit urbit;
|
||||
|
||||
arvo = null;
|
||||
pill = solid.lfs;
|
||||
ship = "zod";
|
||||
};
|
||||
};
|
||||
}
|
@ -1,99 +0,0 @@
|
||||
source $stdenv/setup
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
ARVO=${ARVO?:ARVO location is unset}
|
||||
|
||||
cp -r $src ./pier
|
||||
chmod -R u+rw ./pier
|
||||
|
||||
urbit -d ./pier
|
||||
|
||||
cleanup () {
|
||||
if [ -f ./pier/.vere.lock ]; then
|
||||
kill $(< ./pier/.vere.lock) || true
|
||||
fi
|
||||
}
|
||||
|
||||
trap cleanup EXIT
|
||||
|
||||
port=$(cat ./pier/.http.ports | grep loopback | tr -s ' ' '\n' | head -n 1)
|
||||
|
||||
lensa() {
|
||||
# -f elided, this can hit server-side timeouts
|
||||
curl -s \
|
||||
--data "{\"source\":{\"dojo\":\"$2\"},\"sink\":{\"app\":\"$1\"}}" \
|
||||
"http://localhost:$port" | xargs printf %s | sed 's/\\n/\n/g'
|
||||
}
|
||||
|
||||
lensf() {
|
||||
# -f elided, this can hit server-side timeouts
|
||||
d=$(echo $1 | sed 's/\./\//g')
|
||||
curl -sJO \
|
||||
--data "{\"source\":{\"dojo\":\"$2\"},\"sink\":{\"output-pill\":\"$d\"}}" \
|
||||
"http://localhost:$port"
|
||||
}
|
||||
|
||||
header "updating %base"
|
||||
|
||||
# Update pill strategy to ensure correct staging
|
||||
lensa hood "+hood/mount /=base="
|
||||
|
||||
until [ -d ./pier/base ]; do
|
||||
sleep 1
|
||||
done
|
||||
|
||||
# Update :lens, :dojo and dependencies
|
||||
# FIXME: reduce this list
|
||||
cp $ARVO/app/lens.hoon ./pier/base/app/
|
||||
cp $ARVO/app/dojo.hoon ./pier/base/app/
|
||||
cp $ARVO/lib/plume.hoon ./pier/base/lib/
|
||||
cp $ARVO/lib/server.hoon ./pier/base/lib/
|
||||
cp $ARVO/lib/sole.hoon ./pier/base/lib/
|
||||
cp $ARVO/lib/xray.hoon ./pier/base/lib/
|
||||
cp $ARVO/lib/pprint.hoon ./pier/base/lib/
|
||||
|
||||
mkdir -p ./pier/base/mar/lens/
|
||||
|
||||
cp $ARVO/mar/lens/* ./pier/base/mar/lens/
|
||||
|
||||
cp $ARVO/sur/lens.hoon ./pier/base/sur/
|
||||
cp $ARVO/sur/plum.hoon ./pier/base/sur/
|
||||
cp $ARVO/sur/sole.hoon ./pier/base/sur/
|
||||
cp $ARVO/sur/xray.hoon ./pier/base/sur/
|
||||
|
||||
# Update +solid and its dependencies
|
||||
cp $ARVO/lib/pill.hoon ./pier/base/lib/
|
||||
cp $ARVO/gen/solid.hoon ./pier/base/gen/
|
||||
|
||||
chmod -R u+rw ./pier/base/
|
||||
|
||||
lensa hood "+hood/commit %base"
|
||||
lensa hood "+hood/unmount %base"
|
||||
|
||||
# FIXME: horrible hack to ensure the update is applied first
|
||||
sleep 10
|
||||
|
||||
header "updating %stage"
|
||||
|
||||
# Stage new desk for pill contents
|
||||
lensa hood '+hood/merge %stage our %base'
|
||||
lensa hood "+hood/mount /=stage="
|
||||
|
||||
until [ -d ./pier/stage ]; do
|
||||
sleep 1
|
||||
done
|
||||
|
||||
rm -rf ./pier/stage
|
||||
cp -r $ARVO ./pier/stage
|
||||
chmod -R u+rw ./pier/stage
|
||||
|
||||
lensa hood "+hood/commit %stage"
|
||||
lensa hood "+hood/unmount %stage"
|
||||
|
||||
header "running +solid"
|
||||
|
||||
lensf solid.pill '+solid /=stage=/sys, =dub &'
|
||||
lensa hood '+hood/exit'
|
||||
|
||||
stopNest
|
@ -1,28 +0,0 @@
|
||||
{ stdenv, sources, enableParallelBuilding ? true }:
|
||||
|
||||
stdenv.mkDerivation {
|
||||
pname = "softfloat3";
|
||||
version = sources.softfloat3.rev;
|
||||
src = sources.softfloat3;
|
||||
|
||||
postPatch = ''
|
||||
for f in $(find build -type f -name 'Makefile'); do
|
||||
substituteInPlace $f \
|
||||
--replace 'gcc' '$(CC)' \
|
||||
--replace 'ar crs' '$(AR) crs'
|
||||
done
|
||||
'';
|
||||
|
||||
preBuild = ''
|
||||
cd build/Linux-x86_64-GCC
|
||||
'';
|
||||
|
||||
installPhase = ''
|
||||
mkdir -p $out/{lib,include}
|
||||
cp $src/source/include/*.h $out/include/
|
||||
cp softfloat.a $out/lib/libsoftfloat3.a
|
||||
'';
|
||||
|
||||
inherit enableParallelBuilding;
|
||||
}
|
||||
|
@ -1,85 +0,0 @@
|
||||
{ lib, stdenv, coreutils, pkgconfig # build/env
|
||||
, cacert, ca-bundle, ivory # codegen
|
||||
, curlUrbit, ent, gmp, h2o, libsigsegv, libuv, lmdb # libs
|
||||
, murmur3, openssl, openssl-static-osx, softfloat3 #
|
||||
, urcrypt, zlib, zlib-static-osx #
|
||||
, enableStatic ? stdenv.hostPlatform.isStatic # opts
|
||||
, enableDebug ? false
|
||||
, verePace ? ""
|
||||
, doCheck ? true
|
||||
, enableParallelBuilding ? true
|
||||
, dontStrip ? true }:
|
||||
|
||||
let
|
||||
|
||||
src = lib.cleanSource ../../../pkg/urbit;
|
||||
|
||||
version = builtins.readFile "${src}/version";
|
||||
|
||||
# See https://github.com/urbit/urbit/issues/5561
|
||||
oFlags =
|
||||
if stdenv.isDarwin
|
||||
then (if enableDebug then [ "-O0" "-g" ] else [ "-O3" ])
|
||||
else [ (if enableDebug then "-O0" else "-O3") "-g" ];
|
||||
|
||||
in stdenv.mkDerivation {
|
||||
inherit src version;
|
||||
|
||||
pname = "urbit" + lib.optionalString enableDebug "-debug"
|
||||
+ lib.optionalString enableStatic "-static";
|
||||
|
||||
nativeBuildInputs = [ pkgconfig ];
|
||||
|
||||
buildInputs = [
|
||||
cacert
|
||||
ca-bundle
|
||||
curlUrbit
|
||||
ent
|
||||
gmp
|
||||
h2o
|
||||
ivory.header
|
||||
libsigsegv
|
||||
libuv
|
||||
lmdb
|
||||
murmur3
|
||||
(if stdenv.isDarwin && enableStatic then openssl-static-osx else openssl)
|
||||
softfloat3
|
||||
urcrypt
|
||||
(if stdenv.isDarwin && enableStatic then zlib-static-osx else zlib)
|
||||
];
|
||||
|
||||
# Ensure any `/usr/bin/env bash` shebang is patched.
|
||||
postPatch = ''
|
||||
patchShebangs ./configure
|
||||
'';
|
||||
|
||||
checkTarget = "test";
|
||||
|
||||
installPhase = ''
|
||||
mkdir -p $out/bin
|
||||
cp ./build/urbit $out/bin/urbit
|
||||
'';
|
||||
|
||||
dontDisableStatic = enableStatic;
|
||||
|
||||
configureFlags = if enableStatic
|
||||
then [ "--disable-shared" "--enable-static" ]
|
||||
else [];
|
||||
|
||||
CFLAGS = oFlags ++ lib.optionals (!enableDebug) [ "-Werror" ];
|
||||
|
||||
MEMORY_DEBUG = enableDebug;
|
||||
CPU_DEBUG = enableDebug;
|
||||
EVENT_TIME_DEBUG = false;
|
||||
VERE_PACE = if enableStatic then verePace else "";
|
||||
|
||||
# See https://github.com/NixOS/nixpkgs/issues/18995
|
||||
hardeningDisable = lib.optionals enableDebug [ "all" ];
|
||||
|
||||
inherit enableParallelBuilding doCheck dontStrip;
|
||||
|
||||
meta = {
|
||||
debug = enableDebug;
|
||||
arguments = lib.optionals enableDebug [ "-g" ];
|
||||
};
|
||||
}
|
@ -1,21 +0,0 @@
|
||||
{ stdenv, autoreconfHook, pkgconfig
|
||||
, libaes_siv, openssl, openssl-static-osx, secp256k1
|
||||
, enableStatic ? stdenv.hostPlatform.isStatic }:
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
name = "urcrypt";
|
||||
src = ../../../pkg/urcrypt;
|
||||
|
||||
# XX why are these required for darwin?
|
||||
dontDisableStatic = enableStatic;
|
||||
|
||||
configureFlags = if enableStatic
|
||||
then [ "--disable-shared" "--enable-static" ]
|
||||
else [];
|
||||
|
||||
nativeBuildInputs =
|
||||
[ autoreconfHook pkgconfig ];
|
||||
|
||||
propagatedBuildInputs =
|
||||
[ openssl secp256k1 libaes_siv ];
|
||||
}
|
@ -1,18 +0,0 @@
|
||||
{
|
||||
"openssl": {
|
||||
"branch": "1_1_1n",
|
||||
"homepage": "https://www.openssl.org/",
|
||||
"pmnsh": {
|
||||
"include": "build/include",
|
||||
"lib": "build",
|
||||
"prepare": "./config --prefix=`mkdir -p build && readlink -f ./build` --libdir=. no-shared no-tests",
|
||||
"make": "install_dev"
|
||||
},
|
||||
"owner": "openssl",
|
||||
"repo": "openssl",
|
||||
"rev": "OpenSSL_1_1_1n",
|
||||
"type": "tarball",
|
||||
"url": "https://github.com/openssl/openssl/archive/refs/tags/OpenSSL_1_1_1n.tar.gz",
|
||||
"url_template": "https://github.com/<owner>/<repo>/archive/refs/tags/<rev>.tar.gz"
|
||||
}
|
||||
}
|
@ -1,95 +0,0 @@
|
||||
{
|
||||
"curl": {
|
||||
"branch": "master",
|
||||
"description": "A command line tool and library for transferring data with URL syntax",
|
||||
"homepage": "http://curl.se/",
|
||||
"pmnsh": {
|
||||
"compat": {
|
||||
"openbsd": {
|
||||
"prepare": "autoreconf -vfi && ./configure --disable-shared --disable-ldap --disable-rtsp --without-brotli --without-libidn2 --without-libpsl --without-nghttp2 --with-openssl=`readlink -f ../openssl/build`"
|
||||
}
|
||||
},
|
||||
"include": "include",
|
||||
"lib": "lib/.libs",
|
||||
"prepare": "autoreconf -vfi && ./configure --disable-shared --disable-ldap --disable-rtsp --without-brotli --without-libidn2 --without-libpsl --without-nghttp2 --with-openssl",
|
||||
"make": "-C lib libcurl.la"
|
||||
},
|
||||
"owner": "curl",
|
||||
"repo": "curl",
|
||||
"rev": "curl-7_77_0",
|
||||
"type": "tarball",
|
||||
"url": "https://github.com/curl/curl/archive/curl-7_77_0.tar.gz",
|
||||
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
|
||||
},
|
||||
"lmdb": {
|
||||
"branch": "mdb.master",
|
||||
"description": "LMDB library",
|
||||
"homepage": "http://www.lmdb.tech/",
|
||||
"pmnsh": {
|
||||
"strip": 2,
|
||||
"make": "liblmdb.a"
|
||||
},
|
||||
"owner": "LMDB",
|
||||
"repo": "lmdb",
|
||||
"rev": "48a7fed59a8aae623deff415dda27097198ca0c1",
|
||||
"type": "tarball",
|
||||
"url": "https://github.com/LMDB/lmdb/archive/48a7fed59a8aae623deff415dda27097198ca0c1.tar.gz",
|
||||
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
|
||||
},
|
||||
"secp256k1": {
|
||||
"branch": "master",
|
||||
"description": "Optimized C library for ECDSA signatures and secret/public key operations on curve secp256k1.",
|
||||
"homepage": null,
|
||||
"owner": "bitcoin-core",
|
||||
"pmnsh": {
|
||||
"include": "include",
|
||||
"lib": ".libs",
|
||||
"make": "libsecp256k1.la",
|
||||
"prepare": "./autogen.sh && ./configure --disable-shared --enable-benchmark=no --enable-exhaustive-tests=no --enable-experimental --enable-module-ecdh --enable-module-recovery --enable-module-schnorrsig --enable-tests=yes CFLAGS=-DSECP256K1_API="
|
||||
},
|
||||
"repo": "secp256k1",
|
||||
"rev": "5dcc6f8dbdb1850570919fc9942d22f728dbc0af",
|
||||
"sha256": "x9qG2S6tBSRseWaFIN9N2fRpY1vkv8idT3d3rfJnmaU",
|
||||
"type": "tarball",
|
||||
"url": "https://github.com/bitcoin-core/secp256k1/archive/5dcc6f8dbdb1850570919fc9942d22f728dbc0af.tar.gz",
|
||||
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
|
||||
},
|
||||
"uv": {
|
||||
"branch": "v1.x",
|
||||
"description": "Cross-platform asynchronous I/O",
|
||||
"homepage": "http://libuv.org/",
|
||||
"pmnsh": {
|
||||
"include": "include",
|
||||
"lib": ".libs",
|
||||
"prepare": "./autogen.sh && ./configure --disable-shared",
|
||||
"make": "libuv.la",
|
||||
"compat": {
|
||||
"m1brew": false
|
||||
}
|
||||
},
|
||||
"owner": "libuv",
|
||||
"repo": "libuv",
|
||||
"rev": "v1.40.0",
|
||||
"type": "tarball",
|
||||
"url": "https://github.com/libuv/libuv/archive/v1.40.0.tar.gz",
|
||||
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
|
||||
},
|
||||
"ent": {
|
||||
"pmnsh": {
|
||||
"prepare": "./configure"
|
||||
}
|
||||
},
|
||||
"urcrypt": {
|
||||
"pmnsh": {
|
||||
"compat": {
|
||||
"openbsd": {
|
||||
"make": "install prefix=`readlink -f .` exec_prefix=`readlink -f .`",
|
||||
"include": "include",
|
||||
"lib": "lib"
|
||||
}
|
||||
},
|
||||
"prepare": "./autogen.sh && ./configure --disable-shared PKG_CONFIG_PATH=../secp256k1 CFLAGS=\"-I../secp256k1/include -I../libaes_siv\" LDFLAGS=-L../libaes_siv",
|
||||
"make": "install"
|
||||
}
|
||||
}
|
||||
}
|
123
nix/sources.json
123
nix/sources.json
@ -1,123 +0,0 @@
|
||||
{
|
||||
"h2o": {
|
||||
"branch": "master",
|
||||
"description": "H2O - the optimized HTTP/1, HTTP/2, HTTP/3 server",
|
||||
"homepage": "https://h2o.examp1e.net",
|
||||
"owner": "h2o",
|
||||
"pmnsh": {
|
||||
"compat": {
|
||||
"mingw": {
|
||||
"prepare": "cmake -G\"MSYS Makefiles\" -DCMAKE_INSTALL_PREFIX=. ."
|
||||
},
|
||||
"openbsd": {
|
||||
"prepare": "cmake -DOPENSSL_ROOT_DIR=`readlink -f ../openssl/build` ."
|
||||
}
|
||||
},
|
||||
"include": "include",
|
||||
"make": "libh2o",
|
||||
"prepare": "cmake ."
|
||||
},
|
||||
"repo": "h2o",
|
||||
"rev": "v2.2.6",
|
||||
"sha256": "0qni676wqvxx0sl0pw9j0ph7zf2krrzqc1zwj73mgpdnsr8rsib7",
|
||||
"type": "tarball",
|
||||
"url": "https://github.com/h2o/h2o/archive/v2.2.6.tar.gz",
|
||||
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
|
||||
},
|
||||
"libaes_siv": {
|
||||
"branch": "master",
|
||||
"description": null,
|
||||
"homepage": null,
|
||||
"owner": "dfoxfranke",
|
||||
"pmnsh": {
|
||||
"compat": {
|
||||
"m1brew": {
|
||||
"make": "install CFLAGS=$(pkg-config --cflags openssl)",
|
||||
"prepare": "cmake ."
|
||||
},
|
||||
"mingw": {
|
||||
"make": "aes_siv_static",
|
||||
"prepare": "cmake -G\"MSYS Makefiles\" -DDISABLE_DOCS:BOOL=ON ."
|
||||
},
|
||||
"openbsd": {
|
||||
"make": "aes_siv_static",
|
||||
"prepare": "cmake -DDISABLE_DOCS:BOOL=ON -DOPENSSL_ROOT_DIR=`readlink -f ../openssl/build` ."
|
||||
}
|
||||
}
|
||||
},
|
||||
"repo": "libaes_siv",
|
||||
"rev": "9681279cfaa6e6399bb7ca3afbbc27fc2e19df4b",
|
||||
"sha256": "1g4wy0m5wpqx7z6nillppkh5zki9fkx9rdw149qcxh7mc5vlszzi",
|
||||
"type": "tarball",
|
||||
"url": "https://github.com/dfoxfranke/libaes_siv/archive/9681279cfaa6e6399bb7ca3afbbc27fc2e19df4b.tar.gz",
|
||||
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
|
||||
},
|
||||
"murmur3": {
|
||||
"branch": "master",
|
||||
"description": null,
|
||||
"homepage": null,
|
||||
"owner": "urbit",
|
||||
"pmnsh": {
|
||||
"make": "static"
|
||||
},
|
||||
"repo": "murmur3",
|
||||
"rev": "71a75d57ca4e7ca0f7fc2fd84abd93595b0624ca",
|
||||
"sha256": "0k7jq2nb4ad9ajkr6wc4w2yy2f2hkwm3nkbj2pklqgwsg6flxzwg",
|
||||
"type": "tarball",
|
||||
"url": "https://github.com/urbit/murmur3/archive/71a75d57ca4e7ca0f7fc2fd84abd93595b0624ca.tar.gz",
|
||||
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
|
||||
},
|
||||
"niv": {
|
||||
"branch": "master",
|
||||
"description": "Easy dependency management for Nix projects",
|
||||
"homepage": "https://github.com/nmattia/niv",
|
||||
"owner": "nmattia",
|
||||
"repo": "niv",
|
||||
"rev": "9d35b9e4837ab88517210b1701127612c260eccf",
|
||||
"sha256": "0q50xhnm8g2yfyakrh0nly4swyygxpi0a8cb9gp65wcakcgvzvdh",
|
||||
"type": "tarball",
|
||||
"url": "https://github.com/nmattia/niv/archive/9d35b9e4837ab88517210b1701127612c260eccf.tar.gz",
|
||||
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
|
||||
},
|
||||
"nixpkgs": {
|
||||
"branch": "master",
|
||||
"description": "Nix Packages collection",
|
||||
"homepage": "",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "376d67e1cd05d5ac8a64a3f47f17b80fb6394792",
|
||||
"sha256": "1l4ai1dppwdbkkjbvnavsivr5kx00b3q8640pilpnifskfmfc8mp",
|
||||
"type": "tarball",
|
||||
"url": "https://github.com/NixOS/nixpkgs/archive/376d67e1cd05d5ac8a64a3f47f17b80fb6394792.tar.gz",
|
||||
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
|
||||
},
|
||||
"softfloat3": {
|
||||
"branch": "master",
|
||||
"description": null,
|
||||
"homepage": null,
|
||||
"owner": "urbit",
|
||||
"pmnsh": {
|
||||
"compat": {
|
||||
"m1brew": {
|
||||
"lib": "build/template-FAST_INT64",
|
||||
"make": "-C build/template-FAST_INT64 libsoftfloat3.a"
|
||||
},
|
||||
"mingw": {
|
||||
"lib": "build/Win64-MinGW-w64",
|
||||
"make": "-C build/Win64-MinGW-w64 libsoftfloat3.a"
|
||||
},
|
||||
"openbsd": {
|
||||
"lib": "build/template-FAST_INT64",
|
||||
"make": "-C build/template-FAST_INT64 libsoftfloat3.a"
|
||||
}
|
||||
},
|
||||
"include": "source/include"
|
||||
},
|
||||
"repo": "berkeley-softfloat-3",
|
||||
"rev": "ec4c7e31b32e07aad80e52f65ff46ac6d6aad986",
|
||||
"sha256": "1lz4bazbf7lns1xh8aam19c814a4n4czq5xsq5rmi9sgqw910339",
|
||||
"type": "tarball",
|
||||
"url": "https://github.com/urbit/berkeley-softfloat-3/archive/ec4c7e31b32e07aad80e52f65ff46ac6d6aad986.tar.gz",
|
||||
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
|
||||
}
|
||||
}
|
174
nix/sources.nix
174
nix/sources.nix
@ -1,174 +0,0 @@
|
||||
# This file has been generated by Niv.
|
||||
|
||||
let
|
||||
|
||||
#
|
||||
# The fetchers. fetch_<type> fetches specs of type <type>.
|
||||
#
|
||||
|
||||
fetch_file = pkgs: name: spec:
|
||||
let
|
||||
name' = sanitizeName name + "-src";
|
||||
in
|
||||
if spec.builtin or true then
|
||||
builtins_fetchurl { inherit (spec) url sha256; name = name'; }
|
||||
else
|
||||
pkgs.fetchurl { inherit (spec) url sha256; name = name'; };
|
||||
|
||||
fetch_tarball = pkgs: name: spec:
|
||||
let
|
||||
name' = sanitizeName name + "-src";
|
||||
in
|
||||
if spec.builtin or true then
|
||||
builtins_fetchTarball { name = name'; inherit (spec) url sha256; }
|
||||
else
|
||||
pkgs.fetchzip { name = name'; inherit (spec) url sha256; };
|
||||
|
||||
fetch_git = name: spec:
|
||||
let
|
||||
ref =
|
||||
if spec ? ref then spec.ref else
|
||||
if spec ? branch then "refs/heads/${spec.branch}" else
|
||||
if spec ? tag then "refs/tags/${spec.tag}" else
|
||||
abort "In git source '${name}': Please specify `ref`, `tag` or `branch`!";
|
||||
in
|
||||
builtins.fetchGit { url = spec.repo; inherit (spec) rev; inherit ref; };
|
||||
|
||||
fetch_local = spec: spec.path;
|
||||
|
||||
fetch_builtin-tarball = name: throw
|
||||
''[${name}] The niv type "builtin-tarball" is deprecated. You should instead use `builtin = true`.
|
||||
$ niv modify ${name} -a type=tarball -a builtin=true'';
|
||||
|
||||
fetch_builtin-url = name: throw
|
||||
''[${name}] The niv type "builtin-url" will soon be deprecated. You should instead use `builtin = true`.
|
||||
$ niv modify ${name} -a type=file -a builtin=true'';
|
||||
|
||||
#
|
||||
# Various helpers
|
||||
#
|
||||
|
||||
# https://github.com/NixOS/nixpkgs/pull/83241/files#diff-c6f540a4f3bfa4b0e8b6bafd4cd54e8bR695
|
||||
sanitizeName = name:
|
||||
(
|
||||
concatMapStrings (s: if builtins.isList s then "-" else s)
|
||||
(
|
||||
builtins.split "[^[:alnum:]+._?=-]+"
|
||||
((x: builtins.elemAt (builtins.match "\\.*(.*)" x) 0) name)
|
||||
)
|
||||
);
|
||||
|
||||
# The set of packages used when specs are fetched using non-builtins.
|
||||
mkPkgs = sources: system:
|
||||
let
|
||||
sourcesNixpkgs =
|
||||
import (builtins_fetchTarball { inherit (sources.nixpkgs) url sha256; }) { inherit system; };
|
||||
hasNixpkgsPath = builtins.any (x: x.prefix == "nixpkgs") builtins.nixPath;
|
||||
hasThisAsNixpkgsPath = <nixpkgs> == ./.;
|
||||
in
|
||||
if builtins.hasAttr "nixpkgs" sources
|
||||
then sourcesNixpkgs
|
||||
else if hasNixpkgsPath && ! hasThisAsNixpkgsPath then
|
||||
import <nixpkgs> {}
|
||||
else
|
||||
abort
|
||||
''
|
||||
Please specify either <nixpkgs> (through -I or NIX_PATH=nixpkgs=...) or
|
||||
add a package called "nixpkgs" to your sources.json.
|
||||
'';
|
||||
|
||||
# The actual fetching function.
|
||||
fetch = pkgs: name: spec:
|
||||
|
||||
if ! builtins.hasAttr "type" spec then
|
||||
abort "ERROR: niv spec ${name} does not have a 'type' attribute"
|
||||
else if spec.type == "file" then fetch_file pkgs name spec
|
||||
else if spec.type == "tarball" then fetch_tarball pkgs name spec
|
||||
else if spec.type == "git" then fetch_git name spec
|
||||
else if spec.type == "local" then fetch_local spec
|
||||
else if spec.type == "builtin-tarball" then fetch_builtin-tarball name
|
||||
else if spec.type == "builtin-url" then fetch_builtin-url name
|
||||
else
|
||||
abort "ERROR: niv spec ${name} has unknown type ${builtins.toJSON spec.type}";
|
||||
|
||||
# If the environment variable NIV_OVERRIDE_${name} is set, then use
|
||||
# the path directly as opposed to the fetched source.
|
||||
replace = name: drv:
|
||||
let
|
||||
saneName = stringAsChars (c: if isNull (builtins.match "[a-zA-Z0-9]" c) then "_" else c) name;
|
||||
ersatz = builtins.getEnv "NIV_OVERRIDE_${saneName}";
|
||||
in
|
||||
if ersatz == "" then drv else
|
||||
# this turns the string into an actual Nix path (for both absolute and
|
||||
# relative paths)
|
||||
if builtins.substring 0 1 ersatz == "/" then /. + ersatz else /. + builtins.getEnv "PWD" + "/${ersatz}";
|
||||
|
||||
# Ports of functions for older nix versions
|
||||
|
||||
# a Nix version of mapAttrs if the built-in doesn't exist
|
||||
mapAttrs = builtins.mapAttrs or (
|
||||
f: set: with builtins;
|
||||
listToAttrs (map (attr: { name = attr; value = f attr set.${attr}; }) (attrNames set))
|
||||
);
|
||||
|
||||
# https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/lists.nix#L295
|
||||
range = first: last: if first > last then [] else builtins.genList (n: first + n) (last - first + 1);
|
||||
|
||||
# https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/strings.nix#L257
|
||||
stringToCharacters = s: map (p: builtins.substring p 1 s) (range 0 (builtins.stringLength s - 1));
|
||||
|
||||
# https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/strings.nix#L269
|
||||
stringAsChars = f: s: concatStrings (map f (stringToCharacters s));
|
||||
concatMapStrings = f: list: concatStrings (map f list);
|
||||
concatStrings = builtins.concatStringsSep "";
|
||||
|
||||
# https://github.com/NixOS/nixpkgs/blob/8a9f58a375c401b96da862d969f66429def1d118/lib/attrsets.nix#L331
|
||||
optionalAttrs = cond: as: if cond then as else {};
|
||||
|
||||
# fetchTarball version that is compatible between all the versions of Nix
|
||||
builtins_fetchTarball = { url, name ? null, sha256 }@attrs:
|
||||
let
|
||||
inherit (builtins) lessThan nixVersion fetchTarball;
|
||||
in
|
||||
if lessThan nixVersion "1.12" then
|
||||
fetchTarball ({ inherit url; } // (optionalAttrs (!isNull name) { inherit name; }))
|
||||
else
|
||||
fetchTarball attrs;
|
||||
|
||||
# fetchurl version that is compatible between all the versions of Nix
|
||||
builtins_fetchurl = { url, name ? null, sha256 }@attrs:
|
||||
let
|
||||
inherit (builtins) lessThan nixVersion fetchurl;
|
||||
in
|
||||
if lessThan nixVersion "1.12" then
|
||||
fetchurl ({ inherit url; } // (optionalAttrs (!isNull name) { inherit name; }))
|
||||
else
|
||||
fetchurl attrs;
|
||||
|
||||
# Create the final "sources" from the config
|
||||
mkSources = config:
|
||||
mapAttrs (
|
||||
name: spec:
|
||||
if builtins.hasAttr "outPath" spec
|
||||
then abort
|
||||
"The values in sources.json should not have an 'outPath' attribute"
|
||||
else
|
||||
spec // { outPath = replace name (fetch config.pkgs name spec); }
|
||||
) config.sources;
|
||||
|
||||
# The "config" used by the fetchers
|
||||
mkConfig =
|
||||
{ sourcesFile ? if builtins.pathExists ./sources.json then ./sources.json else null
|
||||
, sources ? if isNull sourcesFile then {} else builtins.fromJSON (builtins.readFile sourcesFile)
|
||||
, system ? builtins.currentSystem
|
||||
, pkgs ? mkPkgs sources system
|
||||
}: rec {
|
||||
# The sources, i.e. the attribute set of spec name to spec
|
||||
inherit sources;
|
||||
|
||||
# The "pkgs" (evaluated nixpkgs) to use for e.g. non-builtin fetchers
|
||||
inherit pkgs;
|
||||
};
|
||||
|
||||
in
|
||||
mkSources (mkConfig {}) // { __functor = _: settings: mkSources (mkConfig settings); }
|
125
nix/test-fake-ship.nix
Normal file
125
nix/test-fake-ship.nix
Normal file
@ -0,0 +1,125 @@
|
||||
{ click, pier, pkgs }:
|
||||
|
||||
let
|
||||
poke = ''
|
||||
=>
|
||||
|%
|
||||
++ take-poke-ack
|
||||
|= =wire
|
||||
=/ m (strand ,?)
|
||||
^- form:m
|
||||
|= tin=strand-input:strand
|
||||
?+ in.tin `[%skip ~]
|
||||
~ `[%wait ~]
|
||||
[~ %agent * %poke-ack *]
|
||||
?. =(wire wire.u.in.tin)
|
||||
`[%skip ~]
|
||||
?~ p.sign.u.in.tin
|
||||
`[%done %.y]
|
||||
`[%done %.n]
|
||||
==
|
||||
++ poke
|
||||
|= [=dock =cage]
|
||||
=/ m (strand ,?)
|
||||
^- form:m
|
||||
=/ =card:agent:gall [%pass /poke %agent dock %poke cage]
|
||||
;< ~ bind:m (send-raw-card card)
|
||||
(take-poke-ack /poke)
|
||||
--
|
||||
|
||||
'';
|
||||
testThread = dojoCommand:
|
||||
pkgs.writeTextFile {
|
||||
name = "${dojoCommand}.hoon";
|
||||
text = ''
|
||||
${poke}
|
||||
=/ m (strand ,vase)
|
||||
;< [=ship =desk =case] bind:m get-beak
|
||||
;< ok=? bind:m (poke [ship %dojo] %lens-command !>([%$ [%dojo '${dojoCommand}'] [%stdout ~]]))
|
||||
(pure:m !>(ok))
|
||||
'';
|
||||
};
|
||||
appThread = generator: app:
|
||||
pkgs.writeTextFile {
|
||||
name = ":${app}|${generator}.hoon";
|
||||
text = ''
|
||||
${poke}
|
||||
=/ m (strand ,vase)
|
||||
;< [=ship =desk =case] bind:m get-beak
|
||||
;< ok=? bind:m (poke [ship %dojo] %lens-command !>([%$ [%dojo '+${app}/${generator}'] [%app %${app}]]))
|
||||
(pure:m !>(ok))
|
||||
'';
|
||||
};
|
||||
pokeApp = hoon: mark: app:
|
||||
pkgs.writeTextFile {
|
||||
name = ":${app} &${mark} ${hoon}.hoon";
|
||||
text = ''
|
||||
${poke}
|
||||
=/ m (strand ,vase)
|
||||
;< [=ship =desk =case] bind:m get-beak
|
||||
;< ok=? bind:m (poke [ship %${app}] %${mark} !>(${hoon}))
|
||||
(pure:m !>(ok))
|
||||
'';
|
||||
};
|
||||
in pkgs.stdenvNoCC.mkDerivation {
|
||||
name = "test-urbit";
|
||||
|
||||
src = pier;
|
||||
|
||||
phases = [ "unpackPhase" "buildPhase" "checkPhase" ];
|
||||
|
||||
nativeBuildInputs = [ pkgs.netcat ];
|
||||
|
||||
unpackPhase = ''
|
||||
cp -R $src ./pier
|
||||
chmod -R u+rw ./pier
|
||||
'';
|
||||
|
||||
buildPhase = ''
|
||||
set -x
|
||||
set -e
|
||||
|
||||
${../urbit} -d ./pier 1>&2 2> $out
|
||||
|
||||
tail -F $out >&2 &
|
||||
|
||||
${click} -k -p -i ${appThread "mass" "hood"} ./pier
|
||||
|
||||
sleep 2
|
||||
|
||||
${click} -k -p -i ${testThread "-test %/tests ~"} ./pier
|
||||
|
||||
${click} -k -p -i ${pokeApp "%agents" "noun" "test"} ./pier
|
||||
${click} -k -p -i ${pokeApp "%generators" "noun" "test"} ./pier
|
||||
${click} -k -p -i ${pokeApp "%marks" "noun" "test"} ./pier
|
||||
|
||||
${click} -k -p -i ${appThread "mass" "hood"} ./pier
|
||||
sleep 2
|
||||
|
||||
${click} -k -p -i ${pokeApp "~" "helm-pack" "hood"} ./pier
|
||||
|
||||
${click} -k -p -i ${appThread "trim" "hood"} ./pier
|
||||
|
||||
${click} -k -p -i ${appThread "mass" "hood"} ./pier
|
||||
|
||||
${click} -k -p -i ${appThread "meld" "hood"} ./pier
|
||||
|
||||
${click} -k -p -i ${appThread "mass" "hood"} ./pier
|
||||
|
||||
${click} -k -p -i ${appThread "exit" "hood"} ./pier
|
||||
|
||||
set +x
|
||||
'';
|
||||
|
||||
checkPhase = ''
|
||||
if egrep "((FAILED|CRASHED|Failed)|warn:)" $out >/dev/null; then
|
||||
exit 1
|
||||
fi
|
||||
'';
|
||||
|
||||
doCheck = true;
|
||||
|
||||
# Fix 'bind: operation not permitted' when nix.useSandbox = true on darwin.
|
||||
# See https://github.com/NixOS/nix/blob/5f6840fbb49ae5b534423bd8a4360646ee93dbaf/src/libstore/build.cc#L2961
|
||||
__darwinAllowLocalNetworking = true;
|
||||
}
|
@ -1,618 +0,0 @@
|
||||
:: gaze: azimuth statistics
|
||||
::
|
||||
:: general flow:
|
||||
:: - receive events
|
||||
:: - process events whose timestamp is known
|
||||
:: - request timestamps for unknown block numbers (if not already running)
|
||||
:: - receive timestamps, process events
|
||||
::
|
||||
/- eth-watcher
|
||||
/+ *ethereum, *azimuth, default-agent, verb
|
||||
::
|
||||
=> |%
|
||||
+$ state-0
|
||||
$: %0
|
||||
:: qued: event logs waiting on block timestamp, oldest first
|
||||
:: time: timstamps of block numbers
|
||||
:: seen: events sorted by timestamp, newest first
|
||||
:: days: stats by day, newest first
|
||||
::
|
||||
running=(unit @ta)
|
||||
qued=loglist
|
||||
time=(map @ud @da)
|
||||
seen=(list [wen=@da wat=event])
|
||||
days=(list [day=@da sat=stats])
|
||||
==
|
||||
::
|
||||
+$ loglist loglist:eth-watcher
|
||||
+$ event
|
||||
$% [%azimuth who=ship dif=diff-point]
|
||||
[%invite by=ship of=ship gift=ship to=address]
|
||||
==
|
||||
::
|
||||
+$ stats
|
||||
$: spawned=(list @p)
|
||||
activated=(list @p)
|
||||
transfer-p=(list @p)
|
||||
transferred=(list @p)
|
||||
configured=(list @p)
|
||||
breached=(list @p)
|
||||
request=(list @p)
|
||||
sponsor=(list @p)
|
||||
management-p=(list @p)
|
||||
voting-p=(list @p)
|
||||
spawn-p=(list @p)
|
||||
invites-senders=(list @p)
|
||||
==
|
||||
::
|
||||
+$ card card:agent:gall
|
||||
::
|
||||
++ node-url 'http://eth-mainnet.urbit.org:8545'
|
||||
++ refresh-rate ~h1
|
||||
++ timeout-time ~h2
|
||||
--
|
||||
::
|
||||
=| state-0
|
||||
=* state -
|
||||
::
|
||||
%+ verb |
|
||||
^- agent:gall
|
||||
=<
|
||||
|_ =bowl:gall
|
||||
+* this .
|
||||
do ~(. +> bowl)
|
||||
def ~(. (default-agent this %|) bowl)
|
||||
bec byk.bowl(r da+now.bowl)
|
||||
::
|
||||
++ on-init
|
||||
^- (quip card _this)
|
||||
[setup-cards:do this]
|
||||
::
|
||||
++ on-save !>(state)
|
||||
++ on-load
|
||||
|= old=vase
|
||||
^- (quip card _this)
|
||||
[~ this(state !<(state-0 old))]
|
||||
::
|
||||
++ on-poke
|
||||
|= [=mark =vase]
|
||||
^- (quip card _this)
|
||||
?> ?=(%noun mark)
|
||||
=/ =noun !<(noun vase)
|
||||
|- ^- [cards=(list card) =_this]
|
||||
?+ noun ~|([dap.bowl %unknown-poke noun] !!)
|
||||
%reconnect
|
||||
:_ this
|
||||
:~ leave-eth-watcher:do
|
||||
watch-eth-watcher:do
|
||||
==
|
||||
::
|
||||
%reload
|
||||
:- cards:$(noun %reconnect)
|
||||
this(qued ~, seen ~, days ~)
|
||||
::
|
||||
%rewatch
|
||||
:_ this:$(noun %reset)
|
||||
:~ leave-eth-watcher:do
|
||||
clear-eth-watcher:do
|
||||
setup-eth-watcher:do
|
||||
await-eth-watcher:do
|
||||
==
|
||||
::
|
||||
%export
|
||||
[export:do this]
|
||||
::
|
||||
%debug
|
||||
~& latest=(turn (scag 5 seen) head)
|
||||
~& oldest=(turn (slag (sub (max 5 (lent seen)) 5) seen) head)
|
||||
~& :- 'order is'
|
||||
=- ?:(sane 'sane' 'insane')
|
||||
%+ roll seen
|
||||
|= [[this=@da *] last=@da sane=?]
|
||||
:- this
|
||||
?: =(*@da last) &
|
||||
(lte this last)
|
||||
~& time=~(wyt by time)
|
||||
~& qued=(lent qued)
|
||||
~& days=(lent days)
|
||||
[~ this]
|
||||
==
|
||||
::
|
||||
++ on-agent
|
||||
|= [=wire =sign:agent:gall]
|
||||
^- (quip card _this)
|
||||
?+ -.sign (on-agent:def wire sign)
|
||||
%kick
|
||||
?. =(/watcher wire) [~ this]
|
||||
[[watch-eth-watcher:do]~ this]
|
||||
::
|
||||
%fact
|
||||
?+ wire (on-agent:def wire sign)
|
||||
[%watcher ~]
|
||||
?. ?=(%eth-watcher-diff p.cage.sign)
|
||||
(on-agent:def wire sign)
|
||||
=^ cards state
|
||||
%- handle-eth-watcher-diff:do
|
||||
!<(diff:eth-watcher q.cage.sign)
|
||||
[cards this]
|
||||
::
|
||||
[%timestamps @ ~]
|
||||
?+ p.cage.sign (on-agent:def wire sign)
|
||||
%thread-fail
|
||||
=+ !<([=term =tang] q.cage.sign)
|
||||
=/ =tank leaf+"{(trip dap.bowl)} thread failed; will retry"
|
||||
%- (slog tank leaf+<term> tang)
|
||||
=^ cards state
|
||||
request-timestamps:do
|
||||
[cards this]
|
||||
::
|
||||
%thread-done
|
||||
=^ cards state
|
||||
%- save-timestamps:do
|
||||
!<((list [@ud @da]) q.cage.sign)
|
||||
[cards this]
|
||||
==
|
||||
==
|
||||
==
|
||||
::
|
||||
++ on-arvo
|
||||
|= [=wire =sign-arvo]
|
||||
^- (quip card _this)
|
||||
?+ +<.sign-arvo ~|([dap.bowl %strange-arvo-sign +<.sign-arvo] !!)
|
||||
%wake
|
||||
?: =(/export wire)
|
||||
[[wait-export:do export:do] this]
|
||||
?: =(/watch wire)
|
||||
[[watch-eth-watcher:do]~ this]
|
||||
~& [dap.bowl %strange-wake wire]
|
||||
[~ this]
|
||||
==
|
||||
::
|
||||
++ on-peek on-peek:def
|
||||
++ on-watch on-watch:def
|
||||
++ on-leave on-leave:def
|
||||
++ on-fail on-fail:def
|
||||
--
|
||||
::
|
||||
|_ =bowl:gall
|
||||
++ bec byk.bowl(r da+now.bowl)
|
||||
++ setup-cards
|
||||
^- (list card)
|
||||
:~ wait-export
|
||||
setup-eth-watcher
|
||||
:: we punt on subscribing to the eth-watcher for a little while.
|
||||
:: this way we get a %history diff containing all past events,
|
||||
:: instead of so many individual %log diffs that we bail meme.
|
||||
:: (to repro, replace this with `watch-eth-watcher`)
|
||||
::
|
||||
await-eth-watcher
|
||||
==
|
||||
::
|
||||
++ wait
|
||||
|= [=wire =@dr]
|
||||
^- card
|
||||
[%pass wire %arvo %b %wait (add now.bowl dr)]
|
||||
::
|
||||
++ wait-export (wait /export refresh-rate)
|
||||
::
|
||||
++ to-eth-watcher
|
||||
|= [=wire =task:agent:gall]
|
||||
^- card
|
||||
[%pass wire %agent [our.bowl %eth-watcher] task]
|
||||
::
|
||||
++ setup-eth-watcher
|
||||
%+ to-eth-watcher /setup
|
||||
:+ %poke %eth-watcher-poke
|
||||
!> ^- poke:eth-watcher
|
||||
:+ %watch /[dap.bowl]
|
||||
:* node-url
|
||||
|
|
||||
refresh-rate
|
||||
timeout-time
|
||||
public:mainnet-contracts
|
||||
~
|
||||
~[azimuth delegated-sending]:mainnet-contracts
|
||||
~
|
||||
~
|
||||
==
|
||||
::
|
||||
:: see also comment in +setup-cards
|
||||
++ await-eth-watcher (wait /watch ~m30)
|
||||
::
|
||||
++ watch-eth-watcher
|
||||
%+ to-eth-watcher /watcher
|
||||
[%watch /logs/[dap.bowl]]
|
||||
::
|
||||
++ leave-eth-watcher
|
||||
%+ to-eth-watcher /watcher
|
||||
[%leave ~]
|
||||
::
|
||||
++ clear-eth-watcher
|
||||
%+ to-eth-watcher /clear
|
||||
:+ %poke %eth-watcher-poke
|
||||
!> ^- poke:eth-watcher
|
||||
[%clear /logs/[dap.bowl]]
|
||||
::
|
||||
++ poke-spider
|
||||
|= [=wire =cage]
|
||||
^- card
|
||||
[%pass wire %agent [our.bowl %spider] %poke cage]
|
||||
::
|
||||
++ watch-spider
|
||||
|= [=wire =sub=path]
|
||||
^- card
|
||||
[%pass wire %agent [our.bowl %spider] %watch sub-path]
|
||||
::
|
||||
:: +handle-eth-watcher-diff: process new logs, clear state on rollback
|
||||
::
|
||||
:: processes logs for which we know the timestamp
|
||||
:: adds timestamp-less logs to queue
|
||||
::
|
||||
++ handle-eth-watcher-diff
|
||||
|= =diff:eth-watcher
|
||||
^- (quip card _state)
|
||||
=^ logs state
|
||||
^- [loglist _state]
|
||||
?- -.diff
|
||||
%history ~& [%got-history (lent loglist.diff)]
|
||||
[loglist.diff state(qued ~, seen ~)]
|
||||
%logs ~& %got-log
|
||||
[loglist.diff state]
|
||||
%disavow ~& %disavow-unimplemented
|
||||
[~ state]
|
||||
==
|
||||
%- process-logs
|
||||
%+ skip logs
|
||||
|= =event-log:rpc
|
||||
%- is-lockup-block
|
||||
block-number:(need mined.event-log)
|
||||
::
|
||||
:: +is-lockup-block: whether the block contains lockup/ignorable transactions
|
||||
::
|
||||
:: this is the stupid dumb equivalent to actually identifying lockup
|
||||
:: transactions procedurally, which is still in git history, but didn't
|
||||
:: work quite right for unidentified reasons
|
||||
::
|
||||
++ is-lockup-block
|
||||
|= num=@ud
|
||||
^- ?
|
||||
%+ roll
|
||||
^- (list [@ud @ud])
|
||||
:~ [7.050.978 7.051.038]
|
||||
==
|
||||
|= [[start=@ud end=@ud] in=_|]
|
||||
?: in &
|
||||
&((gte num start) (lte num end))
|
||||
::
|
||||
:: +request-timestamps: request block timestamps for the logs as necessary
|
||||
::
|
||||
:: will come back as a thread result
|
||||
::
|
||||
++ request-timestamps
|
||||
^- (quip card _state)
|
||||
?~ qued [~ state]
|
||||
?^ running [~ state]
|
||||
=/ tid=@ta
|
||||
%+ scot %ta
|
||||
:((cury cat 3) dap.bowl '_' (scot %uv eny.bowl))
|
||||
:_ state(running `tid)
|
||||
:~ (watch-spider /timestamps/[tid] /thread-result/[tid])
|
||||
::
|
||||
%+ poke-spider /timestamps/[tid]
|
||||
:- %spider-start
|
||||
=- !>([~ `tid bec %eth-get-timestamps -])
|
||||
!> ^- [@t (list @ud)]
|
||||
:- node-url
|
||||
=- ~(tap in -)
|
||||
%- ~(gas in *(set @ud))
|
||||
^- (list @ud)
|
||||
%+ turn qued
|
||||
|= log=event-log:rpc
|
||||
block-number:(need mined.log)
|
||||
==
|
||||
::
|
||||
:: +save-timestamps: store timestamps into state
|
||||
::
|
||||
++ save-timestamps
|
||||
|= timestamps=(list [@ud @da])
|
||||
^- (quip card _state)
|
||||
=. time (~(gas by time) timestamps)
|
||||
=. running ~
|
||||
(process-logs ~)
|
||||
::
|
||||
:: +process-logs: handle new incoming logs
|
||||
::
|
||||
++ process-logs
|
||||
|= new=loglist :: oldest first
|
||||
^- (quip card _state)
|
||||
=. qued (weld qued new)
|
||||
?~ qued [~ state]
|
||||
=- %_ request-timestamps
|
||||
qued (flop rest) :: oldest first
|
||||
seen (weld logs seen) :: newest first
|
||||
days (count-events (flop logs)) :: oldest first
|
||||
==
|
||||
%+ roll `loglist`qued
|
||||
|= [log=event-log:rpc [rest=loglist logs=(list [wen=@da wat=event])]]
|
||||
:: to ensure logs are processed in sane order,
|
||||
:: stop processing as soon as we skipped one
|
||||
::
|
||||
?^ rest [[log rest] logs]
|
||||
=/ tim=(unit @da)
|
||||
%- ~(get by time)
|
||||
block-number:(need mined.log)
|
||||
?~ tim [[log rest] logs]
|
||||
:- rest
|
||||
=+ ven=(event-log-to-event log)
|
||||
?~ ven logs
|
||||
[[u.tim u.ven] logs]
|
||||
::
|
||||
:: +event-log-to-event: turn raw log into gaze noun
|
||||
::
|
||||
++ event-log-to-event
|
||||
|= log=event-log:rpc
|
||||
^- (unit event)
|
||||
?: =(azimuth:mainnet-contracts address.log)
|
||||
=+ (event-log-to-point-diff log)
|
||||
?~ - ~
|
||||
`azimuth+u
|
||||
?: =(delegated-sending:mainnet-contracts address.log)
|
||||
?. .= i.topics.log
|
||||
0x4763.8e3c.ddee.2204.81e4.c3f9.183d.639c.
|
||||
0efe.a7f0.5fcd.2df4.1888.5572.9f71.5419
|
||||
~
|
||||
=/ [of=@ pool=@]
|
||||
~| t.topics.log
|
||||
%+ decode-topics:abi:ethereum t.topics.log
|
||||
~[%uint %uint]
|
||||
=/ [by=@ gift=@ to=@]
|
||||
~| data.log
|
||||
%+ decode-topics:abi:ethereum
|
||||
%+ rash data.log
|
||||
=- ;~(pfix (jest '0x') -)
|
||||
%+ stun [3 3]
|
||||
(bass 16 (stun [64 64] hit))
|
||||
~[%uint %uint %address]
|
||||
`invite+[by of gift to]
|
||||
~
|
||||
::
|
||||
:: +count-events: add events to the daily stats
|
||||
::
|
||||
++ count-events
|
||||
|= logs=_seen :: oldest first
|
||||
^+ days
|
||||
=/ head=[day=@da sat=stats]
|
||||
?^ days i.days
|
||||
*[@da stats]
|
||||
=+ tail=?~(days ~ t.days)
|
||||
|-
|
||||
:: when done, store updated head, but only if it's set
|
||||
::
|
||||
?~ logs
|
||||
?: =(*[@da stats] head) tail
|
||||
[head tail]
|
||||
=* log i.logs
|
||||
:: calculate day for current event, set head if unset
|
||||
::
|
||||
=/ day=@da
|
||||
(sub wen.log (mod wen.log ~d1))
|
||||
=? day.head =(*@da day.head) day
|
||||
:: same day as head, so add to it
|
||||
::
|
||||
?: =(day day.head)
|
||||
%_ $
|
||||
sat.head (count-event wat.log sat.head)
|
||||
logs t.logs
|
||||
==
|
||||
~| [%weird-new-day old=day.head new=day]
|
||||
?> (gth day day.head)
|
||||
:: newer day than head of days, so start new head
|
||||
::
|
||||
%_ $
|
||||
tail [head tail]
|
||||
head [day *stats]
|
||||
==
|
||||
::
|
||||
:: +count-event: add event to the stats, if it's relevant
|
||||
::
|
||||
++ count-event
|
||||
|= [eve=event sat=stats]
|
||||
^- stats
|
||||
?- -.eve
|
||||
%invite sat(invites-senders [by.eve invites-senders.sat])
|
||||
::
|
||||
%azimuth
|
||||
?+ -.dif.eve sat
|
||||
%spawned sat(spawned [who.dif.eve spawned.sat])
|
||||
%activated sat(activated [who.eve activated.sat])
|
||||
%transfer-proxy ?: =(0x0 new.dif.eve) sat
|
||||
sat(transfer-p [who.eve transfer-p.sat])
|
||||
%owner sat(transferred [who.eve transferred.sat])
|
||||
%keys sat(configured [who.eve configured.sat])
|
||||
%continuity sat(breached [who.eve breached.sat])
|
||||
%escape ?~ new.dif.eve sat
|
||||
sat(request [who.eve request.sat])
|
||||
%sponsor ?. has.new.dif.eve sat
|
||||
sat(sponsor [who.eve sponsor.sat])
|
||||
%management-proxy sat(management-p [who.eve management-p.sat])
|
||||
%voting-proxy sat(voting-p [who.eve voting-p.sat])
|
||||
%spawn-proxy sat(spawn-p [who.eve spawn-p.sat])
|
||||
==
|
||||
==
|
||||
::
|
||||
::
|
||||
:: +export: periodically export data
|
||||
::
|
||||
++ export
|
||||
^- (list card)
|
||||
:~ (export-move %days (export-days days))
|
||||
(export-move %months (export-months days))
|
||||
(export-move %events export-raw)
|
||||
==
|
||||
::
|
||||
:: +export-move: %info move to write exported .txt
|
||||
::
|
||||
++ export-move
|
||||
|= [nom=@t dat=(list @t)]
|
||||
^- card
|
||||
=- [%pass /export/[nom] %arvo %c %info -]
|
||||
%+ foal:space:userlib
|
||||
/(scot %p our.bowl)/base/(scot %da now.bowl)/gaze-exports/[nom]/txt
|
||||
[%txt !>(dat)]
|
||||
::
|
||||
:: +peek-x: accept gall scry
|
||||
::
|
||||
:: %/days/txt: per day, digest stats
|
||||
:: %/months/txt: per month, digest stats
|
||||
:: %/raw/txt: all observed events
|
||||
::
|
||||
++ peek-x ::TODO
|
||||
|= pax=path
|
||||
^- (unit (unit (pair mark *)))
|
||||
?~ pax ~
|
||||
?: =(%days i.pax)
|
||||
:^ ~ ~ %txt
|
||||
(export-days days)
|
||||
?: =(%months i.pax)
|
||||
:^ ~ ~ %txt
|
||||
(export-months days)
|
||||
?: =(%raw i.pax)
|
||||
``txt+export-raw
|
||||
~
|
||||
::
|
||||
:: +export-months: generate a csv of stats per month
|
||||
::
|
||||
++ export-months
|
||||
|= =_days
|
||||
%- export-days
|
||||
^+ days
|
||||
%+ roll (flop days)
|
||||
|= [[day=@da sat=stats] mos=(list [mod=@da sat=stats])]
|
||||
^+ mos
|
||||
=/ mod=@da
|
||||
%- year
|
||||
=+ (yore day)
|
||||
-(d.t 1)
|
||||
?~ mos [mod sat]~
|
||||
?: !=(mod mod.i.mos)
|
||||
[[mod sat] mos]
|
||||
:_ t.mos
|
||||
:- mod
|
||||
::TODO this is hideous. can we make a wet gate do this?
|
||||
:* (weld spawned.sat spawned.sat.i.mos)
|
||||
(weld activated.sat activated.sat.i.mos)
|
||||
(weld transfer-p.sat transfer-p.sat.i.mos)
|
||||
(weld transferred.sat transferred.sat.i.mos)
|
||||
(weld configured.sat configured.sat.i.mos)
|
||||
(weld breached.sat breached.sat.i.mos)
|
||||
(weld request.sat request.sat.i.mos)
|
||||
(weld sponsor.sat sponsor.sat.i.mos)
|
||||
(weld management-p.sat management-p.sat.i.mos)
|
||||
(weld voting-p.sat voting-p.sat.i.mos)
|
||||
(weld spawn-p.sat spawn-p.sat.i.mos)
|
||||
(weld invites-senders.sat invites-senders.sat.i.mos)
|
||||
==
|
||||
::
|
||||
:: +export-days: generate a csv of stats per day
|
||||
::
|
||||
++ export-days
|
||||
|= =_days
|
||||
:- %- crip
|
||||
;: weld
|
||||
"date,"
|
||||
"spawned,"
|
||||
"activated,"
|
||||
"transfer proxy,"
|
||||
"transferred,"
|
||||
"transferred (unique),"
|
||||
"configured,"
|
||||
"configured (unique),"
|
||||
"escape request,"
|
||||
"sponsor change,"
|
||||
"invites,"
|
||||
"invites (unique senders)"
|
||||
==
|
||||
|^ ^- (list @t)
|
||||
%+ turn days
|
||||
|= [day=@da stats]
|
||||
%- crip
|
||||
;: weld
|
||||
(scow %da day) ","
|
||||
(count spawned) ","
|
||||
(count activated) ","
|
||||
(count transfer-p) ","
|
||||
(unique transferred) ","
|
||||
(unique configured) ","
|
||||
(count request) ","
|
||||
(count sponsor) ","
|
||||
(unique invites-senders)
|
||||
==
|
||||
::
|
||||
++ count
|
||||
|* l=(list)
|
||||
(num (lent l))
|
||||
::
|
||||
++ unique
|
||||
|* l=(list)
|
||||
;: weld
|
||||
(count l)
|
||||
","
|
||||
(num ~(wyt in (~(gas in *(set)) l)))
|
||||
==
|
||||
::
|
||||
++ num (d-co:co 1)
|
||||
--
|
||||
::
|
||||
:: +export-raw: generate a csv of individual transactions
|
||||
::
|
||||
++ export-raw
|
||||
:- %- crip
|
||||
;: weld
|
||||
"date,"
|
||||
"point,"
|
||||
"event,"
|
||||
"field 1,field2,field3"
|
||||
==
|
||||
|^ ^- (list @t)
|
||||
%+ turn seen
|
||||
:: (cork tail event-to-row crip)
|
||||
|= [wen=@da =event]
|
||||
(crip "{(scow %da wen)},{(event-to-row event)}")
|
||||
::
|
||||
++ event-to-row
|
||||
|= =event
|
||||
?- -.event
|
||||
%azimuth (point-diff-to-row +.event)
|
||||
%invite (invite-to-row +.event)
|
||||
==
|
||||
::
|
||||
++ point-diff-to-row
|
||||
|= [who=ship dif=diff-point]
|
||||
^- tape
|
||||
%+ weld "{(pon who)},"
|
||||
?- -.dif
|
||||
%full "full,"
|
||||
%owner "owner,{(adr new.dif)}"
|
||||
%activated "activated,"
|
||||
%spawned "spawned,{(pon who.dif)}"
|
||||
%keys "keys,{(num life.dif)}"
|
||||
%continuity "breached,{(num new.dif)}"
|
||||
%sponsor "sponsor,{(spo has.new.dif)},{(pon who.new.dif)}"
|
||||
%escape "escape-req,{(req new.dif)}"
|
||||
%management-proxy "management-p,{(adr new.dif)}"
|
||||
%voting-proxy "voting-p,{(adr new.dif)}"
|
||||
%spawn-proxy "spawn-p,{(adr new.dif)}"
|
||||
%transfer-proxy "transfer-p,{(adr new.dif)}"
|
||||
==
|
||||
::
|
||||
++ invite-to-row
|
||||
|= [by=ship of=ship ship to=address]
|
||||
"{(pon by)},invite,{(pon of)},{(adr to)}"
|
||||
::
|
||||
++ num (d-co:co 1)
|
||||
++ pon (cury scow %p)
|
||||
++ adr |=(a=@ ['0' 'x' ((x-co:co (mul 2 20)) a)])
|
||||
++ spo |=(h=? ?:(h "escaped to" "detached from"))
|
||||
++ req |=(r=(unit @p) ?~(r "canceled" (pon u.r)))
|
||||
--
|
||||
--
|
@ -795,7 +795,7 @@
|
||||
%+ turn
|
||||
%+ skim desks
|
||||
|= dek=desk
|
||||
?: (~(has in .^((set desk) %cd /(scot %p our)/base/(scot %da now))) dek)
|
||||
?: (~(has in .^((set desk) %cd /(scot %p our)//(scot %da now))) dek)
|
||||
&
|
||||
~> %slog.(fmt "desk does not yet exist: {<dek>}") |
|
||||
|=(=desk [%pass /kiln/suspend %arvo %c %zest desk %dead])
|
||||
@ -821,10 +821,9 @@
|
||||
=+ .^(=rock:tire %cx /(scot %p our)//(scot %da now)/tire)
|
||||
?~ got=(~(get by rock) loc)
|
||||
abet:(spam leaf+"desk does not exist: {<loc>}" ~)
|
||||
?: =(+<:got %dead)
|
||||
abet:(spam leaf+"desk not installed: {<loc>}" ~)
|
||||
~> %slog.(fmt "uninstalling {<loc>}")
|
||||
=. ..on-init (emit %pass /kiln/uninstall %arvo %c %zest loc %dead)
|
||||
=? ..on-init !=(+<:got %dead)
|
||||
(emit %pass /kiln/uninstall %arvo %c %zest loc %dead)
|
||||
?~ sync=(~(get by sources) loc)
|
||||
abet
|
||||
(poke-unsync loc u.sync)
|
||||
@ -1128,6 +1127,12 @@
|
||||
=/ m (strand:rand ,vase)
|
||||
;< =riot:clay bind:m (warp:strandio her sud ~ %sing %w ud+let /)
|
||||
?> ?=(^ riot)
|
||||
:: The syncs may have changed, so get the latest
|
||||
::
|
||||
;< zyx=(map kiln-sync sync-state) bind:m
|
||||
(scry:strandio (map kiln-sync sync-state) /gx/hood/kiln/syncs/noun)
|
||||
?. (~(has by zyx) syd her sud)
|
||||
(pure:m !>(%done))
|
||||
~> %slog.(fmt "downloading update for {here}")
|
||||
;< =riot:clay bind:m (warp:strandio her sud ~ %sing %v ud+let /)
|
||||
?> ?=(^ riot)
|
||||
|
@ -13,7 +13,6 @@
|
||||
=> v=.
|
||||
|%
|
||||
++ mime => v [/text/plain (as-octs (of-wain pem))]
|
||||
++ elem => v ;pre: {(trip (of-wain pem))}
|
||||
--
|
||||
++ grad %mime
|
||||
--
|
||||
|
@ -3,42 +3,6 @@
|
||||
::
|
||||
/? 310
|
||||
=, html
|
||||
|%
|
||||
++ words 1
|
||||
++ hedtal
|
||||
=| met=marl
|
||||
|= a=marl ^- [hed=marl tal=marl]
|
||||
?~ a [~ ~]
|
||||
?. ?=(%h1 n.g.i.a)
|
||||
?: ?=(%meta n.g.i.a)
|
||||
$(a t.a, met [i.a met])
|
||||
=+ had=$(a c.i.a)
|
||||
?^ -.had had
|
||||
$(a t.a)
|
||||
[c.i.a (weld (flop met) (limit words t.a))]
|
||||
::
|
||||
++ limit
|
||||
|= [lim=@u mal=marl]
|
||||
=< res
|
||||
|- ^- [rem=@u res=marl]
|
||||
?~ mal [lim ~]
|
||||
?~ lim [0 ~]
|
||||
=/ [lam=@u hed=manx]
|
||||
?: ?=(_;/(**) i.mal)
|
||||
[lim ;/(tay)]:(deword lim v.i.a.g.i.mal)
|
||||
[rem ele(c res)]:[ele=i.mal $(mal c.i.mal)]
|
||||
[rem - res]:[hed $(lim lam, mal t.mal)]
|
||||
::
|
||||
++ deword
|
||||
|= [lim=@u tay=tape] ^- [lim=@u tay=tape]
|
||||
?~ tay [lim tay]
|
||||
?~ lim [0 ~]
|
||||
=+ wer=(dot 1^1 tay)
|
||||
?~ q.wer
|
||||
[lim - tay]:[i.tay $(tay t.tay)]
|
||||
=+ nex=$(lim (dec lim), tay q.q.u.q.wer)
|
||||
[-.nex [(wonk wer) +.nex]]
|
||||
--
|
||||
::
|
||||
::
|
||||
=, mimes:html
|
||||
@ -50,14 +14,18 @@
|
||||
++ mime
|
||||
=< mime
|
||||
|%
|
||||
++ elem ;div:(h1:"*{hed}" div:"*{tal}") :: convert to %elem
|
||||
++ hymn ;html:(head:title:"snip" body:"+{elem}") :: convert to %hymn
|
||||
++ hymn :: convert to %hymn
|
||||
|^ html
|
||||
++ div ;div:(h1:"*{hed}" div:"*{tal}")
|
||||
++ html ;html:(head:title:"snip" body:"+{div}")
|
||||
--
|
||||
++ html (crip (en-xml hymn)) :: convert to %html
|
||||
++ mime [/text/html (as-octs html)] :: convert to %mime
|
||||
--
|
||||
++ noun [hed tal]
|
||||
--
|
||||
++ grab |% :: convert from
|
||||
++ grab :: convert from
|
||||
|%
|
||||
++ noun ,[marl marl] :: clam from %noun
|
||||
++ elem |=(a=manx (hedtal +.a))
|
||||
-- --
|
||||
--
|
||||
--
|
||||
|
@ -1,12 +0,0 @@
|
||||
|_ a=manx
|
||||
++ grad %noun
|
||||
++ grab
|
||||
|%
|
||||
++ noun manx
|
||||
--
|
||||
++ grow
|
||||
|%
|
||||
++ htm a
|
||||
++ noun a
|
||||
--
|
||||
--
|
@ -1,11 +0,0 @@
|
||||
|_ a=manx
|
||||
++ grad %noun
|
||||
++ grab
|
||||
|%
|
||||
++ noun manx
|
||||
--
|
||||
++ grow
|
||||
|%
|
||||
++ noun a
|
||||
--
|
||||
--
|
1
pkg/arvo/mar/xhtml.hoon
Symbolic link
1
pkg/arvo/mar/xhtml.hoon
Symbolic link
@ -0,0 +1 @@
|
||||
../../base-dev/mar/xhtml.hoon
|
@ -585,6 +585,7 @@
|
||||
%'GET'
|
||||
%'HEAD'
|
||||
%'OPTIONS'
|
||||
%'PATCH'
|
||||
%'POST'
|
||||
%'PUT'
|
||||
%'TRACE'
|
||||
@ -2699,6 +2700,7 @@
|
||||
+$ gift :: outgoing result
|
||||
$% [%boon payload=*] :: ames response
|
||||
[%done error=(unit error:ames)] :: ames message (n)ack
|
||||
[%flub ~] :: not ready to handle plea
|
||||
[%unto p=unto] ::
|
||||
== ::
|
||||
+$ task :: incoming request
|
||||
|
@ -1253,7 +1253,7 @@
|
||||
$>(%wake gift:behn)
|
||||
==
|
||||
$: %gall
|
||||
$>(%unto gift:gall)
|
||||
$>(?(%flub %unto) gift:gall)
|
||||
==
|
||||
$: %jael
|
||||
$> $? %private-keys
|
||||
@ -1305,6 +1305,7 @@
|
||||
::
|
||||
+$ message-sink-task
|
||||
$% [%done ok=?]
|
||||
[%flub ~]
|
||||
[%drop =message-num]
|
||||
[%hear =lane =shut-packet ok=?]
|
||||
==
|
||||
@ -1815,6 +1816,31 @@
|
||||
+.u.ship-state
|
||||
::
|
||||
+| %tasks
|
||||
:: +on-take-flub: vane not ready to process message, pretend it
|
||||
:: was never delivered
|
||||
::
|
||||
++ on-take-flub
|
||||
|= =wire
|
||||
^+ event-core
|
||||
?~ parsed=(parse-bone-wire wire)
|
||||
:: no-op
|
||||
::
|
||||
~> %slog.0^leaf/"ames: dropping malformed wire: {(spud wire)}"
|
||||
event-core
|
||||
?> ?=([@ her=ship *] u.parsed)
|
||||
=* her her.u.parsed
|
||||
=/ peer-core (abed-got:pe her)
|
||||
?: ?& ?=([%new *] u.parsed)
|
||||
(lth rift.u.parsed rift.peer-state.peer-core)
|
||||
==
|
||||
:: ignore events from an old rift
|
||||
::
|
||||
%- %^ ev-trace odd.veb her
|
||||
|.("dropping old rift wire: {(spud wire)}")
|
||||
event-core
|
||||
=/ =bone
|
||||
?-(u.parsed [%new *] bone.u.parsed, [%old *] bone.u.parsed)
|
||||
abet:(on-flub:peer-core bone)
|
||||
:: +on-take-done: handle notice from vane that it processed a message
|
||||
::
|
||||
++ on-take-done
|
||||
@ -3056,6 +3082,11 @@
|
||||
:- >[bone=bone message-num=message-num meat=meat]:shut-packet<
|
||||
tang.u.dud
|
||||
abet:(call:(abed:mu bone) %hear [message-num +.meat]:shut-packet)
|
||||
::
|
||||
++ on-flub
|
||||
|= =bone
|
||||
^+ peer-core
|
||||
abet:(call:(abed:mi:peer-core bone) %flub ~)
|
||||
:: +on-memo: handle request to send message
|
||||
::
|
||||
++ on-memo
|
||||
@ -3995,6 +4026,11 @@
|
||||
?- -.task
|
||||
%drop sink(nax.state (~(del in nax.state) message-num.task))
|
||||
%done (done ok.task)
|
||||
%flub
|
||||
%= sink
|
||||
last-heard.state (dec last-heard.state)
|
||||
pending-vane-ack.state ~(nap to pending-vane-ack.state)
|
||||
==
|
||||
::
|
||||
%hear
|
||||
|^ ?: ?| corked
|
||||
@ -4846,6 +4882,8 @@
|
||||
[@ %boon *] (on-take-boon:event-core wire payload.sign)
|
||||
::
|
||||
[%behn %wake *] (on-take-wake:event-core wire error.sign)
|
||||
::
|
||||
[%gall %flub ~] (on-take-flub:event-core wire)
|
||||
::
|
||||
[%jael %turf *] (on-take-turf:event-core turf.sign)
|
||||
[%jael %private-keys *] (on-priv:event-core [life vein]:sign)
|
||||
|
@ -865,6 +865,14 @@
|
||||
=. mo-core (mo-track-ship ship)
|
||||
=? mo-core ?=(%u -.ames-request) (mo-give %done ~)
|
||||
::
|
||||
=/ yok=(unit yoke) (~(get by yokes.state) agent-name)
|
||||
?~ yok
|
||||
(mo-give %flub ~)
|
||||
?: ?=(%nuke -.u.yok)
|
||||
(mo-give %flub ~)
|
||||
?: ?=(%.n -.agent.u.yok)
|
||||
(mo-give %flub ~)
|
||||
::
|
||||
=/ =wire /sys/req/(scot %p ship)/[agent-name]
|
||||
::
|
||||
=/ =deal
|
||||
|
@ -3319,6 +3319,7 @@
|
||||
~% %leer ..part ~
|
||||
|= txt=cord
|
||||
^- wain
|
||||
?~ txt ~
|
||||
=/ len=@ (met 3 txt)
|
||||
=/ cut =+(cut -(a 3, c 1, d txt))
|
||||
=/ sub sub
|
||||
|
@ -7,7 +7,4 @@
|
||||
^- form:m
|
||||
;< =bowl:spider bind:m get-bowl:strandio
|
||||
;< code=@p bind:m (scry:strandio @p /j/code/(scot %p our.bowl))
|
||||
%- pure:m
|
||||
!> ^- tape
|
||||
%+ slag 1
|
||||
(scow %p code)
|
||||
(pure:m !>(code))
|
||||
|
@ -9,4 +9,4 @@
|
||||
=/ [who=ship message=@t]
|
||||
?@(arg [who.arg ''] [who.arg (crip mez.arg)])
|
||||
;< ~ bind:m (poke:strandio [who %hood] %helm-hi !>(message))
|
||||
(pure:m !>("hi {<who>} successful"))
|
||||
(pure:m !>((crip "hi {<who>} successful")))
|
||||
|
@ -1,390 +0,0 @@
|
||||
:: naive-csv: produces csv file containing L2 transaction data
|
||||
::
|
||||
:: takes in the network to use and the ethereum node url to grab data from.
|
||||
:: it starts with the azimuth snapshot and scries the logs from %azimuth.
|
||||
:: it then produces a csv file containing the following data on L2
|
||||
:: transactions:
|
||||
::
|
||||
:: - block number
|
||||
:: - timestamp
|
||||
:: - roller address
|
||||
:: - roll hash
|
||||
:: - tx hash
|
||||
:: - sending ship
|
||||
:: - sending proxy
|
||||
:: - nonce
|
||||
:: - gas price
|
||||
:: - length of input data
|
||||
:: - success or failure
|
||||
:: - function name
|
||||
:: - spawning ship (^sein:title)
|
||||
::
|
||||
:: A lot of the data-scrounging here is stuff that %roller already keeps track
|
||||
:: of. We could just scry it from there, but then this thread needs to be run
|
||||
:: on the roller ship. So we rebuild the list of historical transactions
|
||||
:: ourselves so that this can run from any ship.
|
||||
::
|
||||
/- dice,
|
||||
spider
|
||||
::
|
||||
/+ dice,
|
||||
ethereum,
|
||||
ethio,
|
||||
naive,
|
||||
naive-tx=naive-transactions,
|
||||
*strandio
|
||||
::
|
||||
=, strand=strand:spider
|
||||
=, jael
|
||||
::
|
||||
^- thread:spider
|
||||
=< process-logs
|
||||
=>
|
||||
|%
|
||||
:: imported logs is cast as $events
|
||||
+$ events (list event-log:rpc:ethereum)
|
||||
+$ address address:naive :: @ux
|
||||
+$ keccak @ux :: used for transaction and roll hashes
|
||||
+$ blocknum number:block :: @udblocknumber
|
||||
+$ net net:dice :: ?(%mainnet %goerli %local %default)
|
||||
+$ roll-dat :: all data required for each roll
|
||||
[[gas=@ud sender=address] =effects:naive]
|
||||
+$ block-dat :: all data required for each block
|
||||
[timestamp=@da rolls=(map keccak roll-dat)]
|
||||
+$ block-map (map blocknum block-dat)
|
||||
+$ rolls-map (map blocknum (map keccak effects:naive))
|
||||
::
|
||||
+$ action
|
||||
$? %transfer-point
|
||||
%spawn
|
||||
%configure-keys
|
||||
%escape
|
||||
%cancel-escape
|
||||
%adopt
|
||||
%reject
|
||||
%detach
|
||||
%set-management-proxy
|
||||
%set-spawn-proxy
|
||||
%set-transfer-proxy
|
||||
==
|
||||
::
|
||||
+$ tx-data
|
||||
$: =blocknum
|
||||
timestamp=@da
|
||||
roller=address
|
||||
roll-hash=keccak
|
||||
tx-hash=keccak
|
||||
sender=ship
|
||||
proxy=proxy:naive
|
||||
nonce=nonce:naive
|
||||
gas=@ud
|
||||
length=@ux
|
||||
suc=?
|
||||
=action
|
||||
parent=ship
|
||||
==
|
||||
--
|
||||
::
|
||||
|%
|
||||
:: +process-logs is the main process. it grabs the azimuth snapshop, runs
|
||||
:: +naive on the logs, grabs the timestamps and gas costs for each roll,
|
||||
:: then flattens them into a list of $tx-data and saves them to disk.
|
||||
::
|
||||
++ process-logs
|
||||
|= arg=vase
|
||||
=+ !<([~ =net node-url=@t] arg)
|
||||
=/ pax=path /naive-exports/csv :: data will be saved here
|
||||
=/ m (strand ,vase)
|
||||
^- form:m
|
||||
;< =events bind:m (scry events /gx/azimuth/logs/noun)
|
||||
=/ [naive-contract=address chain-id=@]
|
||||
[naive chain-id]:(get-network:dice net)
|
||||
;< =bowl:spider bind:m get-bowl
|
||||
=/ snap=snap-state:dice
|
||||
.^ snap-state:dice %gx
|
||||
/(scot %p our.bowl)/azimuth/(scot %da now.bowl)/last-snap/noun
|
||||
==
|
||||
::
|
||||
;< ~ bind:m
|
||||
%- flog-text %+ weld "naive-csv: processing {<net>} ethereum logs "
|
||||
"with {<(lent events)>} events"
|
||||
=/ =rolls-map
|
||||
(compute-effects nas.snap events net naive-contract chain-id)
|
||||
;< ~ bind:m (flog-text "naive-csv: getting timestamps")
|
||||
;< tim=thread-result bind:m
|
||||
%+ await-thread %eth-get-timestamps
|
||||
!>([node-url ~(tap in ~(key by rolls-map))])
|
||||
=/ timestamps %- ~(gas by *(map blocknum @da))
|
||||
?- tim
|
||||
[%.y *] ;;((list [@ud @da]) q.p.tim)
|
||||
[%.n *]
|
||||
=> (mean 'naive-csv: %eth-get-timestamps failed' p.tim)
|
||||
!!
|
||||
==
|
||||
;< ~ bind:m (flog-text "naive-csv: got timestamps")
|
||||
;< ~ bind:m (flog-text "naive-csv: getting tx receipts")
|
||||
;< gaz=thread-result bind:m
|
||||
%+ await-thread %eth-get-tx-receipts
|
||||
!>([node-url (get-roll-hashes rolls-map)])
|
||||
=/ gas-sender %- ~(gas by *(map keccak [gas=@ud sender=address]))
|
||||
?- gaz
|
||||
[%.y *] (parse-gas-sender ;;((list [@t json]) q.p.gaz))
|
||||
[%.n *]
|
||||
=> (mean 'naive-csv: %eth-tx-receipts failed' p.gaz)
|
||||
!!
|
||||
==
|
||||
;< ~ bind:m (flog-text "naive-csv: got tx receipts")
|
||||
=/ csv=(list cord)
|
||||
(make-csv (flatten (collate-roll-data rolls-map timestamps gas-sender)))
|
||||
;< ~ bind:m (export-csv csv pax)
|
||||
;< ~ bind:m (flog-text :(weld "naive-csv: csv saved to %" (spud pax) "/"))
|
||||
::
|
||||
(pure:m !>(~))
|
||||
:: +collate-roll-data throws naive:effects, timestamps, and gas costs into
|
||||
:: one $block-map
|
||||
::
|
||||
++ collate-roll-data
|
||||
|= $: =rolls-map
|
||||
timestamps=(map blocknum @da)
|
||||
roll-receipts=(map keccak [gas=@ud sender=address])
|
||||
==
|
||||
=/ blocknums=(list blocknum) ~(tap in ~(key by rolls-map))
|
||||
=| =block-map
|
||||
^+ block-map
|
||||
|-
|
||||
?~ blocknums block-map
|
||||
=/ =blocknum i.blocknums
|
||||
=/ rolls=(map keccak [[gas=@ud sender=address] =effects:naive])
|
||||
%- ~(gas by *(map keccak [[gas=@ud sender=address] =effects:naive]))
|
||||
%+ turn ~(tap in ~(key by (~(got by rolls-map) blocknum)))
|
||||
|= txh=keccak
|
||||
:+ txh
|
||||
(~(got by roll-receipts) txh)
|
||||
(~(got by (~(got by rolls-map) blocknum)) txh)
|
||||
%= $
|
||||
blocknums t.blocknums
|
||||
block-map %+ ~(put by block-map)
|
||||
blocknum
|
||||
[(~(got by timestamps) blocknum) rolls]
|
||||
==
|
||||
:: +flatten takes a $block-map and creates a $tx-data for every transaction
|
||||
:: in every roll, returned as a (list tx-data)
|
||||
::
|
||||
++ flatten
|
||||
|= =block-map
|
||||
=/ blocks=(list [blocknum block-dat]) ~(tap by block-map)
|
||||
=| tx-list=(list tx-data)
|
||||
^+ tx-list
|
||||
:: recurse through the list of blocks, getting the rolls submitted in that
|
||||
:: block, their timestamp, and the gas price of that roll
|
||||
::
|
||||
|-
|
||||
=* block-loop $
|
||||
?~ blocks tx-list
|
||||
=/ block=[=blocknum =block-dat] i.blocks
|
||||
=/ roll-list=(list [=keccak =roll-dat]) ~(tap by rolls.block-dat.block)
|
||||
=| block-tx-list=(list tx-data)
|
||||
:: recurse through each roll, getting the transaction data from the effects
|
||||
::
|
||||
|-
|
||||
=* roll-loop $
|
||||
?~ roll-list
|
||||
%= block-loop
|
||||
blocks t.blocks
|
||||
tx-list (welp tx-list block-tx-list)
|
||||
==
|
||||
=/ roll=[=keccak =roll-dat] i.roll-list
|
||||
:: recurse through the list of effects, building up transaction data as we
|
||||
:: go. there's a choice here to use the effects, or the submitted
|
||||
:: raw-tx. the effects include whether or not a transaction failed,
|
||||
:: which is important data not a part of the submitted raw-tx. we
|
||||
:: could determine this ourselves, but we build the effects anyways when
|
||||
:: computing the state transitions, so we may as well use them.
|
||||
::
|
||||
:: an individual transaction results in up to 3 diffs: a %nonce, a %tx, and
|
||||
:: a %point. they always appear in this order. successful transactions
|
||||
:: always have all 3, while failed transactions only have %nonce and %tx.
|
||||
:: note that the nonce listed is always the expected nonce - we can't know
|
||||
:: what nonce was actually submitted without the private key of the signer.
|
||||
::
|
||||
=| roll-tx-list=(list tx-data)
|
||||
=| =tx-data
|
||||
=| nonce-and-tx=[_| _|]
|
||||
|-
|
||||
=* effect-loop $
|
||||
:: if we are processing a new transaction, initialize the parts of tx-data
|
||||
:: that are identical for every transaction in the roll
|
||||
=? tx-data =([| |] nonce-and-tx)
|
||||
:* blocknum.block timestamp.block-dat.block sender.roll-dat.roll
|
||||
keccak.roll *keccak *ship *proxy:naive *nonce:naive
|
||||
gas.roll-dat.roll *@ | *action *ship
|
||||
==
|
||||
:: if we've gotten both the %nonce and %tx diff from a transaction, add the
|
||||
:: tx-data to the list of tx for the roll
|
||||
::
|
||||
?: =([& &] nonce-and-tx)
|
||||
%= effect-loop
|
||||
nonce-and-tx [| |]
|
||||
roll-tx-list (snoc roll-tx-list tx-data)
|
||||
==
|
||||
:: if we've finished looping through the effects, add the tx list from the
|
||||
:: roll to the list of tx for the block
|
||||
::
|
||||
?~ effects.roll-dat.roll
|
||||
%= roll-loop
|
||||
roll-list t.roll-list
|
||||
block-tx-list (welp block-tx-list roll-tx-list)
|
||||
==
|
||||
::
|
||||
=/ =diff:naive i.effects.roll-dat.roll
|
||||
:: we ignore %operator, %dns, %point diffs
|
||||
::
|
||||
?+ diff
|
||||
$(effects.roll-dat.roll t.effects.roll-dat.roll)
|
||||
:: %nonce is always the first diff from a given transaction.
|
||||
::
|
||||
[%nonce *]
|
||||
%= effect-loop
|
||||
-.nonce-and-tx &
|
||||
sender.tx-data ship.diff
|
||||
nonce.tx-data nonce.diff
|
||||
proxy.tx-data proxy.diff
|
||||
parent.tx-data (^sein:title ship.diff)
|
||||
effects.roll-dat.roll t.effects.roll-dat.roll
|
||||
==
|
||||
:: %tx is always the second diff from a given transaction.
|
||||
::
|
||||
[%tx *]
|
||||
%= effect-loop
|
||||
+.nonce-and-tx &
|
||||
effects.roll-dat.roll t.effects.roll-dat.roll
|
||||
action.tx-data +<.tx.raw-tx.diff
|
||||
suc.tx-data ?~ err.diff & |
|
||||
length.tx-data `@`-.raw.raw-tx.diff
|
||||
tx-hash.tx-data (hash-raw-tx:naive-tx raw-tx.diff)
|
||||
==
|
||||
==
|
||||
::
|
||||
++ parse-gas-sender
|
||||
|= res=(list [@t json])
|
||||
^- (list [=keccak [gas=@ud sender=address]])
|
||||
%+ turn res
|
||||
|= [id=@t =json]
|
||||
^- [=keccak [gas=@ud sender=address]]
|
||||
:- (hex-to-num:ethereum id)
|
||||
:- %- parse-hex-result:rpc:ethereum
|
||||
~| json
|
||||
?> ?=(%o -.json)
|
||||
(~(got by p.json) 'effectiveGasPrice') :: gas used in wei
|
||||
%- parse-hex-result:rpc:ethereum
|
||||
~| json
|
||||
?> ?=(%o -.json)
|
||||
(~(got by p.json) 'from')
|
||||
:: +get-roll-hashes makes a list of hashes of all transactions from $rolls-map
|
||||
::
|
||||
++ get-roll-hashes
|
||||
|= =rolls-map ^- (list keccak)
|
||||
%- zing
|
||||
%+ turn ~(val by rolls-map)
|
||||
|= a=(map keccak effects:naive)
|
||||
~(tap in ~(key by a))
|
||||
:: +compute-effects calls +naive to compute the state transitions for all
|
||||
:: logs, but it returns a map that only has the effects for L2 transactions,
|
||||
:: leaving out L1 transactions. we need to compute all of them in order to
|
||||
:: determine whether the transactions were valid.
|
||||
::
|
||||
++ compute-effects
|
||||
|= $: nas=^state:naive
|
||||
=events
|
||||
=net
|
||||
naive-contract=address
|
||||
chain-id=@ud
|
||||
==
|
||||
=| out=rolls-map
|
||||
^+ out
|
||||
::
|
||||
|-
|
||||
?~ events out
|
||||
=/ log=event-log:rpc:ethereum i.events
|
||||
?~ mined.log
|
||||
~& >> 'naive-csv: empty log'
|
||||
$(events t.events)
|
||||
=/ =blocknum block-number.u.mined.log
|
||||
=/ =^input:naive
|
||||
:- blocknum
|
||||
?. =(naive-contract address.log)
|
||||
:- %log
|
||||
[address.log (data-to-hex:dice data.log) topics.log]
|
||||
?~ input.u.mined.log
|
||||
~& >> 'naive-csv: empty L2 transaction'
|
||||
[%bat *@]
|
||||
[%bat u.input.u.mined.log]
|
||||
=^ =effects:naive nas
|
||||
(%*(. naive lac |) verifier:naive-tx chain-id nas input)
|
||||
%= $
|
||||
events t.events
|
||||
out ?. =(%bat +<.input)
|
||||
out :: skip L1 logs
|
||||
=/ cur (~(get by out) blocknum)
|
||||
?~ cur
|
||||
%+ ~(put by out) blocknum
|
||||
(my [[transaction-hash.u.mined.log effects]~])
|
||||
%+ ~(put by out) blocknum
|
||||
(~(put by u.cur) transaction-hash.u.mined.log effects)
|
||||
==
|
||||
:: +export-csv writes a (list cord) as csv to disk at .pax
|
||||
::
|
||||
++ export-csv
|
||||
|= [in=(list cord) pax=path]
|
||||
=/ m (strand ,~)
|
||||
^- form:m
|
||||
;< =bowl:spider bind:m get-bowl
|
||||
=- (send-raw-card %pass / %arvo %c %info -)
|
||||
%+ foal:space:userlib
|
||||
;: weld
|
||||
/(scot %p our.bowl)/base/(scot %da now.bowl)
|
||||
pax
|
||||
/(scot %da now.bowl)/txt
|
||||
==
|
||||
[%txt !>(in)]
|
||||
:: +make-csv takes in a (list tx-data) and makes it into a (list cord) to be
|
||||
:: saved as a csv file
|
||||
::
|
||||
++ make-csv
|
||||
|= in=(list tx-data)
|
||||
^- (list cord)
|
||||
:- %- crip
|
||||
;: weld
|
||||
"block number,"
|
||||
"timestamp,"
|
||||
"roller address,"
|
||||
"roll hash,"
|
||||
"tx hash,"
|
||||
"sending ship,"
|
||||
"sending proxy,"
|
||||
"nonce,"
|
||||
"gas price,"
|
||||
"length of input data,"
|
||||
"success or failure,"
|
||||
"function name,"
|
||||
"parent"
|
||||
==
|
||||
%+ turn in
|
||||
|= =tx-data
|
||||
%- crip
|
||||
;: weld
|
||||
(scow %ud blocknum.tx-data) ","
|
||||
(scow %da timestamp.tx-data) ","
|
||||
(scow %ux roller.tx-data) ","
|
||||
(scow %ux roll-hash.tx-data) ","
|
||||
(scow %ux tx-hash.tx-data) ","
|
||||
(scow %p sender.tx-data) ","
|
||||
(scow %tas proxy.tx-data) ","
|
||||
(scow %ud nonce.tx-data) ","
|
||||
(scow %ud gas.tx-data) ","
|
||||
(scow %ux length.tx-data) ","
|
||||
(scow %f suc.tx-data) ","
|
||||
(scow %tas action.tx-data) ","
|
||||
(scow %p parent.tx-data)
|
||||
==
|
||||
--
|
@ -14,5 +14,5 @@
|
||||
;< =riot:clay bind:m
|
||||
(warp:strandio ship desk ~ %sing care case target-path)
|
||||
?~ riot
|
||||
(pure:m !>("nothing"))
|
||||
(pure:m !>('nothing'))
|
||||
(pure:m q.r.u.riot)
|
||||
|
@ -4,18 +4,15 @@
|
||||
|%
|
||||
+$ vere-update [cur=vere next=(unit vere)]
|
||||
::
|
||||
:: parse out the commit suffix for people on pre-release vere
|
||||
:: these revisions look like /vere/~.2.7-de2d39b
|
||||
:: we will have better pre-release (pace) handling later
|
||||
++ parse-current-pace
|
||||
|= current=vere
|
||||
^- @t
|
||||
(snag 1 rev.current)
|
||||
::
|
||||
++ parse-current-version
|
||||
|= current=vere
|
||||
^- @t
|
||||
=/ v
|
||||
%+ rush
|
||||
(slav %ta (rear rev.current))
|
||||
;~((glue hep) (star ;~(pose nud dot)) (star aln))
|
||||
?~ v (slav %ta (rear rev.current))
|
||||
(crip -.u.v)
|
||||
(rear rev.current)
|
||||
::
|
||||
++ is-equal-version
|
||||
|= [latest=@t current=vere]
|
||||
@ -25,14 +22,18 @@
|
||||
^- thread:spider
|
||||
|= arg=vase
|
||||
=/ m (strand ,vase)
|
||||
;< latest=cord bind:m
|
||||
(fetch-cord:strandio "https://bootstrap.urbit.org/vere/live/last")
|
||||
;< =bowl:spider bind:m get-bowl:strandio
|
||||
=/ cur=vere .^(vere %$ /(scot %p our.bowl)//(scot %da now.bowl)/zen/ver)
|
||||
=/ pace=tape
|
||||
?: =('once' (parse-current-pace cur))
|
||||
"live"
|
||||
(trip (parse-current-pace cur))
|
||||
;< latest=cord bind:m
|
||||
(fetch-cord:strandio "https://bootstrap.urbit.org/vere/{pace}/last")
|
||||
=/ =vere-update
|
||||
?: (is-equal-version latest cur)
|
||||
[cur ~]
|
||||
=| next=vere
|
||||
[cur `next(rev /vere/(scot %ta latest))]
|
||||
[cur `next(rev /vere/(crip pace)/(scot %ta latest))]
|
||||
%- pure:m
|
||||
!>(vere-update)
|
||||
|
@ -61,7 +61,7 @@
|
||||
?+ flow=(~(get by sub) which) `0/sub
|
||||
~ [~[(pine which)] 0/(~(put by sub) which ~)]
|
||||
[~ ~] [~[(pine which)] 0/sub]
|
||||
[~ ~ [* %& * *]] [~[(scry `+(aeon.u.u.flow) which)] 0/sub]
|
||||
[~ ~ [* %& * *]] [~[(pine which)] 0/sub]
|
||||
==
|
||||
++ quit (corl (lead %0) ~(del by sub)) :: Unsub from [ship dude path].
|
||||
++ read :: See current subscribed states.
|
||||
@ -95,7 +95,7 @@
|
||||
[~ ~ *]
|
||||
=. stale.u.u.flow &
|
||||
:_ 0/(~(put by sub) current u.flow)
|
||||
~[(on-rock-poke current u.u.flow ~)]
|
||||
~[(on-rock-poke fake=& current u.u.flow ~)]
|
||||
::
|
||||
[~ ~]
|
||||
:_ 0/(~(del by sub) current) :_ ~
|
||||
@ -105,77 +105,53 @@
|
||||
[path ship dude]:current
|
||||
==
|
||||
==
|
||||
:: :: Check if we're still interested
|
||||
:: :: in a wave. If no, no-op.
|
||||
:: :: If yes, scry.
|
||||
++ behn :: (See https://gist.github.com/belisarius222/7f8452bfea9b199c0ed717ab1778f35b)
|
||||
|= [ship=term =dude aeon=term path=paths]
|
||||
^- (list card:agent:gall)
|
||||
%- fall :_ ~ %- mole |.
|
||||
=/ ship (slav %p ship)
|
||||
=/ aeon (slav %ud aeon)
|
||||
?: (lte aeon aeon:(fall (~(got by sub) ship dude path) *flow)) ~
|
||||
~[(scry `aeon ship dude path)]
|
||||
::
|
||||
++ apply :: Handle response from publisher.
|
||||
|= res=(response:poke lake paths)
|
||||
^- (quip card:agent:gall subs)
|
||||
%- fall :_ `0/sub %- mole |.
|
||||
=* current [src.bowl dude.res path.res]
|
||||
=/ old=flow (fall (~(got by sub) current) *flow)
|
||||
?- type.res
|
||||
%tomb
|
||||
?: ?=(%tomb what.res)
|
||||
=/ =flow old(stale &)
|
||||
:_ 0/(~(put by sub) current `flow) :_ ~
|
||||
(on-rock-poke current flow ~)
|
||||
(on-rock-poke fake=& current flow ~)
|
||||
::
|
||||
%yore
|
||||
:_ 0/sub :_ ~
|
||||
(pine src.bowl dude.res path.res)
|
||||
::
|
||||
%nigh
|
||||
:_ 0/sub :_ ~
|
||||
(behn-s25 [dude aeon path]:res)
|
||||
::
|
||||
%scry
|
||||
=/ [wave=(unit wave:lake) =flow]
|
||||
=/ [wave=(unit wave:lake) new=(unit flow)]
|
||||
?- what.res
|
||||
%rock ?> (gte aeon.res aeon.old)
|
||||
[~ [aeon.res | | rock.res]]
|
||||
%wave ?> =(aeon.res +(aeon.old))
|
||||
[`wave.res [aeon.res | | (wash:lake rock.old wave.res)]]
|
||||
==
|
||||
:_ 0/(~(put by sub) current `flow)
|
||||
:~ (on-rock-poke current flow wave)
|
||||
(scry `+(aeon.res) src.bowl dude.res path.res)
|
||||
==
|
||||
%rock ?: (lte aeon.res aeon.old) [~ ~]
|
||||
[~ `[aeon.res | | rock.res]]
|
||||
%wave ?: (lte aeon.res aeon.old) [~ ~]
|
||||
?> =(aeon.res +(aeon.old))
|
||||
[`wave.res `[aeon.res | | (wash:lake rock.old wave.res)]]
|
||||
==
|
||||
?~ new `0/sub
|
||||
:_ 0/(~(put by sub) current new) :_ ~
|
||||
(on-rock-poke fake=& current u.new wave)
|
||||
::
|
||||
++ handle-fake-on-rock
|
||||
|= =(on-rock:poke lake paths)
|
||||
^- (list card:agent:gall)
|
||||
?~ flow=(~(get by sub) [src from path]:on-rock) ~
|
||||
?~ u.flow ~
|
||||
?. =([stale fail rock]:u.u.flow [stale fail rock]:on-rock) ~
|
||||
~[(on-rock-poke fake=| [src from path]:on-rock u.u.flow wave.on-rock)]
|
||||
::
|
||||
:: Non-public facing arms below
|
||||
::
|
||||
++ behn-s25
|
||||
|= [=dude =aeon path=noun]
|
||||
++ pine
|
||||
|= [who=ship which=dude where=paths]
|
||||
^- card:agent:gall
|
||||
:* %pass (zoom behn/(scot %p src.bowl)^dude^(scot %ud aeon)^path)
|
||||
%arvo %b %wait (add ~s25 now.bowl)
|
||||
==
|
||||
++ pine |= [ship dude paths] (scry ~ +<)
|
||||
++ scry
|
||||
|= [when=(unit aeon) who=ship which=dude where=paths]
|
||||
^- card:agent:gall
|
||||
=/ when ?~ when ~ (scot %ud u.when)
|
||||
:* %pass (zoom scry-request/(scot %p who)^which^when^where)
|
||||
:* %pass (zoom scry-request/(scot %p who)^which^where)
|
||||
%agent [who which]
|
||||
%poke %sss-to-pub :- result-type ^- result
|
||||
[where dap.bowl ^when]
|
||||
%poke sss-to-pub/[result-type `result`[where dap.bowl]]
|
||||
==
|
||||
++ on-rock-poke
|
||||
|= [[=ship =dude path=paths] flow wave=(unit wave:lake)]
|
||||
|= [fake=? [=ship =dude path=paths] flow wave=(unit wave:lake)]
|
||||
^- card:agent:gall
|
||||
:* %pass (zoom on-rock/(scot %ud aeon)^(scot %p ship)^dude^path)
|
||||
:* %pass %+ zoom ?:(fake %fake %on-rock)
|
||||
(scot %ud aeon)^(scot %p ship)^dude^path
|
||||
%agent [our dap]:bowl
|
||||
%poke %sss-on-rock on-rock-type ^- from
|
||||
[path ship dude stale fail rock wave]
|
||||
%poke ?:(fake %sss-fake-on-rock %sss-on-rock)
|
||||
on-rock-type `from`[path ship dude stale fail rock wave]
|
||||
==
|
||||
--
|
||||
++ du :: Manage publications.
|
||||
@ -184,28 +160,78 @@
|
||||
|%
|
||||
+$ into (request:poke paths)
|
||||
+$ result (response:poke lake paths)
|
||||
+$ rule [rocks=_1 waves=_5] :: Retention policy
|
||||
+$ rule $~ [`5 5]
|
||||
[horizon=(unit @ud) frequency=@ud] :: Retention policy
|
||||
+$ tide
|
||||
$: rok=((mop aeon rock:lake) gte)
|
||||
wav=((mop aeon wave:lake) lte)
|
||||
rul=rule
|
||||
mem=(mip ship dude @da)
|
||||
mem=(jug ship dude)
|
||||
==
|
||||
+$ buoy
|
||||
$: tid=$~(*tide $@(aeon tide))
|
||||
alo=(unit (set ship))
|
||||
==
|
||||
+$ pubs [%0 (map paths buoy)]
|
||||
++ pubs
|
||||
=< $>(%1 versioned)
|
||||
|%
|
||||
++ update
|
||||
|= =versioned
|
||||
^- pubs
|
||||
?- -.versioned
|
||||
%1 versioned
|
||||
%0
|
||||
:- %1
|
||||
%- ~(run by +.versioned)
|
||||
|= =buoy-0:^versioned
|
||||
^- buoy
|
||||
%= buoy-0
|
||||
tid
|
||||
?@ tid.buoy-0 tid.buoy-0
|
||||
^- tide
|
||||
%= tid.buoy-0
|
||||
rocks.rul
|
||||
?: =(waves.rul.tid.buoy-0 0) ~
|
||||
`(mul [+(rocks) waves]:rul.tid.buoy-0)
|
||||
::
|
||||
mem
|
||||
^- (jug ship dude)
|
||||
%- ~(run by mem.tid.buoy-0)
|
||||
|= =(map dude @da)
|
||||
^- (set dude)
|
||||
~(key by map)
|
||||
==
|
||||
==
|
||||
==
|
||||
++ versioned
|
||||
=< $% [%0 (map paths buoy-0)]
|
||||
[%1 (map paths buoy)]
|
||||
==
|
||||
|%
|
||||
+$ buoy-0
|
||||
$: tid=$~(*tide-0 $@(aeon tide-0))
|
||||
alo=(unit (set ship))
|
||||
==
|
||||
+$ tide-0
|
||||
$: rok=((mop aeon rock:lake) gte)
|
||||
wav=((mop aeon wave:lake) lte)
|
||||
rul=[rocks=@ud waves=@ud]
|
||||
mem=(mip ship dude @da)
|
||||
==
|
||||
--
|
||||
|= [pub=pubs =bowl:gall result-type=type]
|
||||
=> .(pub +.pub)
|
||||
--
|
||||
--
|
||||
|= [pub=versioned:pubs =bowl:gall result-type=type]
|
||||
=> .(pub +:(update:pubs pub))
|
||||
=* rok ((on aeon rock:lake) gte)
|
||||
=* wav ((on aeon wave:lake) lte)
|
||||
|%
|
||||
++ rule :: Set new retention policy.
|
||||
|= [path=paths =^rule]
|
||||
^- pubs
|
||||
:- %0
|
||||
:- %1
|
||||
%- fall :_ (~(put by pub) path %*(. *$<(aeon buoy) rul.tid rule))
|
||||
%- mole |.
|
||||
%+ ~(jab by pub) path
|
||||
|= =buoy
|
||||
?@ tid.buoy buoy
|
||||
@ -214,11 +240,11 @@
|
||||
++ wipe :: Create new rock and wipe rest.
|
||||
|= path=paths
|
||||
^- pubs
|
||||
:- %0
|
||||
:- %1
|
||||
%+ ~(jab by pub) path
|
||||
|= =buoy
|
||||
?@ tid.buoy buoy
|
||||
%* . buoy(tid (form tid.buoy(rul [0 1])))
|
||||
%* . buoy(tid (form tid.buoy(rul [`0 1])))
|
||||
rul.tid rul.tid.buoy
|
||||
wav.tid ~
|
||||
==
|
||||
@ -232,29 +258,31 @@
|
||||
?> ?=(^ tid.buoy)
|
||||
=* tide tid.buoy
|
||||
=/ next=aeon +((latest tide))
|
||||
:- %+ murn ~(tap bi mem.tide)
|
||||
|= [=ship =dude =@da]
|
||||
?: (lth da now.bowl) ~
|
||||
`(send scry/wave/wave ship dude next path)
|
||||
:- %0
|
||||
:- %- zing
|
||||
%+ turn ~(tap by mem.tide)
|
||||
|= [=ship =(set dude)]
|
||||
%+ turn ~(tap in set)
|
||||
|= =dude
|
||||
(send wave/[next wave] ship dude path)
|
||||
:- %1
|
||||
%+ ~(put by pub) path
|
||||
=/ last=[=aeon =rock:lake] (fall (pry:rok rok.tide) *[key val]:rok)
|
||||
=. wav.tide (put:wav wav.tide next wave)
|
||||
=. mem.tide ~
|
||||
?. =(next (add aeon.last waves.rul.tide)) buoy
|
||||
?. =(next (add aeon.last frequency.rul.tide)) buoy
|
||||
buoy(tid (form tide))
|
||||
::
|
||||
++ fork :: Fork a pub into an empty path.
|
||||
|= [from=paths to=paths]
|
||||
^- pubs
|
||||
:- %0
|
||||
:- %1
|
||||
?< (~(has by pub) to)
|
||||
(~(put by pub) to (~(got by pub) from))
|
||||
::
|
||||
++ copy :: Fork a sub into an empty path.
|
||||
|= [sub=_(mk-subs lake *) from=[ship dude *] to=paths]
|
||||
^- pubs
|
||||
:- %0
|
||||
:- %1
|
||||
?< (~(has by pub) to)
|
||||
%+ ~(put by pub) to
|
||||
%* . *$<(aeon buoy)
|
||||
@ -263,7 +291,7 @@
|
||||
::
|
||||
++ perm :: Change permissions with gate.
|
||||
|= [where=(list paths) diff=$-((unit (set ship)) (unit (set ship)))]
|
||||
^- pubs
|
||||
^- (quip card:agent:gall pubs)
|
||||
%+ edit where
|
||||
|= =buoy
|
||||
=/ new=_alo.buoy (diff alo.buoy)
|
||||
@ -272,21 +300,21 @@
|
||||
alo new
|
||||
mem.tid ?~ new mem.tid.buoy
|
||||
%. mem.tid.buoy
|
||||
~(int by (malt (turn ~(tap in u.new) (late *(map @ @)))))
|
||||
~(int by (malt (turn ~(tap in u.new) (late *(set @)))))
|
||||
==
|
||||
++ public (curr perm _~) :: Make list of paths public.
|
||||
++ secret (curr perm _`~) :: Make list of paths secret.
|
||||
:: :: Block ships from paths.
|
||||
++ block :: No-ops on public paths.
|
||||
|= [who=(list ship) whence=(list paths)]
|
||||
^- pubs
|
||||
^- (quip card:agent:gall pubs)
|
||||
%+ perm whence
|
||||
|= old=(unit (set ship))
|
||||
?~ old ~ `(~(dif in u.old) (sy who))
|
||||
:: :: Allow ships to paths.
|
||||
++ allow :: Any public paths will no-op.
|
||||
|= [who=(list ship) where=(list paths)]
|
||||
^- pubs
|
||||
^- (quip card:agent:gall pubs)
|
||||
%+ perm where
|
||||
|= old=(unit (set ship))
|
||||
?~ old ~ `(~(gas in u.old) who)
|
||||
@ -313,37 +341,51 @@
|
||||
=/ =buoy (~(gut by pub) path.req *buoy)
|
||||
?< &(?=(^ alo.buoy) !(~(has in u.alo.buoy) src.bowl))
|
||||
?@ tid.buoy
|
||||
:_ 0/pub :_ ~
|
||||
(send tomb/~ src.bowl dude.req tid.buoy path.req)
|
||||
?~ when.req
|
||||
:_ 1/pub :_ ~
|
||||
(send tomb/~ src.bowl dude.req path.req)
|
||||
:_ 1/pub :_ ~
|
||||
=/ last (fall (pry:rok rok.tid.buoy) *[=key =val]:rok)
|
||||
:_ 0/pub :_ ~
|
||||
(send scry/rock/val.last src.bowl dude.req key.last path.req)
|
||||
?^ dat=(get:wav wav.tid.buoy u.when.req)
|
||||
:_ 0/pub :_ ~
|
||||
(send scry/wave/u.dat src.bowl [dude u.when path]:req)
|
||||
?: %+ lte u.when.req
|
||||
key::(fall (ram:wav wav.tid.buoy) (pry:rok rok.tid.buoy) [=key val]:wav)
|
||||
:_ 0/pub :_ ~
|
||||
(send yore/~ src.bowl [dude u.when path]:req)
|
||||
?> =(u.when.req +((latest tid.buoy)))
|
||||
:- ~[(send nigh/~ src.bowl [dude u.when path]:req)]
|
||||
:- %0
|
||||
%+ ~(put by pub) path.req
|
||||
(send rock/last src.bowl dude.req path.req)
|
||||
::
|
||||
++ tell
|
||||
|= [[ship=term =dude aeon=term path=paths] =sign:agent:gall]
|
||||
^- (quip card:agent:gall pubs)
|
||||
?> ?=(%poke-ack -.sign)
|
||||
?^ p.sign `1/pub
|
||||
=/ =buoy (~(gut by pub) path *buoy)
|
||||
?< &(?=(^ alo.buoy) !(~(has in u.alo.buoy) src.bowl))
|
||||
?@ tid.buoy
|
||||
:_ 1/pub :_ ~
|
||||
(send tomb/~ src.bowl dude path)
|
||||
::
|
||||
=> .(aeon +((slav %ud aeon)))
|
||||
?^ dat=(get:wav wav.tid.buoy aeon)
|
||||
:_ 1/pub :_ ~
|
||||
(send wave/[aeon u.dat] src.bowl dude path)
|
||||
=/ last (fall (pry:rok rok.tid.buoy) [=key =val]:rok)
|
||||
?: (lte aeon key.last)
|
||||
:_ 1/pub :_ ~
|
||||
(send rock/last src.bowl dude path)
|
||||
:- ~
|
||||
:- %1
|
||||
%+ ~(put by pub) path
|
||||
%= buoy
|
||||
mem.tid (~(put bi mem.tid.buoy) src.bowl dude.req (add ~s25 now.bowl))
|
||||
mem.tid (~(put ju mem.tid.buoy) src.bowl dude)
|
||||
==
|
||||
::
|
||||
:: Non-public facing arms below
|
||||
::
|
||||
++ send
|
||||
|= [payload=_|3:*(response:poke lake paths) =ship =dude =aeon path=paths]
|
||||
|= [payload=_|2:*(response:poke lake paths) =ship =dude path=paths]
|
||||
^- card:agent:gall
|
||||
=* mark (cat 3 %sss- name:lake)
|
||||
:* %pass (zoom scry-response/(scot %p ship)^dude^(scot %ud aeon)^path)
|
||||
=/ callback=^path
|
||||
?: ?=(%tomb what.payload) (zoom tomb-response/(scot %p ship)^dude^path)
|
||||
(zoom scry-response/(scot %p ship)^dude^(scot %ud aeon.payload)^path)
|
||||
:* %pass callback
|
||||
%agent [ship dude]
|
||||
%poke mark result-type ^- (response:poke lake paths)
|
||||
[path dap.bowl aeon payload]
|
||||
[path dap.bowl payload]
|
||||
==
|
||||
++ latest
|
||||
|= =$@(aeon tide)
|
||||
@ -354,31 +396,49 @@
|
||||
::
|
||||
++ edit
|
||||
|= [ps=(list paths) edit=$-(buoy buoy)]
|
||||
^- pubs
|
||||
:- %0
|
||||
^- (quip card:agent:gall pubs)
|
||||
%- ~(rep in (sy ps))
|
||||
|= [path=paths =_pub]
|
||||
%- fall :_ pub %- mole |.
|
||||
(~(jab by pub) path edit)
|
||||
|= [path=paths caz=(list card:agent:gall) %1 =_pub]
|
||||
?~ old=(~(get by pub) path) [caz 1/pub]
|
||||
=/ new=buoy (edit u.old)
|
||||
:_ 1/(~(put by pub) path new)
|
||||
%- weld :_ caz
|
||||
^- (list card:agent:gall)
|
||||
?@ tid.u.old ~
|
||||
?@ tid.new
|
||||
%- zing
|
||||
%+ turn ~(tap by mem.tid.u.old)
|
||||
|= [=ship =(set dude)]
|
||||
(turn ~(tap in set) |=(=dude (send tomb/~ ship dude path)))
|
||||
?~ alo.new ~
|
||||
=/ new-alo=(jug ship dude)
|
||||
(malt (turn ~(tap in u.alo.new) (late *(set @))))
|
||||
%- zing
|
||||
%+ turn ~(tap by (~(dif by mem.tid.u.old) new-alo))
|
||||
|= [=ship =(set dude)]
|
||||
(turn ~(tap in set) |=(=dude (send tomb/~ ship dude path)))
|
||||
::
|
||||
++ form
|
||||
|= =tide
|
||||
^+ tide
|
||||
=/ max-rock=[=aeon =rock:lake] (fall (pry:rok rok.tide) *[key val]:rok)
|
||||
=/ max-wave (fall (bind (ram:wav wav.tide) head) 0)
|
||||
=. rok.tide
|
||||
%+ gas:rok +<-:gas:rok
|
||||
%- tab:rok :_ [~ +(rocks.rul.tide)]
|
||||
?: ?| =(waves.rul.tide 0)
|
||||
(lth max-wave (add aeon.max-rock waves.rul.tide))
|
||||
=? rok.tide :: Create new rock.
|
||||
?& !=(frequency.rul.tide 0)
|
||||
(gte max-wave (add aeon.max-rock frequency.rul.tide))
|
||||
==
|
||||
rok.tide
|
||||
%+ put:rok rok.tide
|
||||
%+ roll (tab:wav wav.tide `aeon.max-rock max-wave)
|
||||
|: [*[now=aeon =wave:lake] `[prev=aeon =rock:lake]`max-rock]
|
||||
~| %aeon-awry
|
||||
?> =(now +(prev))
|
||||
[now (wash:lake rock wave)]
|
||||
=. rok.tide
|
||||
?~ horizon.rul.tide :: Only keep genesis and latest.
|
||||
(gas:rok ~ (murn ~[(ram:rok rok.tide) (pry:rok rok.tide)] same))
|
||||
%^ lot:rok rok.tide :: Delete beyond horizon.
|
||||
~
|
||||
(mole |.((sub max-wave (max [u.horizon frequency]:rul.tide))))
|
||||
~| %rock-zero
|
||||
tide(wav (lot:wav wav.tide (bind (ram:rok rok.tide) |=([r=@ *] (dec r))) ~))
|
||||
--
|
||||
|
@ -581,8 +581,9 @@
|
||||
++ check-for-file
|
||||
|= [[=ship =desk =case] =spur]
|
||||
=/ m (strand ,?)
|
||||
;< =riot:clay bind:m (warp ship desk ~ %sing %x case spur)
|
||||
(pure:m ?=(^ riot))
|
||||
;< =riot:clay bind:m (warp ship desk ~ %sing %u case spur)
|
||||
?> ?=(^ riot)
|
||||
(pure:m !<(? q.r.u.riot))
|
||||
::
|
||||
++ list-tree
|
||||
|= [[=ship =desk =case] =spur]
|
||||
|
@ -6,11 +6,15 @@
|
||||
=, mimes:html
|
||||
|_ mud=@t
|
||||
++ grow :: convert to
|
||||
|% ++ mime [/text/css (as-octs mud)] :: convert to %mime
|
||||
++ elem ;style :: convert to %hymn
|
||||
|%
|
||||
++ mime [/text/css (as-octs mud)] :: convert to %mime
|
||||
++ hymn :: convert to %hymn
|
||||
|^ html
|
||||
++ style ;style
|
||||
;- (trip mud)
|
||||
==
|
||||
++ hymn ;html:(head:"{elem}" body)
|
||||
++ html ;html:(head:"{style}" body)
|
||||
--
|
||||
--
|
||||
++ grab
|
||||
|% :: convert from
|
||||
|
@ -8,20 +8,7 @@
|
||||
++ grow :: convert to
|
||||
|%
|
||||
++ mime `^mime`[/text/x-hoon (as-octs:mimes:html own)] :: convert to %mime
|
||||
++ elem :: convert to %html
|
||||
;div:pre(urb_codemirror "", mode "hoon"):"{(trip own)}"
|
||||
:: =+ gen-id="src-{<`@ui`(mug own)>}"
|
||||
:: ;div
|
||||
:: ;textarea(id "{gen-id}"):"{(trip own)}"
|
||||
:: ;script:"""
|
||||
:: CodeMirror.fromTextArea(
|
||||
:: window[{<gen-id>}],
|
||||
:: \{lineNumbers:true, readOnly:true}
|
||||
:: )
|
||||
:: """
|
||||
:: ==
|
||||
++ hymn
|
||||
:: ;html:(head:title:"Source" "+{elem}")
|
||||
;html
|
||||
;head
|
||||
;title:"Source"
|
||||
|
@ -2,14 +2,5 @@
|
||||
:::: /hoon/htm/mar
|
||||
::
|
||||
/? 310
|
||||
|_ own=manx
|
||||
::
|
||||
++ grad %noun
|
||||
++ grow :: convert to
|
||||
|%
|
||||
++ noun own
|
||||
++ hymn own
|
||||
--
|
||||
++ grab |% :: convert from
|
||||
++ noun manx :: clam from %noun
|
||||
-- --
|
||||
/= htm /mar/html
|
||||
htm
|
||||
|
@ -8,10 +8,13 @@
|
||||
++ grow
|
||||
|%
|
||||
++ mime [/application/javascript (as-octs:mimes:html (@t mud))]
|
||||
++ elem ;script
|
||||
++ hymn :: convert to %hymn
|
||||
|^ html
|
||||
++ script ;script
|
||||
;- (trip (@t mud))
|
||||
==
|
||||
++ hymn ;html:(head:"+{elem}" body)
|
||||
++ html ;html:(head:"{script}" body)
|
||||
--
|
||||
--
|
||||
++ grab
|
||||
|% :: convert from
|
||||
|
@ -16,11 +16,6 @@
|
||||
%& a+p.result
|
||||
%| a+[a+[%s '[[output rendering error]]']~]~
|
||||
==
|
||||
::
|
||||
++ elem
|
||||
=- ;pre:code:"{(of-wall -)}"
|
||||
^- wall %- zing ^- (list wall)
|
||||
(turn (flop tan) |=(a=tank (wash 0^160 a)))
|
||||
--
|
||||
++ grab :: convert from
|
||||
|%
|
||||
|
@ -18,7 +18,6 @@
|
||||
=> v=.
|
||||
|%
|
||||
++ mime => v [/text/plain (as-octs (of-wain txt))]
|
||||
++ elem => v ;pre: {(trip (of-wain txt))}
|
||||
--
|
||||
++ grad
|
||||
|%
|
||||
|
@ -9,10 +9,6 @@
|
||||
++ mime [/text/x-unmark (as-octs:mimes:html mud)]
|
||||
++ txt
|
||||
(to-wain:format mud)
|
||||
++ elem
|
||||
^- manx
|
||||
=, cram
|
||||
elm:(static (ream mud))
|
||||
++ front :: XX performance, types
|
||||
^- (map term knot)
|
||||
%- ~(run by inf:(static:cram (ream mud)))
|
||||
|
@ -9,10 +9,6 @@
|
||||
++ mime [/text/x-unmark (as-octs:mimes:html mud)]
|
||||
++ txt
|
||||
(to-wain:format mud)
|
||||
++ elem
|
||||
^- manx
|
||||
=, cram
|
||||
elm:(static (ream mud))
|
||||
++ front :: XX performance, types
|
||||
^- (map term knot)
|
||||
%- ~(run by inf:(static:cram (ream mud)))
|
||||
|
@ -1,5 +1,5 @@
|
||||
::
|
||||
:::: /hoon/elem/urb/mar
|
||||
:::: /hoon/urb/mar
|
||||
::
|
||||
/? 310
|
||||
=, mimes:html
|
||||
|
5
pkg/base-dev/mar/xhtml.hoon
Normal file
5
pkg/base-dev/mar/xhtml.hoon
Normal file
@ -0,0 +1,5 @@
|
||||
::
|
||||
:::: /hoon/xhtml/mar
|
||||
::
|
||||
/= xhtml /mar/html
|
||||
xhtml
|
@ -10,25 +10,21 @@
|
||||
--
|
||||
+$ aeon @ud
|
||||
+$ dude dude:agent:gall
|
||||
+$ what ?(%rock %wave)
|
||||
++ poke
|
||||
|%
|
||||
++ request
|
||||
|* paths=mold
|
||||
$: path=paths
|
||||
=dude
|
||||
when=(unit aeon)
|
||||
==
|
||||
++ response
|
||||
|* [=(lake) paths=mold]
|
||||
$: path=paths
|
||||
=dude
|
||||
=aeon
|
||||
$% [type=?(%nigh %yore %tomb) ~]
|
||||
$: type=%scry
|
||||
$% [what=%rock =rock:lake]
|
||||
[what=%wave =wave:lake]
|
||||
== == == ==
|
||||
$% [what=%tomb ~]
|
||||
[what=%rock =aeon =rock:lake]
|
||||
[what=%wave =aeon =wave:lake]
|
||||
== ==
|
||||
++ on-rock
|
||||
|* [=(lake) paths=mold]
|
||||
$: path=paths
|
||||
|
@ -1,21 +0,0 @@
|
||||
::
|
||||
:::: /hoon/css/mar
|
||||
::
|
||||
/? 310
|
||||
=, eyre
|
||||
=, mimes:html
|
||||
|_ mud=@t
|
||||
++ grow :: convert to
|
||||
|% ++ mime [/text/css (as-octs mud)] :: convert to %mime
|
||||
++ elem ;style :: convert to %hymn
|
||||
;- (trip mud)
|
||||
==
|
||||
++ hymn ;html:(head:"{elem}" body)
|
||||
--
|
||||
++ grab
|
||||
|% :: convert from
|
||||
++ mime |=([p=mite q=octs] (@t q.q))
|
||||
++ noun @t :: clam from %noun
|
||||
--
|
||||
++ grad %mime
|
||||
--
|
1
pkg/landscape/mar/css.hoon
Symbolic link
1
pkg/landscape/mar/css.hoon
Symbolic link
@ -0,0 +1 @@
|
||||
../../base-dev/mar/css.hoon
|
@ -1,18 +0,0 @@
|
||||
::
|
||||
:::: /hoon/elem/mar
|
||||
::
|
||||
/? 310
|
||||
=, mimes:html
|
||||
=, html
|
||||
|_ own=manx
|
||||
::
|
||||
++ grad %noun
|
||||
++ grow :: convert to
|
||||
|%
|
||||
++ hymn ;html:(head body:"+{own}") :: convert to %hymn
|
||||
++ html (crip (en-xml hymn)) :: convert to %html
|
||||
++ mime [/text/html (as-octs html)] :: convert to %mime
|
||||
--
|
||||
++ grab |% :: convert from
|
||||
++ noun manx :: clam from %noun
|
||||
-- --
|
@ -18,5 +18,4 @@
|
||||
--
|
||||
++ grab |% :: convert from
|
||||
++ noun (map knot cord) :: clam from %noun
|
||||
:: ++ elem ,~
|
||||
-- --
|
||||
|
@ -1,12 +0,0 @@
|
||||
|_ a=manx
|
||||
++ grad %noun
|
||||
++ grab
|
||||
|%
|
||||
++ noun manx
|
||||
--
|
||||
++ grow
|
||||
|%
|
||||
++ htm a
|
||||
++ noun a
|
||||
--
|
||||
--
|
1
pkg/landscape/mar/xhtml.hoon
Symbolic link
1
pkg/landscape/mar/xhtml.hoon
Symbolic link
@ -0,0 +1 @@
|
||||
../../base-dev/mar/xhtml.hoon
|
21
sh/release
21
sh/release
@ -1,21 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Usage: release
|
||||
|
||||
# Create a static tarball under ./release named with the current git revision.
|
||||
|
||||
set -xeuo pipefail
|
||||
|
||||
cd "${0%/*}/.."
|
||||
|
||||
rev="$(git rev-parse HEAD)"
|
||||
sha="${rev:0:5}"
|
||||
ver="$(cat pkg/urbit/version)"
|
||||
tgz="release/urbit-v${ver}-${sha}.tar.gz"
|
||||
out="$(nix-build -A tarball --arg enableStatic true --no-out-link)"
|
||||
|
||||
mkdir -p release
|
||||
|
||||
cp -f "$out" "$tgz"
|
||||
|
||||
chmod 0644 "$tgz"
|
@ -4,4 +4,4 @@ set -xeuo pipefail
|
||||
|
||||
cd "${0%/*}/.."
|
||||
|
||||
cp "$(nix-build -A brass.build --no-out-link)" bin/brass.pill
|
||||
cp "$(nix build .#brass --no-link --print-out-paths)" bin/brass.pill
|
||||
|
@ -4,4 +4,4 @@ set -xeuo pipefail
|
||||
|
||||
cd "${0%/*}/.."
|
||||
|
||||
cp "$(nix-build -A ivory.build --no-out-link)" bin/ivory.pill
|
||||
cp "$(nix build .#ivory --no-link --print-out-paths)" bin/brass.pill
|
||||
|
@ -4,4 +4,4 @@ set -xeuo pipefail
|
||||
|
||||
cd "${0%/*}/.."
|
||||
|
||||
cp "$(nix-build -A solid.build --no-out-link)" bin/solid.pill
|
||||
cp "$(nix build .#solid --no-link --print-out-paths)" bin/brass.pill
|
||||
|
13
test-desk.bill
Normal file
13
test-desk.bill
Normal file
@ -0,0 +1,13 @@
|
||||
:~ %acme
|
||||
%azimuth
|
||||
%dbug
|
||||
%dojo
|
||||
%eth-watcher
|
||||
%hood
|
||||
%herm
|
||||
%lens
|
||||
%ping
|
||||
%spider
|
||||
%test
|
||||
==
|
||||
|
Loading…
Reference in New Issue
Block a user