Merge branch 'next/arvo' into yu/content-dist-merged

Includes patched versions of ames' and clay's +load arms.

In clay, we do a dumb ;; hack to get the state to adapt properly. This
shouldn't be needed ($case had an extra... case added to it, old ones
should still nest), and so we should revisit the logic there to make it
cleaner/better before release.
This commit is contained in:
fang 2022-06-30 16:27:02 +02:00
commit 292a65f64c
No known key found for this signature in database
GPG Key ID: EB035760C1BBA972
83 changed files with 51570 additions and 51948 deletions

View File

@ -1,40 +1,10 @@
# Notes:
#
# jobs.<job_id>
#
# A seperate job id results in a lot of duplication of nix/cachix work.
# The build will have to download any substituted derivations from cachix
# for the steps with each distinct job id and upload built derivations to
# cachix after each job has completed, either succesfully or on failure.
#
# jobs.<job_id>.steps.run
#
# build + test are distinct as each step entry results in a collapsable title
# within the log output, which makes it easier to view failing builds or
# tests independently.
#
# jobs.<job_id>.strategy.fail-fast
#
# Set to false so developers working on vere or king-haskell can have their
# respective builds proceed without the other causing failure.
#
# shell.nix
#
# mkShell doesn't allow you to build it - so instantiate all the subshells
# defined for the individual pkg/*/shell.nix as a sanity check and to create
# some artefacts suitable for developers to pull from cachix. The top-level
# shell.nix build time is dominated by Haskell dependencies so it's built as
# part of the haskell build steps.
#
# Syntax:
#
# https://docs.github.com/en/free-pro-team@latest/actions/reference/workflow-syntax-for-github-actions
name: build
on:
push:
paths:
- '.github/workflows/build.yml'
- '.github/workflows/vere.yml'
- 'pkg/arvo/**'
- 'pkg/docker-image/**'
- 'pkg/ent/**'
@ -44,9 +14,11 @@ on:
- 'pkg/urcrypt/**'
- 'bin/**'
- 'nix/**'
- default.nix
- 'default.nix'
pull_request:
paths:
- '.github/workflows/build.yml'
- '.github/workflows/vere.yml'
- 'pkg/arvo/**'
- 'pkg/docker-image/**'
- 'pkg/ent/**'
@ -56,64 +28,15 @@ on:
- 'pkg/urcrypt/**'
- 'bin/**'
- 'nix/**'
- default.nix
- 'default.nix'
jobs:
urbit:
strategy:
fail-fast: false
matrix:
include:
- { os: ubuntu-latest }
- { os: macos-latest }
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v2
# We only want the extra nix config on linux, where it is necessary
# for the docker build. We don't want in on Mac, where it isn't but
# it breaks the nix install. The two `if` clauses should be mutually
# exclusive
- uses: cachix/install-nix-action@v16
with:
extra_nix_config: |
system-features = nixos-test benchmark big-parallel kvm
if: ${{ matrix.os == 'ubuntu-latest' }}
- uses: cachix/install-nix-action@v16
if: ${{ matrix.os != 'ubuntu-latest' }}
- uses: cachix/cachix-action@v10
with:
name: ares
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
- run: nix-build -A urbit --arg enableStatic true
- if: ${{ matrix.os == 'ubuntu-latest' }}
run: nix-build -A urbit-tests
- if: ${{ matrix.os == 'ubuntu-latest' }}
run: nix-build -A docker-image
mingw:
runs-on: windows-latest
defaults:
run:
shell: C:\msys64\msys2_shell.cmd -mingw64 -defterm -no-start -here -c ". <(cygpath '{0}')"
working-directory: ./pkg/urbit
steps:
- uses: actions/checkout@v2
with:
lfs: true
# echo suppresses pacman prompt
- run: echo|./configure
env:
CACHIX_CACHE: ares
CACHIX_AUTH_TOKEN: ${{ secrets.CACHIX_AUTH_TOKEN }}
- run: mingw32-make build/urbit
- run: build/urbit -l -d -B ../../bin/solid.pill -F bus && curl -f --data '{"source":{"dojo":"+hood/exit"},"sink":{"app":"hood"}}' http://localhost:12321
call-vere:
uses: ./.github/workflows/vere.yml
with:
pace: 'often'
upload: >-
${{
(github.ref_name == 'next/vere' && github.ref_type == 'branch')
}}
secrets: inherit

14
.github/workflows/pre-release.yml vendored Normal file
View File

@ -0,0 +1,14 @@
name: pre-release
on:
release: null
push:
tags: 'urbit-v[0-9]+.[0-9]+-rc[0-9]+'
jobs:
call-vere:
uses: ./.github/workflows/vere.yml
with:
pace: 'soon'
upload: true
secrets: inherit

View File

@ -3,42 +3,12 @@ name: release
on:
release: null
push:
tags: ['*']
tags: 'urbit-v[0-9]+.[0-9]+'
jobs:
upload:
strategy:
matrix:
include:
- { os: ubuntu-latest, system: x86_64-linux }
- { os: macos-latest, system: x86_64-darwin }
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v2
- uses: cachix/install-nix-action@v16
- uses: cachix/cachix-action@v10
with:
name: ${{ secrets.CACHIX_NAME }}
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
- uses: google-github-actions/setup-gcloud@v0.2.0
with:
version: '290.0.1'
service_account_key: ${{ secrets.GCS_SERVICE_ACCOUNT_KEY }}
project_id: ${{ secrets.GCS_PROJECT }}
export_default_credentials: true
- run: nix-build -A tarball --arg enableStatic true
- name: Run upload to bootstrap.urbit.org
run: |
version="$(cat ./pkg/urbit/version)"
system="$(nix-instantiate --eval --expr 'builtins.currentSystem')"
system=${system:1:${#system}-2}
target="gs://bootstrap.urbit.org/ci/urbit-v${version}-${system}-${GITHUB_SHA:0:9}.tgz"
gsutil cp -n ./result "$target"
echo "upload to $target complete."
call-vere:
uses: ./.github/workflows/vere.yml
with:
pace: 'live'
upload: true
secrets: inherit

44
.github/workflows/tarballs.yml vendored Normal file
View File

@ -0,0 +1,44 @@
name: tarballs
on:
release: null
push:
tags: ['*']
jobs:
upload:
strategy:
matrix:
include:
- { os: ubuntu-latest, system: x86_64-linux }
- { os: macos-latest, system: x86_64-darwin }
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v2
- uses: cachix/install-nix-action@v16
- uses: cachix/cachix-action@v10
with:
name: ${{ secrets.CACHIX_NAME }}
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
- uses: google-github-actions/setup-gcloud@v0.2.0
with:
version: '290.0.1'
service_account_key: ${{ secrets.GCS_SERVICE_ACCOUNT_KEY }}
project_id: ${{ secrets.GCS_PROJECT }}
export_default_credentials: true
- run: nix-build -A tarball --arg enableStatic true
- name: Run upload to bootstrap.urbit.org
run: |
version="$(cat ./pkg/urbit/version)"
system="$(nix-instantiate --eval --expr 'builtins.currentSystem')"
system=${system:1:${#system}-2}
target="gs://bootstrap.urbit.org/ci/urbit-v${version}-${system}-${GITHUB_SHA:0:9}.tgz"
gsutil cp -n ./result "$target"
echo "upload to $target complete."

263
.github/workflows/vere.yml vendored Normal file
View File

@ -0,0 +1,263 @@
name: vere
on:
workflow_call:
inputs:
upload:
description: 'upload binaries to gcp'
type: boolean
default: false
required: false
pace:
description: 'release pace'
type: string
default: 'often'
required: false
secrets:
CACHIX_AUTH_TOKEN:
required: false
GCS_SERVICE_ACCOUNT_KEY:
required: false
GCS_PROJECT:
required: false
workflow_dispatch:
inputs:
upload:
description: 'upload binaries to gcp'
type: boolean
default: false
required: false
pace:
description: 'release pace'
type: choice
options:
- often
- soon
- live
env:
UPLOAD_BASE: bootstrap.urbit.org/vere
VERE_PACE: ${{ inputs.pace }}
VERSION_TYPE: ${{ (inputs.pace == 'soon' || inputs.pace == 'live') && 'real' || 'hash' }}
jobs:
urbit:
strategy:
fail-fast: false
matrix:
include:
- { os: ubuntu-latest }
- { os: macos-latest }
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v2
# We only want the extra nix config on linux, where it is necessary
# for the docker build. We don't want in on Mac, where it isn't but
# it breaks the nix install. The two `if` clauses should be mutually
# exclusive
- uses: cachix/install-nix-action@v16
with:
extra_nix_config: |
system-features = nixos-test benchmark big-parallel kvm
if: ${{ matrix.os == 'ubuntu-latest' }}
- uses: cachix/install-nix-action@v16
if: ${{ matrix.os != 'ubuntu-latest' }}
- uses: cachix/cachix-action@v10
with:
name: ares
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
- name: build static binary
run: |
nix-build -A urbit \
--arg enableStatic true \
--argstr verePace ${{ env.VERE_PACE }} > ./urbit-derivation
cat ./urbit-derivation
echo -n "urbit_static=" >> $GITHUB_ENV
cat ./urbit-derivation >> $GITHUB_ENV
cat ./urbit-derivation
- name: confirm binary is mostly static
if: matrix.os == 'macos-latest'
run: |
bin="${{ env.urbit_static }}/bin/urbit"
if [ ! -f "$bin" ]; then
echo "no binary at $bin"
exit 1;
fi
libs="$(otool -L "${{ env.urbit_static }}/bin/urbit" | tail -n +2)"
# XX CoreFoundation?
if [ -z "$(echo "$libs" | grep -v libSystem)" ]; then
echo "it's mostly static"
echo "$libs"
exit 0
else
echo "dynamic links found:"
echo "$libs"
exit 1
fi
- name: get version string
run: |
if [ "real" == "$VERSION_TYPE" ]; then
version="$(cat ./pkg/urbit/version)"
else
version="${GITHUB_SHA:0:9}"
fi
echo -n "$version" > ./version-string
- name: upload version string artifact
if: matrix.os == 'ubuntu-latest'
uses: actions/upload-artifact@v3
with:
name: version-string
path: version-string
- uses: google-github-actions/setup-gcloud@v0.2.0
if: inputs.upload
with:
version: '290.0.1'
service_account_key: ${{ secrets.GCS_SERVICE_ACCOUNT_KEY }}
project_id: ${{ secrets.GCS_PROJECT }}
export_default_credentials: true
- name: upload binary to bootstrap.urbit.org
if: inputs.upload
run: |
version="$(cat ./version-string)"
system="$(nix-instantiate --eval --expr 'builtins.currentSystem')"
system=${system:1:${#system}-2}
target="gs://${UPLOAD_BASE}/${VERE_PACE}/${version}/vere-v${version}-${system}"
gsutil cp -n "${{ env.urbit_static }}/bin/urbit" "$target"
exitcode=$?
test $exitcode -eq 0 &&
echo "upload to $target complete." ||
echo "upload to $target failed.";
exit $exitcode
- if: ${{ matrix.os == 'ubuntu-latest' }}
run: nix-build -A urbit-tests
- if: ${{ matrix.os == 'ubuntu-latest' }}
run: nix-build -A docker-image
mingw:
runs-on: windows-latest
defaults:
run:
shell: >
C:\msys64\msys2_shell.cmd -mingw64 -defterm -no-start -here -c
". <(cygpath '{0}')"
working-directory: ./pkg/urbit
steps:
- uses: actions/checkout@v2
with:
lfs: true
# echo suppresses pacman prompt
- run: echo|./configure
env:
CACHIX_CACHE: ares
CACHIX_AUTH_TOKEN: ${{ secrets.CACHIX_AUTH_TOKEN }}
- run: mingw32-make build/urbit
- run: >
build/urbit -l -d -B ../../bin/solid.pill -F bus &&
curl -f --data '{"source":{"dojo":"+hood/exit"},"sink":{"app":"hood"}}'
http://localhost:12321
- name: confirm binary is mostly static
run: |
if [ -z "$(ldd build/urbit | grep -vi "windows/system32")"]; then
echo "it's mostly static"
exit 0
else
echo "dynamic links found:"
ldd build/urbit
exit 1
fi
- uses: actions/setup-python@v2
if: inputs.upload
with:
python-version: 3.7
- uses: google-github-actions/setup-gcloud@v0.6.0
if: inputs.upload
env:
# see https://github.com/google-github-actions/setup-gcloud/issues/100
CLOUDSDK_PYTHON: ${{env.pythonLocation}}\python.exe
with:
service_account_key: ${{ secrets.GCS_SERVICE_ACCOUNT_KEY }}
project_id: ${{ secrets.GCS_PROJECT }}
export_default_credentials: true
- name: upload binary to bootstrap.urbit.org
if: inputs.upload
env:
CLOUDSDK_PYTHON: ${{env.pythonLocation}}\python.exe
shell: bash
run: |
if [ "real" == "$VERSION_TYPE" ]; then
version="$(cat ./version)"
else
version="${GITHUB_SHA:0:9}"
fi
system="x86_64-windows"
target="gs://${UPLOAD_BASE}/${VERE_PACE}/${version}/vere-v${version}-${system}.exe"
gsutil cp -n ./build/urbit.exe "$target"
exitcode=$?
test $exitcode -eq 0 &&
echo "upload to $target complete." ||
echo "upload to $target failed.";
exit $exitcode
after:
runs-on: ubuntu-latest
needs: [urbit, mingw]
# XX disabled due to missing storage.objects.delete access
if: false
# if: inputs.upload
steps:
- uses: google-github-actions/setup-gcloud@v0.2.0
with:
version: '290.0.1'
service_account_key: ${{ secrets.GCS_SERVICE_ACCOUNT_KEY }}
project_id: ${{ secrets.GCS_PROJECT }}
export_default_credentials: true
- name: download version-string
uses: actions/download-artifact@v3
with:
name: version-string
- name: update latest deployed version
run: |
target="gs://${UPLOAD_BASE}/${VERE_PACE}/last"
# *not* -n, as we want to overwrite the latest version-string
#
gsutil cp ./version-string "$target"
exitcode=$?
test $exitcode -eq 0 &&
echo "upload to $target complete." ||
echo "upload to $target failed.";
exit $exitcode

View File

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:204056f6c140a8d5329f78e149a318bc85190d2aaab73204249d39a12d0353e0
size 9296839
oid sha256:2f46209c31bc7be965b6ba32db92fb0746be15d9613b1c3c8d09ce7fa0e5e157
size 8280141

View File

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:9003df4b4ec9743c6e5d92e07470fe5d93d16a8d84a8327eba2605ba69325483
size 8861772
oid sha256:2cf770948d0c377b417f7c93f06ad60ff69bbf0a3cc5cebc7adf982310ec41cb
size 6349089

View File

@ -40,7 +40,9 @@
, crossOverlays ? [ ]
# Whether to use pkgs.pkgsStatic.* to obtain statically linked package
# dependencies - ie. when building fully-static libraries or executables.
, enableStatic ? false }:
, enableStatic ? false
# release channel (when static)
, verePace ? "" }:
let
@ -95,7 +97,7 @@ let
marsSources = callPackage ./nix/pkgs/marsSources { };
urbit = callPackage ./nix/pkgs/urbit { inherit enableStatic; };
urbit = callPackage ./nix/pkgs/urbit { inherit enableStatic verePace; };
urcrypt = callPackage ./nix/pkgs/urcrypt { inherit enableStatic; };

View File

@ -27,6 +27,10 @@ in {
brotliSupport = false;
};
# lies, all lies
openssl-static-osx = prev.openssl;
zlib-static-osx = prev.zlib;
lmdb = prev.lmdb.overrideAttrs (attrs: {
patches =
optionalList attrs.patches ++ prev.lib.optional prev.stdenv.isDarwin [

View File

@ -16,12 +16,19 @@ let
in {
gmp = enableStatic prev.gmp;
curlUrbit = enableStatic prev.curlUrbit;
curlUrbit = enableStatic (prev.curlUrbit.override { openssl = final.openssl-static-osx; zlib = final.zlib-static-osx; });
libuv = enableStatic prev.libuv;
libffi = enableStatic prev.libffi;
openssl-static-osx = prev.openssl.override {
static = true;
withPerl = false;
};
zlib-static-osx = if final.stdenv.isDarwin then prev.zlib.static else prev.zlib;
secp256k1 = enableStatic prev.secp256k1;
lmdb = prev.lmdb.overrideAttrs (old:

View File

@ -1,9 +1,11 @@
{ lib, stdenv, coreutils, pkgconfig # build/env
, cacert, ca-bundle, ivory # codegen
, curlUrbit, ent, gmp, h2o, libsigsegv, libuv, lmdb # libs
, murmur3, openssl, softfloat3, urcrypt, zlib #
, curlUrbit, ent, gmp, h2o, libsigsegv, libuv, lmdb # libs
, murmur3, openssl, openssl-static-osx, softfloat3 #
, urcrypt, zlib, zlib-static-osx #
, enableStatic ? stdenv.hostPlatform.isStatic # opts
, enableDebug ? false
, verePace ? ""
, doCheck ? true
, enableParallelBuilding ? true
, dontStrip ? true }:
@ -40,10 +42,10 @@ in stdenv.mkDerivation {
libuv
lmdb
murmur3
openssl
(if stdenv.isDarwin && enableStatic then openssl-static-osx else openssl)
softfloat3
urcrypt
zlib
(if stdenv.isDarwin && enableStatic then zlib-static-osx else zlib)
];
# Ensure any `/usr/bin/env bash` shebang is patched.
@ -69,6 +71,7 @@ in stdenv.mkDerivation {
MEMORY_DEBUG = enableDebug;
CPU_DEBUG = enableDebug;
EVENT_TIME_DEBUG = false;
VERE_PACE = if enableStatic then verePace else "";
# See https://github.com/NixOS/nixpkgs/issues/18995
hardeningDisable = lib.optionals enableDebug [ "all" ];

View File

@ -1,5 +1,5 @@
{ stdenv, autoreconfHook, pkgconfig
, libaes_siv, openssl, secp256k1
, libaes_siv, openssl, openssl-static-osx, secp256k1
, enableStatic ? stdenv.hostPlatform.isStatic }:
stdenv.mkDerivation rec {

View File

@ -532,10 +532,11 @@
?: ?=([%show %3] -.mad)
(dy-rash %tan (dy-show-source q.mad) ~)
?: ?=(%brev -.mad)
?: ?=(?(%eny %now %our) p.mad)
(dy-rash %tan (cat 3 p.mad ' is immutable') ~)
=. var (~(del by var) p.mad)
=< dy-amok
?+ p.mad .
$?(%eny %now %our) !!
%lib .(lib ~)
%sur .(sur ~)
%dir .(dir [[our.hid %base ud+0] /])
@ -543,13 +544,12 @@
=+ cay=(~(got by rez) p.q.mad)
?- -.p.mad
%verb
?: ?=(?(%eny %now %our) p.p.mad)
(dy-rash %tan (cat 3 p.p.mad ' is immutable') ~)
=. var (~(put by var) p.p.mad cay)
~| bad-set+[p.p.mad p.q.cay]
=< dy-amok
?+ p.p.mad .
%eny ~|(%entropy-is-eternal !!)
%now ~|(%time-is-immutable !!)
%our ~|(%self-is-immutable !!)
%lib
%_ .
lib
@ -568,6 +568,8 @@
?: ?=([@ ~] pax) ~[i.pax %base '0']
?: ?=([@ @ ~] pax) ~[i.pax i.t.pax '0']
pax
?: =(~ .^((list path) %ct pax))
+(..dy (he-diff %tan 'dojo: dir does not exist' ~))
=. dir (need (de-beam pax))
=- +>(..dy (he-diff %tan - ~))
rose+[" " `~]^~[leaf+"=%" (smyt (en-beam he-beak s.dir))]
@ -735,9 +737,9 @@
^+ +>+>
=^ dat say (~(transceive sole say) cal)
?: |(?=(^ per) ?=(^ pux) ?=(~ pro))
~& %dy-edit-busy
=^ lic say (~(transmit sole say) dat)
(dy-diff %mor [%det lic] [%bel ~] ~)
=/ tip=@t 'dojo: busy (press backspace to abort)'
(dy-diff %mor [%det lic] [%bel ~] [%tan [tip ~]] ~)
=> .(per `dat)
=/ res (mule |.((slam u.pro !>((tufa buf.say)))))
?: ?=(%| -.res)

View File

@ -1,21 +0,0 @@
::
:::: /hoon/css/mar
::
/? 310
=, eyre
=, mimes:html
|_ mud=@t
++ grow :: convert to
|% ++ mime [/text/css (as-octs mud)] :: convert to %mime
++ elem ;style :: convert to %hymn
;- (trip mud)
==
++ hymn ;html:(head:"{elem}" body)
--
++ grab
|% :: convert from
++ mime |=([p=mite q=octs] (@t q.q))
++ noun @t :: clam from %noun
--
++ grad %mime
--

1
pkg/arvo/mar/css.hoon Symbolic link
View File

@ -0,0 +1 @@
../../base-dev/mar/css.hoon

View File

@ -8,5 +8,14 @@
++ grab
|%
+$ noun [ship binding]
++ json
=, dejs:format
|= jon=json
%. jon
%- ot
:~ [%ship |=(j=json ?>(?=([%s *] j) (rash +.j fed:ag)))]
[%address |=(j=json ?>(?=([%s *] j) [%if (rash +.j ip4:eyre)]))]
[%turf (ar so)]
==
--
--

View File

@ -0,0 +1,16 @@
/- *dns
|_ r=request
++ grad %noun
++ grow
|%
++ json
%- pairs:enjs:format
:~ ['ship' (ship:enjs:format ship.r)]
['address' s+(rsh 3 (scot %if +.address.r))]
==
--
++ grab
|%
++ noun request
--
--

View File

@ -1761,6 +1761,7 @@
[%park des=desk yok=yoki ran=rang] :: synchronous commit
[%perm des=desk pax=path rit=rite] :: change permissions
[%pork ~] :: resume commit
[%stir arg=*] :: debug
[%tomb =clue] :: tombstone specific
$>(%trim vane-task) :: trim state
$>(%vega vane-task) :: report upgrade

View File

@ -266,6 +266,20 @@
=. private-key (rsh 8 (rsh 3 private-key))
::
`@`(shar:ed:crypto public-key private-key)
:: +encode-keys-packet: create key request $packet
::
++ encode-keys-packet
~/ %encode-keys-packet
|= [sndr=ship rcvr=ship sndr-life=life]
^- shot
:* [sndr rcvr]
&
&
(mod sndr-life 16)
`@`1
origin=~
content=`@`%keys
==
::
++ response-size 13 :: 1kb
:: +sift-roar: assemble scry response fragments into full message
@ -460,7 +474,7 @@
$: peers=(map ship ship-state-5)
=unix=duct
=life
crypto-core=acru:ames
crypto-core=acru-6
=bug
==
::
@ -470,12 +484,6 @@
[%known peer-state-5]
==
::
+$ alien-agenda-6
$: messages=(list [=duct =plea])
packets=(set =blob)
heeds=(set duct)
==
::
+$ peer-state-5
$: $: =symmetric-key
=life
@ -496,6 +504,12 @@
[%known peer-state-6]
==
::
+$ alien-agenda-6
$: messages=(list [=duct =plea])
packets=(set =blob)
heeds=(set duct)
==
::
+$ peer-state-6
$: $: =symmetric-key
=life
@ -511,11 +525,36 @@
nax=(set [=bone =message-num])
heeds=(set duct)
==
::
++ acru-6 $_ ^?
|%
++ as ^?
|% ++ seal |~([a=pass b=@] *@)
++ sign |~(a=@ *@)
++ sure |~(a=@ *(unit @))
++ tear |~([a=pass b=@] *(unit @))
--
++ de |~([a=@ b=@] *(unit @))
++ dy |~([a=@ b=@] *@)
++ en |~([a=@ b=@] *@)
++ ex ^?
|% ++ fig *@uvH
++ pac *@uvG
++ pub *pass
++ sec *ring
--
++ nu ^?
|% ++ pit |~([a=@ b=@] ^?(..nu))
++ nol |~(a=ring ^?(..nu))
++ com |~(a=pass ^?(..nu))
--
--
::
+$ ames-state-6
$: peers=(map ship ship-state-6)
=unix=duct
=life
crypto-core=acru:ames
crypto-core=acru-6
=bug
==
::
@ -714,7 +753,7 @@
::
=< =* adult-gate .
=| queued-events=(qeu queued-event)
=| cached-state=(unit [%6 ames-state-6])
=| cached-state=(unit $%([%5 ames-state-5] [%6 ames-state-6]))
::
|= [now=@da eny=@ rof=roof]
=* larval-gate .
@ -739,7 +778,8 @@
~|(%ames-larval-call-dud (mean tang.u.dud))
::
?: &(?=(^ cached-state) ?=(~ queued-events))
(molt ~)
=^ moves adult-gate (call:adult-core duct dud task)
(molt moves)
:: %born: set .unix-duct and start draining .queued-events
::
?: ?=(%born -.task)
@ -869,23 +909,35 @@
[%4 %larva *]
=. state.old (state-4-to-5:load:adult-core state.old)
$(-.old %5)
::
:: [%5 %larva *]
:: ~> %slog.0^leaf/"ames: larva: load"
:: =. queued-events events.old
:: larval-gate
:: ::
:: [%5 %adult *]
:: ~> %slog.1^leaf/"ames: larva reload"
:: =. adult-gate (load:adult-core %5 state.old)
:: larval-gate
::
[%5 %adult *]
=. cached-state `[%5 state.old]
~> %slog.0^leaf/"ames: larva reload"
larval-gate
::
[%5 %larva *]
~> %slog.0^leaf/"ames: larva: load"
=. queued-events events.old
larval-gate
::
[%5 %adult *]
~> %slog.1^leaf/"ames: larva reload"
=. adult-gate (load:adult-core %5 state.old)
[%6 %adult *]
=. cached-state `[%6 state.old]
~> %slog.0^leaf/"ames: larva reload"
larval-gate
::
[%6 %adult *] (load:adult-core %6 state.old)
::
[%6 %larva *]
~> %slog.1^leaf/"ames: larva: load"
~> %slog.0^leaf/"ames: larva: load"
=. queued-events events.old
=. adult-gate (load:adult-core %6 state.old)
larval-gate
::
[%7 %adult *] (load:adult-core %7 state.old)
@ -901,12 +953,16 @@
++ molt
|= moves=(list move)
^- (quip move _adult-gate)
~& [%hmm now]
=. ames-state.adult-gate
?> ?=(^ cached-state)
=? u.cached-state ?=(%5 -.u.cached-state)
[%6 (state-5-to-6:load:adult-core +.u.cached-state)]
?> ?=(%6 -.u.cached-state)
(state-6-to-7:load:adult-core +.u.cached-state)
=. cached-state ~
~> %slog.0^leaf/"ames: metamorphosis reload"
[~ adult-gate]
[moves adult-gate]
--
:: adult ames, after metamorphosis from larva
::
@ -986,25 +1042,16 @@
:: +load: load in old state after reload
::
++ load
|^
|= $= old-state
$% [%4 ames-state-4]
[%5 ames-state-5]
[%6 ames-state-6]
[%7 ^ames-state]
==
^+ ames-gate
=? old-state ?=(%4 -.old-state) %5^(state-4-to-5 +.old-state)
=? old-state ?=(%5 -.old-state) %6^(state-5-to-6 +.old-state)
=? old-state ?=(%6 -.old-state) %7^(state-6-to-7 +.old-state)
::
?> ?=(%7 -.old-state)
ames-gate(ames-state +.old-state)
|^ |= old-state=[%7 ^ames-state]
^+ ames-gate
?> ?=(%7 -.old-state)
ames-gate(ames-state +.old-state)
::
++ state-6-to-7
|= old=ames-state-6
^- ^ames-state
=+ !< =rift
~& :- %uhh `beam`[[our %rift %da now] /(scot %p our)]
q:(need (need (rof ~ %j `beam`[[our %rift %da now] /(scot %p our)])))
:* peers=(~(run by peers.old) ship-state-6-to-7)
unix-duct.old
@ -1442,6 +1489,8 @@
~/ %on-hear-packet
|= [=lane =shot dud=(unit goof)]
^+ event-core
%- %^ trace odd.veb sndr.shot
|.("received packet")
::
?: =(our sndr.shot)
event-core
@ -1451,6 +1500,8 @@
?. =(our rcvr.shot)
on-hear-forward
::
?: =(%keys content.shot)
on-hear-keys
?: ?& ?=(%pawn (clan:title sndr.shot))
!?=([~ %known *] (~(get by peers.ames-state) sndr.shot))
==
@ -1480,12 +1531,24 @@
::
=/ =blob (etch-shot shot)
(send-blob & rcvr.shot blob)
:: +on-hear-keys: handle receipt of attestion request
::
++ on-hear-keys
~/ %on-hear-keys
|= [=lane =shot dud=(unit goof)]
=+ %^ trace msg.veb sndr.shot
|.("requested attestation")
?. =(%pawn (clan:title our))
event-core
(send-blob | sndr.shot (attestation-packet sndr.shot 1))
:: +on-hear-open: handle receipt of plaintext comet self-attestation
::
++ on-hear-open
~/ %on-hear-open
|= [=lane =shot dud=(unit goof)]
^+ event-core
=+ %^ trace msg.veb sndr.shot
|.("got attestation")
:: assert the comet can't pretend to be a moon or other address
::
?> ?=(%pawn (clan:title sndr.shot))
@ -1504,10 +1567,12 @@
::
=. event-core
=/ crypto-suite=@ud 1
=/ keys
(my [sndr-life.open-packet crypto-suite public-key.open-packet]~)
=/ =point
:* ^= rift 0
^= life sndr-life.open-packet
^= keys (my [sndr-life.open-packet crypto-suite public-key.open-packet]~)
^= keys keys
^= sponsor `(^sein:title sndr.shot)
==
(on-publ / [%full (my [sndr.shot point]~)])
@ -1526,9 +1591,10 @@
|= [=lane =shot dud=(unit goof)]
^+ event-core
=/ sndr-state (~(get by peers.ames-state) sndr.shot)
:: If we don't know them, ask Jael for their keys. On comets, this will
:: also cause us to send a self-attestation to the sender. The packet
:: itself is dropped; we can assume it will be resent.
:: If we don't know them, ask Jael for their keys. If they're a
:: comet, this will also cause us to request a self-attestation
:: from the sender. The packet itself is dropped; we can assume it
:: will be resent.
::
?. ?=([~ %known *] sndr-state)
(enqueue-alien-todo sndr.shot |=(alien-agenda +<))
@ -2007,19 +2073,13 @@
(emit duct %pass /public-keys %j %public-keys [n=ship ~ ~])
:: +request-attestation: helper to request attestation from comet
::
:: Comets will respond to any unknown peer with a self-attestation,
:: so we either send a sendkeys packet (a dummy shut packet) or, if
:: we're a comet, our own self-attestation, saving a roundtrip.
::
:: Also sets a timer to resend the request every 30s.
::
++ request-attestation
|= =ship
^+ event-core
=/ packet ?. =(%pawn (clan:title our))
(sendkeys-packet ship)
(attestation-packet ship 1)
=. event-core (send-blob | ship packet)
=+ (trace msg.veb ship |.("requesting attestion"))
=. event-core (send-blob | ship (sendkeys-packet ship))
=/ =wire /alien/(scot %p ship)
(emit duct %pass wire %b %wait (add now ~s30))
:: +send-blob: fire packet at .ship and maybe sponsors
@ -2116,14 +2176,7 @@
^- blob
?> ?=(%pawn (clan:title her))
%- etch-shot
%- etch-shut-packet
:* ^= shut-packet *shut-packet
^= symmetric-key *symmetric-key
^= sndr our
^= rcvr her
^= sndr-life 0
^= rcvr-life 0
==
(encode-keys-packet our her life.ames-state)
:: +get-peer-state: lookup .her state or ~
::
++ get-peer-state
@ -3097,7 +3150,7 @@
:: TODO no longer true
::NOTE we only send requests to ships we know,
:: so we should only get responses from ships we know.
:: below we assume sndr.packet is a known peer.
:: below we assume sndr.shot is a known peer.
=* from sndr.shot
=/ peer-core (need (pe-abed:fine-peer from))
pe-abet:(pe-hear:peer-core lane shot)

View File

@ -267,6 +267,7 @@
cez=(map @ta crew) :: permission groups
pud=(unit [=desk =yoki]) :: pending update
sad=(map ship @da) :: scry known broken
bug=[veb=@ mas=@] :: verbosity
== ::
::
:: Unvalidated response to a request.
@ -586,6 +587,7 @@
+$ args
$: files=(map path (each page lobe))
file-store=(map lobe page)
verb=@
cache=flow
flue
==
@ -613,7 +615,7 @@
%+ gain-leak vale+path
|= nob=state
=. nub nob
::~> %slog.0^leaf/"ford: read file {(spud path)}"
%- (trace 1 |.("read file {(spud path)}"))
=/ file
~| %file-not-found^path
(~(got by files) path)
@ -637,7 +639,7 @@
?: (~(has in cycle.nub) nave+mak)
~|(cycle+nave+mak^cycle.nub !!)
=. cycle.nub (~(put in cycle.nub) nave+mak)
:: ~> %slog.0^leaf/"ford: make mark {<mak>}"
%- (trace 1 |.("make mark {<mak>}"))
=^ cor=vase nub (build-fit %mar mak)
=/ gad=vase (slap cor limb/%grad)
?@ q.gad
@ -714,7 +716,7 @@
%+ gain-leak dais+mak
|= nob=state
=. nub nob
::~> %slog.0^leaf/"ford: make dais {<mak>}"
%- (trace 1 |.("make dais {<mak>}"))
:_ nub :- %dais
^- dais
=> [nav=nav ..zuse]
@ -763,7 +765,7 @@
:_(nub [%vase =>(..zuse !>(|=(m=mime q.q.m)))])
:: try +grow; is there a +grow core with a .b arm?
::
:: ~> %slog.0^leaf/"ford: make cast {<a>} -> {<b>}"
%- (trace 1 |.("make cast {<a>} -> {<b>}"))
=^ old=vase nub (build-fit %mar a)
?: =/ ram (mule |.((slap old !,(*hoon grow))))
?: ?=(%| -.ram) %.n
@ -831,7 +833,7 @@
%+ gain-leak tube+a^b
|= nob=state
=. nub nob
:: ~> %slog.0^leaf/"ford: make tube {<a>} -> {<b>}"
%- (trace 1 |.("make tube {<a>} -> {<b>}"))
:_(nub [%tube =>([gat=gat ..zuse] |=(v=vase (slam gat v)))])
::
++ validate-page
@ -899,7 +901,7 @@
%- soak-vase
%+ gain-sprig file+path |.
=. stack.nub [~ stack.nub]
:: ~> %slog.0^leaf/"ford: make file {(spud path)}"
%- (trace 1 |.("make file {(spud path)}"))
?: (~(has in cycle.nub) file+path)
~|(cycle+file+path^cycle.nub !!)
=. cycle.nub (~(put in cycle.nub) file+path)
@ -973,8 +975,14 @@
%- mean %- flop
=/ lyn p.hair
=/ col q.hair
^- (list tank)
:~ leaf+"syntax error at [{<lyn>} {<col>}] in {<pax>}"
leaf+(trip (snag (dec lyn) (to-wain:format (crip tex))))
::
=/ =wain (to-wain:format (crip tex))
?: (gth lyn (lent wain))
'<<end of file>>'
(snag (dec lyn) wain)
::
leaf+(runt [(dec col) '-'] "^")
==
::
@ -1157,6 +1165,10 @@
$(p +.p, pax +.pax)
== ==
::
++ trace
|= [pri=@ print=(trap tape)]
(^trace verb pri print)
::
++ mist-to-pour
|= =mist
^- pour
@ -1207,14 +1219,14 @@
=/ spilt (~(has in spill.nub) leak)
=^ =soak nub
?^ got=(~(get by cache.nub) leak)
:: %- =/ refs ?:(spilt 0 1)
:: =/ tape-1 "ford: cache {<pour.leak>}: adding {<refs>}"
:: =/ tape-2 ", giving {<(add refs refs.u.got)>}"
:: (slog leaf+(welp tape-1 tape-2) ~)
%- %+ trace 3 |.
=/ refs ?:(spilt 0 1)
%+ welp "cache {<pour.leak>}: adding {<refs>}, "
"giving {<(add refs refs.u.got)>}"
=? cache.nub !spilt
(~(put by cache.nub) leak [+(refs.u.got) soak.u.got])
[soak.u.got nub]
:: %- (slog leaf+"ford: cache {<pour.leak>}: creating" ~)
%- (trace 2 |.("cache {<pour.leak>}: creating"))
=^ =soak nub (next nub)
=. cache.nub (~(put by cache.nub) leak [1 soak])
:: If we're creating a cache entry, add refs to our dependencies
@ -1224,14 +1236,14 @@
?~ deps
[soak nub]
=/ got (~(got by cache.nub) i.deps)
:: %- =/ tape-1 "ford: cache {<pour.leak>} for {<pour.i.deps>}"
:: =/ tape-2 ": bumping to ref {<refs.got>}"
:: (slog leaf+(welp tape-1 tape-2) ~)
%- %+ trace 3 |.
%+ welp "cache {<pour.leak>} for {<pour.i.deps>}"
": bumping to ref {<refs.got>}"
=. cache.nub (~(put by cache.nub) i.deps got(refs +(refs.got)))
$(deps t.deps)
?: spilt
[soak nub]
:: %- (slog leaf+"ford: spilt: {<spilt>}" ~)
%- (trace 3 |.("spilt {<mist>}"))
=: spill.nub (~(put in spill.nub) leak)
sprig.nub (~(put by sprig.nub) mist leak soak)
==
@ -1239,19 +1251,18 @@
--
::
++ lose-leak
|= [fad=flow =leak]
|= [verb=@ fad=flow =leak]
^- flow
?~ got=(~(get by fad) leak)
:: %- (slog leaf+"ford: lose missing leak {<leak>}" ~)
%- (trace verb 0 |.("lose missing leak {<leak>}"))
fad
?: (lth 1 refs.u.got)
:: =/ tape "ford: cache {<pour.leak>}: decrementing from {<refs.u.got>}"
:: %- (slog leaf+tape ~)
%- (trace verb 3 |.("cache {<pour.leak>}: decrementing from {<refs.u.got>}"))
=. fad (~(put by fad) leak u.got(refs (dec refs.u.got)))
fad
=+ ?. =(0 refs.u.got) ~
~ :: ((slog leaf+"ford: lose zero leak {<leak>}" ~) ~)
:: %- (slog leaf+"ford: cache {<pour.leak>}: freeing" ~)
((trace verb 0 |.("lose zero leak {<leak>}")) ~)
%- (trace verb 2 |.("cache {<pour.leak>}: freeing"))
=. fad (~(del by fad) leak)
=/ leaks ~(tap in deps.leak)
|- ^- flow
@ -1261,13 +1272,19 @@
$(leaks t.leaks)
::
++ lose-leaks
|= [fad=flow leaks=(set leak)]
|= [verb=@ fad=flow leaks=(set leak)]
^- flow
=/ leaks ~(tap in leaks)
|-
?~ leaks
fad
$(fad (lose-leak fad i.leaks), leaks t.leaks)
$(fad (lose-leak verb fad i.leaks), leaks t.leaks)
::
++ trace
|= [verb=@ pri=@ print=(trap tape)]
?: (lth verb pri)
same
(slog leaf+"ford: {(print)}" ~)
--
::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
:: section 4cA, filesystem logic
@ -1477,8 +1494,8 @@
++ tako-ford
|= tak=tako
%- ford:fusion
=/ files (~(run by q:(tako-to-yaki:ze tak)) |=(=lobe |+lobe))
[files lat.ran fad ?:(=(tak (aeon-to-tako:ze let.dom)) fod.dom [~ ~])]
:- (~(run by q:(tako-to-yaki:ze tak)) |=(=lobe |+lobe))
[lat.ran veb.bug fad ?:(=(tak (aeon-to-tako:ze let.dom)) fod.dom [~ ~])]
:: Produce ford cache appropriate for the aeon
::
++ tako-flow
@ -1819,7 +1836,8 @@
=. fod.dom
?: updated [~ ~]
(promote-ford fod.dom invalid)
=. fad (lose-leaks:fusion fad (~(dif in spill.old-fod) spill.fod.dom))
=. fad
(lose-leaks:fusion veb.bug fad (~(dif in spill.old-fod) spill.fod.dom))
=? changes updated (changes-for-upgrade q.old-yaki deletes changes)
::
=/ files
@ -1828,7 +1846,7 @@
%- ~(dif by (~(uni by original) changes))
%- ~(gas by *(map path (each page lobe)))
(turn ~(tap in deletes) |=(=path [path |+*lobe]))
=/ =args:ford:fusion [files lat.ran fad fod.dom]
=/ =args:ford:fusion [files lat.ran veb.bug fad fod.dom]
::
=^ change-cages args (checkout-changes args changes)
=/ sane-continuation (sane-changes changes change-cages)
@ -2121,7 +2139,7 @@
=/ original=(map path (each page lobe))
(~(run by q.yaki) |=(=lobe |+lobe))
(~(uni by original) changes)
=/ =args:ford:fusion [all-changes lat.ran ~ ~ ~]
=/ =args:ford:fusion [all-changes lat.ran veb.bug ~ ~ ~]
=^ all-change-cages args (checkout-changes args all-changes)
=/ ccs=(list [=path =lobe =cage]) ~(tap by change-cages)
|- ^+ *sane-changes
@ -2955,7 +2973,7 @@
=/ =yaki (~(got by hut.ran) (~(got by hit.dom) u.yon))
=/ files (~(run by q.yaki) |=(=lobe |+lobe))
=/ =args:ford:fusion
[files lat.ran fad ?:(=(yon let.dom) fod.dom [~ ~])]
[files lat.ran veb.bug fad ?:(=(yon let.dom) fod.dom [~ ~])]
=^ mim args
(checkout-mime args ~ ~(key by files))
=. mim.dom (apply-changes-to-mim mim.dom mim)
@ -4322,7 +4340,7 @@
::
::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
=| :: instrument state
$: ver=%11 :: vane version
$: ver=%13 :: vane version
ruf=raft :: revision tree
== ::
|= [now=@da eny=@uvJ rof=roof] :: current invocation
@ -4511,6 +4529,12 @@
=/ den ((de now rof hen ruf) our des.req)
abet:(perm:den pax.req rit.req)
[mos ..^$]
::
%stir
?+ arg.req ~|(%strange-stir !!)
[%verb @] [~ ..^$(veb.bug.ruf +.arg.req)]
[%mass @] [~ ..^$(mas.bug.ruf +.arg.req)]
==
::
%tomb (tomb-clue:tomb hen clue.req)
%trim
@ -4599,14 +4623,37 @@
++ load
=> |%
+$ raft-any
$% [%11 raft-11]
$% [%13 raft-13]
[%12 *] ::raft-12] ::TODO see below
[%11 raft-11]
[%10 raft-10]
[%9 raft-9]
[%8 raft-8]
[%7 raft-7]
[%6 raft-6]
==
+$ raft-11 raft
+$ raft-13 raft
+$ raft-12
$: rom=room
hoy=(map ship rung)
ran=rang
fad=flow
mon=(map term beam)
hez=(unit duct)
cez=(map @ta crew)
pud=(unit [=desk =yoki])
bug=[veb=@ mas=@]
==
+$ raft-11
$: rom=room
hoy=(map ship rung)
ran=rang
fad=flow
mon=(map term beam)
hez=(unit duct)
cez=(map @ta crew)
pud=(unit [=desk =yoki])
==
+$ raft-10
$: rom=room-10
hoy=(map ship rung-10)
@ -4824,7 +4871,11 @@
=? old ?=(%8 -.old) 9+(raft-8-to-9 +.old)
=? old ?=(%9 -.old) 10+(raft-9-to-10 +.old)
=? old ?=(%10 -.old) 11+(raft-10-to-11 +.old)
?> ?=(%11 -.old)
=? old ?=(%11 -.old) 12+(raft-11-to-12 +.old)
::TODO ;; because $case changed, but that shouldn't matter?
:: figure out a better way to do this.
=? old ?=(%12 -.old) 13+(raft-12-to-13 ;;(raft-12 +.old))
?> ?=(%13 -.old)
..^^$(ruf +.old)
:: +raft-6-to-7: delete stale ford caches (they could all be invalid)
::
@ -4888,6 +4939,7 @@
rede-8(dom [ank.dom let.dom hit.dom lab.dom mim.dom *flow])
==
:: +raft-9-to-10: add .dist-upgraded
::
++ raft-9-to-10
|= raf=raft-9
^- raft-10
@ -4925,8 +4977,7 @@
~
*norm
mim.dom.dojo-10
~
~
[~ ~]
==
==
::
@ -4946,8 +4997,7 @@
~
*norm
mim.dom.rede-10
~
~
[~ ~]
==
::
ref
@ -4987,10 +5037,11 @@
==
::
|3
^+ |3:*raft-11
:- *flow
%= |3.raf
mon (~(run by mon.raf) |=(=beam beam(r ud+0)))
|3 [pud.raf ~]
|3 pud.raf
==
==
::
@ -5038,6 +5089,18 @@
^- (map [=care =path] cach)
(~(run by caches-10) cach-10-to-cach)
--
:: +raft-11-to-12: add bug
::
++ raft-11-to-12
|= raf=raft-11
^- raft-12
raf(pud [pud.raf [0 0]])
:: +raft-12-to-13: add bug
::
++ raft-12-to-13
|= raf=raft-12
^- raft-13
raf(bug [~ bug.raf])
--
::
++ scry :: inspect
@ -5463,7 +5526,7 @@
::
++ whey
^- (list mass)
?: & :: lac
?: (gth mas.bug.ruf 0)
=/ domestic
%+ turn (sort ~(tap by dos.rom.ruf) aor)
|= [=desk =dojo]
@ -5474,7 +5537,10 @@
==
:~ :+ %object-store %|
:~ commits+&+hut.ran.ruf
pages+&+lat.ran.ruf
:+ %pages %|
%+ turn ~(tap by lat.ran.ruf)
|= [=lobe =page]
[(scot %uv lobe) %& page]
==
domestic+|+domestic
foreign+&+hoy.ruf
@ -5490,10 +5556,7 @@
==
:~ :+ %object-store %|
:~ commits+&+hut.ran.ruf
:+ %pages %|
%+ turn ~(tap by lat.ran.ruf)
|= [=lobe =page]
[(scot %uv lobe) %& page]
pages+&+lat.ran.ruf
==
domestic+|+domestic
foreign+&+hoy.ruf

View File

@ -251,7 +251,7 @@
;meta(charset "utf-8");
;meta(name "viewport", content "width=device-width, initial-scale=1, shrink-to-fit=no");
;link(rel "icon", type "image/svg+xml", href (weld "data:image/svg+xml;utf8," favicon));
;title:"OS1"
;title:"Urbit"
;style:'''
@import url("https://rsms.me/inter/inter.css");
@font-face {

View File

@ -9,7 +9,8 @@
lib=naive-transactions,
ethereum,
dice
/* logs %eth-logs /app/azimuth/logs/eth-logs
:: /* logs %eth-logs /app/azimuth/logs/eth-logs
=/ logs ~
=, strand=strand:spider
::
=> |% +$ card card:agent:gall

View File

@ -25,7 +25,7 @@
|= if=@if
=/ m (strand ,~)
^- form:m
=/ collector-app `dock`[~zod %dns-collector]
=/ collector-app `dock`[~deg %dns-collector]
;< good=? bind:m (self-check-http:libdns |+if 2)
?. good
%+ strand-fail:strandio %bail-early-self-check

View File

@ -443,24 +443,25 @@
:: channel is now established; comet also emitted a duplicate
:: self-attestation, which we ignore
::
=^ moves3 comet2 (call comet2 ~[//unix] %hear (snag-packet 1 moves2))
=^ moves4 comet2 (take comet2 /bone/(scot %p our-comet)/0/1 ~[//unix] %g %done ~)
=^ moves5 comet (call comet ~[//unix] %hear (snag-packet 0 moves4))
=^ moves3 comet2 (call comet2 ~[//unix] %hear (snag-packet 0 moves2))
=^ moves4 comet2 (call comet2 ~[//unix] %hear (snag-packet 1 moves2))
=^ moves5 comet2 (take comet2 /bone/(scot %p our-comet)/0/1 ~[//unix] %g %done ~)
=^ moves6 comet2 (take comet2 /bone/(scot %p our-comet)/0/1 ~[//unix] %g %boon [%post 'first1!!'])
=^ moves7 comet (call comet ~[//unix] %hear (snag-packet 0 moves6))
=^ moves7 comet (call comet ~[//unix] %hear (snag-packet 0 moves5))
=^ moves8 comet (call comet ~[//unix] %hear (snag-packet 0 moves6))
::
;: weld
%+ expect-eq
!> [~[//unix] %pass /qos %d %flog %text "; {<our-comet>} is your neighbor"]
!> (snag 0 `(list move:ames)`moves3)
!> (snag 0 `(list move:ames)`moves4)
::
%+ expect-eq
!> [~[//unix] %pass /qos %d %flog %text "; {<our-comet2>} is your neighbor"]
!> (snag 0 `(list move:ames)`moves5)
!> (snag 0 `(list move:ames)`moves7)
::
%+ expect-eq
!> [~[/g/talk] %give %boon [%post 'first1!!']]
!> (snag 0 `(list move:ames)`moves7)
!> (snag 0 `(list move:ames)`moves8)
==
::
++ test-nack ^- tang

View File

@ -99,6 +99,7 @@
%: ford:fusion
files=(my [/lib/self/hoon &+hoon+source]~)
file-store=~
0
*flow:fusion
*flue:fusion
==
@ -113,6 +114,7 @@
%: ford:fusion
files=(my [/mar/mime/hoon &+hoon+mar-mime]~)
file-store=~
0
*flow:fusion
*flue:fusion
==
@ -139,6 +141,7 @@
[/mar/txt-diff/hoon &+hoon+mar-txt-diff]
==
file-store=~
0
*flow:fusion
*flue:fusion
==
@ -168,6 +171,7 @@
%: ford:fusion
files
file-store=~
0
*flow:fusion
*flue:fusion
==
@ -186,6 +190,7 @@
%: ford:fusion
files
file-store=~
0
*flow:fusion
*flue:fusion
==
@ -205,6 +210,7 @@
%: ford:fusion
files
file-store=~
0
*flow:fusion
*flue:fusion
==
@ -218,6 +224,7 @@
%: ford:fusion
files=(my [/gen/hello/hoon &+hoon+gen-hello]~)
file-store=~
0
*flow:fusion
*flue:fusion
==
@ -243,6 +250,7 @@
[/sur/spider/hoon &+hoon+sur-spider]
==
file-store=~
0
*flow:fusion
*flue:fusion
==

21
pkg/base-dev/mar/css.hoon Normal file
View File

@ -0,0 +1,21 @@
::
:::: /hoon/css/mar
::
/? 310
=, eyre
=, mimes:html
|_ mud=@t
++ grow :: convert to
|% ++ mime [/text/css (as-octs mud)] :: convert to %mime
++ elem ;style :: convert to %hymn
;- (trip mud)
==
++ hymn ;html:(head:"{elem}" body)
--
++ grab
|% :: convert from
++ mime |=([p=mite q=octs] (@t q.q))
++ noun @t :: clam from %noun
--
++ grad %mime
--

View File

@ -180,9 +180,8 @@
=rein
==
^- [jolt=(list dude) idle=(list dude)]
=/ all=(list dude) (read-bill local)
=/ want (get-apps-want local all rein)
=/ have (get-apps-live local)
=/ want (get-apps-want local have rein)
[want (skip have ~(has in (sy want)))]
::
++ get-remote-diff

File diff suppressed because it is too large Load Diff

View File

@ -1,7 +1,7 @@
:~ title+'System'
info+'An app launcher for Urbit.'
color+0xee.5432
glob-http+['https://bootstrap.urbit.org/glob-0v2.p3f6i.19q8d.lsgcb.mckg7.dtu8f.glob' 0v2.p3f6i.19q8d.lsgcb.mckg7.dtu8f]
glob-http+['https://bootstrap.urbit.org/glob-0v5.kgrq2.gp725.bo5bk.dmr7d.h41qk.glob' 0v5.kgrq2.gp725.bo5bk.dmr7d.h41qk]
::glob-ames+~zod^0v0
base+'grid'
version+[1 1 3]

1
pkg/garden/mar/css.hoon Symbolic link
View File

@ -0,0 +1 @@
../../base-dev/mar/css.hoon

View File

@ -18,7 +18,7 @@
"@radix-ui/react-toggle": "^0.0.10",
"@tlon/sigil-js": "^1.4.4",
"@types/lodash": "^4.14.172",
"@urbit/api": "^2.1.0",
"@urbit/api": "^2.1.1",
"@urbit/http-api": "^2.1.0",
"big-integer": "^1.6.48",
"classnames": "^2.3.1",
@ -1468,9 +1468,9 @@
}
},
"node_modules/@urbit/api": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/@urbit/api/-/api-2.1.0.tgz",
"integrity": "sha512-2njwpvZ2s23gp5C4v9uDynU1wxrN33+vloqpvlErUhMR/4R6mXE4cMvsZzj/fCGOa0J3LRV/If7XmgG+kY69Tg==",
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/@urbit/api/-/api-2.1.1.tgz",
"integrity": "sha512-QRlqhtJ73q+pgMdSwuOO62HlxA7/2c5ylCcOUT01LXkJ2LTVCl5u+QnejdDvUmqjOuN2PyZk7df30xJVg6rC2A==",
"dependencies": {
"@babel/runtime": "^7.16.0",
"big-integer": "^1.6.48",
@ -8257,9 +8257,9 @@
}
},
"@urbit/api": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/@urbit/api/-/api-2.1.0.tgz",
"integrity": "sha512-2njwpvZ2s23gp5C4v9uDynU1wxrN33+vloqpvlErUhMR/4R6mXE4cMvsZzj/fCGOa0J3LRV/If7XmgG+kY69Tg==",
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/@urbit/api/-/api-2.1.1.tgz",
"integrity": "sha512-QRlqhtJ73q+pgMdSwuOO62HlxA7/2c5ylCcOUT01LXkJ2LTVCl5u+QnejdDvUmqjOuN2PyZk7df30xJVg6rC2A==",
"requires": {
"@babel/runtime": "^7.16.0",
"big-integer": "^1.6.48",

View File

@ -15,8 +15,8 @@
"tsc": "tsc --noEmit"
},
"dependencies": {
"@radix-ui/react-checkbox": "^0.1.5",
"@fingerprintjs/fingerprintjs": "^3.3.3",
"@radix-ui/react-checkbox": "^0.1.5",
"@radix-ui/react-dialog": "^0.0.20",
"@radix-ui/react-dropdown-menu": "^0.0.23",
"@radix-ui/react-icons": "^1.1.0",
@ -25,7 +25,7 @@
"@radix-ui/react-toggle": "^0.0.10",
"@tlon/sigil-js": "^1.4.4",
"@types/lodash": "^4.14.172",
"@urbit/api": "^2.1.0",
"@urbit/api": "^2.1.1",
"@urbit/http-api": "^2.1.0",
"big-integer": "^1.6.48",
"classnames": "^2.3.1",

View File

@ -72,7 +72,6 @@ export const Leap = React.forwardRef(
useEffect(() => {
const newMatch = getMatch(rawInput);
if (newMatch && rawInput) {
useLeapStore.setState({ selectedMatch: newMatch });
}
@ -112,7 +111,10 @@ export const Leap = React.forwardRef(
const navigateByInput = useCallback(
(input: string) => {
const normalizedValue = input.trim().replace(/(~?[\w^_-]{3,13})\//, '$1/apps/');
const normalizedValue = input
.trim()
.replace('%', '')
.replace(/(~?[\w^_-]{3,13})\//, '$1/apps/$1/');
push(`/leap/${menu}/${normalizedValue}`);
},
[menu]

View File

@ -7,6 +7,7 @@ module.exports = {
// Because we use styled system, and use
// the convention of each prop on a new line
// we probably shouldn't keep this on
'max-lines-per-function': ['off', {}]
'max-lines-per-function': ['off', {}],
'max-lines': ['off', {}]
}
};

View File

@ -1 +1 @@
16.14.0
14.19.0

View File

@ -0,0 +1 @@
nodejs 14.19.0

View File

@ -1,66 +1,71 @@
const path = require('path');
const HtmlWebpackPlugin = require('html-webpack-plugin');
const { CleanWebpackPlugin } = require('clean-webpack-plugin');
const MomentLocalesPlugin = require('moment-locales-webpack-plugin');
const webpack = require('webpack');
const { execSync } = require('child_process');
const path = require("path");
const HtmlWebpackPlugin = require("html-webpack-plugin");
const { CleanWebpackPlugin } = require("clean-webpack-plugin");
const MomentLocalesPlugin = require("moment-locales-webpack-plugin");
const webpack = require("webpack");
const { execSync } = require("child_process");
const GIT_DESC = execSync('git describe --always', { encoding: 'utf8' }).trim();
const GIT_DESC = execSync("git describe --always", { encoding: "utf8" }).trim();
module.exports = {
mode: 'production',
mode: "production",
entry: {
app: './src/index.tsx',
serviceworker: './src/serviceworker.js'
app: "./src/index.tsx",
serviceworker: "./src/serviceworker.js",
},
module: {
rules: [
{
test: /\.(j|t)sx?$/,
use: {
loader: 'babel-loader',
loader: "babel-loader",
options: {
presets: ['@babel/preset-env', '@babel/typescript', '@babel/preset-react'],
presets: [
"@babel/preset-env",
"@babel/typescript",
"@babel/preset-react",
],
plugins: [
'lodash',
'@babel/transform-runtime',
'@babel/plugin-proposal-object-rest-spread',
'@babel/plugin-proposal-optional-chaining',
'@babel/plugin-proposal-class-properties'
]
}
"lodash",
"@babel/transform-runtime",
"@babel/plugin-proposal-object-rest-spread",
"@babel/plugin-proposal-optional-chaining",
"@babel/plugin-proposal-class-properties",
],
},
},
exclude: /node_modules\/(?!(@tlon\/indigo-dark|@tlon\/indigo-light|@tlon\/indigo-react|@urbit\/api)\/).*/
exclude:
/node_modules\/(?!(@tlon\/indigo-dark|@tlon\/indigo-light|@tlon\/indigo-react|@urbit\/api)\/).*/,
},
{
test: /\.css$/i,
test: /\.css$/i,
use: [
// Creates `style` nodes from JS strings
'style-loader',
"style-loader",
// Translates CSS into CommonJS
'css-loader',
"css-loader",
// Compiles Sass to CSS
'sass-loader'
]
"sass-loader",
],
},
{
test: /\.(woff(2)?|ttf|eot|svg)(\?v=\d+\.\d+\.\d+)?$/,
use: [
{
loader: 'file-loader',
loader: "file-loader",
options: {
name: '[name].[ext]',
outputPath: 'fonts/'
}
}
]
}
]
name: "[name].[ext]",
outputPath: "fonts/",
},
},
],
},
],
},
resolve: {
extensions: ['.js', '.ts', '.tsx']
extensions: [".js", ".ts", ".tsx"],
},
devtool: 'source-map',
devtool: "source-map",
// devServer: {
// contentBase: path.join(__dirname, './'),
// hot: true,
@ -71,26 +76,30 @@ module.exports = {
new MomentLocalesPlugin(),
new CleanWebpackPlugin(),
new webpack.DefinePlugin({
'process.env.LANDSCAPE_STREAM': JSON.stringify(process.env.LANDSCAPE_STREAM),
'process.env.LANDSCAPE_SHORTHASH': JSON.stringify(GIT_DESC),
'process.env.LANDSCAPE_STORAGE_VERSION': Date.now().toString(),
'process.env.LANDSCAPE_LAST_WIPE': '2021-10-20',
"process.env.LANDSCAPE_STREAM": JSON.stringify(
process.env.LANDSCAPE_STREAM
),
"process.env.LANDSCAPE_SHORTHASH": JSON.stringify(GIT_DESC),
"process.env.LANDSCAPE_STORAGE_VERSION": Date.now().toString(),
"process.env.LANDSCAPE_LAST_WIPE": "2021-10-20",
}),
new HtmlWebpackPlugin({
title: 'Groups',
template: './public/index.html',
favicon: './src/assets/img/Favicon.png'
})
title: "Groups",
template: "./public/index.html",
favicon: "./src/assets/img/favicon.png",
}),
],
output: {
filename: (pathData) => {
return pathData.chunk.name === 'app' ? 'index.[contenthash].js' : '[name].js';
return pathData.chunk.name === "app"
? "index.[contenthash].js"
: "[name].js";
},
path: path.resolve(__dirname, '../dist'),
publicPath: '/apps/landscape/'
path: path.resolve(__dirname, "../dist"),
publicPath: "/apps/landscape/",
},
optimization: {
minimize: true,
usedExports: true
}
usedExports: true,
},
};

File diff suppressed because it is too large Load Diff

View File

@ -5,21 +5,18 @@
"main": "index.js",
"private": true,
"engines": {
"node": "16.14.0"
"node": "14.19.0"
},
"dependencies": {
"@babel/runtime": "^7.12.5",
"@fingerprintjs/fingerprintjs": "^3.3.3",
"@radix-ui/react-dialog": "^0.1.0",
"@reach/disclosure": "^0.10.5",
"@reach/menu-button": "^0.10.5",
"@reach/tabs": "^0.10.5",
"@react-spring/web": "^9.1.1",
"@tlon/indigo-dark": "^1.0.6",
"@tlon/indigo-light": "^1.0.7",
"@tlon/indigo-react": "^1.2.27",
"@tlon/sigil-js": "^1.4.3",
"@urbit/api": "^2.1.0",
"@tlon/sigil-js": "^1.4.5",
"@urbit/api": "^2.1.1",
"@urbit/http-api": "^2.1.0",
"any-ascii": "^0.1.7",
"aws-sdk": "^2.830.0",
@ -36,15 +33,15 @@
"mousetrap": "^1.6.5",
"mousetrap-global-bind": "^1.1.0",
"normalize-wheel": "1.0.1",
"oembed-parser": "^1.4.5",
"oembed-parser": "^3.0.4",
"prop-types": "^15.7.2",
"querystring": "^0.2.0",
"react": "^17.0.2",
"react-codemirror2": "^6.0.1",
"react-codemirror2-react-17": "^1.0.0",
"react-dom": "^17.0.2",
"react-helmet": "^6.1.0",
"react-markdown": "^4.3.1",
"react-oembed-container": "^1.0.0",
"react-markdown": "^5.0.3",
"react-oembed-container": "^1.0.1",
"react-router-dom": "^5.2.0",
"react-use-gesture": "^9.1.3",
"react-virtuoso": "^0.20.3",
@ -107,6 +104,7 @@
"lint-staged": "^11.0.0",
"loki": "^0.28.1",
"moment-locales-webpack-plugin": "^1.2.0",
"patch-package": "^6.4.7",
"react-refresh": "^0.11.0",
"sass": "^1.32.5",
"sass-loader": "^8.0.2",
@ -131,7 +129,8 @@
"storybook": "start-storybook -p 6006",
"build-storybook": "build-storybook",
"chromatic": "chromatic --exit-zero-on-changes",
"hook-lint": "eslint --cache --fix"
"hook-lint": "eslint --cache --fix",
"postinstall": "patch-package"
},
"author": "",
"license": "MIT",

View File

@ -4,7 +4,7 @@ export interface Suspender<T> {
read: () => T;
}
export function suspend<T>(awaiting: Promise<T>): Suspender<T> {
export function suspend<T>(awaiting: Promise<T>, defaultValue?: any): Suspender<T> {
let state: SuspendState = 'pending';
let result: T | null = null;
@ -22,8 +22,10 @@ export function suspend<T>(awaiting: Promise<T>): Suspender<T> {
read: () => {
if (state === 'result') {
return result!;
} else if (state === 'error') {
} else if (state === 'error' && typeof defaultValue === 'undefined') {
throw result;
} else if (state === 'error' && typeof defaultValue !== 'undefined') {
return defaultValue;
} else {
throw promise;
}

View File

@ -34,7 +34,8 @@ const useStorage = ({ accept = '*' } = { accept: '*' }): IuseStorage => {
}
client.current = new S3Client({
credentials: s3.credentials,
endpoint: s3.credentials.endpoint
endpoint: s3.credentials.endpoint,
signatureVersion: 'v4'
});
}
}, [gcp.token, s3.credentials]);

View File

@ -11,8 +11,7 @@ export function getTitleFromWorkspace(
case 'messages':
return 'Messages';
case 'group':
const association = associations.groups[workspace.group];
return association?.metadata?.title || '';
return associations.groups[workspace.group]?.metadata?.title || 'Groups';
}
}

View File

@ -1,6 +1,6 @@
import { useCallback } from 'react';
import create from 'zustand';
import { suspend, Suspender , suspendWithResult } from '../lib/suspend';
import { suspend, Suspender } from '../lib/suspend';
import { jsonFetch } from '~/logic/lib/util';
export interface EmbedState {
@ -23,17 +23,19 @@ const useEmbedState = create<EmbedState>((set, get) => ({
const search = new URLSearchParams({
url
});
const embed = await jsonFetch(`${OEMBED_PROVIDER}?${search.toString()}`);
const { embeds: es } = get();
set({ embeds: { ...es, [url]: embed } });
return embed;
},
getEmbed: (url: string): Suspender<any> => {
const { fetch, embeds } = get();
if(url in embeds) {
return suspendWithResult(embeds[url]);
return embeds[url];
}
return suspend(fetch(url));
const { embeds: es } = get();
const embed = suspend(fetch(url), {});
set({ embeds: { ...es, [url]: embed } });
return embed;
}
}));

View File

@ -1,5 +1,6 @@
import { acceptDm, cite, Content, declineDm, deSig, Post } from '@urbit/api';
import React, { useCallback, useEffect } from 'react';
import Helmet from 'react-helmet';
import _ from 'lodash';
import bigInt from 'big-integer';
import { Box, Row, Col, Text, Center } from '@tlon/indigo-react';
@ -49,6 +50,21 @@ function quoteReply(post: Post) {
return `${reply}\n\n~${post.author}:`;
}
export function DmHelmet(props: DmHelmetProps) {
const { ship } = props;
const hark = useHarkDm(ship);
const unreadCount = hark.count;
const contact = useContact(ship);
const { hideNicknames } = useSettingsState(selectCalmState);
const showNickname = !hideNicknames && Boolean(contact);
const nickname = showNickname ? contact!.nickname : cite(ship) ?? ship;
return(
<Helmet defer={false}>
<title>{unreadCount ? `(${String(unreadCount)}) ` : ''}{ nickname }</title>
</Helmet>
);
}
export function DmResource(props: DmResourceProps) {
const { ship } = props;
const dm = useDM(ship);

View File

@ -5,7 +5,7 @@ import 'codemirror/addon/hint/show-hint';
import 'codemirror/lib/codemirror.css';
import 'codemirror/mode/markdown/markdown';
import React, { useRef, ClipboardEvent, useEffect, useImperativeHandle } from 'react';
import { Controlled as CodeEditor } from 'react-codemirror2';
import { Controlled as CodeEditor } from 'react-codemirror2-react-17';
import styled from 'styled-components';
import { MOBILE_BROWSER_REGEX } from '~/logic/lib/util';
import useSettingsState from '~/logic/state/settings';

View File

@ -1,18 +1,17 @@
import React from 'react';
import {
Icon,
Center,
Row,
Text,
Col,
Box,
CenterProps
} from '@tlon/indigo-react';
import { hasProvider } from 'oembed-parser';
import { AUDIO_REGEX, IMAGE_REGEX } from '~/views/components/RemoteContent';
import { AUDIO_REGEX, IMAGE_REGEX, validOembedCheck } from '~/views/components/RemoteContent';
import { AudioPlayer } from '~/views/components/AudioPlayer';
import { useHistory } from 'react-router';
import { useHovering } from '~/logic/lib/util';
import { useEmbed } from '~/logic/state/embed';
import Author from '~/views/components/Author';
import {
GraphNode,
@ -38,8 +37,32 @@ export interface LinkBlockItemProps {
summary?: boolean;
}
export function LinkBlockItem(props: LinkBlockItemProps & CenterProps) {
const { node, summary, size, m, border = 1, objectFit, ...rest } = props;
export const LinkBlockItem = (props: LinkBlockItemProps & CenterProps) => {
const { node, ...rest } = props;
const { post } = node;
const { contents } = post;
const [{ text: title }, ...content] = contents as [
TextContent,
UrlContent | ReferenceContent
];
let url = '';
if ('url' in content?.[0]) {
url = content[0].url;
}
return(
<AsyncFallback fallback={<RemoteContentEmbedFallback url={url} />}>
<LinkBlockItemInner
node={node}
{...rest}
/>
</AsyncFallback>
);
}
function LinkBlockItemInner(props: LinkBlockItemProps & CenterProps) {
const { node, summary, m, border = 1, objectFit, ...rest } = props;
const { post, children } = node;
const { contents, index, author } = post;
@ -56,8 +79,9 @@ export function LinkBlockItem(props: LinkBlockItemProps & CenterProps) {
const isImage = IMAGE_REGEX.test(url);
const isAudio = AUDIO_REGEX.test(url);
const oembed = useEmbed(url);
const isOembed = validOembedCheck(oembed, url);
const isOembed = hasProvider(url);
const history = useHistory();
const { hovering, bind } = useHovering();
const onClick = () => {
@ -65,70 +89,67 @@ export function LinkBlockItem(props: LinkBlockItemProps & CenterProps) {
history.push(`${pathname}/index${index}${search}`);
};
return (
<Center
<Box
onClick={onClick}
position="relative"
m={m}
border={border}
borderColor="lightGray"
position="relative"
borderRadius="1"
height={size}
width={size}
m={m}
maxHeight="100%"
{...rest}
{...bind}
>
<AsyncFallback fallback={<RemoteContentEmbedFallback url={url} />}>
{isReference ? (
summary ? (
<RemoteContentPermalinkEmbed
reference={content[0] as ReferenceContent}
/>
) : (
<PermalinkEmbed
link={referenceToPermalink(content[0] as ReferenceContent).link}
transcluded={0}
/>
)
) : isImage ? (
<RemoteContentImageEmbed
url={url}
tall
stretch
objectFit={objectFit ? objectFit : "cover"}
/>
) : isAudio ? (
<AudioPlayer title={title} url={url} />
) : isOembed ? (
<RemoteContentOembed tall={!summary} renderUrl={false} url={url} thumbnail={summary} />
) : (
<RemoteContentEmbedFallback url={url} />
)}
</AsyncFallback>
<Box
backgroundColor="white"
display={summary && hovering ? 'block' : 'none'}
width="100%"
height="64px"
position="absolute"
left="0"
bottom="0"
>
<Col width="100%" height="100%" p="2" justifyContent="space-between">
<Row justifyContent="space-between" width="100%">
<Text textOverflow="ellipsis" whiteSpace="nowrap" overflow="hidden">
{title}
</Text>
<Row gapX="1" alignItems="center">
<Icon icon="Chat" color="black" />
<Text>{children.size}</Text>
<Col height="100%" justifyContent="center" alignItems="center">
{isReference ? (
summary ? (
<RemoteContentPermalinkEmbed
reference={content[0] as ReferenceContent}
/>
) : (
<PermalinkEmbed
link={referenceToPermalink(content[0] as ReferenceContent).link}
transcluded={0}
/>
)
) : isImage ? (
<RemoteContentImageEmbed
url={url}
tall
stretch
objectFit={objectFit ? objectFit : "cover"}
/>
) : isAudio ? (
<AudioPlayer title={title} url={url} />
) : isOembed ? (
<RemoteContentOembed tall={!summary} renderUrl={false} url={url} thumbnail={summary} oembed={oembed} />
) : (
<RemoteContentEmbedFallback url={url} />
)}
<Box
backgroundColor="white"
display={summary && hovering ? 'block' : 'none'}
width="100%"
height="64px"
position="absolute"
left="0"
bottom="0"
>
<Col width="100%" height="100%" p="2" justifyContent="space-between">
<Row justifyContent="space-between" width="100%">
<Text textOverflow="ellipsis" whiteSpace="nowrap" overflow="hidden">
{title}
</Text>
<Row gapX="1" alignItems="center">
<Icon icon="Chat" color="black" />
<Text>{children.size}</Text>
</Row>
</Row>
</Row>
<Row width="100%">
<Author ship={author} date={post['time-sent']} showImage></Author>
</Row>
</Col>
</Box>
</Center>
<Row width="100%">
<Author ship={author} date={post['time-sent']} showImage></Author>
</Row>
</Col>
</Box>
</Col>
</Box>
);
}

View File

@ -1,4 +1,4 @@
import { Col, Row, RowProps } from '@tlon/indigo-react';
import { Center, Col, Row, RowProps } from '@tlon/indigo-react';
import { Association, GraphNode, markEachAsRead, TextContent, UrlContent } from '@urbit/api';
import React, { useEffect } from 'react';
import { useGroup } from '~/logic/state/group';
@ -27,21 +27,13 @@ export function LinkDetail(props: LinkDetailProps) {
return (
/* @ts-ignore indio props?? */
<Row height="100%" width="100%" flexDirection={['column', 'column', 'row']} {...rest}>
<LinkBlockItem
minWidth="0"
minHeight="0"
height={["50%", "50%", "100%"]}
width={["100%", "100%", "calc(100% - 350px)"]}
flexGrow={0}
border={0}
node={node}
objectFit="contain"
/>
<Center flex="3 1 75%" overflowY="scroll" >
<LinkBlockItem maxHeight="100%" border={0} node={node} objectFit="contain" />
</Center>
<Col
minHeight="0"
flexShrink={1}
width={['100%', '100%', '350px']}
flexGrow={0}
flex="1 25%"
maxWidth={['auto', 'auto', '45ch']}
maxHeight={['50%', '50%', 'unset']}
gapY={[2,4]}
borderLeft={[0, 0, 1]}
borderTop={[1, 1, 0]}

View File

@ -90,33 +90,33 @@ export function EditProfile(props: any): ReactElement {
const onSubmit = async (values: any, actions: any) => {
try {
Object.keys(values).forEach((key) => {
for (const key in values) {
const newValue = key !== 'color' ? values[key] : uxToHex(values[key]);
if (newValue !== contact[key]) {
if (key === 'isPublic') {
airlock.poke(setPublic(newValue));
return;
} else if (key === 'groups') {
const toRemove: string[] = _.difference(
contact?.groups || [],
newValue
);
const toAdd: string[] = _.difference(
newValue,
contact?.groups || []
);
toRemove.forEach(e =>
airlock.poke(editContact(ship, { 'remove-group': resourceFromPath(e) }))
);
toAdd.forEach(e =>
airlock.poke(editContact(ship, { 'add-group': resourceFromPath(e) }))
);
} else if (key !== 'last-updated' && key !== 'isPublic') {
airlock.poke(editContact(ship, { [key]: newValue }));
return;
if (newValue === contact[key] || key === 'last-updated') {
continue;
} else if (key === 'isPublic') {
await airlock.poke(setPublic(newValue));
} else if (key === 'groups') {
const toRemove: string[] = _.difference(
contact?.groups || [],
newValue
);
const toAdd: string[] = _.difference(
newValue,
contact?.groups || []
);
for (const i in toRemove) {
const group = resourceFromPath(toRemove[i]);
await airlock.poke(editContact(ship, { 'remove-group': group }));
}
for (const i in toAdd) {
const group = resourceFromPath(toAdd[i]);
await airlock.poke(editContact(ship, { 'add-group': group }));
}
} else {
await airlock.poke(editContact(ship, { [key]: newValue }));
}
});
}
history.push(`/~profile/${ship}`);
} catch (e) {
console.error(e);

View File

@ -6,7 +6,7 @@ import 'codemirror/lib/codemirror.css';
import 'codemirror/mode/markdown/markdown';
import { useFormikContext } from 'formik';
import React, { useCallback, useRef } from 'react';
import { UnControlled as CodeEditor } from 'react-codemirror2';
import { UnControlled as CodeEditor } from 'react-codemirror2-react-17';
import { Prompt } from 'react-router-dom';
import { useFileUpload } from '~/logic/lib/useFileUpload';
import { IuseStorage } from '~/logic/lib/useStorage';
@ -23,6 +23,7 @@ interface MarkdownEditorProps {
value: string;
onChange: (s: string) => void;
onBlur?: (e: any) => void;
disabled?: boolean;
}
const PromptIfDirty = () => {
@ -39,7 +40,7 @@ const PromptIfDirty = () => {
export function MarkdownEditor(
props: MarkdownEditorProps & PropFunc<typeof Box>
) {
const { onBlur, placeholder, value, onChange, ...boxProps } = props;
const { onBlur, placeholder, value, onChange, disabled, ...boxProps } = props;
const options = {
mode: MARKDOWN_CONFIG,
@ -56,7 +57,9 @@ export function MarkdownEditor(
const handleChange = useCallback(
(_e, _d, v: string) => {
onChange(v);
if (!disabled) {
onChange(v);
}
},
[onChange]
);
@ -93,6 +96,7 @@ export function MarkdownEditor(
p={1}
border={1}
borderColor="lightGray"
backgroundColor={disabled ? 'lightGray' : 'white'}
borderRadius={2}
height={['calc(100% - 22vh)', '100%']}
{...boxProps}

View File

@ -6,14 +6,17 @@ import { MarkdownEditor } from './MarkdownEditor';
export const MarkdownField = ({
id,
disabled,
...rest
}: { id: string } & Parameters<typeof Box>[0]) => {
}: { id: string; disabled?: boolean } & Parameters<typeof Box>[0]) => {
const [{ value, onBlur }, { error, touched }, { setValue }] = useField(id);
const handleBlur = useCallback(
(e: any) => {
_.set(e, 'target.id', id);
onBlur && onBlur(e);
if (!disabled) {
_.set(e, 'target.id', id);
onBlur && onBlur(e);
}
},
[onBlur, id]
);
@ -23,7 +26,7 @@ export const MarkdownField = ({
return (
<Box
overflowY="hidden"
height='100%'
height="100%"
width="100%"
display="flex"
flexDirection="column"
@ -35,6 +38,7 @@ export const MarkdownField = ({
onBlur={handleBlur}
value={value}
onChange={setValue}
disabled={disabled}
/>
<ErrorLabel mt={2} hasError={Boolean(error && touched)}>
{error}

View File

@ -1,5 +1,7 @@
import {
Button, Col, ManagedTextInputField as Input,
Button,
Col,
ManagedTextInputField as Input,
Row
} from '@tlon/indigo-react';
import { Form, Formik, FormikHelpers } from 'formik';
@ -31,7 +33,8 @@ export interface PostFormSchema {
}
export function PostForm(props: PostFormProps) {
const { initial, onSubmit, submitLabel, loadingText, cancel, history } = props;
const { initial, onSubmit, submitLabel, loadingText, cancel, history } =
props;
return (
<Col width="100%" height="100%" p={[2, 4]}>
@ -40,30 +43,49 @@ export function PostForm(props: PostFormProps) {
initialValues={initial}
onSubmit={onSubmit}
>
<Form style={{ display: 'contents' }}>
<Row flexShrink={0} flexDirection={['column-reverse', 'row']} mb={4} gapX={4} justifyContent='space-between'>
<Input maxWidth='40rem' width='100%' flexShrink={[0, 1]} placeholder="Post Title" id="title" />
<Row flexDirection={['column', 'row']} mb={[4,0]}>
<AsyncButton
ml={[0,2]}
flexShrink={0}
primary
loadingText={loadingText}
>
{submitLabel}
</AsyncButton>
{cancel && <Button
ml={[0,2]}
mt={[2,0]}
onClick={() => {
history.goBack();
}}
type="button"
>Cancel</Button>}
{({ isSubmitting }) => (
<Form style={{ display: 'contents' }}>
<Row
flexShrink={0}
flexDirection={['column-reverse', 'row']}
mb={4}
gapX={4}
justifyContent="space-between"
>
<Input
maxWidth="40rem"
width="100%"
flexShrink={[0, 1]}
placeholder="Post Title"
id="title"
disabled={isSubmitting}
/>
<Row flexDirection={['column', 'row']} mb={[4, 0]}>
<AsyncButton
ml={[0, 2]}
flexShrink={0}
primary
loadingText={loadingText}
>
{submitLabel}
</AsyncButton>
{cancel && (
<Button
ml={[0, 2]}
mt={[2, 0]}
onClick={() => {
history.goBack();
}}
type="button"
>
Cancel
</Button>
)}
</Row>
</Row>
</Row>
<MarkdownField flexGrow={1} id="body" />
</Form>
<MarkdownField flexGrow={1} id="body" disabled={isSubmitting} />
</Form>
)}
</Formik>
</Col>
);

View File

@ -3,7 +3,7 @@ import moment from 'moment';
import React, { ReactElement, ReactNode } from 'react';
import { Sigil } from '~/logic/lib/sigil';
import { useCopy } from '~/logic/lib/useCopy';
import { cite, uxToHex } from '~/logic/lib/util';
import { cite, deSig, uxToHex } from '~/logic/lib/util';
import { useContact } from '~/logic/state/contact';
import { useDark } from '~/logic/state/join';
import useSettingsState, { selectCalmState, useShowNickname } from '~/logic/state/settings';
@ -52,7 +52,7 @@ function Author(props: AuthorProps & PropFunc<typeof Box>): ReactElement {
const { hideAvatars } = useSettingsState(selectCalmState);
const name = showNickname && contact ? contact.nickname : cite(ship);
const stamp = moment(date);
const { copyDisplay, doCopy } = useCopy(`~${ship}`, name);
const { copyDisplay, doCopy } = useCopy(`~${deSig(ship)}`, name);
const sigil = fullNotIcon ? (
<Sigil ship={ship} size={size} color={color} padding={sigilPadding} />

View File

@ -28,7 +28,7 @@ import { Link } from 'react-router-dom';
import { AppPermalink, referenceToPermalink } from '~/logic/lib/permalinks';
import useMetadataState from '~/logic/state/metadata';
import { RemoteContentWrapper } from './wrapper';
import { useEmbed } from '~/logic/state/embed';
import { Suspender } from '~/logic/lib/suspend';
import { IS_SAFARI } from '~/logic/lib/platform';
import useDocketState, { useTreaty } from '~/logic/state/docket';
import { AppTile } from '~/views/apps/permalinks/embed';
@ -97,6 +97,7 @@ export function RemoteContentImageEmbed(
objectFit="cover"
borderRadius={2}
onError={onError}
style={{ imageRendering: '-webkit-optimize-contrast' }}
{...props}
/>
</Box>
@ -319,6 +320,7 @@ type RemoteContentOembedProps = {
renderUrl?: boolean;
thumbnail?: boolean;
tall?: boolean;
oembed: Suspender<any>;
} & RemoteContentEmbedProps &
PropFunc<typeof Box>;
@ -332,10 +334,9 @@ export const RemoteContentOembed = React.forwardRef<
HTMLDivElement,
RemoteContentOembedProps
>((props, ref) => {
const { url, renderUrl = false, thumbnail = false, ...rest } = props;
const oembed = useEmbed(url);
const { url, oembed, renderUrl = false, thumbnail = false, ...rest } = props;
const embed = oembed.read();
const fallbackError = new Error('fallback');
const [aspect, width, height] = useMemo(() => {
if(!('height' in embed && typeof embed.height === 'number'
@ -373,11 +374,9 @@ export const RemoteContentOembed = React.forwardRef<
dangerouslySetInnerHTML={{ __html: embed.html }}
></EmbedBox>
</EmbedContainer>
) : renderUrl ? (
) : (
<RemoteContentEmbedFallback url={url} />
) : (() => {
throw fallbackError;
})()
)
}
</Col>
);

View File

@ -1,12 +1,17 @@
import { hasProvider } from 'oembed-parser';
import {
Box,
} from '@tlon/indigo-react';
import React from 'react';
import useSettingsState from '~/logic/state/settings';
import {
RemoteContentAudioEmbed,
RemoteContentImageEmbed,
RemoteContentOembed,
RemoteContentVideoEmbed
RemoteContentVideoEmbed,
RemoteContentEmbedFallback
} from './embed';
import { useEmbed } from '~/logic/state/embed';
import { Suspender } from '~/logic/lib/suspend';
import { TruncatedText } from '~/views/components/TruncatedText';
import { RemoteContentWrapper } from './wrapper';
import AsyncFallback from '../AsyncFallback';
@ -43,8 +48,34 @@ export const IMAGE_REGEX = new RegExp(
export const AUDIO_REGEX = new RegExp(/(\.mp3|\.wav|\.ogg|\.m4a)$/i);
export const VIDEO_REGEX = new RegExp(/(\.mov|\.mp4|\.ogv)$/i);
// This is used to prevent our oembed parser from
// trying to embed facebook/instagram links, which require an API key
const isFacebookGraphDependent = (url: string) => {
const caseDesensitizedURL = url.toLowerCase()
return (caseDesensitizedURL.includes('facebook.com') || caseDesensitizedURL.includes('instagram.com'))
}
export const validOembedCheck = (embed: Suspender<any>, url: string) => {
if (!isFacebookGraphDependent(url)) {
if (!embed.read().hasOwnProperty("error")) {
return true
}
}
return false
}
export const RemoteContent = (props: RemoteContentProps) => {
const {url, ...rest} = props
return(
<AsyncFallback fallback={<RemoteContentEmbedFallback url={url} />}>
<RemoteContentInner url={url} {...rest}/>
</AsyncFallback>
)
}
const emptyRef = () => {};
export function RemoteContent(props: RemoteContentProps) {
function RemoteContentInner(props: RemoteContentProps) {
const {
url,
embedRef = emptyRef,
@ -57,45 +88,51 @@ export function RemoteContent(props: RemoteContentProps) {
const isImage = IMAGE_REGEX.test(url);
const isAudio = AUDIO_REGEX.test(url);
const isVideo = VIDEO_REGEX.test(url);
const isOembed = hasProvider(url);
const oembed = useEmbed(url);
const isOembed = validOembedCheck(oembed, url);
const wrapperProps = {
url,
tall,
embedOnly: !renderUrl || tall
};
const fallback = !renderUrl ? null : (
<RemoteContentWrapper {...wrapperProps}>
<TruncatedText>{url}</TruncatedText>
</RemoteContentWrapper>
);
const fallback = null;
if (isImage && remoteContentPolicy.imageShown) {
return (
<RemoteContentWrapper {...wrapperProps} noOp={transcluded} replaced>
<RemoteContentImageEmbed url={url} />
</RemoteContentWrapper>
<Box mt={1} mb={2} flexShrink={0}>
<RemoteContentWrapper {...wrapperProps} noOp={transcluded} replaced>
<RemoteContentImageEmbed url={url} />
</RemoteContentWrapper>
</Box>
);
} else if (isAudio && remoteContentPolicy.audioShown) {
return (
<RemoteContentWrapper {...wrapperProps}>
<RemoteContentAudioEmbed url={url} />
</RemoteContentWrapper>
<Box mt={1} mb={2} flexShrink={0}>
<RemoteContentWrapper {...wrapperProps}>
<RemoteContentAudioEmbed url={url} />
</RemoteContentWrapper>
</Box>
);
} else if (isVideo && remoteContentPolicy.videoShown) {
return (
<RemoteContentWrapper
{...wrapperProps}
detail={<RemoteContentVideoEmbed url={url} />}
>
<TruncatedText>{url}</TruncatedText>
</RemoteContentWrapper>
<Box mt={1} mb={2} flexShrink={0}>
<RemoteContentWrapper
{...wrapperProps}
detail={<RemoteContentVideoEmbed url={url} />}
>
<TruncatedText>{url}</TruncatedText>
</RemoteContentWrapper>
</Box>
);
} else if (isOembed && remoteContentPolicy.oembedShown) {
return (
<AsyncFallback fallback={fallback}>
<RemoteContentOembed ref={embedRef} url={url} renderUrl={renderUrl} />
</AsyncFallback>
<Box mt={1} mb={2} flexShrink={0}>
<AsyncFallback fallback={fallback}>
<RemoteContentOembed ref={embedRef} url={url} renderUrl={renderUrl} oembed={oembed} />
</AsyncFallback>
</Box>
);
}
return fallback;

View File

@ -2,26 +2,12 @@ import { Anchor, Text } from '@tlon/indigo-react';
import { Contact, Group } from '@urbit/api';
import React from 'react';
import ReactMarkdown, { ReactMarkdownProps } from 'react-markdown';
import RemarkDisableTokenizers from 'remark-disable-tokenizers';
import { isValidPatp } from 'urbit-ob';
import { deSig } from '~/logic/lib/util';
import { PermalinkEmbed } from '~/views/apps/permalinks/embed';
import { Mention } from '~/views/components/MentionText';
import RemoteContent from '~/views/components/RemoteContent';
const DISABLED_BLOCK_TOKENS = [
'indentedCode',
'atxHeading',
'thematicBreak',
'list',
'setextHeading',
'html',
'definition',
'table'
];
const DISABLED_INLINE_TOKENS = [];
type RichTextProps = ReactMarkdownProps & {
disableRemoteContent?: boolean;
contact?: Contact;
@ -110,10 +96,6 @@ const RichText = React.memo(({ disableRemoteContent = false, ...props }: RichTex
return <Text display={props.inline ? 'inline' : 'block'} mb={2} {...props}>{paraProps.children}</Text>;
}
}}
plugins={[[
RemarkDisableTokenizers,
{ block: DISABLED_BLOCK_TOKENS, inline: DISABLED_INLINE_TOKENS }
]]}
/>
));

View File

@ -74,7 +74,7 @@ export function GraphPermissions(props: GraphPermissionsProps) {
const writers = _.get(
group?.tags,
['graph', association.resource, 'writers'],
new Set()
[]
);
let [, , hostShip] = association.resource.split('/');
@ -91,7 +91,7 @@ export function GraphPermissions(props: GraphPermissionsProps) {
const initialValues = {
writePerms,
writers: Array.from(writers)
writers: [...writers]
.filter(x => x !== hostShip),
readerComments: association.metadata.vip === 'reader-comments'
};
@ -104,7 +104,7 @@ export function GraphPermissions(props: GraphPermissionsProps) {
resource: association.resource,
tag: 'writers'
};
const allWriters = Array.from(writers).map(w => `~${w}`);
const allWriters = [...writers].map(w => `~${w}`);
if (values.readerComments !== readerComments) {
await airlock.poke(metadataEdit(association, {
vip: values.readerComments ? 'reader-comments' : ''
@ -170,7 +170,7 @@ export function GraphPermissions(props: GraphPermissionsProps) {
<Col>
<Label mb={2}>Permissions Summary</Label>
<PermissionsSummary
writersSize={writers.size}
writersSize={writers.length}
vip={association.metadata.vip}
/>
</Col>

View File

@ -34,26 +34,30 @@ interface GraphMentionNode {
ship: string;
}
const addEmphasisToMention = (contents: Content[], content: Content, index: number) => {
const addEmphasisToMention = (
contents: Content[],
content: Content,
index: number
) => {
const prevContent = contents[index - 1];
const nextContent = contents[index + 1];
if (
'text' in content &&
(content.text.trim() === '**' || content.text.trim() === '*' )
) {
(content.text.trim() === '**' || content.text.trim() === '*')
) {
return {
text: ''
};
}
if(
if (
'text' in content &&
content.text.endsWith('*') &&
!content.text.startsWith('*') &&
nextContent !== undefined &&
'mention' in nextContent
) {
if (content.text.charAt((content.text.length - 2)) === '*') {
if (content.text.charAt(content.text.length - 2) === '*') {
return { text: content.text.slice(0, content.text.length - 2) };
}
return { text: content.text.slice(0, content.text.length - 1) };
@ -116,78 +120,99 @@ const codeToMdAst = (content: CodeContent) => {
};
};
const contentToMdAst = (tall: boolean) => (
content: Content
): [StitchMode, any] => {
if ('text' in content) {
if (content.text.toString().trim().length === 0) {
const contentToMdAst =
(tall: boolean) =>
(content: Content): [StitchMode, any] => {
if ('text' in content) {
if (content.text.toString().trim().length === 0) {
return [
'merge',
{ type: 'root', children: [{ type: 'paragraph', children: [] }] }
];
}
return [
'merge',
{ type: 'root', children: [{ type: 'paragraph', children: [] }] }
tall ? parseTall(content.text) : parseWide(content.text)
] as [StitchMode, any];
} else if ('code' in content) {
return ['block', codeToMdAst(content)];
} else if ('reference' in content) {
return [
'block',
{
type: 'root',
children: [
{
type: 'graph-reference',
reference: content.reference
}
]
}
];
} else if ('url' in content) {
const images = ['.jpg', '.jpeg', '.png', '.gif', '.webp'];
return [
'inline',
{
type: 'root',
children: [
{
type: 'link',
url: content.url,
children: [
{
type: 'text',
value: !images.some(i => content.url.includes(i))
? content.url
: ''
}
]
}
]
}
];
} else if ('mention' in content) {
return [
'inline',
{
type: 'root',
children: [
{
type: 'graph-mention',
ship: content.mention,
emphasis: content.emphasis
}
]
}
];
}
return [
'merge',
tall ? parseTall(content.text) : parseWide(content.text)
] as [StitchMode, any];
} else if ('code' in content) {
return ['block', codeToMdAst(content)];
} else if ('reference' in content) {
return [
'block',
{
type: 'root',
children: [
{
type: 'graph-reference',
reference: content.reference
}
]
}
];
} else if ('url' in content) {
return [
'block',
{
type: 'root',
children: [
{
type: 'graph-url',
url: content.url
}
]
}
];
} else if ('mention' in content) {
return [
'inline',
{
type: 'root',
children: [
{
type: 'graph-mention',
ship: content.mention,
emphasis: content.emphasis
}
]
children: []
}
];
}
return [
'inline',
{
type: 'root',
children: []
}
];
};
};
function stitchInline(a: any, b: any) {
if (!a?.children) {
throw new Error('Bad stitchInline call: missing root');
}
const lastParaIdx = a.children.length - 1;
const last = a.children[lastParaIdx];
// wrap bare link in list-item inside a p node
// for better typography consistency
if (last?.type === 'listItem') {
if (last?.children.length === 0) {
last.children.push({
type: 'paragraph',
children: []
});
}
}
if (last?.children) {
const ros = {
...a,
@ -217,9 +242,14 @@ function getChildren<T extends unknown>(node: T): AstContent[] {
}
export function asParent<T extends BlockContent>(node: T): Parent | undefined {
return ['paragraph', 'heading', 'list', 'listItem', 'table'].includes(
node.type
)
return [
'paragraph',
'heading',
'list',
'listItem',
'table',
'blockquote'
].includes(node.type)
? (node as Parent)
: undefined;
}
@ -241,6 +271,7 @@ function stitchMerge(a: Root, b: Root) {
children: [...aChildren.slice(0, -1), mergedPara, ...bChildren.slice(1)]
};
}
return { ...a, children: [...aChildren, ...bChildren] };
}
@ -256,10 +287,10 @@ function stitchInlineAfterBlock(a: Root, b: GraphMentionNode[]) {
}
function stitchAsts(asts: [StitchMode, GraphAstNode][]) {
return _.reduce(
const t = _.reduce(
asts,
([prevMode, ast], [mode, val]): [StitchMode, GraphAstNode] => {
if (prevMode === 'block') {
if (prevMode === 'block' || prevMode === 'inline') {
if (mode === 'inline') {
return [mode, stitchInlineAfterBlock(ast, val?.children ?? [])];
}
@ -283,6 +314,46 @@ function stitchAsts(asts: [StitchMode, GraphAstNode][]) {
},
['block', { type: 'root', children: [] }] as [StitchMode, GraphAstNode]
);
t[1].children.map((c, idx) => {
const links = [];
function addRichEmbedURL(nodes) {
if (nodes?.children) {
nodes.children.filter(k => {
if (k.type === 'link') {
links.push({
type: 'root',
children: [
{
type: 'graph-url',
url: k.url
}
]
});
} else if (k?.children) {
k.children.filter(o => {
if (o.type === 'link') {
links.push({
type: 'root',
children: [
{
type: 'graph-url',
url: o.url
}
]
});
}
});
}
});
nodes.children.push(...links);
}
}
addRichEmbedURL(c);
});
return t;
}
const header = ({ children, depth, ...rest }) => {
const level = depth;
@ -392,7 +463,7 @@ const renderers = {
);
return tall ? <Box mb={2}>{inner}</Box> : inner;
},
link: (props) => {
link: props => {
return (
<Anchor
display="inline"
@ -408,9 +479,13 @@ const renderers = {
);
},
list: ({ depth, ordered, children }) => {
return ordered ? <Ol>{children}</Ol> : <Ul>{children}</Ul>;
return ordered ? (
<Ol fontSize="1">{children}</Ol>
) : (
<Ul fontSize="1">{children}</Ul>
);
},
'graph-mention': (obj) => {
'graph-mention': obj => {
return <Mention ship={obj.ship} emphasis={obj.emphasis} />;
},
image: ({ url, tall }) => (
@ -419,19 +494,13 @@ const renderers = {
</Box>
),
'graph-url': ({ url, tall }) => (
<Box mt={1} mb={2} flexShrink={0}>
<RemoteContent key={url} url={url} tall={tall} />
</Box>
<RemoteContent key={url} url={url} tall={tall} />
),
'graph-reference': ({ reference, transcluded }) => {
const { link } = referenceToPermalink({ reference });
return (
<Box my={2} flexShrink={0}>
<PermalinkEmbed
link={link}
transcluded={transcluded}
showOurContact
/>
<PermalinkEmbed link={link} transcluded={transcluded} showOurContact />
</Box>
);
},
@ -499,19 +568,13 @@ export type GraphContentProps = PropFunc<typeof Box> & {
showOurContact: boolean;
};
export const GraphContent = React.memo((
props: GraphContentProps
) => {
const {
contents,
tall = false,
transcluded = 0,
...rest
} = props;
export const GraphContent = React.memo((props: GraphContentProps) => {
const { contents, tall = false, transcluded = 0, ...rest } = props;
const [, ast] = stitchAsts(
contents
.map((content, index) => addEmphasisToMention(contents, content, index))
.map(contentToMdAst(tall)));
.map((content, index) => addEmphasisToMention(contents, content, index))
.map(contentToMdAst(tall))
);
return (
<Box {...rest}>
<Graphdown transcluded={transcluded} ast={ast} tall={tall} />

View File

@ -1,128 +1,126 @@
/* eslint-disable */
/** pulled from remark-parse
*
*
* critical change is that blockquotes require a newline to be continued, see
* the `if(!prefixed) conditional
*/
'use strict'
"use strict";
var trim = require('trim')
var interrupt = require('remark-parse/lib/util/interrupt')
var trim = require("trim");
module.exports = blockquote
module.exports = blockquote;
var lineFeed = '\n'
var tab = '\t'
var space = ' '
var greaterThan = '>'
var lineFeed = "\n";
var tab = "\t";
var space = " ";
var greaterThan = ">";
function blockquote(eat, value, silent) {
var self = this
var offsets = self.offset
var tokenizers = self.blockTokenizers
var interruptors = self.interruptBlockquote
var now = eat.now()
var currentLine = now.line
var length = value.length
var values = []
var contents = []
var indents = []
var add
var index = 0
var character
var rest
var nextIndex
var content
var line
var startIndex
var prefixed
var exit
var self = this;
var offsets = self.offset;
var tokenizers = self.blockTokenizers;
var interruptors = self.interruptBlockquote;
var now = eat.now();
var currentLine = now.line;
var length = value.length;
var values = [];
var contents = [];
var indents = [];
var add;
var index = 0;
var character;
var rest;
var nextIndex;
var content;
var line;
var startIndex;
var prefixed;
var exit;
while (index < length) {
character = value.charAt(index)
character = value.charAt(index);
if (character !== space && character !== tab) {
break
break;
}
index++
index++;
}
if (value.charAt(index) !== greaterThan) {
return
return;
}
if (silent) {
return true
return true;
}
index = 0
index = 0;
while (index < length) {
nextIndex = value.indexOf(lineFeed, index)
startIndex = index
prefixed = false
nextIndex = value.indexOf(lineFeed, index);
startIndex = index;
prefixed = false;
if (nextIndex === -1) {
nextIndex = length
nextIndex = length;
}
while (index < length) {
character = value.charAt(index)
character = value.charAt(index);
if (character !== space && character !== tab) {
break
break;
}
index++
index++;
}
if (value.charAt(index) === greaterThan) {
index++
prefixed = true
index++;
prefixed = true;
if (value.charAt(index) === space) {
index++
index++;
}
} else {
index = startIndex
index = startIndex;
}
content = value.slice(index, nextIndex)
content = value.slice(index, nextIndex);
if (!prefixed && !trim(content)) {
index = startIndex
break
index = startIndex;
break;
}
if (!prefixed) {
break;
}
line = startIndex === index ? content : value.slice(startIndex, nextIndex)
line = startIndex === index ? content : value.slice(startIndex, nextIndex);
indents.push(index - startIndex)
values.push(line)
contents.push(content)
indents.push(index - startIndex);
values.push(line);
contents.push(content);
index = nextIndex + 1
index = nextIndex + 1;
}
const trailingNewline = value.charAt(nextIndex) === '\n';
const trailingNewline = value.charAt(nextIndex) === "\n";
index = -1
length = indents.length
add = eat(values.join(lineFeed))
index = -1;
length = indents.length;
add = eat(values.join(lineFeed));
while (++index < length) {
offsets[currentLine] = (offsets[currentLine] || 0) + indents[index]
currentLine++
offsets[currentLine] = (offsets[currentLine] || 0) + indents[index];
currentLine++;
}
exit = self.enterBlock()
contents = self.tokenizeBlock(contents.join(lineFeed), now)
console.log(values);
exit()
exit = self.enterBlock();
contents = self.tokenizeBlock(contents.join(lineFeed), now);
exit();
const added = add({type: 'blockquote', children: contents})
return trailingNewline ? add({ type: 'paragraph', children: [] }) : added;
const added = add({ type: "blockquote", children: contents });
return trailingNewline ? add({ type: "paragraph", children: [] }) : added;
}

View File

@ -8,7 +8,7 @@ import {
} from 'react-router-dom';
import { useShortcut } from '~/logic/state/settings';
import { useLocalStorageState } from '~/logic/lib/useLocalStorageState';
import { getGroupFromWorkspace } from '~/logic/lib/workspace';
import { getGroupFromWorkspace, getTitleFromWorkspace } from '~/logic/lib/workspace';
import useGroupState from '~/logic/state/group';
import useHarkState from '~/logic/state/hark';
import useMetadataState from '~/logic/state/metadata';
@ -22,7 +22,7 @@ import { Skeleton } from './Skeleton';
import { EmptyGroupHome } from './Home/EmptyGroupHome';
import { Join } from './Join/Join';
import { Resource } from './Resource';
import { DmResource } from '~/views/apps/chat/DmResource';
import { DmResource, DmHelmet } from '~/views/apps/chat/DmResource';
import { UnjoinedResource } from '~/views/components/UnjoinedResource';
import { NewChannel } from './NewChannel';
import { GroupHome } from './Home/GroupHome';
@ -126,16 +126,18 @@ export function GroupsPane(props: GroupsPaneProps) {
const { ship } = match.params as Record<string, string>;
return (
<Skeleton
mobileHide
recentGroups={recentGroups}
selected={ship}
{...props}
baseUrl={match.path}
> <DmResource ship={ship} />
</Skeleton>
<>
<DmHelmet ship={ship} />
<Skeleton
mobileHide
recentGroups={recentGroups}
selected={ship}
{...props}
baseUrl={match.path}
> <DmResource ship={ship} />
</Skeleton>
</>
);
}}
/>
@ -180,7 +182,7 @@ export function GroupsPane(props: GroupsPaneProps) {
const appPath = `/ship/${host}/${name}`;
const association = associations.graph[appPath];
const resourceUrl = `${baseUrl}/join/${app}${appPath}`;
let title = groupAssociation?.metadata?.title ?? 'Groups';
let title = getTitleFromWorkspace(associations, workspace);
if (!association) {
return <Loading />;
@ -252,7 +254,7 @@ export function GroupsPane(props: GroupsPaneProps) {
render={(routeProps) => {
const shouldHideSidebar =
routeProps.location.pathname.includes('/feed');
const title = groupAssociation?.metadata?.title ?? 'Groups';
const title = getTitleFromWorkspace(associations, workspace);
return (
<>
<Helmet defer={false}>

View File

@ -352,7 +352,7 @@ function Participant(props: {
</Link>
</Action>
<Action bg="transparent">
<Link to={`/~landscape/dm/${contact.patp}`}>
<Link to={`/~landscape/messages/dm/~${contact.patp}`}>
<Text color="green">Send Message</Text>
</Link>
</Action>

View File

@ -190,6 +190,11 @@
|%
++ pull-action pull-hook-action+!>([%add ship rid])
::
++ listen-hark
|= gr=resource
%+ poke-our:pass:io %hark-graph-hook
hark-graph-hook-action+!>([%listen gr /])
::
++ watch-md (watch-our:(jn-pass-io /md) %metadata-store /updates)
++ watch-groups (watch-our:(jn-pass-io /groups) %group-store /groups)
++ watch-md-nacks (watch-our:(jn-pass-io /md-nacks) %metadata-pull-hook /nack)
@ -436,6 +441,9 @@
=? jn-core |(hidden autojoin.request)
%- emit-many
(turn graphs pull-gra:pass)
=? jn-core hidden
%- emit-many
(turn graphs listen-hark:pass)
jn-core
::
++ feed-rid

View File

@ -1,7 +1,7 @@
:~ title+'Groups'
info+'A suite of applications to communicate on Urbit'
color+0xee.5432
glob-http+['https://bootstrap.urbit.org/glob-0v7.bmftr.90ktq.cma0h.da190.bs8b1.glob' 0v7.bmftr.90ktq.cma0h.da190.bs8b1]
glob-http+['https://bootstrap.urbit.org/glob-0v4.2se6m.fvv67.nn5e8.vfrv9.mmi88.glob' 0v4.2se6m.fvv67.nn5e8.vfrv9.mmi88]
base+'landscape'
version+[1 0 11]

View File

@ -35,6 +35,8 @@
(poke-our %group-store group-update-0+!>([%add-members rid (sy our.bowl ~)]))
;< ~ bind:m
(poke-our %group-push-hook push-hook-act)
;< ~ bind:m
(poke-our %hark-graph-hook hark-graph-hook-action+!>([%listen rid /]))
(pure:m rid)
--
::

View File

@ -38,4 +38,6 @@
(raw-poke-our %contact-pull-hook pull-hook-act)
;< ~ bind:m
(raw-poke-our %group-store remove)
;< ~ bind:m
(raw-poke-our %group-view group-view-action+!>([%done rid]))
(pure:m !>(~))

View File

@ -8,8 +8,12 @@
"directory": "pkg/npm/api"
},
"type": "module",
"main": "dist/cjs/index.js",
"main": "dist/cjs/index.cjs",
"module": "dist/esm/index.js",
"exports": {
"require": "./dist/cjs/index.cjs",
"import": "./dist/esm/index.js"
},
"jsdelivr": "dist/urbit-api.min.js",
"unpkg": "dist/urbit-api.min.js",
"types": "dist/index.d.ts",

View File

@ -60,13 +60,13 @@ export default [
],
output: [
{
dir: 'dist/esm',
file: 'dist/esm/index.js',
format: 'esm',
exports: 'named',
sourcemap: true
sourcemap: true,
},
{
dir: 'dist/cjs',
file: 'dist/cjs/index.cjs',
format: 'cjs',
exports: 'named',
sourcemap: true

View File

@ -1,6 +1,6 @@
{
"name": "@urbit/http-api",
"version": "2.1.0",
"version": "2.1.3",
"license": "MIT",
"description": "Library to interact with an Urbit ship over HTTP",
"repository": {
@ -9,8 +9,12 @@
"directory": "pkg/npm/http-api"
},
"type": "module",
"main": "dist/cjs/index.js",
"main": "dist/cjs/index.cjs",
"module": "dist/esm/index.js",
"exports": {
"require": "./dist/cjs/index.cjs",
"import": "./dist/esm/index.js"
},
"jsdelivr": "dist/urbit-http-api.min.js",
"unpkg": "dist/urbit-http-api.min.js",
"types": "dist/index.d.ts",

View File

@ -56,13 +56,13 @@ export default [
],
output: [
{
dir: 'dist/esm',
file: 'dist/esm/index.js',
format: 'esm',
exports: 'named',
sourcemap: true,
},
{
dir: 'dist/cjs',
file: 'dist/cjs/index.cjs',
format: 'cjs',
exports: 'named',
sourcemap: true,

View File

@ -235,9 +235,9 @@ export class Urbit {
console.log('Received SSE: ', event);
}
if (!event.id) return;
this.lastEventId = parseInt(event.id, 10);
if (this.lastEventId - this.lastAcknowledgedEventId > 20) {
this.ack(this.lastEventId);
const eventId = parseInt(event.id, 10);
if (eventId - this.lastAcknowledgedEventId > 20) {
this.ack(eventId);
}
if (event.data && JSON.parse(event.data)) {
@ -312,7 +312,7 @@ export class Urbit {
*
*/
reset() {
if(this.verbose) {
if (this.verbose) {
console.log('resetting');
}
this.delete();

View File

@ -134,6 +134,16 @@ int err_win_to_posix(DWORD winerr)
return error;
}
int link(const char *path1, const char *path2)
{
if ( CreateHardLinkA(path2, path1, NULL) ) {
return 0;
}
errno = err_win_to_posix(GetLastError());
return -1;
}
// from msys2 mingw-packages-dev patches
// -----------------------------------------------------------------------

View File

@ -3,6 +3,7 @@
#define mkdir(A, B) mkdir(A)
int link(const char *path1, const char *path2);
char *realpath(const char *path, char *resolved_path);
int fdatasync(int fd);
int utimes(const char *path, const struct timeval times[2]);

1
pkg/urbit/configure vendored
View File

@ -31,6 +31,7 @@ defmacro () {
}
defmacro URBIT_VERSION "\"$URBIT_VERSION\""
defmacro U3_VERE_PACE "\"${VERE_PACE:-once}\""
opt_debug=
opt_static=

View File

@ -18,8 +18,10 @@
#include <curl/curl.h>
#include <vere/db/lmdb.h>
#include <getopt.h>
#include <libgen.h>
#include "ca-bundle.h"
#include "whereami.h"
// serf module state
//
@ -36,6 +38,31 @@ static u3_cue_xeno* sil_u; // cue handle
STATIC_ASSERT(( 0 == CHAR_MIN && UCHAR_MAX == CHAR_MAX ),
"unsigned char required");
/* _main_self_path(): get binary self-path.
*/
static void
_main_self_path(void)
{
c3_c* pat_c;
c3_i len_i, pat_i;
if ( 0 < (len_i = wai_getExecutablePath(NULL, 0, &pat_i)) ) {
pat_c = c3_malloc( 1 + len_i );
wai_getExecutablePath(pat_c, len_i, &pat_i);
pat_c[len_i] = 0;
u3_Host.dem_c = pat_c;
}
else {
fprintf(stderr, "unable to get binary self path\r\n");
exit(1);
// XX continue?
//
// u3_Host.dem_c = strdup(bin_c);
}
}
/* _main_readw(): parse a word from a string.
*/
static u3_noun
@ -105,13 +132,13 @@ _main_repath(c3_c* pax_c)
return rel_c;
}
/* _main_getopt(): extract option map from command line.
/* _main_init(): initialize globals
*/
static u3_noun
_main_getopt(c3_i argc, c3_c** argv)
static void
_main_init(void)
{
c3_i ch_i, lid_i;
c3_w arg_w;
u3_Host.nex_o = c3n;
u3_Host.pep_o = c3n;
u3_Host.ops_u.abo = c3n;
u3_Host.ops_u.dem = c3n;
@ -137,6 +164,37 @@ _main_getopt(c3_i argc, c3_c** argv)
u3_Host.ops_u.puf_c = "jam";
u3_Host.ops_u.hap_w = 50000;
u3_Host.ops_u.kno_w = DefaultKernel;
}
/* _main_pier_run(): get pier from binary path (argv[0]), if appropriate
*/
static c3_c*
_main_pier_run(c3_c* bin_c)
{
c3_c* dir_c = 0;
c3_w bin_w = strlen(bin_c);
c3_w len_w = strlen(U3_BIN_ALIAS);
// no args, argv[0] == $pier/.run
//
if ( (len_w <= bin_w)
&& (0 == strcmp(bin_c + (bin_w - len_w), U3_BIN_ALIAS)) )
{
bin_c = strdup(bin_c); // dirname can modify
dir_c = _main_repath(dirname(bin_c));
c3_free(bin_c);
}
return dir_c;
}
/* _main_getopt(): extract option map from command line.
*/
static u3_noun
_main_getopt(c3_i argc, c3_c** argv)
{
c3_i ch_i, lid_i;
c3_w arg_w;
static struct option lop_u[] = {
{ "arvo", required_argument, NULL, 'A' },
@ -166,6 +224,7 @@ _main_getopt(c3_i argc, c3_c** argv)
{ "http-port", required_argument, NULL, c3__http },
{ "https-port", required_argument, NULL, c3__htls },
{ "no-conn", no_argument, NULL, c3__noco },
{ "no-dock", no_argument, NULL, c3__nodo },
{ "quiet", no_argument, NULL, 'q' },
{ "versions", no_argument, NULL, 'R' },
{ "replay-from", required_argument, NULL, 'r' },
@ -291,6 +350,10 @@ _main_getopt(c3_i argc, c3_c** argv)
u3_Host.ops_u.con = c3n;
break;
}
case c3__nodo: {
u3_Host.ops_u.doc = c3n;
break;
}
case 'R': {
u3_Host.ops_u.rep = c3y;
return c3y;
@ -346,8 +409,9 @@ _main_getopt(c3_i argc, c3_c** argv)
if ( u3_Host.ops_u.who_c != 0 ) {
u3_Host.dir_c = strdup(1 + u3_Host.ops_u.who_c);
}
else {
// XX not sure how this might be reachable
// no trailing positional arg, argv[0] != $pier/.run, invalid command
//
else if ( !(u3_Host.dir_c = _main_pier_run(argv[0])) ) {
return c3n;
}
}
@ -530,23 +594,37 @@ _setup_ssl_curl(void* arg)
/* _cw_usage(): print utility usage.
*/
static void
_cw_usage(c3_c* s)
_cw_usage(c3_c* bin_c)
{
fprintf(stderr,
"\nutilities:\n"
" %s cram <pier> jam state:\n"
" %s grab <pier> measure memory usage:\n"
" %s info <pier> print pier info:\n"
" %s meld <pier> deduplicate snapshot:\n"
" %s pack <pier> defragment snapshot:\n"
" %s queu <pier> <at-event> cue state:\n"
"\n run as a 'serf':\n"
c3_c *use_c[] = {
"utilities:\n",
" %s cram %.*s jam state:\n",
" %s dock %.*s copy binary:\n",
" %s grab %.*s measure memory usage:\n",
" %s info %.*s print pier info:\n",
" %s meld %.*s deduplicate snapshot:\n",
" %s pack %.*s defragment snapshot:\n",
" %s prep %.*s prepare for upgrade:\n",
" %s next %.*s request upgrade:\n",
" %s queu %.*s<at-event> cue state:\n",
" %s vere ARGS <output dir> download binary:\n",
"\n run as a 'serf':\n",
" %s serf <pier> <key> <flags> <cache-size> <at-event>"
#ifdef U3_OS_mingw
" <ctrlc-handle>"
#endif
"\n",
s, s, s, s, s, s, s);
0
};
c3_c* d = _main_pier_run(bin_c);
c3_i i;
for ( i=0; use_c[i]; i++ ) {
fprintf(stderr, use_c[i], bin_c, d ? 0 : 7, "<pier> ");
}
c3_free(d);
}
/* u3_ve_usage(): print usage and exit.
@ -912,7 +990,7 @@ _cw_intr_win(c3_c* han_c)
}
#endif
/* _cw_serf_commence(); initialize and run serf
/* _cw_serf_commence(): initialize and run serf
*/
static void
_cw_serf_commence(c3_i argc, c3_c* argv[])
@ -1029,7 +1107,7 @@ _cw_serf_commence(c3_i argc, c3_c* argv[])
u3m_stop();
}
/* _cw_disk_init(); open event log
/* _cw_disk_init(): open event log
*/
static u3_disk*
_cw_disk_init(c3_c* dir_c)
@ -1045,18 +1123,62 @@ _cw_disk_init(c3_c* dir_c)
return log_u;
}
/* _cw_info(); print pier info
/* _cw_dock(): copy binary into pier
*/
static void
_cw_dock(c3_i argc, c3_c* argv[])
{
switch ( argc ) {
case 2: {
if ( !(u3_Host.dir_c = _main_pier_run(argv[0])) ) {
fprintf(stderr, "unable to find pier\r\n");
exit (1);
}
} break;
case 3: {
u3_Host.dir_c = argv[2];
} break;
default: {
fprintf(stderr, "invalid command\r\n");
exit(1);
} break;
}
_main_self_path();
u3_king_dock(U3_VERE_PACE);
}
/* _cw_info(): print pier info
*/
static void
_cw_info(c3_i argc, c3_c* argv[])
{
c3_assert( 3 <= argc );
switch ( argc ) {
case 2: {
if ( !(u3_Host.dir_c = _main_pier_run(argv[0])) ) {
fprintf(stderr, "unable to find pier\r\n");
exit (1);
}
} break;
c3_c* dir_c = argv[2];
c3_d eve_d = u3m_boot(dir_c);
u3_disk* log_u = _cw_disk_init(dir_c);
case 3: {
u3_Host.dir_c = argv[2];
} break;
fprintf(stderr, "\r\nurbit: %s at event %" PRIu64 "\r\n", dir_c, eve_d);
default: {
fprintf(stderr, "invalid command\r\n");
exit(1);
} break;
}
c3_d eve_d = u3m_boot(u3_Host.dir_c);
u3_disk* log_u = _cw_disk_init(u3_Host.dir_c);
fprintf(stderr, "\r\nurbit: %s at event %" PRIu64 "\r\n",
u3_Host.dir_c, eve_d);
u3_disk_slog(log_u);
printf("\n");
@ -1066,35 +1188,65 @@ _cw_info(c3_i argc, c3_c* argv[])
u3m_stop();
}
/* _cw_grab(); gc pier.
/* _cw_grab(): gc pier.
*/
static void
_cw_grab(c3_i argc, c3_c* argv[])
{
c3_assert( 3 <= argc );
switch ( argc ) {
case 2: {
if ( !(u3_Host.dir_c = _main_pier_run(argv[0])) ) {
fprintf(stderr, "unable to find pier\r\n");
exit (1);
}
} break;
c3_c* dir_c = argv[2];
u3m_boot(dir_c);
case 3: {
u3_Host.dir_c = argv[2];
} break;
default: {
fprintf(stderr, "invalid command\r\n");
exit(1);
} break;
}
u3m_boot(u3_Host.dir_c);
u3C.wag_w |= u3o_hashless;
u3_serf_grab();
u3m_stop();
}
/* _cw_cram(); jam persistent state (rock), and exit.
/* _cw_cram(): jam persistent state (rock), and exit.
*/
static void
_cw_cram(c3_i argc, c3_c* argv[])
{
c3_assert( 3 <= argc );
switch ( argc ) {
case 2: {
if ( !(u3_Host.dir_c = _main_pier_run(argv[0])) ) {
fprintf(stderr, "unable to find pier\r\n");
exit (1);
}
} break;
c3_c* dir_c = argv[2];
c3_d eve_d = u3m_boot(dir_c);
u3_disk* log_u = _cw_disk_init(dir_c); // XX s/b try_aquire lock
case 3: {
u3_Host.dir_c = argv[2];
} break;
default: {
fprintf(stderr, "invalid command\r\n");
exit(1);
} break;
}
c3_d eve_d = u3m_boot(u3_Host.dir_c);
u3_disk* log_u = _cw_disk_init(u3_Host.dir_c); // XX s/b try_aquire lock
c3_o ret_o;
fprintf(stderr, "urbit: cram: preparing\r\n");
if ( c3n == (ret_o = u3u_cram(dir_c, eve_d)) ) {
if ( c3n == (ret_o = u3u_cram(u3_Host.dir_c, eve_d)) ) {
fprintf(stderr, "urbit: cram: unable to jam state\r\n");
}
else {
@ -1113,33 +1265,50 @@ _cw_cram(c3_i argc, c3_c* argv[])
u3m_stop();
}
/* _cw_queu(); cue rock, save, and exit.
/* _cw_queu(): cue rock, save, and exit.
*/
static void
_cw_queu(c3_i argc, c3_c* argv[])
{
c3_assert( 4 <= argc );
c3_c* dir_c = argv[2];
c3_c* eve_c = argv[3];
c3_c* eve_c;
c3_d eve_d;
switch ( argc ) {
case 3: {
if ( !(u3_Host.dir_c = _main_pier_run(argv[0])) ) {
fprintf(stderr, "unable to find pier\r\n");
exit (1);
}
eve_c = argv[2];
} break;
case 4: {
u3_Host.dir_c = argv[2];
eve_c = argv[3];
} break;
default: {
fprintf(stderr, "invalid command\r\n");
exit(1);
} break;
}
if ( 1 != sscanf(eve_c, "%" PRIu64 "", &eve_d) ) {
fprintf(stderr, "urbit: queu: invalid number '%s'\r\n", eve_c);
exit(1);
}
else {
u3_disk* log_u = _cw_disk_init(dir_c); // XX s/b try_aquire lock
u3_disk* log_u = _cw_disk_init(u3_Host.dir_c); // XX s/b try_aquire lock
fprintf(stderr, "urbit: queu: preparing\r\n");
u3m_boot(dir_c);
u3m_boot(u3_Host.dir_c);
// XX can spuriously fail do to corrupt memory-image checkpoint,
// need a u3m_half_boot equivalent
// workaround is to delete/move the checkpoint in case of corruption
//
if ( c3n == u3u_uncram(dir_c, eve_d) ) {
if ( c3n == u3u_uncram(u3_Host.dir_c, eve_d) ) {
fprintf(stderr, "urbit: queu: failed\r\n");
exit(1);
}
@ -1152,19 +1321,34 @@ _cw_queu(c3_i argc, c3_c* argv[])
}
}
/* _cw_uniq(); deduplicate persistent nouns
/* _cw_uniq(): deduplicate persistent nouns
*/
static void
_cw_meld(c3_i argc, c3_c* argv[])
{
c3_assert( 3 <= argc );
switch ( argc ) {
case 2: {
if ( !(u3_Host.dir_c = _main_pier_run(argv[0])) ) {
fprintf(stderr, "unable to find pier\r\n");
exit (1);
}
} break;
c3_c* dir_c = argv[2];
u3_disk* log_u = _cw_disk_init(dir_c); // XX s/b try_aquire lock
case 3: {
u3_Host.dir_c = argv[2];
} break;
default: {
fprintf(stderr, "invalid command\r\n");
exit(1);
} break;
}
u3_disk* log_u = _cw_disk_init(u3_Host.dir_c); // XX s/b try_aquire lock
c3_w pre_w;
u3C.wag_w |= u3o_hashless;
u3m_boot(dir_c);
u3m_boot(u3_Host.dir_c);
pre_w = u3a_open(u3R);
u3u_meld();
@ -1175,17 +1359,83 @@ _cw_meld(c3_i argc, c3_c* argv[])
u3m_stop();
}
/* _cw_pack(); compact memory, save, and exit.
/* _cw_next(): request upgrade
*/
static void
_cw_next(c3_i argc, c3_c* argv[])
{
c3_i ch_i, lid_i;
c3_w arg_w;
static struct option lop_u[] = {
{ "arch", required_argument, NULL, 'a' },
{ NULL, 0, NULL, 0 }
};
u3_Host.dir_c = _main_pier_run(argv[0]);
while ( -1 != (ch_i=getopt_long(argc, argv, "a:", lop_u, &lid_i)) ) {
switch ( ch_i ) {
case 'a': {
u3_Host.arc_c = strdup(optarg);
} break;
case '?': {
exit(1);
} break;
}
}
// argv[optind] is always "next"
//
if ( !u3_Host.dir_c ) {
if ( optind + 1 < argc ) {
u3_Host.dir_c = argv[optind + 1];
}
else {
fprintf(stderr, "invalid command, pier required\r\n");
exit(1);
}
optind++;
}
if ( optind + 1 != argc ) {
fprintf(stderr, "invalid command\r\n");
exit(1);
}
u3_Host.pep_o = c3y;
u3_Host.nex_o = c3y;
}
/* _cw_pack(): compact memory, save, and exit.
*/
static void
_cw_pack(c3_i argc, c3_c* argv[])
{
c3_assert( 3 <= argc );
switch ( argc ) {
case 2: {
if ( !(u3_Host.dir_c = _main_pier_run(argv[0])) ) {
fprintf(stderr, "unable to find pier\r\n");
exit (1);
}
} break;
c3_c* dir_c = argv[2];
u3_disk* log_u = _cw_disk_init(dir_c); // XX s/b try_aquire lock
case 3: {
u3_Host.dir_c = argv[2];
} break;
u3m_boot(dir_c);
default: {
fprintf(stderr, "invalid command\r\n");
exit(1);
} break;
}
u3_disk* log_u = _cw_disk_init(u3_Host.dir_c); // XX s/b try_aquire lock
u3m_boot(u3_Host.dir_c);
u3a_print_memory(stderr, "urbit: pack: gained", u3m_pack());
u3e_save();
@ -1193,6 +1443,145 @@ _cw_pack(c3_i argc, c3_c* argv[])
u3m_stop();
}
/* _cw_prep(): prepare for upgrade
*/
static void
_cw_prep(c3_i argc, c3_c* argv[])
{
switch ( argc ) {
case 2: {
if ( !(u3_Host.dir_c = _main_pier_run(argv[0])) ) {
fprintf(stderr, "unable to find pier\r\n");
exit (1);
}
} break;
case 3: {
u3_Host.dir_c = argv[2];
} break;
default: {
fprintf(stderr, "invalid command\r\n");
exit(1);
} break;
}
u3_Host.pep_o = c3y;
}
/* _cw_vere(): download vere
*/
static void
_cw_vere(c3_i argc, c3_c* argv[])
{
c3_c* pac_c = "live";
c3_c* arc_c = 0;
c3_c* ver_c = 0;
c3_c* dir_c;
c3_i ch_i, lid_i;
c3_w arg_w;
static struct option lop_u[] = {
{ "arch", required_argument, NULL, 'a' },
{ "pace", required_argument, NULL, 'p' },
{ "version", required_argument, NULL, 'v' },
{ NULL, 0, NULL, 0 }
};
while ( -1 != (ch_i=getopt_long(argc, argv, "a:p:v:", lop_u, &lid_i)) ) {
switch ( ch_i ) {
case 'a': {
arc_c = strdup(optarg);
} break;
case 'p': {
pac_c = strdup(optarg);
} break;
case 'v': {
ver_c = strdup(optarg);
} break;
case '?': {
exit(1);
} break;
}
}
// argv[optind] is always "vere"/"fetch-vere"
//
if ( optind + 1 < argc ) {
dir_c = argv[optind + 1];
optind++;
}
else {
fprintf(stderr, "invalid command, output directory required\r\n");
exit(1);
}
if ( optind + 1 != argc ) {
fprintf(stderr, "invalid command\r\n");
exit(1);
}
if ( !arc_c ) {
#ifdef U3_OS_ARCH
arc_c = U3_OS_ARCH;
#else
fprintf(stderr, "unknown architecture, --arch required\r\n");
exit(1);
#endif
}
// Initialize OpenSSL for client and server
//
{
SSL_library_init();
SSL_load_error_strings();
}
// initialize curl
//
if ( 0 != curl_global_init(CURL_GLOBAL_DEFAULT) ) {
u3l_log("boot: curl initialization failed\r\n");
exit(1);
}
_setup_cert_store();
u3K.ssl_curl_f = _setup_ssl_curl;
u3K.ssl_x509_f = _setup_ssl_x509;
if ( !ver_c ) {
switch ( u3_king_next(pac_c, &ver_c) ) {
case -2: {
fprintf(stderr, "vere: unable to check for next version\n");
exit(1);
} break;
case -1: {
fprintf(stderr, "you're already running it!\n");
exit(0);
} break;
case 0: {
fprintf(stderr, "vere: next (%%%s): %s\n", pac_c, ver_c);
} break;
default: c3_assert(0);
}
}
if ( u3_king_vere(pac_c, ver_c, arc_c, dir_c, 0) ) {
u3l_log("vere: download failed\r\n");
exit(1);
}
u3l_log("vere: download succeeded\r\n");
}
/* _cw_utils(): "worker" utilities and "serf" entrypoint
*/
static c3_i
@ -1202,11 +1591,15 @@ _cw_utils(c3_i argc, c3_c* argv[])
//
// $@ ~ :: usage
// $% [%cram dir=@t] :: jam state
// [%grab dir=@t] :: gc
// [%dock dir=@t] :: copy binary
// [?(%grab %mass) dir=@t] :: gc
// [%info dir=@t] :: print
// [%meld dir=@t] :: deduplicate
// [?(%next %upgrade) dir=@t] :: upgrade
// [%pack dir=@t] :: defragment
// [%prep dir=@t] :: prep upgrade
// [%queu dir=@t eve=@ud] :: cue state
// [?(%vere %fetch-vere) dir=@t] :: download vere
// :: :: ipc:
// [%serf dir=@t key=@t wag=@t hap=@ud eve=@ud] :: compute
// ==
@ -1214,22 +1607,37 @@ _cw_utils(c3_i argc, c3_c* argv[])
// NB: don't print to anything other than stderr;
// other streams may be used for ipc.
//
if ( (2 < argc) && 4 == strlen(argv[1]) ) {
c3_m mot_m;
{
c3_c* s = argv[1]; mot_m = c3_s4(s[0], s[1], s[2], s[3]);
}
c3_m mot_m = 0;
switch ( mot_m ) {
case c3__cram: _cw_cram(argc, argv); return 1;
case c3__grab: _cw_grab(argc, argv); return 1;
case c3__info: _cw_info(argc, argv); return 1;
case c3__meld: _cw_meld(argc, argv); return 1;
case c3__pack: _cw_pack(argc, argv); return 1;
case c3__queu: _cw_queu(argc, argv); return 1;
case c3__serf: _cw_serf_commence(argc, argv); return 1;
if ( 2 <= argc ) {
if ( 4 == strlen(argv[1]) ) {
c3_c* s = argv[1];
mot_m = c3_s4(s[0], s[1], s[2], s[3]);
}
else if ( 0 == strcmp(argv[1], "upgrade") ) {
mot_m = c3__next;
}
else if ( 0 == strcmp(argv[1], "fetch-vere") ) {
mot_m = c3__vere;
}
}
switch ( mot_m ) {
case c3__cram: _cw_cram(argc, argv); return 1;
case c3__dock: _cw_dock(argc, argv); return 1;
case c3__mass:
case c3__grab: _cw_grab(argc, argv); return 1;
case c3__info: _cw_info(argc, argv); return 1;
case c3__meld: _cw_meld(argc, argv); return 1;
case c3__next: _cw_next(argc, argv); return 2; // continue on
case c3__pack: _cw_pack(argc, argv); return 1;
case c3__prep: _cw_prep(argc, argv); return 2; // continue on
case c3__queu: _cw_queu(argc, argv); return 1;
case c3__vere: _cw_vere(argc, argv); return 1;
case c3__serf: _cw_serf_commence(argc, argv); return 1;
}
return 0;
@ -1239,18 +1647,48 @@ c3_i
main(c3_i argc,
c3_c** argv)
{
// Parse options.
//
if ( _cw_utils(argc, argv) ) {
return 0;
}
else if ( c3n == _main_getopt(argc, argv) ) {
u3_ve_usage(argc, argv);
return 1;
if ( argc <= 0 ) {
fprintf(stderr, "nice try, fbi\r\n");
exit(1);
}
_main_init();
c3_c* bin_c = strdup(argv[0]);
// parse for subcommands
//
switch ( _cw_utils(argc, argv) ) {
default: c3_assert(0);
// no matching subcommand, parse arguments
//
case 0: {
if ( c3n == _main_getopt(argc, argv) ) {
u3_ve_usage(argc, argv);
return 1;
}
} break;
// ran subcommand
case 1: {
return 0;
}
// found subcommand, continue
//
case 2: break;
}
_main_self_path();
// XX add argument
//
if ( !u3_Host.wrk_c ) {
u3_Host.wrk_c = strdup(argv[0]);
u3_Host.wrk_c = bin_c;
}
else {
c3_free(bin_c);
}
if ( c3y == u3_Host.ops_u.dem ) {

673
pkg/urbit/daemon/whereami.c Normal file
View File

@ -0,0 +1,673 @@
// (‑●‑●)> released under the WTFPL v2 license, by Gregory Pakosz (@gpakosz)
// https://github.com/gpakosz/whereami
// in case you want to #include "whereami.c" in a larger compilation unit
#if !defined(WHEREAMI_H)
#include "whereami.h"
#endif
#ifdef __cplusplus
extern "C" {
#endif
#if !defined(WAI_MALLOC) || !defined(WAI_FREE) || !defined(WAI_REALLOC)
#include <stdlib.h>
#endif
#if !defined(WAI_MALLOC)
#define WAI_MALLOC(size) malloc(size)
#endif
#if !defined(WAI_FREE)
#define WAI_FREE(p) free(p)
#endif
#if !defined(WAI_REALLOC)
#define WAI_REALLOC(p, size) realloc(p, size)
#endif
#ifndef WAI_NOINLINE
#if defined(_MSC_VER)
#define WAI_NOINLINE __declspec(noinline)
#elif defined(__GNUC__)
#define WAI_NOINLINE __attribute__((noinline))
#else
#error unsupported compiler
#endif
#endif
#if defined(_MSC_VER)
#define WAI_RETURN_ADDRESS() _ReturnAddress()
#elif defined(__GNUC__)
#define WAI_RETURN_ADDRESS() __builtin_extract_return_addr(__builtin_return_address(0))
#else
#error unsupported compiler
#endif
#if defined(_WIN32)
#define WIN32_LEAN_AND_MEAN
#if defined(_MSC_VER)
#pragma warning(push, 3)
#endif
#include <windows.h>
#include <intrin.h>
#if defined(_MSC_VER)
#pragma warning(pop)
#endif
static int WAI_PREFIX(getModulePath_)(HMODULE module, char* out, int capacity, int* dirname_length)
{
wchar_t buffer1[MAX_PATH];
wchar_t buffer2[MAX_PATH];
wchar_t* path = NULL;
int length = -1;
for (;;)
{
DWORD size;
int length_, length__;
size = GetModuleFileNameW(module, buffer1, sizeof(buffer1) / sizeof(buffer1[0]));
if (size == 0)
break;
else if (size == (DWORD)(sizeof(buffer1) / sizeof(buffer1[0])))
{
DWORD size_ = size;
do
{
wchar_t* path_;
path_ = (wchar_t*)WAI_REALLOC(path, sizeof(wchar_t) * size_ * 2);
if (!path_)
break;
size_ *= 2;
path = path_;
size = GetModuleFileNameW(module, path, size_);
}
while (size == size_);
if (size == size_)
break;
}
else
path = buffer1;
if (!_wfullpath(buffer2, path, MAX_PATH))
break;
length_ = (int)wcslen(buffer2);
length__ = WideCharToMultiByte(CP_UTF8, 0, buffer2, length_ , out, capacity, NULL, NULL);
if (length__ == 0)
length__ = WideCharToMultiByte(CP_UTF8, 0, buffer2, length_, NULL, 0, NULL, NULL);
if (length__ == 0)
break;
if (length__ <= capacity && dirname_length)
{
int i;
for (i = length__ - 1; i >= 0; --i)
{
if (out[i] == '\\')
{
*dirname_length = i;
break;
}
}
}
length = length__;
break;
}
if (path != buffer1)
WAI_FREE(path);
return length;
}
WAI_NOINLINE WAI_FUNCSPEC
int WAI_PREFIX(getExecutablePath)(char* out, int capacity, int* dirname_length)
{
return WAI_PREFIX(getModulePath_)(NULL, out, capacity, dirname_length);
}
WAI_NOINLINE WAI_FUNCSPEC
int WAI_PREFIX(getModulePath)(char* out, int capacity, int* dirname_length)
{
HMODULE module;
int length = -1;
#if defined(_MSC_VER)
#pragma warning(push)
#pragma warning(disable: 4054)
#endif
if (GetModuleHandleEx(GET_MODULE_HANDLE_EX_FLAG_FROM_ADDRESS | GET_MODULE_HANDLE_EX_FLAG_UNCHANGED_REFCOUNT, (LPCTSTR)WAI_RETURN_ADDRESS(), &module))
#if defined(_MSC_VER)
#pragma warning(pop)
#endif
{
length = WAI_PREFIX(getModulePath_)(module, out, capacity, dirname_length);
}
return length;
}
#elif defined(__linux__) || defined(__CYGWIN__) || defined(__sun)
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#if defined(__linux__)
#include <linux/limits.h>
#else
#include <limits.h>
#endif
#ifndef __STDC_FORMAT_MACROS
#define __STDC_FORMAT_MACROS
#endif
#include <inttypes.h>
#if !defined(WAI_PROC_SELF_EXE)
#if defined(__sun)
#define WAI_PROC_SELF_EXE "/proc/self/path/a.out"
#else
#define WAI_PROC_SELF_EXE "/proc/self/exe"
#endif
#endif
WAI_FUNCSPEC
int WAI_PREFIX(getExecutablePath)(char* out, int capacity, int* dirname_length)
{
char buffer[PATH_MAX];
char* resolved = NULL;
int length = -1;
for (;;)
{
resolved = realpath(WAI_PROC_SELF_EXE, buffer);
if (!resolved)
break;
length = (int)strlen(resolved);
if (length <= capacity)
{
memcpy(out, resolved, length);
if (dirname_length)
{
int i;
for (i = length - 1; i >= 0; --i)
{
if (out[i] == '/')
{
*dirname_length = i;
break;
}
}
}
}
break;
}
return length;
}
#if !defined(WAI_PROC_SELF_MAPS_RETRY)
#define WAI_PROC_SELF_MAPS_RETRY 5
#endif
#if !defined(WAI_PROC_SELF_MAPS)
#if defined(__sun)
#define WAI_PROC_SELF_MAPS "/proc/self/map"
#else
#define WAI_PROC_SELF_MAPS "/proc/self/maps"
#endif
#endif
#if defined(__ANDROID__) || defined(ANDROID)
#include <fcntl.h>
#include <sys/mman.h>
#include <unistd.h>
#endif
WAI_NOINLINE WAI_FUNCSPEC
int WAI_PREFIX(getModulePath)(char* out, int capacity, int* dirname_length)
{
int length = -1;
FILE* maps = NULL;
for (int r = 0; r < WAI_PROC_SELF_MAPS_RETRY; ++r)
{
maps = fopen(WAI_PROC_SELF_MAPS, "r");
if (!maps)
break;
for (;;)
{
char buffer[PATH_MAX < 1024 ? 1024 : PATH_MAX];
uint64_t low, high;
char perms[5];
uint64_t offset;
uint32_t major, minor;
char path[PATH_MAX];
uint32_t inode;
if (!fgets(buffer, sizeof(buffer), maps))
break;
if (sscanf(buffer, "%" PRIx64 "-%" PRIx64 " %s %" PRIx64 " %x:%x %u %s\n", &low, &high, perms, &offset, &major, &minor, &inode, path) == 8)
{
uint64_t addr = (uintptr_t)WAI_RETURN_ADDRESS();
if (low <= addr && addr <= high)
{
char* resolved;
resolved = realpath(path, buffer);
if (!resolved)
break;
length = (int)strlen(resolved);
#if defined(__ANDROID__) || defined(ANDROID)
if (length > 4
&&buffer[length - 1] == 'k'
&&buffer[length - 2] == 'p'
&&buffer[length - 3] == 'a'
&&buffer[length - 4] == '.')
{
int fd = open(path, O_RDONLY);
char* begin;
char* p;
begin = (char*)mmap(0, offset, PROT_READ, MAP_SHARED, fd, 0);
p = begin + offset;
while (p >= begin) // scan backwards
{
if (*((uint32_t*)p) == 0x04034b50UL) // local file header found
{
uint16_t length_ = *((uint16_t*)(p + 26));
if (length + 2 + length_ < (int)sizeof(buffer))
{
memcpy(&buffer[length], "!/", 2);
memcpy(&buffer[length + 2], p + 30, length_);
length += 2 + length_;
}
break;
}
p -= 4;
}
munmap(begin, offset);
close(fd);
}
#endif
if (length <= capacity)
{
memcpy(out, resolved, length);
if (dirname_length)
{
int i;
for (i = length - 1; i >= 0; --i)
{
if (out[i] == '/')
{
*dirname_length = i;
break;
}
}
}
}
break;
}
}
}
fclose(maps);
maps = NULL;
if (length != -1)
break;
}
if (maps)
fclose(maps);
return length;
}
#elif defined(__APPLE__)
#define _DARWIN_BETTER_REALPATH
#include <mach-o/dyld.h>
#include <limits.h>
#include <stdlib.h>
#include <string.h>
#include <dlfcn.h>
WAI_FUNCSPEC
int WAI_PREFIX(getExecutablePath)(char* out, int capacity, int* dirname_length)
{
char buffer1[PATH_MAX];
char buffer2[PATH_MAX];
char* path = buffer1;
char* resolved = NULL;
int length = -1;
for (;;)
{
uint32_t size = (uint32_t)sizeof(buffer1);
if (_NSGetExecutablePath(path, &size) == -1)
{
path = (char*)WAI_MALLOC(size);
if (!_NSGetExecutablePath(path, &size))
break;
}
resolved = realpath(path, buffer2);
if (!resolved)
break;
length = (int)strlen(resolved);
if (length <= capacity)
{
memcpy(out, resolved, length);
if (dirname_length)
{
int i;
for (i = length - 1; i >= 0; --i)
{
if (out[i] == '/')
{
*dirname_length = i;
break;
}
}
}
}
break;
}
if (path != buffer1)
WAI_FREE(path);
return length;
}
WAI_NOINLINE WAI_FUNCSPEC
int WAI_PREFIX(getModulePath)(char* out, int capacity, int* dirname_length)
{
char buffer[PATH_MAX];
char* resolved = NULL;
int length = -1;
for(;;)
{
Dl_info info;
if (dladdr(WAI_RETURN_ADDRESS(), &info))
{
resolved = realpath(info.dli_fname, buffer);
if (!resolved)
break;
length = (int)strlen(resolved);
if (length <= capacity)
{
memcpy(out, resolved, length);
if (dirname_length)
{
int i;
for (i = length - 1; i >= 0; --i)
{
if (out[i] == '/')
{
*dirname_length = i;
break;
}
}
}
}
}
break;
}
return length;
}
#elif defined(__QNXNTO__)
#include <limits.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <dlfcn.h>
#if !defined(WAI_PROC_SELF_EXE)
#define WAI_PROC_SELF_EXE "/proc/self/exefile"
#endif
WAI_FUNCSPEC
int WAI_PREFIX(getExecutablePath)(char* out, int capacity, int* dirname_length)
{
char buffer1[PATH_MAX];
char buffer2[PATH_MAX];
char* resolved = NULL;
FILE* self_exe = NULL;
int length = -1;
for (;;)
{
self_exe = fopen(WAI_PROC_SELF_EXE, "r");
if (!self_exe)
break;
if (!fgets(buffer1, sizeof(buffer1), self_exe))
break;
resolved = realpath(buffer1, buffer2);
if (!resolved)
break;
length = (int)strlen(resolved);
if (length <= capacity)
{
memcpy(out, resolved, length);
if (dirname_length)
{
int i;
for (i = length - 1; i >= 0; --i)
{
if (out[i] == '/')
{
*dirname_length = i;
break;
}
}
}
}
break;
}
fclose(self_exe);
return length;
}
WAI_FUNCSPEC
int WAI_PREFIX(getModulePath)(char* out, int capacity, int* dirname_length)
{
char buffer[PATH_MAX];
char* resolved = NULL;
int length = -1;
for(;;)
{
Dl_info info;
if (dladdr(WAI_RETURN_ADDRESS(), &info))
{
resolved = realpath(info.dli_fname, buffer);
if (!resolved)
break;
length = (int)strlen(resolved);
if (length <= capacity)
{
memcpy(out, resolved, length);
if (dirname_length)
{
int i;
for (i = length - 1; i >= 0; --i)
{
if (out[i] == '/')
{
*dirname_length = i;
break;
}
}
}
}
}
break;
}
return length;
}
#elif defined(__DragonFly__) || defined(__FreeBSD__) || \
defined(__FreeBSD_kernel__) || defined(__NetBSD__)
#include <limits.h>
#include <stdlib.h>
#include <string.h>
#include <sys/types.h>
#include <sys/sysctl.h>
#include <dlfcn.h>
WAI_FUNCSPEC
int WAI_PREFIX(getExecutablePath)(char* out, int capacity, int* dirname_length)
{
char buffer1[PATH_MAX];
char buffer2[PATH_MAX];
char* path = buffer1;
char* resolved = NULL;
int length = -1;
for (;;)
{
int mib[4] = { CTL_KERN, KERN_PROC, KERN_PROC_PATHNAME, -1 };
size_t size = sizeof(buffer1);
if (sysctl(mib, (u_int)(sizeof(mib) / sizeof(mib[0])), path, &size, NULL, 0) != 0)
break;
resolved = realpath(path, buffer2);
if (!resolved)
break;
length = (int)strlen(resolved);
if (length <= capacity)
{
memcpy(out, resolved, length);
if (dirname_length)
{
int i;
for (i = length - 1; i >= 0; --i)
{
if (out[i] == '/')
{
*dirname_length = i;
break;
}
}
}
}
break;
}
if (path != buffer1)
WAI_FREE(path);
return length;
}
WAI_NOINLINE WAI_FUNCSPEC
int WAI_PREFIX(getModulePath)(char* out, int capacity, int* dirname_length)
{
char buffer[PATH_MAX];
char* resolved = NULL;
int length = -1;
for(;;)
{
Dl_info info;
if (dladdr(WAI_RETURN_ADDRESS(), &info))
{
resolved = realpath(info.dli_fname, buffer);
if (!resolved)
break;
length = (int)strlen(resolved);
if (length <= capacity)
{
memcpy(out, resolved, length);
if (dirname_length)
{
int i;
for (i = length - 1; i >= 0; --i)
{
if (out[i] == '/')
{
*dirname_length = i;
break;
}
}
}
}
}
break;
}
return length;
}
#else
#error unsupported platform
#endif
#ifdef __cplusplus
}
#endif

View File

@ -0,0 +1,65 @@
// (‑●‑●)> released under the WTFPL v2 license, by Gregory Pakosz (@gpakosz)
// https://github.com/gpakosz/whereami
#ifndef WHEREAMI_H
#define WHEREAMI_H
#ifdef __cplusplus
extern "C" {
#endif
#ifndef WAI_FUNCSPEC
#define WAI_FUNCSPEC
#endif
#ifndef WAI_PREFIX
#define WAI_PREFIX(function) wai_##function
#endif
/**
* Returns the path to the current executable.
*
* Usage:
* - first call `int length = wai_getExecutablePath(NULL, 0, NULL);` to
* retrieve the length of the path
* - allocate the destination buffer with `path = (char*)malloc(length + 1);`
* - call `wai_getExecutablePath(path, length, NULL)` again to retrieve the
* path
* - add a terminal NUL character with `path[length] = '\0';`
*
* @param out destination buffer, optional
* @param capacity destination buffer capacity
* @param dirname_length optional recipient for the length of the dirname part
* of the path.
*
* @return the length of the executable path on success (without a terminal NUL
* character), otherwise `-1`
*/
WAI_FUNCSPEC
int WAI_PREFIX(getExecutablePath)(char* out, int capacity, int* dirname_length);
/**
* Returns the path to the current module
*
* Usage:
* - first call `int length = wai_getModulePath(NULL, 0, NULL);` to retrieve
* the length of the path
* - allocate the destination buffer with `path = (char*)malloc(length + 1);`
* - call `wai_getModulePath(path, length, NULL)` again to retrieve the path
* - add a terminal NUL character with `path[length] = '\0';`
*
* @param out destination buffer, optional
* @param capacity destination buffer capacity
* @param dirname_length optional recipient for the length of the dirname part
* of the path.
*
* @return the length of the module path on success (without a terminal NUL
* character), otherwise `-1`
*/
WAI_FUNCSPEC
int WAI_PREFIX(getModulePath)(char* out, int capacity, int* dirname_length);
#ifdef __cplusplus
}
#endif
#endif // #ifndef WHEREAMI_H

View File

@ -135,22 +135,16 @@
// defined in vere/io/unix.c.
c3_t u3_unix_cane(const c3_c* pax_c);
# define c3_open(a, ...) ({ \
c3_assert(u3_unix_cane(a)); \
open(a, __VA_ARGS__);})
# define c3_opendir(a) ({ \
c3_assert(u3_unix_cane(a)); \
opendir(a);})
# define c3_mkdir(a, b) ({ \
c3_assert(u3_unix_cane(a)); \
mkdir(a, b);})
# define c3_rmdir(a) ({ \
c3_assert(u3_unix_cane(a)); \
rmdir(a);})
# define c3_unlink(a) ({ \
c3_assert(u3_unix_cane(a)); \
unlink(a);})
# define c3_fopen(a, b) ({ \
c3_assert(u3_unix_cane(a)); \
fopen(a, b);})
#endif /* ifndef C3_DEFS_H */

View File

@ -328,6 +328,7 @@
# define c3__dmal c3_s4('d','m','a','l')
# define c3__do c3_s2('d','o')
# define c3__doc c3_s3('d','o','c')
# define c3__dock c3_s4('d','o','c','k')
# define c3__docs c3_s4('d','o','c','s')
# define c3__dogo c3_s4('d','o','g','o')
# define c3__dojo c3_s4('d','o','j','o')
@ -798,6 +799,7 @@
# define c3__noah c3_s4('n','o','a','h')
# define c3__nock c3_s4('n','o','c','k')
# define c3__noco c3_s4('n','o','c','o')
# define c3__nodo c3_s4('n','o','d','o')
# define c3__none c3_s4('n','o','n','e')
# define c3__noop c3_s4('n','o','o','p')
# define c3__nop c3_s3('n','o','p')
@ -914,6 +916,7 @@
# define c3__post c3_s4('p','o','s','t')
# define c3__pray c3_s4('p','r','a','y')
# define c3__prec c3_s4('p','r','e','c')
# define c3__prep c3_s4('p','r','e','p')
# define c3__pret c3_s4('p','r','e','t')
# define c3__prex c3_s4('p','r','e','x')
# define c3__pril c3_s4('p','r','i','l')
@ -1234,6 +1237,7 @@
# define c3__velt c3_s4('v','e','l','t')
# define c3__vent c3_s4('v','e','n','t')
# define c3__verb c3_s4('v','e','r','b')
# define c3__vere c3_s4('v','e','r','e')
# define c3__vern c3_s4('v','e','r','n')
# define c3__very c3_s4('v','e','r','y')
# define c3__view c3_s4('v','i','e','w')

View File

@ -21,6 +21,7 @@
# ifndef _XOPEN_SOURCE
# define _XOPEN_SOURCE 700
# endif
# include <ctype.h>
# include <inttypes.h>
# include <stdlib.h>
# include <string.h>
@ -35,8 +36,10 @@
# include <sys/time.h>
# include <sys/resource.h>
# include <sys/mman.h>
# include <sys/sendfile.h>
# elif defined(U3_OS_osx)
# include <ctype.h>
# include <inttypes.h>
# include <stdlib.h>
# include <string.h>
@ -53,8 +56,11 @@
# include <sys/resource.h>
# include <sys/syslimits.h>
# include <sys/mman.h>
# include <sys/clonefile.h>
# include <copyfile.h>
# elif defined(U3_OS_bsd)
# include <ctype.h>
# include <inttypes.h>
# include <stdlib.h>
# include <string.h>
@ -74,6 +80,7 @@
# define signal mingw_has_no_usable_signal
# define raise mingw_has_no_usable_raise
# define _POSIX
# include <ctype.h>
# include <inttypes.h>
# include <stdlib.h>
# include <string.h>
@ -101,6 +108,42 @@
# define ASAN_ENABLED
# endif
/** Platform string.
**/
# if defined(U3_OS_linux)
# ifdef __LP64__
# ifdef U3_CPU_aarch64
// XX not yet
//# define U3_OS_ARCH "aarch64-linux"
# else
# define U3_OS_ARCH "x86_64-linux"
# endif
# endif
# elif defined(U3_OS_mingw)
# define U3_OS_ARCH "x86_64-windows"
# elif defined(U3_OS_osx)
# ifdef __LP64__
# ifdef U3_CPU_aarch64
// XX not yet
//# define U3_OS_ARCH "aarch64-darwin"
# else
# define U3_OS_ARCH "x86_64-darwin"
# endif
# endif
# endif
/** Binary alias.
**/
# ifdef U3_OS_mingw
# define U3_BIN_SUFFIX ".exe"
# else
# define U3_BIN_SUFFIX ""
# endif
# define U3_BIN_ALIAS ".run" U3_BIN_SUFFIX
/** Address space layout.
***
*** NB: 2^29 words == 2GB

View File

@ -310,6 +310,7 @@
c3_c* puk_c; // -Y, scry result filename
c3_c* puf_c; // -Z, scry result format
c3_o con; // run conn
c3_o doc; // dock binary in pier
} u3_opts;
/* u3_host: entire host.
@ -317,15 +318,19 @@
typedef struct _u3_host {
c3_w kno_w; // current executing stage
c3_c* dir_c; // pier path (no trailing /)
c3_c* dem_c; // daemon executable path
c3_c* wrk_c; // worker executable path
c3_d now_d; // event tick
uv_loop_t* lup_u; // libuv event loop
u3_usig* sig_u; // signal list
#if defined(U3_OS_mingw)
#if defined(U3_OS_mingw)
HANDLE cev_u; // Ctrl-C event handle
#endif
#endif
u3_utty* uty_u; // linked terminal list
c3_o nex_o; // upgrade requested
c3_c* arc_c; // upgrade to arch
u3_opts ops_u; // commandline options
c3_o pep_o; // prep for upgrade
c3_i xit_i; // exit code for shutdown
u3_trac tra_u; // tracing information
void (*bot_f)(); // call when chis is up
@ -1409,6 +1414,11 @@
void
u3_king_slog(void);
/* u3_king_dock(): copy binary into pier on boot.
*/
void
u3_king_dock(c3_c* pac_c);
/* u3_king_done(): all piers closed
*/
void
@ -1429,6 +1439,21 @@
void
u3_king_grab(void* ptr_v);
/* u3_king_next(): get next vere version string, if it exists.
** return: 0 is success, -1 is no-op (same version), -2 is error
*/
c3_i
u3_king_next(c3_c* pac_c, c3_c** out_c);
/* u3_king_vere(): download binary as specified.
*/
c3_i
u3_king_vere(c3_c* pac_c, // pace
c3_c* ver_c, // version
c3_c* arc_c, // architecture
c3_c* dir_c, // output directory
c3_t lin_t); // link to $pier/.run
/* u3_daemon_init(): platform-specific daemon mode initialization.
*/
void

View File

@ -42,8 +42,6 @@ u3_lmdb_init(const c3_c* pax_c, size_t siz_i)
MDB_env* env_u;
c3_w ret_w;
c3_assert(u3_unix_cane(pax_c));
if ( (ret_w = mdb_env_create(&env_u)) ) {
mdb_logerror(stderr, ret_w, "lmdb: init fail");
return 0;

View File

@ -84,8 +84,6 @@ u3_foil_folder(const c3_c* pax_c)
uv_dirent_t den_u;
c3_i err_i;
c3_assert(u3_unix_cane(pax_c));
/* open directory, synchronously
*/
{

View File

@ -9,6 +9,8 @@
#include <curl/curl.h>
#include <uv.h>
static const c3_c* ver_hos_c = "https://bootstrap.urbit.org/vere";
// stash config flags for worker
//
static c3_w sag_w;
@ -231,54 +233,167 @@ _king_curl_alloc(void* dat_v, size_t uni_t, size_t mem_t, void* buf_v)
return siz_t;
}
/* _king_get_atom(): HTTP GET url_c, produce the response body as an atom.
/* _king_curl_bytes(): HTTP GET url_c, produce response body bytes.
** XX deduplicate with dawn.c
*/
static u3_noun
_king_get_atom(c3_c* url_c)
static c3_i
_king_curl_bytes(c3_c* url_c, c3_w* len_w, c3_y** hun_y, c3_t veb_t)
{
CURL *curl;
CURLcode result;
long cod_l;
c3_i ret_i = 0;
CURL *cul_u;
CURLcode res_i;
long cod_i;
uv_buf_t buf_u = uv_buf_init(c3_malloc(1), 0);
if ( !(curl = curl_easy_init()) ) {
if ( !(cul_u = curl_easy_init()) ) {
u3l_log("failed to initialize libcurl\n");
exit(1);
}
u3K.ssl_curl_f(curl);
curl_easy_setopt(curl, CURLOPT_URL, url_c);
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, _king_curl_alloc);
curl_easy_setopt(curl, CURLOPT_WRITEDATA, (void*)&buf_u);
u3K.ssl_curl_f(cul_u);
curl_easy_setopt(cul_u, CURLOPT_URL, url_c);
curl_easy_setopt(cul_u, CURLOPT_WRITEFUNCTION, _king_curl_alloc);
curl_easy_setopt(cul_u, CURLOPT_WRITEDATA, (void*)&buf_u);
result = curl_easy_perform(curl);
curl_easy_getinfo(curl, CURLINFO_RESPONSE_CODE, &cod_l);
res_i = curl_easy_perform(cul_u);
curl_easy_getinfo(cul_u, CURLINFO_RESPONSE_CODE, &cod_i);
// XX retry?
//
if ( CURLE_OK != result ) {
u3l_log("failed to fetch %s: %s\n",
url_c, curl_easy_strerror(result));
u3_king_bail();
exit(1);
if ( CURLE_OK != res_i ) {
if ( veb_t ) {
u3l_log("curl: failed %s: %s\n", url_c, curl_easy_strerror(res_i));
}
ret_i = -1;
}
if ( 300 <= cod_l ) {
u3l_log("error fetching %s: HTTP %ld\n", url_c, cod_l);
if ( 300 <= cod_i ) {
if ( veb_t ) {
u3l_log("curl: error %s: HTTP %ld\n", url_c, cod_i);
}
ret_i = -2;
}
curl_easy_cleanup(cul_u);
*len_w = buf_u.len;
*hun_y = (c3_y*)buf_u.base;
return ret_i;
}
/* _king_get_atom(): HTTP GET url_c, produce response body as atom.
*/
static u3_noun
_king_get_atom(c3_c* url_c)
{
c3_w len_w;
c3_y* hun_y;
u3_noun pro;
if ( _king_curl_bytes(url_c, &len_w, &hun_y, 1) ) {
u3_king_bail();
exit(1);
}
curl_easy_cleanup(curl);
pro = u3i_bytes(len_w, hun_y);
c3_free(hun_y);
return pro;
}
{
u3_noun pro = u3i_bytes(buf_u.len, (const c3_y*)buf_u.base);
/* _king_get_pace(): get "pace" (release channel name).
*/
static c3_c*
_king_get_pace(void)
{
struct stat buf_u;
c3_c* pat_c;
c3_w red_w, len_w;
c3_i ret_i, fid_i;
c3_free(buf_u.base);
ret_i = asprintf(&pat_c, "%s/.bin/pace", u3_Host.dir_c);
c3_assert( ret_i > 0 );
return pro;
fid_i = c3_open(pat_c, O_RDONLY, 0644);
if ( (fid_i < 0) || (fstat(fid_i, &buf_u) < 0) ) {
c3_free(pat_c);
return strdup("live");
}
c3_free(pat_c);
len_w = buf_u.st_size;
pat_c = c3_malloc(len_w + 1);
red_w = read(fid_i, pat_c, len_w);
close(fid_i);
if ( len_w != red_w ) {
c3_free(pat_c);
u3l_log("unable to read pace file, "
"falling back to default (\"live\")\n");
return strdup("live");
}
pat_c[len_w] = 0;
while ( len_w-- && isspace(pat_c[len_w]) ) {
pat_c[len_w] = 0;
}
return pat_c;
}
/* u3_king_next(): get next vere version string, if it exists.
** return: 0 is success, -1 is no-op (same version), -2 is error
*/
c3_i
u3_king_next(c3_c* pac_c, c3_c** out_c)
{
c3_c* ver_c;
c3_c* url_c;
c3_w len_w;
c3_y* hun_y;
c3_i ret_i;
ret_i = asprintf(&url_c, "%s/%s/%s/next", ver_hos_c, pac_c, URBIT_VERSION);
c3_assert( ret_i > 0 );
// skip printfs on failed requests (/next is usually not present)
//
if ( _king_curl_bytes(url_c, &len_w, &hun_y, 0) ) {
c3_free(url_c);
ret_i = asprintf(&url_c, "%s/%s/last", ver_hos_c, pac_c);
c3_assert( ret_i > 0 );
// enable printfs on failed requests (/last must be present)
// XX support channel redirections
//
if ( _king_curl_bytes(url_c, &len_w, &hun_y, 1) )
{
c3_free(url_c);
return -2;
}
}
c3_free(url_c);
// null-terminate
//
hun_y = c3_realloc(hun_y, 1 + len_w);
hun_y[len_w] = 0;
ver_c = (c3_c*)hun_y;
// XX trim ver_c ?
//
if ( 0 == strcmp(ver_c, URBIT_VERSION) ) {
c3_free(ver_c);
return -1;
}
*out_c = ver_c;
return 0;
}
/* _get_cmd_output(): Run a shell command and capture its output.
@ -848,6 +963,527 @@ _king_forall_unlink(void (*pir_f)(u3_pier*))
}
}
/* _king_curl_file(): HTTP GET [url_c], write response body to [fil_u].
*/
static c3_i
_king_save_file(c3_c* url_c, FILE* fil_u)
{
c3_i ret_i = 0;
CURL *cul_u;
CURLcode res_i;
long cod_i;
if ( !(cul_u = curl_easy_init()) ) {
u3l_log("failed to initialize libcurl\n");
exit(1);
}
u3K.ssl_curl_f(cul_u);
curl_easy_setopt(cul_u, CURLOPT_URL, url_c);
curl_easy_setopt(cul_u, CURLOPT_WRITEDATA, (void*)fil_u);
res_i = curl_easy_perform(cul_u);
curl_easy_getinfo(cul_u, CURLINFO_RESPONSE_CODE, &cod_i);
// XX retry?
//
if ( CURLE_OK != res_i ) {
u3l_log("curl: failed %s: %s\n", url_c, curl_easy_strerror(res_i));
ret_i = -1;
}
if ( 300 <= cod_i ) {
u3l_log("curl: error %s: HTTP %ld\n", url_c, cod_i);
ret_i = -2;
}
curl_easy_cleanup(cul_u);
return ret_i;
}
/* _king_make_pace(): mkdir -p $pier/.bin/[pace]
*/
static c3_i
_king_make_pace(c3_c* pac_c)
{
c3_c* bin_c;
c3_i ret_i;
ret_i = asprintf(&bin_c, "%s/.bin", u3_Host.dir_c);
c3_assert( ret_i > 0 );
ret_i = c3_mkdir(bin_c, 0700);
if ( ret_i && (EEXIST != errno) ) {
fprintf(stderr, "vere: mkdir %s failed: %s\n", bin_c, strerror(errno));
c3_free(bin_c);
return -1;
}
c3_free(bin_c);
ret_i = asprintf(&bin_c, "%s/.bin/%s/", u3_Host.dir_c, pac_c);
c3_assert( ret_i > 0 );
// XX asserting wrapper conflicts here (and is bypassed for .urb)
//
ret_i = mkdir(bin_c, 0700);
if ( ret_i && (EEXIST != errno) ) {
fprintf(stderr, "vere: mkdir %s failed: %s\n", bin_c, strerror(errno));
c3_free(bin_c);
return -1;
}
c3_free(bin_c);
return 0;
}
static c3_i
_king_write_raw(c3_i fid_i, c3_y* buf_y, size_t len_i);
/* _king_init_pace(): save pace file if not present
*/
static c3_i
_king_init_pace(c3_c* pac_c)
{
c3_c* bin_c;
c3_i fid_i, ret_i = asprintf(&bin_c, "%s/.bin/pace", u3_Host.dir_c);
c3_assert( ret_i > 0 );
if ( (-1 == (fid_i = open(bin_c, O_WRONLY | O_CREAT | O_EXCL, 0644))) ) {
if ( EEXIST == errno ) {
c3_free(bin_c);
// XX print something here?
//
return 0;
}
else {
u3l_log("dock: init pace (%s): open %s\n", pac_c, strerror(errno));
c3_free(bin_c);
return -1;
}
}
if ( _king_write_raw(fid_i, (c3_y*)pac_c, strlen(pac_c)) ) {
u3l_log("dock: init pace (%s): write %s\n", pac_c, strerror(errno));
close(fid_i);
c3_free(bin_c);
return -1;
}
// XX sync first?
//
else if ( close(fid_i) ) {
u3l_log("dock: init pace (%s): close %s\n", pac_c, strerror(errno));
c3_free(bin_c);
return 1;
}
u3l_log("dock: pace (%s): configured at %s/.bin/pace\r\n",
pac_c, u3_Host.dir_c);
return 0;
}
/* _king_link_run(): ln [bin_c] $pier/.run
*/
static c3_i
_king_link_run(c3_c* bin_c)
{
c3_c* lin_c;
c3_i ret_i;
ret_i = asprintf(&lin_c, "%s/%s", u3_Host.dir_c, U3_BIN_ALIAS);
c3_assert( ret_i > 0 );
ret_i = unlink(lin_c);
if ( ret_i && (ENOENT != errno) ) {
fprintf(stderr, "vere: unlink %s failed: %s\n", lin_c, strerror(errno));
c3_free(lin_c);
return -1;
}
ret_i = link(bin_c, lin_c);
if ( ret_i ) {
fprintf(stderr, "vere: link %s -> %s failed: %s\n",
lin_c, bin_c, strerror(errno));
c3_free(lin_c);
return -1;
}
c3_free(lin_c);
return 0;
}
/* u3_king_vere(): download binary as specified.
*/
c3_i
u3_king_vere(c3_c* pac_c, // pace
c3_c* ver_c, // version
c3_c* arc_c, // architecture
c3_c* dir_c, // output directory
c3_t lin_t) // link to $pier/.run
{
c3_c* bin_c;
c3_c* url_c;
FILE* fil_u;
c3_i fid_i, ret_i;
ret_i = asprintf(&bin_c, "%s/vere-v%s-%s" U3_BIN_SUFFIX,
dir_c, ver_c, arc_c);
c3_assert( ret_i > 0 );
if ( (-1 == (fid_i = open(bin_c, O_WRONLY | O_CREAT | O_EXCL, 0755)))
|| !(fil_u = fdopen(fid_i, "wb")) )
{
if ( EEXIST == errno ) {
u3l_log("already installed\n");
c3_free(bin_c);
return 0;
}
else {
u3l_log("unable to open %s: %s\r\n", bin_c, strerror(errno));
c3_free(bin_c);
return -1;
}
}
ret_i = asprintf(&url_c, "%s/%s/%s/vere-v%s-%s",
ver_hos_c, pac_c, ver_c, ver_c, arc_c);
c3_assert( ret_i > 0 );
if ( (ret_i = _king_save_file(url_c, fil_u)) ) {
u3l_log("unable to save %s to %s: %d\r\n", url_c, bin_c, ret_i);
c3_free(url_c);
fclose(fil_u);
unlink(bin_c);
c3_free(bin_c);
return -1; // XX
}
// XX sync unnecessary here?
//
if ( fflush(fil_u) || c3_sync(fid_i) ) {
fprintf(stderr, "vere: sync %s failed: %s\n", bin_c, strerror(errno));
c3_free(url_c);
fclose(fil_u);
unlink(bin_c);
c3_free(bin_c);
return -1;
}
fclose(fil_u);
// XX if link fails wat do?
// XX set via cli option
//
if ( lin_t ) {
if ( _king_link_run(bin_c) ) {
fprintf(stderr, "vere: link %s/%s failed\n", u3_Host.dir_c, U3_BIN_ALIAS);
c3_free(url_c);
c3_free(bin_c);
return -1;
}
}
u3l_log("vere: saved to %s\n", bin_c);
c3_free(url_c);
c3_free(bin_c);
return 0;
}
/* _king_do_upgrade(): get arch-appropriate binary at [ver_c].
*/
static void
_king_do_upgrade(c3_c* pac_c, c3_c* ver_c)
{
c3_c* dir_c;
c3_c* arc_c;
#ifdef U3_OS_ARCH
arc_c = U3_OS_ARCH;
#else
if ( u3_Host.arc_c ) {
arc_c = u3_Host.arc_c;
}
else {
u3l_log("vere: --arch required\r\n");
return;
}
#endif
if ( _king_make_pace(pac_c) ) {
u3l_log("vere: unable to make pace (%s) directory in pier\n", pac_c);
u3_king_bail();
exit(1);
}
{
c3_i ret_i = asprintf(&dir_c, "%s/.bin/%s", u3_Host.dir_c, pac_c);
c3_assert( ret_i > 0 );
}
// XX get link option
//
if ( u3_king_vere(pac_c, ver_c, arc_c, dir_c, 1) ) {
u3l_log("vere: upgrade failed\r\n");
u3_king_bail();
exit(1);
}
c3_free(dir_c);
u3l_log("vere: upgrade succeeded\r\n");
// XX print restart instructions
}
/* _king_read_raw: read (up to) [len_i] from [fid_i] to [buf_y]
*/
static ssize_t
_king_read_raw(c3_i fid_i, c3_y* buf_y, size_t len_i)
{
ssize_t ret_i;
do {
ret_i = read(fid_i, buf_y, len_i);
}
while ( (ret_i < 0) && (errno == EINTR) );
return ret_i;
}
/* _king_read_raw: write [len_i] from [buf_y] to [fid_i].
*/
static c3_i
_king_write_raw(c3_i fid_i, c3_y* buf_y, size_t len_i)
{
ssize_t ret_i;
while ( len_i ) {
do {
ret_i = write(fid_i, buf_y, len_i);
}
while ( (ret_i < 0) && (errno == EINTR) );
if ( ret_i < 0 ) {
return -1;
}
else {
len_i -= ret_i;
buf_y += ret_i;
}
}
return 0;
}
static c3_i
_king_copy_raw(c3_i src_i, c3_i dst_i, c3_y* buf_y, size_t pag_i)
{
ssize_t red_i;
do {
if ( 0 > (red_i = _king_read_raw(src_i, buf_y, pag_i)) ) {
return -1;
}
if ( _king_write_raw(dst_i, buf_y, (size_t)red_i) ) {
return -1;
}
}
while ( red_i );
return 0;
}
#if defined(U3_OS_mingw)
int err_win_to_posix(DWORD winerr);
#endif
static c3_i
_king_copy_file(c3_c* src_c, c3_c* dst_c)
{
#if defined(U3_OS_mingw)
// XX try FSCTL_DUPLICATE_EXTENTS_TO_FILE
//
if ( CopyFileA(src_c, dst_c, TRUE) ) {
return 0;
}
// XX fallback on any?
//
errno = err_win_to_posix(GetLastError());
return -1;
#elif defined(U3_OS_osx)
if ( !clonefile(src_c, dst_c, 0) ) {
return 0;
}
// fallthru to copying bytes on some errors
//
else if ( (ENOTSUP != errno) && (EXDEV != errno) ) {
return -1;
}
#endif
{
c3_i src_i, dst_i, ret_i = 0, err_i = 0;
if ( -1 == (src_i = open(src_c, O_RDONLY, 0644)) ) {
err_i = errno;
ret_i = -1;
goto done1;
}
if ( -1 == (dst_i = open(dst_c, O_RDWR | O_CREAT, 0755)) ) {
err_i = errno;
ret_i = -1;
goto done2;
}
// XX try clone_file_range ?
//
#if defined(U3_OS_linux)
#if defined(FICLONE)
if ( !ioctl(dst_i, FICLONE, src_i) ) {
ret_i = 0;
goto done3;
}
// fallthru to copying bytes on some errors
//
else if ( (EOPNOTSUPP != errno) && (EXDEV != errno) ) {
err_i = errno;
ret_i = -1;
goto done3;
}
#endif
{
off_t off_i = 0;
ssize_t sen_i;
size_t len_i;
{
struct stat sat_u;
if ( -1 == fstat(src_i, &sat_u) ) {
err_i = errno;
ret_i = -1;
goto done3;
}
len_i = sat_u.st_size;
}
do {
// XX fallback on any errors?
//
if ( 0 > (sen_i = sendfile64(dst_i, src_i, &off_i, len_i)) ) {
err_i = errno;
ret_i = -1;
goto done3;
}
len_i -= off_i;
}
while ( len_i );
ret_i = 0;
goto done3;
}
#elif defined(U3_OS_osx)
if ( !fcopyfile(src_i, dst_i, NULL, COPYFILE_ALL) ) {
ret_i = 0;
goto done3;
}
// XX fallback on any errors?
//
#endif
{
size_t pag_i = 1 << 14;;
c3_y* buf_y = c3_malloc(pag_i);
ret_i = _king_copy_raw(src_i, dst_i, buf_y, pag_i);
err_i = errno;
c3_free(buf_y);
}
done3:
close(dst_i);
done2:
close(src_i);
done1:
errno = err_i;
return ret_i;
}
}
/* _king_copy_vere(): copy current binary into $pier/.bin (COW if possible)
*/
static c3_i
_king_copy_vere(c3_c* pac_c, c3_c* ver_c, c3_c* arc_c, c3_t lin_t)
{
c3_c* bin_c;
c3_i ret_i;
if ( _king_make_pace(pac_c) ) {
return -1; // XX
}
ret_i = asprintf(&bin_c, "%s/.bin/%s/vere-v%s-%s" U3_BIN_SUFFIX,
u3_Host.dir_c, pac_c, ver_c, arc_c);
c3_assert( ret_i > 0 );
ret_i = _king_copy_file(u3_Host.dem_c, bin_c);
if ( ret_i ) {
fprintf(stderr, "vere: copy %s -> %s failed: %s\r\n",
bin_c, u3_Host.dem_c, strerror(errno));
c3_free(bin_c);
return -1;
}
// XX option
//
if ( lin_t ) {
if ( _king_link_run(bin_c) ) {
fprintf(stderr, "vere: link %s/%s failed\n", u3_Host.dir_c, U3_BIN_ALIAS);
c3_free(bin_c);
return -1;
}
}
c3_free(bin_c);
return 0;
}
/* u3_king_dock(): copy binary into pier on boot.
*/
void
u3_king_dock(c3_c* pac_c)
{
c3_c* arc_c = "unknown";
#ifdef U3_OS_ARCH
arc_c = U3_OS_ARCH;
#endif
// XX get link option
//
if ( _king_copy_vere(pac_c, URBIT_VERSION, arc_c, 1) ) {
u3l_log("vere: binary copy failed\r\n");
u3_king_bail();
exit(1);
}
else {
// NB: failure ignored
//
_king_init_pace(pac_c);
u3l_log("vere: binary copy succeeded\r\n");
// XX print restart instructions
}
}
/* _king_done_cb():
*/
static void
@ -871,6 +1507,53 @@ u3_king_done(void)
{
uv_handle_t* han_u = (uv_handle_t*)&u3K.tim_u;
// get next binary
//
if ( c3y == u3_Host.nex_o ) {
c3_c* pac_c;
c3_c* ver_c;
// hack to ensure we only try once
//
u3_Host.nex_o = c3n;
pac_c = _king_get_pace();
switch ( u3_king_next(pac_c, &ver_c) ) {
case -2: {
u3l_log("vere: unable to check for next version\n");
} break;
case -1: {
u3l_log("vere: up to date\n");
} break;
case 0: {
u3l_log("vere: next (%%%s): %s\n", pac_c, ver_c);
_king_do_upgrade(pac_c, ver_c);
c3_free(ver_c);
} break;
default: c3_assert(0);
}
c3_free(pac_c);
}
else if ( c3y == u3_Host.pep_o ) {
u3l_log("vere: ready for upgrade\n");
}
// copy binary into pier on boot
//
if ( (c3y == u3_Host.ops_u.nuu)
&& (c3y == u3_Host.ops_u.doc) )
{
// hack to ensure we only try once
//
u3_Host.ops_u.nuu = c3n;
u3_king_dock(U3_VERE_PACE);
}
// XX hack, if pier's are still linked, we're not actually done
//
if ( !u3K.pir_u && !uv_is_closing(han_u) ) {
@ -880,6 +1563,10 @@ u3_king_done(void)
u3_term_log_exit();
fflush(stdout);
}
// XX remove move
//
exit(0);
}
/* u3_king_exit(): shutdown gracefully

View File

@ -469,14 +469,14 @@ static void
_pier_on_scry_done(void* ptr_v, u3_noun nun)
{
u3_pier* pir_u = ptr_v;
u3_weak res = u3r_at(7, nun);
u3_weak res = u3r_at(7, nun);
if (u3_none == res) {
u3l_log("pier: scry failed\n");
}
else {
u3_weak out, pad;
c3_c *ext_c, *pac_c;
u3_weak out;
c3_c *ext_c, *pac_c;
u3l_log("pier: scry succeeded\n");
@ -506,30 +506,13 @@ _pier_on_scry_done(void* ptr_v, u3_noun nun)
u3z(puf);
}
// try to build export target path
//
{
u3_noun pro = u3m_soft(0, _pier_stab, u3i_string(pac_c));
if ( 0 == u3h(pro) ) {
c3_w len_w = u3kb_lent(u3k(u3t(pro)));
pad = u3nt(c3_s4('.', 'u', 'r', 'b'),
c3_s3('p', 'u', 't'),
u3qb_scag(len_w - 1, u3t(pro)));
}
else {
u3l_log("pier: invalid export path %s\n", pac_c);
pad = u3_none;
}
u3z(pro);
}
// if serialization and export path succeeded, write to disk
//
if ( (u3_none != out) && (u3_none != pad) ) {
if ( u3_none != out ) {
c3_c fil_c[256];
snprintf(fil_c, 256, "%s.%s", pac_c + 1, ext_c);
u3_unix_save(fil_c, pad);
u3_unix_save(fil_c, out);
u3l_log("pier: scry result in %s/.urb/put/%s\n", u3_Host.dir_c, fil_c);
}
}
@ -1049,7 +1032,17 @@ _pier_play(u3_play* pay_u)
}
else if ( pay_u->eve_d == log_u->dun_d ) {
u3_lord_save(pir_u->god_u);
_pier_wyrd_init(pir_u);
// early exit, preparing for upgrade
//
// XX check kelvins?
//
if ( c3y == u3_Host.pep_o ) {
u3_pier_exit(pir_u);
}
else {
_pier_wyrd_init(pir_u);
}
}
}
else {
@ -1431,7 +1424,16 @@ _pier_on_lord_live(void* ptr_v)
_pier_play_init(pir_u, eve_d);
}
else {
_pier_wyrd_init(pir_u);
// early exit, preparing for upgrade
//
// XX check kelvins?
//
if ( c3y == u3_Host.pep_o ) {
u3_pier_exit(pir_u);
}
else {
_pier_wyrd_init(pir_u);
}
}
}
}

View File

@ -1 +1 @@
1.8
1.9