diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml new file mode 100644 index 000000000..83c4bbbd9 --- /dev/null +++ b/.github/workflows/build.yml @@ -0,0 +1,120 @@ +# Notes: +# +# jobs. +# +# A seperate job id results in a lot of duplication of nix/cachix work. +# The build will have to download any substituted derivations from cachix +# for the steps with each distinct job id and upload built derivations to +# cachix after each job has completed, either succesfully or on failure. +# +# jobs..steps.run +# +# build + test are distinct as each step entry results in a collapsable title +# within the log output, which makes it easier to view failing builds or +# tests independently. +# +# jobs..strategy.fail-fast +# +# Set to false so developers working on vere or king-haskell can have their +# respective builds proceed without the other causing failure. +# +# shell.nix +# +# mkShell doesn't allow you to build it - so instantiate all the subshells +# defined for the individual pkg/*/shell.nix as a sanity check and to create +# some artefacts suitable for developers to pull from cachix. The top-level +# shell.nix build time is dominated by Haskell dependencies so it's built as +# part of the haskell build steps. +# +# Syntax: +# +# https://docs.github.com/en/free-pro-team@latest/actions/reference/workflow-syntax-for-github-actions + +name: Build, test, and upload urbit release tarball + +on: + push: null + pull_request: null + +jobs: + urbit: + strategy: + fail-fast: false + matrix: + include: + - { os: ubuntu-latest } + - { os: macos-latest } + + runs-on: ${{ matrix.os }} + + steps: + - uses: actions/checkout@v2 + - uses: cachix/install-nix-action@v12 + - uses: cachix/cachix-action@v8 + with: + name: mars + authToken: ${{ secrets.CACHIX_AUTH_TOKEN }} + + - run: nix-build -A urbit --arg enableStatic true + + - if: ${{ matrix.os == 'ubuntu-latest' }} + run: nix-build -A urbit-tests + + haskell: + strategy: + fail-fast: false + matrix: + include: + - { os: ubuntu-latest } + - { os: macos-latest } + + runs-on: ${{ matrix.os }} + + steps: + - uses: actions/checkout@v2 + - uses: cachix/install-nix-action@v12 + - uses: cachix/cachix-action@v8 + with: + name: mars + authToken: ${{ secrets.CACHIX_AUTH_TOKEN }} + + - run: nix-build -A hs.urbit-king.components.exes.urbit-king --arg enableStatic true + - run: nix-build -A hs-checks + - run: nix-build shell.nix + + upload: + needs: [urbit, haskell] + strategy: + matrix: + include: + - { os: ubuntu-latest, system: x86_64-linux } + - { os: macos-latest, system: x86_64-darwin } + + runs-on: ${{ matrix.os }} + + steps: + - uses: actions/checkout@v2 + - uses: cachix/install-nix-action@v12 + - uses: cachix/cachix-action@v8 + with: + name: mars + authToken: ${{ secrets.CACHIX_AUTH_TOKEN }} + + - uses: GoogleCloudPlatform/github-actions/setup-gcloud@0.1.2 + with: + version: '290.0.1' + service_account_key: ${{ secrets.GCS_SERVICE_ACCOUNT_KEY }} + project_id: ${{ secrets.GCS_PROJECT }} + export_default_credentials: true + + - run: nix-build -A tarball + + - name: Run upload to bootstrap.urbit.org + run: | + version="$(cat ./pkg/urbit/version)" + system="$(nix eval --raw '(builtins.currentSystem)')" + target="gs://bootstrap.urbit.org/ci/urbit-v${version}-${system}-${GITHUB_SHA:0:9}.tgz" + + gsutil cp -n ./result "$target" + + echo "upload to $target complete." diff --git a/.gitignore b/.gitignore index 74b439d22..e79759dc6 100644 --- a/.gitignore +++ b/.gitignore @@ -1,46 +1,79 @@ -# nix symlink artifacts -# +# OSX +.DS_Store + +# Editors +*~ +*# +.#* +\#*# +.*.sw[a-z] +*.un~ +*.org +.projectile +s/* +cscope.* + +# Tags +.tags +.etags +TAGS +GPATH +GRTAGS +GTAGS + +# Nix result result-* -# common dev piers -# +# Common Piers /zod /bus /nec /fakezod -# package manager caches -# -.stack-work +# NodeJS node_modules -# build and release artifacts -# +# Haskell +.stack-work +stack.yaml.lock +dist-newstyle +.ghc* + +# Profiling +*.prof +*.aux +*.hp +*.ps +*.pdf + +# Build/Release Artifacts +build/ cross/ release/ -dist -/out -/work +dist/ +out/ +work/ +*.o -# landscape dev -# +# Landscape Dev urbitrc *-min.js pkg/interface/link-webext/web-ext-artifacts -# catchall editor and OS stuff -# -.tags -.etags -tags -TAGS -GPATH -GRTAGS -GTAGS -.DS_Store -*.swp -*.swo -\#*\# -s/* +# Certificates +*.pem +*.key +*.secret +*.sec +*.asc +# Archives +*.zip +*.gz +*.tar +*.bzip2 +*.xz + +# Logs +*.log \ No newline at end of file diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index e9f7b659f..000000000 --- a/.travis.yml +++ /dev/null @@ -1,80 +0,0 @@ -stages: - - compile - # Don't run the combine stage in pull requests, because deploy is disabled there. - - name: combine - if: type != pull_request - -jobs: - include: - - stage: compile - os: linux - language: nix - nix: 2.3.6 - before_install: - - git lfs pull - - echo "trusted-users = root travis" | sudo tee -a /etc/nix/nix.conf && sudo pkill nix-daemon - install: - - nix-env -iA cachix -f https://cachix.org/api/v1/install - script: - - cachix use urbit2 - - ./sh/cachix - - make build - - make release - - sh/ci-tests - - - stage: compile - os: linux - language: generic - env: STACK_YAML=pkg/hs/stack.yaml - cache: - directories: - - $HOME/.ghc - - $HOME/.cabal - - $HOME/.stack - - $TRAVIS_BUILD_DIR/.stack-work - before_install: - - sh/travis-install-stack - install: - - stack --no-terminal --install-ghc build urbit-king --only-dependencies - script: - - stack test - - sh/release-king-linux64-dynamic - - - stage: compile - os: osx - language: generic - sudo: required - env: STACK_YAML=pkg/hs/stack.yaml - cache: - directories: - - $HOME/.ghc - - $HOME/.cabal - - $HOME/.stack - - $TRAVIS_BUILD_DIR/.stack-work - before_install: - - sh/travis-install-stack - install: - - stack --no-terminal --install-ghc build urbit-king --only-dependencies - script: - - stack test - - sh/release-king-darwin-dynamic - - - stage: combine - os: linux - language: generic - script: - - sh/combine-release-builds - -deploy: - - skip_cleanup: true - provider: gcs - access_key_id: GOOGTADOPP55X5ZTH3IKAXQW - secret_access_key: - secure: rSPif0VHX3Q3QpWM9iUt/Z9sicCY8ABuwVFPeT3YUnAAmSXM46PIHKieoGs79kx9IelFyQsM8xS0XWyt4S/haR7VaTJY+zHJjf0AnA1sr5ZIV70t3K0SXzq4+E1ffOZRiC0VmHatPz10wjaIpHxpjcROqQV4M1dBCq2H/rpccIE= - bucket: bootstrap.urbit.org - local-dir: release/ - acl: public-read - on: - condition: "-d release/" - repo: urbit/urbit - all_branches: true diff --git a/Makefile b/Makefile index 434706970..9efd4c135 100644 --- a/Makefile +++ b/Makefile @@ -3,20 +3,14 @@ build: nix-build -A urbit -A herb --no-out-link -build-all: - nix-build --no-out-link - install: nix-env -f . -iA urbit -iA urbit-debug -iA herb -cross: - sh/cross - release: sh/release test: - sh/test + nix-build -A urbit-tests --no-out-link pills: sh/update-solid-pill @@ -32,3 +26,6 @@ interface: clean: rm -rf ./out ./work rm -f result result-* + +fmt: + sh/fmt diff --git a/bin/brass.pill b/bin/brass.pill index 8e713c7d5..b5f47de1b 100644 --- a/bin/brass.pill +++ b/bin/brass.pill @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:35d8930b9b35364605196d99766ec713154af9105ce7b9fabfaa50e8ca29a5fd -size 4448128 +oid sha256:f4a6a782b2193a16dc10340e40787522ec34df50eb517fef849bc32b87a3e512 +size 4508418 diff --git a/bin/ivory.pill b/bin/ivory.pill index 29eeabbc5..40004edc4 100644 --- a/bin/ivory.pill +++ b/bin/ivory.pill @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:e5c82dea80aa7c5593f43fa4294db7974211abceedd907663da73889857642e7 -size 1309381 +oid sha256:db42476704efdbb5c2cbf81ea67f15603da5b848850b538081e7b108f969fa19 +size 1315294 diff --git a/bin/solid.pill b/bin/solid.pill index 17d47dca6..bbbae340e 100644 --- a/bin/solid.pill +++ b/bin/solid.pill @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:d18233ffa40c24e0e3d43b201788cf5d880ea4517fb33f12f6850af2b6a8100c -size 6436148 +oid sha256:d8ac4b3bf78d7c8ef62f668bfe14b243cffe5784a97c87486e1ae31c4b58e1f6 +size 6312584 diff --git a/default.nix b/default.nix index a21725f73..439899c3c 100644 --- a/default.nix +++ b/default.nix @@ -1,8 +1,188 @@ +/* Examples + + Shared urbit and urbit-worker binaries: + + $ nix-build -A urbit + + Static urbit and urbit-worker binaries: + + $ nix-build -A urbit --arg enableStatic true + + Note that on linux the previous command is equivalent to: + + $ nix-build -A urbit --argstr crossSystem x86_64-unknown-linux-musl \ + --arg enableSatic true + + Static urbit-king binary: + + $ nix-build -A hs.urbit-king.components.exes.urbit-king --arg enableStatic true + + Static release tarball: + + $ nix-build -A tarball --arg enableStatic true + + Build a pill: + + $ nix-build -A ivory.build + $ nix-build -A brass.build + $ nix-build -A solid.build + + Run the king-haskell checks (.tests are _build_ the test code, .checks _runs_): + + $ nix-build -A hs.urbit-king.checks.urbit-king-tests + + Build a specific Haskell package from ./pkg/hs: + + $ nix-build -A hs.urbit-noun.components.library + $ nix-build -A hs.urbit-atom.components.benchmarks.urbit-atom-bench + $ nix-build -A hs.urbit-atom.components.tests.urbit-atom-tests +*/ + +# The build system where packages will be _built_. +{ system ? builtins.currentSystem + # The host system where packages will _run_. +, crossSystem ? null + # Additional sources.json overrides. +, sources ? { } + # Additional nixpkgs.config overrides. +, config ? { } + # Additional nixpkgs.overlays. +, overlays ? [ ] + # Overlays to apply to the last package set in cross compilation. +, crossOverlays ? [ ] + # Whether to use pkgs.pkgsStatic.* to obtain statically linked package + # dependencies - ie. when building fully-static libraries or executables. +, enableStatic ? false }: + let - pkgs = import ./nix/pkgs {}; - deps = import ./nix/deps {}; + pkgsNative = import ./nix/default.nix { inherit system; }; -in + pkgsCross = import ./nix/default.nix { + inherit system sources config overlays crossOverlays; - deps // pkgs + # If we're running on linux and crossSystem is unspecified but + # enableStatic = true - set the crossSystem to musl64. + crossSystem = + if system == "x86_64-linux" && crossSystem == null && enableStatic then + "x86_64-unknown-linux-musl" + else + crossSystem; + }; + + # Use nixpkgs' top-level/static overlay if enableStatic = true. + pkgsStatic = if enableStatic then pkgsCross.pkgsStatic else pkgsCross; + + # Enrich the global package set with our local functions and packages. + # Cross vs static build dependencies can be selectively overridden for + # inputs like python and haskell-nix + callPackage = + pkgsNative.lib.callPackageWith (pkgsStatic // libLocal // pkgsLocal); + + # Local library import-from-derivation functions such as fetchGitHubLFS, etc. + libLocal = pkgsNative.callPackage ./nix/lib { }; + + # Local vendored packages defined in ./pkg. + # For non-vendored nixpkgs specific package overrides, see ./nix/overlays. + pkgsLocal = { + argon2u = callPackage ./nix/pkgs/argon2u { }; + + ca-bundle = callPackage ./nix/pkgs/ca-bundle { }; + + ed25519 = callPackage ./nix/pkgs/ed25519 { }; + + ent = callPackage ./nix/pkgs/ent { }; + + ge-additions = callPackage ./nix/pkgs/ge-additions { }; + + libaes_siv = callPackage ./nix/pkgs/libaes_siv { }; + + libscrypt = callPackage ./nix/pkgs/libscrypt { }; + + murmur3 = callPackage ./nix/pkgs/murmur3 { }; + + softfloat3 = callPackage ./nix/pkgs/softfloat3 { }; + + herb = callPackage ./nix/pkgs/herb { inherit (pkgsCross) python; }; + + arvo = callPackage ./nix/pkgs/arvo { }; + + ivory = callPackage ./nix/pkgs/pill/ivory.nix { }; + + brass = callPackage ./nix/pkgs/pill/brass.nix { }; + + solid = callPackage ./nix/pkgs/pill/solid.nix { }; + + urbit = callPackage ./nix/pkgs/urbit { inherit enableStatic; }; + + hs = callPackage ./nix/pkgs/hs { + inherit enableStatic; + inherit (pkgsCross) haskell-nix; + }; + }; + + # Additional top-level packages and attributes exposed for convenience. + pkgsExtra = with pkgsLocal; rec { + # Expose packages with local customisations (like patches) for dev access. + inherit (pkgsCross) libsigsegv; + + # Collect haskell check (aka "run the tests") attributes so we can run every + # test for our local haskell packages, similar to the urbit-tests attribute. + hs-checks = (pkgsNative.recurseIntoAttrs + (libLocal.collectHaskellComponents pkgsLocal.hs)).checks; + + urbit-debug = urbit.override { enableDebug = true; }; + urbit-tests = libLocal.testFakeShip { + inherit herb; + + urbit = urbit-debug; + pill = solid.lfs; + }; + + ivory-ropsten = ivory.override { arvo = arvo.ropsten; }; + brass-ropsten = brass.override { arvo = arvo.ropsten; }; + + # Create a .tgz of the primary binaries. + tarball = let + name = "urbit-v${urbit.version}-${urbit.system}"; + urbit-king = hs.urbit-king.components.exes.urbit-king; + in libLocal.makeReleaseTarball { + inherit name; + + contents = { + "${name}/urbit" = "${urbit}/bin/urbit"; + "${name}/urbit-worker" = "${urbit}/bin/urbit-worker"; + "${name}/urbit-king" = "${urbit-king}/bin/urbit-king"; + }; + }; + + # A convenience function for constructing a shell.nix for any of the + # pkgsLocal derivations by automatically propagating any dependencies + # to the nix-shell. + # + # Example: + # + # let + # pkgs = import ./default.nix { }; + # in pkgs.shellFor { + # packages = ps: [ + # ps.urbit + # ps.herb + # ]; + # } + # + shellFor = { name, packages, ... }@attrs: + pkgsNative.mkShell ({ + inputsFrom = packages pkgsLocal; + } // builtins.removeAttrs attrs [ "packages" ]); + }; + + # Ensure that in the case of cross-compilation we're not statically linking + # against glibc. This is typically a sign that crossSystem is misconfigured. + checkPlatform = + if enableStatic && pkgsCross.stdenv.hostPlatform.libc == "glibc" then + builtins.trace "warning: statically linking against glibc." + else + pkgsNative.lib.id; + +in checkPlatform (pkgsLocal // pkgsExtra) diff --git a/nix/cachix/local.nix b/nix/cachix/local.nix deleted file mode 100644 index 753a00a0c..000000000 --- a/nix/cachix/local.nix +++ /dev/null @@ -1,22 +0,0 @@ -# All the non-release builds that should be cached in `cachix`. - -let - - pkgs = import ../pkgs {}; - deps = import ../deps {}; - - # Cache the result of cloning source repos. - repos = { - argon2-src = deps.argon2.src; - ed25519-src = deps.ed25519.src; - h2o-src = deps.h2o.src; - murmur3-src = deps.murmur3.src; - scrypt-src = deps.scrypt.src; - secp256k1-src = deps.secp256k1.src; - softfloat3-src = deps.softfloat3.src; - uv-src = deps.uv.src; - }; - -in - - deps // pkgs // repos diff --git a/nix/cachix/release.nix b/nix/cachix/release.nix deleted file mode 100644 index bc63f052a..000000000 --- a/nix/cachix/release.nix +++ /dev/null @@ -1,11 +0,0 @@ -let - - util = import ./util.nix; - nixcrpkgs = import ../nixcrpkgs.nix; - release = import ../release.nix; - all_releases = util.flattenSetPrefix release; - crosstools = { inherit (nixcrpkgs.native) pkgconf; }; - -in - - crosstools // all_releases diff --git a/nix/cachix/tests.nix b/nix/cachix/tests.nix deleted file mode 100644 index 2248a5cea..000000000 --- a/nix/cachix/tests.nix +++ /dev/null @@ -1,7 +0,0 @@ -let - ops = import ../ops/default.nix {}; -in - { - results = ops.test; - fakebus = ops.bus; - } diff --git a/nix/cachix/util.nix b/nix/cachix/util.nix deleted file mode 100644 index 6eada20f4..000000000 --- a/nix/cachix/util.nix +++ /dev/null @@ -1,26 +0,0 @@ -# Some utility functions: - -rec { - - # The inverse of builtins.listToAttrs - attrsToList = o: - map (a: { name=a; value=builtins.getAttr a o; }) - (builtins.attrNames o); - - # ∀o,x,y. produce o' such that o'.y == o.x.y (assuming no conflicts) - flattenSet = o: - builtins.foldl' (acc: v: acc // v) {} - (builtins.attrValues o); - - prefixSetAttrs = prefix: o: - builtins.listToAttrs - (map ({name, value}: { name=prefix + name; value=value; }) - (attrsToList o)); - - # ∀o,x,y. produce o' such that o'.x-y == o.x.y - flattenSetPrefix = o: - (builtins.foldl' (acc: o: acc // o) {} - (map ({name, value}: prefixSetAttrs name value) - (attrsToList o))); - -} diff --git a/nix/crossdeps.nix b/nix/crossdeps.nix deleted file mode 100644 index e942b842b..000000000 --- a/nix/crossdeps.nix +++ /dev/null @@ -1,14 +0,0 @@ -crossenv: - -rec { - argon2 = import ./deps/argon2/cross.nix { inherit crossenv; }; - murmur3 = import ./deps/murmur3/cross.nix { inherit crossenv; }; - uv = import ./deps/uv/cross.nix { inherit crossenv; }; - ed25519 = import ./deps/ed25519/cross.nix { inherit crossenv; }; - scrypt = import ./deps/scrypt/cross.nix { inherit crossenv; }; - softfloat3 = import ./deps/softfloat3/cross.nix { inherit crossenv; }; - secp256k1 = import ./deps/secp256k1/cross.nix { inherit crossenv; }; - h2o = import ./deps/h2o/cross.nix { inherit crossenv uv; }; - ivory-header = import ./deps/ivory-header/cross.nix { inherit crossenv; }; - ca-header = import ./deps/ca-header/cross.nix { inherit crossenv; }; -} diff --git a/nix/default.nix b/nix/default.nix new file mode 100644 index 000000000..38611d9f7 --- /dev/null +++ b/nix/default.nix @@ -0,0 +1,56 @@ +# The build system where packages will be _built_. +{ system ? builtins.currentSystem + # The host system where packages will _run_. +, crossSystem ? null + # Additional sources.json overrides. +, sources ? { } + # Additional nixpkgs.config overrides. +, config ? { } + # Additional nixpkgs.overlays. +, overlays ? [ ] + # Overlays to apply to the last package set in cross compilation. +, crossOverlays ? [ ] }: + +let + + sourcesFinal = import ./sources.nix { inherit pkgs; } // sources; + + haskellNix = import sourcesFinal."haskell.nix" { + sourcesOverride = { + hackage = sourcesFinal."hackage.nix"; + stackage = sourcesFinal."stackage.nix"; + }; + }; + + configFinal = haskellNix.config // config; + + overlaysFinal = haskellNix.overlays ++ [ + (_final: prev: { + # Add top-level .sources attribute for other overlays to access sources. + sources = sourcesFinal; + + # Additional non-convential package/exe mappings for shellFor.tools. + haskell-nix = prev.haskell-nix // { + toolPackageName = prev.haskell-nix.toolPackageName // { + shellcheck = "ShellCheck"; + }; + }; + }) + + # General unguarded (native) overrides for nixpkgs. + (import ./overlays/native.nix) + + # Specific overrides guarded by the host platform. + (import ./overlays/musl.nix) + ] ++ overlays; + + pkgs = import sourcesFinal.nixpkgs { + inherit system crossSystem crossOverlays; + + config = configFinal; + overlays = overlaysFinal; + }; + +in pkgs // { + pkgsStatic = pkgs.pkgsStatic.extend (import ./overlays/static.nix); +} diff --git a/nix/deps-env.nix b/nix/deps-env.nix deleted file mode 100644 index 6f7357b4f..000000000 --- a/nix/deps-env.nix +++ /dev/null @@ -1,31 +0,0 @@ -let - - pkgs = import ./nixpkgs.nix; - tlon = import ./pkgs { pkgs=pkgs; }; - deps = import ./deps { pkgs=pkgs; }; - - tools = - with pkgs; - [ cargo rustc meson ninja pkgconfig libtool gdb ]; - - libs = - with pkgs; - [ openssl curl gmp scrypt libsigsegv openssl zlib lmdb ]; - - osx = - with pkgs; - lib.optionals stdenv.isDarwin ( - with darwin.apple_sdk.frameworks; - [ Cocoa CoreServices ]); - - vendor = - with deps; - [ argon2 ed25519 h2o murmur3 scrypt secp256k1 softfloat3 uv ent ge-additions ivory-header ca-header ]; - -in - -pkgs.stdenv.mkDerivation rec { - name = "urbit-deps-env"; - env = pkgs.buildEnv { name = name; paths = buildInputs; }; - buildInputs = tools ++ libs ++ osx ++ vendor; -} diff --git a/nix/deps/argon2/builder.sh b/nix/deps/argon2/builder.sh deleted file mode 100644 index 9430b939f..000000000 --- a/nix/deps/argon2/builder.sh +++ /dev/null @@ -1,14 +0,0 @@ -source $stdenv/setup - -cp -r $src ./src -chmod -R a+w ./src -cd ./src - -sed -i 's|ar rcs|${AR} rcs|' Makefile - -make libargon2.a -j4 - -mkdir -p $out/{lib,include} -cp libargon2.a $out/lib -cp include/argon2.h $out/include -cp ./src/blake2/*.h $out/include diff --git a/nix/deps/argon2/cross.nix b/nix/deps/argon2/cross.nix deleted file mode 100644 index f7776d03e..000000000 --- a/nix/deps/argon2/cross.nix +++ /dev/null @@ -1,17 +0,0 @@ -{ crossenv }: - -crossenv.make_derivation rec { - name = "argon2-4da94"; - builder = ./builder.sh; - - CC = "${crossenv.host}-gcc"; - AR = "${crossenv.host}-ar"; - NO_THREADS = true; - - src = crossenv.nixpkgs.fetchFromGitHub { - owner = "urbit"; - repo = "argon2"; - rev = "4da94a611ee62bad87ab2b131ffda3bcc0723d9c"; - sha256 = "0bqq1hg367l4jkb6cqhxlblpvdbwz3l586qsfakwzfd9wdvnm3yc"; - }; -} diff --git a/nix/deps/argon2/default.nix b/nix/deps/argon2/default.nix deleted file mode 100644 index e101ca696..000000000 --- a/nix/deps/argon2/default.nix +++ /dev/null @@ -1,13 +0,0 @@ -{ pkgs }: - -pkgs.stdenv.mkDerivation rec { - name = "argon2-4da94"; - builder = ./builder.sh; - NO_THREADS = true; - src = pkgs.fetchFromGitHub { - owner = "urbit"; - repo = "argon2"; - rev = "4da94a611ee62bad87ab2b131ffda3bcc0723d9c"; - sha256 = "0bqq1hg367l4jkb6cqhxlblpvdbwz3l586qsfakwzfd9wdvnm3yc"; - }; -} diff --git a/nix/deps/ca-header/builder.sh b/nix/deps/ca-header/builder.sh deleted file mode 100755 index 413a515ec..000000000 --- a/nix/deps/ca-header/builder.sh +++ /dev/null @@ -1,27 +0,0 @@ -source $stdenv/setup - -set -ex - -cleanup () { - echo "done" -} - -trap cleanup EXIT - - -if ! [ -f "$SSL_CERT_FILE" ]; then - echo "$SSL_CERT_FILE doesn't exist" - exit 1 -fi - -mkdir -p ./include - -cat $SSL_CERT_FILE > include/ca-bundle.crt -xxd -i include/ca-bundle.crt > ca-bundle.h - -mkdir -p $out/include - -mv ca-bundle.h $out/include -rm -rf ./include - -set +x diff --git a/nix/deps/ca-header/cross.nix b/nix/deps/ca-header/cross.nix deleted file mode 100644 index 2595eb01a..000000000 --- a/nix/deps/ca-header/cross.nix +++ /dev/null @@ -1,8 +0,0 @@ -{ crossenv }: - -crossenv.make_derivation rec { - name = "ca-bundle.h"; - builder = ./builder.sh; - native_inputs = with crossenv.nixpkgs; [ cacert xxd ]; - SSL_CERT_FILE = "${crossenv.nixpkgs.cacert}/etc/ssl/certs/ca-bundle.crt"; -} diff --git a/nix/deps/ca-header/default.nix b/nix/deps/ca-header/default.nix deleted file mode 100644 index 6e65f1454..000000000 --- a/nix/deps/ca-header/default.nix +++ /dev/null @@ -1,7 +0,0 @@ -{ pkgs }: - -pkgs.stdenv.mkDerivation { - name = "ca-bundle.h"; - builder = ./builder.sh; - nativeBuildInputs = with pkgs; [ cacert xxd ]; -} diff --git a/nix/deps/default.nix b/nix/deps/default.nix deleted file mode 100644 index 8f44e7501..000000000 --- a/nix/deps/default.nix +++ /dev/null @@ -1,14 +0,0 @@ -{ pkgs ? import ../nixpkgs.nix }: - -rec { - argon2 = import ./argon2 { inherit pkgs; }; - murmur3 = import ./murmur3 { inherit pkgs; }; - uv = import ./uv { inherit pkgs; }; - ed25519 = import ./ed25519 { inherit pkgs; }; - scrypt = import ./scrypt { inherit pkgs; }; - softfloat3 = import ./softfloat3 { inherit pkgs; }; - secp256k1 = import ./secp256k1 { inherit pkgs; }; - h2o = import ./h2o { inherit pkgs uv; }; - ivory-header = import ./ivory-header { inherit pkgs; }; - ca-header = import ./ca-header { inherit pkgs; }; -} diff --git a/nix/deps/ed25519/builder.sh b/nix/deps/ed25519/builder.sh deleted file mode 100644 index e37423356..000000000 --- a/nix/deps/ed25519/builder.sh +++ /dev/null @@ -1,29 +0,0 @@ -source $stdenv/setup - -sources=" \ - $src/src/add_scalar.c \ - $src/src/seed.c \ - $src/src/verify.c \ - $src/src/add_scalar.c \ - $src/src/sha512.c \ - $src/src/ge.c \ - $src/src/fe.c \ - $src/src/keypair.c \ - $src/src/sign.c \ - $src/src/sc.c \ - $src/src/key_exchange.c \ -" - -CFLAGS="-O3 -Wall -I$src/src" - -for fn in $sources -do echo $CC $CFLAGS -c $fn -o $(basename $fn).o - $CC -O3 -Wall -I$src/src -c $fn -o $(basename $fn).o -done - -mkdir -p $out/{lib,include} - -$AR rcs $out/lib/libed25519.a *.o -echo $AR rcs $out/lib/libed25519.a *.o - -cp $src/src/*.h $out/include diff --git a/nix/deps/ed25519/cross.nix b/nix/deps/ed25519/cross.nix deleted file mode 100644 index 76ae2a5d5..000000000 --- a/nix/deps/ed25519/cross.nix +++ /dev/null @@ -1,16 +0,0 @@ -{ crossenv }: - -crossenv.make_derivation rec { - name = "ed25519-76385"; - builder = ./builder.sh; - - CC = "${crossenv.host}-gcc"; - AR = "${crossenv.host}-ar"; - - src = crossenv.nixpkgs.fetchFromGitHub { - owner = "urbit"; - repo = "ed25519"; - rev = "76385f2ebbbc9580a9c236952d68d11d73a6135c"; - sha256 = "0s1spif4s9lgcwcny3fl2fvpbw6acqn3s8r6qxnrmkd9icgyw4cp"; - }; -} diff --git a/nix/deps/ed25519/default.nix b/nix/deps/ed25519/default.nix deleted file mode 100644 index 4d4ee42c8..000000000 --- a/nix/deps/ed25519/default.nix +++ /dev/null @@ -1,12 +0,0 @@ -{ pkgs }: - -pkgs.stdenv.mkDerivation rec { - name = "ed25519-76385"; - builder = ./builder.sh; - src = pkgs.fetchFromGitHub { - owner = "urbit"; - repo = "ed25519"; - rev = "76385f2ebbbc9580a9c236952d68d11d73a6135c"; - sha256 = "0s1spif4s9lgcwcny3fl2fvpbw6acqn3s8r6qxnrmkd9icgyw4cp"; - }; -} diff --git a/nix/deps/h2o/builder.sh b/nix/deps/h2o/builder.sh deleted file mode 100644 index 4c5c71723..000000000 --- a/nix/deps/h2o/builder.sh +++ /dev/null @@ -1,109 +0,0 @@ -source $stdenv/setup - -sources=" \ - deps/cloexec/cloexec.c \ - deps/libgkc/gkc.c \ - deps/libyrmcds/close.c \ - deps/libyrmcds/connect.c \ - deps/libyrmcds/recv.c \ - deps/libyrmcds/send.c \ - deps/libyrmcds/send_text.c \ - deps/libyrmcds/socket.c \ - deps/libyrmcds/strerror.c \ - deps/libyrmcds/text_mode.c \ - deps/picohttpparser/picohttpparser.c \ - lib/common/cache.c \ - lib/common/file.c \ - lib/common/filecache.c \ - lib/common/hostinfo.c \ - lib/common/http1client.c \ - lib/common/memcached.c \ - lib/common/memory.c \ - lib/common/multithread.c \ - lib/common/serverutil.c \ - lib/common/socket.c \ - lib/common/socketpool.c \ - lib/common/string.c \ - lib/common/time.c \ - lib/common/timeout.c \ - lib/common/url.c \ - lib/core/config.c \ - lib/core/configurator.c \ - lib/core/context.c \ - lib/core/headers.c \ - lib/core/logconf.c \ - lib/core/proxy.c \ - lib/core/request.c \ - lib/core/token.c \ - lib/core/util.c \ - lib/handler/access_log.c \ - lib/handler/chunked.c \ - lib/handler/compress.c \ - lib/handler/compress/gzip.c \ - lib/handler/errordoc.c \ - lib/handler/expires.c \ - lib/handler/fastcgi.c \ - lib/handler/file.c \ - lib/handler/headers.c \ - lib/handler/mimemap.c \ - lib/handler/proxy.c \ - lib/handler/redirect.c \ - lib/handler/reproxy.c \ - lib/handler/throttle_resp.c \ - lib/handler/status.c \ - lib/handler/headers_util.c \ - lib/handler/status/events.c \ - lib/handler/status/requests.c \ - lib/handler/http2_debug_state.c \ - lib/handler/status/durations.c \ - lib/handler/configurator/access_log.c \ - lib/handler/configurator/compress.c \ - lib/handler/configurator/errordoc.c \ - lib/handler/configurator/expires.c \ - lib/handler/configurator/fastcgi.c \ - lib/handler/configurator/file.c \ - lib/handler/configurator/headers.c \ - lib/handler/configurator/proxy.c \ - lib/handler/configurator/redirect.c \ - lib/handler/configurator/reproxy.c \ - lib/handler/configurator/throttle_resp.c \ - lib/handler/configurator/status.c \ - lib/handler/configurator/http2_debug_state.c \ - lib/handler/configurator/headers_util.c \ - lib/http1.c \ - lib/tunnel.c \ - lib/http2/cache_digests.c \ - lib/http2/casper.c \ - lib/http2/connection.c \ - lib/http2/frame.c \ - lib/http2/hpack.c \ - lib/http2/scheduler.c \ - lib/http2/stream.c \ - lib/http2/http2_debug_state.c \ -" - -CFLAGS=" \ - -O3 \ - -Wall -Wno-unused-value -Wno-unused-function \ - -I$src/include \ - -I$src/deps/cloexec \ - -I$src/deps/brotli/enc \ - -I$src/deps/golombset \ - -I$src/deps/libgkc \ - -I$src/deps/libyrmcds \ - -I$src/deps/klib \ - -I$src/deps/neverbleed \ - -I$src/deps/picohttpparser \ - -I$src/deps/picotest \ - -I$src/deps/yaml/include \ - -I$src/deps/yoml -" - -for s in $sources -do cc $CFLAGS -c $src/$s -o $(sed 's|/|_|g; s/.c$/.o/' <<< $s) -done - -mkdir -p $out/{lib,include} -ar rcs $out/lib/libh2o.a *.o -cp -r $src/include/* $out/include -cp $src/deps/picohttpparser/picohttpparser.h $out/include diff --git a/nix/deps/h2o/cross.nix b/nix/deps/h2o/cross.nix deleted file mode 100644 index 49b6ce3de..000000000 --- a/nix/deps/h2o/cross.nix +++ /dev/null @@ -1,17 +0,0 @@ -{ crossenv, uv }: - -crossenv.make_derivation rec { - inherit (crossenv) openssl zlib; - inherit uv; - - name = "h2o-0ed9a"; - cross_inputs = [ uv crossenv.openssl crossenv.zlib ]; - builder = ./cross.sh; - - src = crossenv.nixpkgs.fetchFromGitHub { - owner = "urbit"; - repo = "h2o"; - rev = "0ed9ac70757a16ec45f91b8a347850d9699c3fb1"; - sha256 = "16b5zbwdq371hhqga76dh7x4c0qr3xb5ah9r8hnm6rip460p6xpm"; - }; -} diff --git a/nix/deps/h2o/cross.sh b/nix/deps/h2o/cross.sh deleted file mode 100644 index e91314996..000000000 --- a/nix/deps/h2o/cross.sh +++ /dev/null @@ -1,26 +0,0 @@ -source $stdenv/setup - -cp -r $src src -chmod -R u+w src -cd src - -cmake-cross . \ - -DZLIB_LIBRARY=$zlib/lib/libz.a \ - -DZLIB_INCLUDE_DIR=$zlib/include \ - -DCMAKE_INSTALL_PREFIX=$out \ - -DBUILD_SHARED_LIBS=off \ - -DWITH_MRUBY=off \ - -DWITH_BUNDLED_SSL=off \ - -DWITH_PICOTLS=on - -make libh2o - -mkdir -p $out/{lib,lib/pkgconfig,include} - -cp ./libh2o.a $out/lib - -cp ./libh2o.pc $out/lib/pkgconfig - -cp -r include/* $out/include - -cp deps/picohttpparser/picohttpparser.h $out/include diff --git a/nix/deps/h2o/default.nix b/nix/deps/h2o/default.nix deleted file mode 100644 index 101edbeb7..000000000 --- a/nix/deps/h2o/default.nix +++ /dev/null @@ -1,13 +0,0 @@ -{ pkgs, uv }: - -pkgs.stdenv.mkDerivation rec { - name = "h2o-0ed9a"; - buildInputs = [ uv pkgs.openssl pkgs.zlib ]; - builder = ./builder.sh; - src = pkgs.fetchFromGitHub { - owner = "urbit"; - repo = "h2o"; - rev = "0ed9ac70757a16ec45f91b8a347850d9699c3fb1"; - sha256 = "16b5zbwdq371hhqga76dh7x4c0qr3xb5ah9r8hnm6rip460p6xpm"; - }; -} diff --git a/nix/deps/ivory-header/builder.sh b/nix/deps/ivory-header/builder.sh deleted file mode 100755 index 225ed8a60..000000000 --- a/nix/deps/ivory-header/builder.sh +++ /dev/null @@ -1,35 +0,0 @@ -source $stdenv/setup - -set -e - -if ! [ -f "$IVORY" ]; then - echo "$IVORY doesn't exist" - exit 1 -fi - -# -# heuristics to confirm the ivory pill is valid -# - -# first 7 bytes != "version" (start of an lfs pointer) -# -if [ "$(head -c 7 "$IVORY")" = "version" ]; then - echo "$IVORY is an LFS pointer (it starts with 'version')" - echo "to fix, run: git lfs install" - exit 1 -fi - -# greater than 10KB -# -if ! [ $(du -k "$IVORY" | cut -f1) -gt 10 ]; then - echo "$IVORY is less than 10KB" - exit 1 -fi - -cat $IVORY > u3_Ivory.pill -xxd -i u3_Ivory.pill > ivory.h - -mkdir -p $out/include - -mv ivory.h $out/include -rm u3_Ivory.pill diff --git a/nix/deps/ivory-header/cross.nix b/nix/deps/ivory-header/cross.nix deleted file mode 100644 index 6939a84de..000000000 --- a/nix/deps/ivory-header/cross.nix +++ /dev/null @@ -1,11 +0,0 @@ -{ - crossenv, - ivory ? ../../../bin/ivory.pill -}: - -crossenv.make_derivation rec { - name = "ivory.h"; - builder = ./builder.sh; - native_inputs = with crossenv.nixpkgs; [ xxd ]; - IVORY = ivory; -} diff --git a/nix/deps/ivory-header/default.nix b/nix/deps/ivory-header/default.nix deleted file mode 100644 index 0e461a7ca..000000000 --- a/nix/deps/ivory-header/default.nix +++ /dev/null @@ -1,11 +0,0 @@ -{ - pkgs, - ivory ? ../../../bin/ivory.pill -}: - -pkgs.stdenv.mkDerivation { - name = "ivory.h"; - builder = ./builder.sh; - nativeBuildInputs = with pkgs; [ xxd ]; - IVORY = ivory; -} diff --git a/nix/deps/murmur3/builder.sh b/nix/deps/murmur3/builder.sh deleted file mode 100644 index 9afd6918a..000000000 --- a/nix/deps/murmur3/builder.sh +++ /dev/null @@ -1,11 +0,0 @@ -source $stdenv/setup - -echo $CC -fPIC -O3 -o murmur3.o -c $src/murmur3.c -$CC -fPIC -O3 -o murmur3.o -c $src/murmur3.c - -mkdir -p $out/{lib,include} - -echo $AR rcs $out/lib/libmurmur3.a murmur3.o -$AR rcs $out/lib/libmurmur3.a murmur3.o - -cp $src/murmur3.h $out/include diff --git a/nix/deps/murmur3/cross.nix b/nix/deps/murmur3/cross.nix deleted file mode 100644 index 024f58656..000000000 --- a/nix/deps/murmur3/cross.nix +++ /dev/null @@ -1,16 +0,0 @@ -{ crossenv }: - -crossenv.make_derivation rec { - name = "murmur3-71a75"; - builder = ./builder.sh; - - CC = "${crossenv.host}-gcc"; - AR = "${crossenv.host}-ar"; - - src = crossenv.nixpkgs.fetchFromGitHub { - owner = "urbit"; - repo = "murmur3"; - rev = "71a75d57ca4e7ca0f7fc2fd84abd93595b0624ca"; - sha256 = "0k7jq2nb4ad9ajkr6wc4w2yy2f2hkwm3nkbj2pklqgwsg6flxzwg"; - }; -} diff --git a/nix/deps/murmur3/default.nix b/nix/deps/murmur3/default.nix deleted file mode 100644 index 01c0ae9cf..000000000 --- a/nix/deps/murmur3/default.nix +++ /dev/null @@ -1,12 +0,0 @@ -{ pkgs }: - -pkgs.stdenv.mkDerivation rec { - name = "murmur3-71a75"; - builder = ./builder.sh; - src = pkgs.fetchFromGitHub { - owner = "urbit"; - repo = "murmur3"; - rev = "71a75d57ca4e7ca0f7fc2fd84abd93595b0624ca"; - sha256 = "0k7jq2nb4ad9ajkr6wc4w2yy2f2hkwm3nkbj2pklqgwsg6flxzwg"; - }; -} diff --git a/nix/deps/scrypt/builder.sh b/nix/deps/scrypt/builder.sh deleted file mode 100644 index 94e03d031..000000000 --- a/nix/deps/scrypt/builder.sh +++ /dev/null @@ -1,27 +0,0 @@ -source $stdenv/setup - -sources=" \ - crypto_scrypt-check \ - crypto_scrypt-hash \ - crypto_scrypt-hexconvert \ - crypto_scrypt-nosse \ - crypto-mcf \ - crypto-scrypt-saltgen \ - slowequals \ - sha256 \ - b64 \ -" - -CFLAGS="-I$src -Wall -ffast-math -O3 -D_FORTIFY_SOURCE=2 -fstack-protector" - -for s in $sources -do echo $CC $CFLAGS -c $src/$s.c -o $s.o - $CC $CFLAGS -c $src/$s.c -o $s.o -done - -echo $AR rcs libscrypt.a *.o -$AR rcs libscrypt.a *.o - -mkdir -p $out/{lib,include} -cp libscrypt.a $out/lib -cp $src/*.h $out/include diff --git a/nix/deps/scrypt/cross.nix b/nix/deps/scrypt/cross.nix deleted file mode 100644 index 9a74e5b65..000000000 --- a/nix/deps/scrypt/cross.nix +++ /dev/null @@ -1,16 +0,0 @@ -{ crossenv }: - -crossenv.make_derivation rec { - name = "scrypt-02969"; - builder = ./builder.sh; - - CC = "${crossenv.host}-gcc"; - AR = "${crossenv.host}-ar"; - - src = crossenv.nixpkgs.fetchFromGitHub { - owner = "urbit"; - repo = "libscrypt"; - rev = "029693ff1cbe4f69d3a2da87d0f4f034f92cc0c2"; - sha256 = "17pcxypzjmmrvacw45cacvibm6mlr9ip30hy30l1appsnywx679n"; - }; -} diff --git a/nix/deps/scrypt/default.nix b/nix/deps/scrypt/default.nix deleted file mode 100644 index a0740f9e1..000000000 --- a/nix/deps/scrypt/default.nix +++ /dev/null @@ -1,12 +0,0 @@ -{ pkgs }: - -pkgs.stdenv.mkDerivation rec { - name = "scrypt-02969"; - builder = ./builder.sh; - src = pkgs.fetchFromGitHub { - owner = "urbit"; - repo = "libscrypt"; - rev = "029693ff1cbe4f69d3a2da87d0f4f034f92cc0c2"; - sha256 = "17pcxypzjmmrvacw45cacvibm6mlr9ip30hy30l1appsnywx679n"; - }; -} diff --git a/nix/deps/secp256k1/builder.sh b/nix/deps/secp256k1/builder.sh deleted file mode 100644 index 9e5a11415..000000000 --- a/nix/deps/secp256k1/builder.sh +++ /dev/null @@ -1,11 +0,0 @@ -source $stdenv/setup - -cp -r $src ./src -chmod -R u+w ./src -cd src - -libtoolize -bash ./autogen.sh -bash ./configure --prefix=$out --host=$host $configureFlags -make -make install diff --git a/nix/deps/secp256k1/cross.nix b/nix/deps/secp256k1/cross.nix deleted file mode 100644 index 67ac2379d..000000000 --- a/nix/deps/secp256k1/cross.nix +++ /dev/null @@ -1,25 +0,0 @@ -{ crossenv }: - -crossenv.make_derivation rec { - name = "secp256k1-b4e87"; - builder = ./builder.sh; - - CFLAGS = "-fPIC"; - - configureFlags = [ - "--disable-shared" - "--enable-module-recovery" - ]; - - cross_inputs = [ crossenv.libgmp ]; - native_inputs = - with crossenv.nixpkgs; - [ autoconf automake libtool m4 ]; - - src = crossenv.nixpkgs.fetchFromGitHub { - owner = "bitcoin-core"; - repo = "secp256k1"; - rev = "e34ceb333b1c0e6f4115ecbb80c632ac1042fa49"; - sha256 = "0as78s179hcr3ysk3fw98k5wzabgnwri7vkkc17wg31lyz6ids6c"; - }; -} diff --git a/nix/deps/secp256k1/default.nix b/nix/deps/secp256k1/default.nix deleted file mode 100644 index 438b2dc25..000000000 --- a/nix/deps/secp256k1/default.nix +++ /dev/null @@ -1,25 +0,0 @@ -{ pkgs }: - -pkgs.stdenv.mkDerivation rec { - name = "secp256k1-b4e87"; - builder = ./builder.sh; - - CFLAGS = "-fPIC"; - - configureFlags = [ - "--disable-shared" - "--enable-module-recovery" - ]; - - buildInputs = [ pkgs.gmp ]; - nativeBuildInputs = - with pkgs; - [ autoconf automake libtool m4 ]; - - src = pkgs.fetchFromGitHub { - owner = "bitcoin-core"; - repo = "secp256k1"; - rev = "e34ceb333b1c0e6f4115ecbb80c632ac1042fa49"; - sha256 = "0as78s179hcr3ysk3fw98k5wzabgnwri7vkkc17wg31lyz6ids6c"; - }; -} diff --git a/nix/deps/softfloat3/builder.sh b/nix/deps/softfloat3/builder.sh deleted file mode 100644 index 6a8efbaf8..000000000 --- a/nix/deps/softfloat3/builder.sh +++ /dev/null @@ -1,15 +0,0 @@ -source $stdenv/setup - -cp -r $src $TMP/$name -chmod -R u+w $TMP/$name -cd $TMP/$name - -cd ./build/Linux-386-SSE2-GCC -sed -i 's|gcc|$(CC)|' Makefile -sed -i 's/ar crs/$(AR) crs/' Makefile - -make -j4 - -mkdir -p $out/{lib,include} -cp $src/source/include/*.h $out/include -cp softfloat.a $out/lib/libsoftfloat3.a diff --git a/nix/deps/softfloat3/cross.nix b/nix/deps/softfloat3/cross.nix deleted file mode 100644 index 9d76c7e69..000000000 --- a/nix/deps/softfloat3/cross.nix +++ /dev/null @@ -1,16 +0,0 @@ -{ crossenv }: - -crossenv.make_derivation rec { - name = "softfloat3-ec4c7"; - builder = ./builder.sh; - - CC = "${crossenv.host}-gcc"; - AR = "${crossenv.host}-ar"; - - src = crossenv.nixpkgs.fetchFromGitHub { - owner = "urbit"; - repo = "berkeley-softfloat-3"; - rev = "ec4c7e31b32e07aad80e52f65ff46ac6d6aad986"; - sha256 = "1lz4bazbf7lns1xh8aam19c814a4n4czq5xsq5rmi9sgqw910339"; - }; -} diff --git a/nix/deps/softfloat3/default.nix b/nix/deps/softfloat3/default.nix deleted file mode 100644 index c101b28c2..000000000 --- a/nix/deps/softfloat3/default.nix +++ /dev/null @@ -1,12 +0,0 @@ -{ pkgs }: - -pkgs.stdenv.mkDerivation rec { - name = "softfloat3-ec4c7"; - builder = ./builder.sh; - src = pkgs.fetchFromGitHub { - owner = "urbit"; - repo = "berkeley-softfloat-3"; - rev = "ec4c7e31b32e07aad80e52f65ff46ac6d6aad986"; - sha256 = "1lz4bazbf7lns1xh8aam19c814a4n4czq5xsq5rmi9sgqw910339"; - }; -} diff --git a/nix/deps/uv/builder.sh b/nix/deps/uv/builder.sh deleted file mode 100644 index d9f0ff42c..000000000 --- a/nix/deps/uv/builder.sh +++ /dev/null @@ -1,9 +0,0 @@ -source $stdenv/setup - -cp -r $src ./src -chmod -R a+w ./src -cd ./src - -LIBTOOLIZE=libtoolize ./autogen.sh -bash ./configure --prefix=$out --host=$host $configureFlags -make install diff --git a/nix/deps/uv/cross.nix b/nix/deps/uv/cross.nix deleted file mode 100644 index 4e81e4bd7..000000000 --- a/nix/deps/uv/cross.nix +++ /dev/null @@ -1,17 +0,0 @@ -{ crossenv }: - -crossenv.make_derivation rec { - name = "uv-64294"; - native_inputs = with crossenv.nixpkgs; [ autoconf automake libtool m4 ]; - builder = ./builder.sh; - - configureFlags = [ "--disable-shared" ]; - CFLAGS = "-fPIC"; - - src = crossenv.nixpkgs.fetchFromGitHub { - owner = "urbit"; - repo = "libuv"; - rev = "6429495dc9a80aaf1c243038b381451f12bc7dcf"; - sha256 = "07m2m4v9mds0wihzjxjwswwfj3rnk2ycr3vgwfcrvnb5xjz7rs15"; - }; -} diff --git a/nix/deps/uv/default.nix b/nix/deps/uv/default.nix deleted file mode 100644 index ecf8383d2..000000000 --- a/nix/deps/uv/default.nix +++ /dev/null @@ -1,27 +0,0 @@ -{ pkgs }: - -let - - osx = - with pkgs; - lib.optionals stdenv.isDarwin ( - with darwin.apple_sdk.frameworks; - [ Cocoa CoreServices ]); - -in - -pkgs.stdenv.mkDerivation rec { - name = "uv-64294"; - buildInputs = osx ++ (with pkgs; [ autoconf automake libtool m4 ]); - builder = ./builder.sh; - - CFLAGS = "-fPIC"; - configureFlags = [ "--disable-shared" ]; - - src = pkgs.fetchFromGitHub { - owner = "urbit"; - repo = "libuv"; - rev = "6429495dc9a80aaf1c243038b381451f12bc7dcf"; - sha256 = "07m2m4v9mds0wihzjxjwswwfj3rnk2ycr3vgwfcrvnb5xjz7rs15"; - }; -} diff --git a/nix/lib/boot-fake-ship.nix b/nix/lib/boot-fake-ship.nix new file mode 100644 index 000000000..610f9f0e7 --- /dev/null +++ b/nix/lib/boot-fake-ship.nix @@ -0,0 +1,54 @@ +{ lib, stdenvNoCC, cacert }: + +{ urbit, herb, arvo ? null, pill, ship, arguments ? [ "-l" ] }: + +let + + args = arguments ++ [ "-d" "-F" "${ship}" "-B" "${pill}" ] + ++ lib.optionals (arvo != null) [ "-A" "${arvo}" ]; + +in stdenvNoCC.mkDerivation { + name = "fake-${ship}"; + + buildInputs = [ cacert urbit herb ]; + + phases = [ "buildPhase" "installPhase " ]; + + buildPhase = '' + if ! [ -f "$SSL_CERT_FILE" ]; then + header "$SSL_CERT_FILE doesn't exist" + exit 1 + fi + + set -xeuo pipefail + + urbit ${lib.concatStringsSep " " args} ./pier + + cleanup () { + if [ -f ./pier/.vere.lock ]; then + kill $(< ./pier/.vere.lock) || true + fi + + set +x + } + + trap cleanup EXIT + + check () { + [ 3 -eq "$(herb ./pier -d 3)" ] + } + + if check && sleep 10 && check; then + header "boot success" + herb ./pier -p hood -d '+hood/exit' + else + header "boot failure" + kill $(< ./pier/.vere.lock) || true + exit 1 + fi + ''; + + installPhase = '' + mv ./pier $out + ''; +} diff --git a/nix/lib/default.nix b/nix/lib/default.nix new file mode 100644 index 000000000..3d70c745c --- /dev/null +++ b/nix/lib/default.nix @@ -0,0 +1,37 @@ +# Functions that are expected run on the native (non-cross) system. + +{ lib, recurseIntoAttrs, haskell-nix, callPackage }: + +rec { + bootFakeShip = callPackage ./boot-fake-ship.nix { }; + + testFakeShip = callPackage ./test-fake-ship.nix { inherit bootFakeShip; }; + + fetchGitHubLFS = callPackage ./fetch-github-lfs.nix { }; + + makeReleaseTarball = callPackage ./make-release-tarball.nix { }; + + collectHaskellComponents = project: + let + + # These functions pull out from the Haskell project either all the + # components of a particular type, or all the checks. + + pkgs = haskell-nix.haskellLib.selectProjectPackages project; + + collectChecks = _: + recurseIntoAttrs (builtins.mapAttrs (_: p: p.checks) pkgs); + + collectComponents = type: + haskell-nix.haskellLib.collectComponents' type pkgs; + + # Recompute the Haskell package set sliced by component type + in builtins.mapAttrs (type: f: f type) { + # These names must match the subcomponent: components..<...> + "library" = collectComponents; + "tests" = collectComponents; + "benchmarks" = collectComponents; + "exes" = collectComponents; + "checks" = collectChecks; + }; +} diff --git a/nix/lib/fetch-github-lfs.nix b/nix/lib/fetch-github-lfs.nix new file mode 100644 index 000000000..ecffd76b9 --- /dev/null +++ b/nix/lib/fetch-github-lfs.nix @@ -0,0 +1,107 @@ +{ lib, stdenvNoCC, runCommandLocal, cacert, curl, jq }: + +{ src +# `name` shouldn't use `baseNameOf` otherwise we'll +# get `is not allowed to refer to a store path` errors. +, name ? baseNameOf src, owner ? "urbit", repo ? "urbit" +, preferLocalBuild ? true }: + +assert builtins.isPath src; + +let + + # Parse the first 7 characters of the supplied `src` path for the required + # `version` key as defined by the lfs specification: + # https://github.com/git-lfs/git-lfs/blob/master/docs/spec.md + # + # If `version` exists we assume we're dealing with a lfs pointer and parse + # the `oid` and `size` from the pointer and write these into a JSON object. + # + # If the first 7 characters are unrecognised we assume the path is a binary + # file and set both `oid` and `size` to `null`. + # + # The `oid` and `size` are then JSON decoded into an expression to use + # as the fixed-output derivation's `sha256 = oid`, and to form a download + # operation payload to request the actual lfs blob's real url. + pointer = builtins.fromJSON (builtins.readFile + (runCommandLocal "lfs-pointer-${name}" { } '' + oid="null" + size="null" + + if [[ "$(head -c 7 "${src}")" != "version" ]]; then + header "lfs ${src} is a binary blob, skipping" + else + header "reading lfs pointer from ${src}" + + contents=($(awk '{print $2}' "${src}")) + oid="''${contents[1]#sha256:}" + size="''${contents[2]}" + fi + + cat < "$out" + {"oid": "$oid", "size": $size} + EOF + '')); + + downloadUrl = + "https://github.com/${owner}/${repo}.git/info/lfs/objects/batch"; + + # Encode `oid` and `size` into a download operation per: + # https://github.com/git-lfs/git-lfs/blob/master/docs/api/batch.md + # + # This is done using toJSON to avoid bash quotation issues. + downloadPayload = builtins.toJSON { + operation = "download"; + objects = [ pointer ]; + }; + + # Define a fixed-output derivation using the lfs pointer's `oid` as the + # expected sha256 output hash, if `oid` is not null. + # + + # 1. Request the actual url of the binary file from the lfs batch api. + # 2. Download the binary file contents to `$out`. + download = stdenvNoCC.mkDerivation { + name = "lfs-blob-${name}"; + nativeBuildInputs = [ curl jq ]; + phases = [ "installPhase" ]; + installPhase = '' + curl=( + curl + --location + --max-redirs 20 + --retry 3 + --disable-epsv + --cookie-jar cookies + $NIX_CURL_FLAGS + ) + + header "reading lfs metadata from ${downloadUrl}" + + href=$("''${curl[@]}" \ + -d '${downloadPayload}' \ + -H 'Accept: application/vnd.git-lfs+json' \ + '${downloadUrl}' \ + | jq -r '.objects[0].actions.download.href') + + header "download lfs data from remote" + + # Pozor/Achtung: the href contains credential and signature information, + # so we avoid echoing it to stdout/err. + "''${curl[@]}" -s --output "$out" "$href" + ''; + + impureEnvVars = stdenvNoCC.lib.fetchers.proxyImpureEnvVars; + + SSL_CERT_FILE = "${cacert}/etc/ssl/certs/ca-bundle.crt"; + + outputHashAlgo = "sha256"; + outputHashMode = "flat"; + outputHash = pointer.oid; + + inherit preferLocalBuild; + }; + + # If `pointer.oid` is null then supplied the `src` must be a binary + # blob and can be returned directly. +in if pointer.oid == null || pointer.size == null then src else download diff --git a/nix/lib/make-release-tarball.nix b/nix/lib/make-release-tarball.nix new file mode 100644 index 000000000..48ba1865d --- /dev/null +++ b/nix/lib/make-release-tarball.nix @@ -0,0 +1,30 @@ +{ lib, stdenvNoCC, coreutils }: + +{ name, extension ? "tgz", contents # { target = source, ... } +}: + +let + + transforms = builtins.concatStringsSep " " (lib.mapAttrsToList + (target: source: ''--transform "s,${source},${target},"'') contents); + + sources = builtins.concatStringsSep " " + (lib.mapAttrsToList (_target: source: "${source}") contents); + +in stdenvNoCC.mkDerivation { + name = "${name}.${extension}"; + phases = [ "buildPhase" ]; + + nativeBuildInputs = [ coreutils ]; + + buildPhase = '' + tar -vczf $out \ + --owner=0 --group=0 --mode=u+rw,uga+r \ + --absolute-names \ + --hard-dereference \ + ${transforms} \ + ${sources} + ''; + + preferLocalBuild = true; +} diff --git a/nix/lib/test-fake-ship.nix b/nix/lib/test-fake-ship.nix new file mode 100644 index 000000000..133c041b8 --- /dev/null +++ b/nix/lib/test-fake-ship.nix @@ -0,0 +1,173 @@ +{ lib, stdenvNoCC, cacert, python3, bootFakeShip }: + +{ urbit, herb, arvo ? null, pill, ship ? "bus", arguments ? urbit.meta.arguments +, doCheck ? true }: + +stdenvNoCC.mkDerivation { + name = "test-${ship}"; + + src = bootFakeShip { inherit urbit herb arvo pill ship; }; + + phases = [ "unpackPhase" "buildPhase" "checkPhase" ]; + + buildInputs = [ cacert urbit herb python3 ]; + + unpackPhase = '' + cp -R $src ./pier + chmod -R u+rw ./pier + ''; + + buildPhase = '' + set -x + + urbit ${lib.concatStringsSep " " arguments} -d ./pier 2> urbit-output + + # Sledge Hammer! + # See: https://github.com/travis-ci/travis-ci/issues/4704#issuecomment-348435959 + python3 -c $'import os\n[os.set_blocking(i, True) for i in range(3)]\n' + + tail -F urbit-output >&2 & + + tailproc=$! + + cleanup () { + kill $(cat ./pier/.vere.lock) || true + kill "$tailproc" 2>/dev/null || true + + set +x + } + + trap cleanup EXIT + + # measure initial memory usage + # + herb ./pier -d '~& ~ ~& %init-mass-start ~' + herb ./pier -p hood -d '+hood/mass' + herb ./pier -d '~& ~ ~& %init-mass-end ~' + + # run the unit tests + # + herb ./pier -d '~& ~ ~& %test-unit-start ~' + herb ./pier -d '####-test %/tests' + herb ./pier -d '~& ~ ~& %test-unit-end ~' + + # use the :test app to build all agents, generators, and marks + # + herb ./pier -p hood -d '+hood/start %test' + + herb ./pier -d '~& ~ ~& %test-agents-start ~' + herb ./pier -p test -d '%agents' + herb ./pier -d '~& ~ ~& %test-agents-end ~' + + herb ./pier -d '~& ~ ~& %test-generators-start ~' + herb ./pier -p test -d '%generators' + herb ./pier -d '~& ~ ~& %test-generators-end ~' + + herb ./pier -d '~& ~ ~& %test-marks-start ~' + herb ./pier -p test -d '%marks' + herb ./pier -d '~& ~ ~& %test-marks-end ~' + + # measure memory usage post tests + # + herb ./pier -d '~& ~ ~& %test-mass-start ~' + herb ./pier -p hood -d '+hood/mass' + herb ./pier -d '~& ~ ~& %test-mass-end ~' + + # defragment the loom + # + herb ./pier -d '~& ~ ~& %pack-start ~' + herb ./pier -p hood -d '+hood/pack' + herb ./pier -d '~& ~ ~& %pack-end ~' + + # reclaim space within arvo + # + herb ./pier -d '~& ~ ~& %trim-start ~' + herb ./pier -p hood -d '+hood/trim' + herb ./pier -d '~& ~ ~& %trim-end ~' + + # measure memory usage pre |meld + # + herb ./pier -d '~& ~ ~& %trim-mass-start ~' + herb ./pier -p hood -d '+hood/mass' + herb ./pier -d '~& ~ ~& %trim-mass-end ~' + + # globally deduplicate + # + herb ./pier -d '~& ~ ~& %meld-start ~' + herb ./pier -p hood -d '+hood/meld' + herb ./pier -d '~& ~ ~& %meld-end ~' + + # measure memory usage post |meld + # + herb ./pier -d '~& ~ ~& %meld-mass-start ~' + herb ./pier -p hood -d '+hood/mass' + herb ./pier -d '~& ~ ~& %meld-mass-end ~' + + herb ./pier -p hood -d '+hood/exit' + + cleanup + + # Collect output + cp urbit-output test-output-unit + cp urbit-output test-output-agents + cp urbit-output test-output-generators + cp urbit-output test-output-marks + + sed -i '0,/test-unit-start/d' test-output-unit + sed -i '/test-unit-end/,$d' test-output-unit + + sed -i '0,/test-agents-start/d' test-output-agents + sed -i '/test-agents-end/,$d' test-output-agents + + sed -i '0,/test-generators-start/d' test-output-generators + sed -i '/test-generators-end/,$d' test-output-generators + + sed -i '0,/test-marks-start/d' test-output-marks + sed -i '/test-marks-end/,$d' test-output-marks + + mkdir -p $out + + cp test-output-* $out/ + ''; + + checkPhase = '' + hdr () { + echo =====$(sed 's/./=/g' <<< "$1")===== + echo ==== $1 ==== + echo =====$(sed 's/./=/g' <<< "$1")===== + } + + for f in $(find "$out/" -type f); do + hdr "$(basename $f)" + cat "$f" + done + + fail=0 + + for f in $(find "$out/" -type f); do + if egrep "((FAILED|CRASHED)|(ford|warn):) " $f >/dev/null; then + if [[ $fail -eq 0 ]]; then + hdr "Test Failures" + fi + + echo "ERROR Test failure in $(basename $f)" + + ((fail++)) + fi + done + + if [[ $fail -eq 0 ]]; then + hdr "Success" + fi + + exit "$fail" + ''; + + inherit doCheck; + + # Fix 'bind: operation not permitted' when nix.useSandbox = true on darwin. + # See https://github.com/NixOS/nix/blob/5f6840fbb49ae5b534423bd8a4360646ee93dbaf/src/libstore/build.cc#L2961 + __darwinAllowLocalNetworking = true; + + meta = { platforms = [ "x86_64-linux" ]; }; +} diff --git a/nix/nixcrpkgs.nix b/nix/nixcrpkgs.nix deleted file mode 100644 index c65bdc3d6..000000000 --- a/nix/nixcrpkgs.nix +++ /dev/null @@ -1,12 +0,0 @@ -let - - nixpkgs = import ./nixpkgs.nix; - - osx_sdk = builtins.fetchurl { - sha256 = "89aa34dfe5bcbc7d53d3c55a84b35ac810ecfbcdd16a64c9667992b0c36c60c4"; - url = "https://github.com/phracker/MacOSX-SDKs/releases/download/10.13/MacOSX10.11.sdk.tar.xz"; - }; - -in - -import ./nixcrpkgs/top.nix { inherit osx_sdk nixpkgs; } diff --git a/nix/nixcrpkgs/.gitignore b/nix/nixcrpkgs/.gitignore deleted file mode 100644 index fed8256b1..000000000 --- a/nix/nixcrpkgs/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -/result* -/support/results -macos/MacOSX10.12.sdk.tar.xz -macos/MacOSX10.13.sdk.tar.xz diff --git a/nix/nixcrpkgs/LICENSE.nixpkgs b/nix/nixcrpkgs/LICENSE.nixpkgs deleted file mode 100644 index a632d6f58..000000000 --- a/nix/nixcrpkgs/LICENSE.nixpkgs +++ /dev/null @@ -1,31 +0,0 @@ -Copyright (c) 2003-2017 Eelco Dolstra and the Nixpkgs/NixOS contributors - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -====================================================================== - -Note: the license above does not apply to the packages built by the -Nix Packages collection, merely to the package descriptions (i.e., Nix -expressions, build scripts, etc.). Also, the license does not apply -to some of the binaries used for bootstrapping Nixpkgs (e.g., -pkgs/stdenv/linux/tools/bash). It also might not apply to patches -included in Nixpkgs, which may be derivative works of the packages to -which they apply. The aforementioned artifacts are all covered by the -licenses of the respective packages. diff --git a/nix/nixcrpkgs/LICENSE.txt b/nix/nixcrpkgs/LICENSE.txt deleted file mode 100644 index 714d2d141..000000000 --- a/nix/nixcrpkgs/LICENSE.txt +++ /dev/null @@ -1,25 +0,0 @@ -Copyright (c) 2017 Pololu Corporation. For more information, see - -http://www.pololu.com/ -http://forum.pololu.com/ - -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without -restriction, including without limitation the rights to use, -copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -OTHER DEALINGS IN THE SOFTWARE. diff --git a/nix/nixcrpkgs/README.md b/nix/nixcrpkgs/README.md deleted file mode 100644 index 4e0f1f6f8..000000000 --- a/nix/nixcrpkgs/README.md +++ /dev/null @@ -1,175 +0,0 @@ -# nixcrpkgs - -[www.pololu.com](https://www.pololu.com/) - -*nixcrpkgs* is a collection of tools for cross-compiling statically-linked, -standalone software applications. With nixcrpkgs, you can specify what -platforms you want to target, what libraries and build tools you depend on, and -the commands that build your software. When you build your software, nixcrpkgs -will automatically take care of building or retrieving everything you need, -including cross-compilers and libraries. - -nixcrpkgs primarily consists of *Nix expressions*, which are recipes for -building software with [Nix, the purely functional package -manager][nix]. These recipes build on top of the [Nix -Packages collection (Nixpkgs)][nixpkgs]. - -## Features - -- Supported target platforms: - - Windows (32-bit or 64-bit) using [mingw-w64](https://mingw-w64.org/) and [GCC](https://gcc.gnu.org/) 6.3.0 - - Linux (32-bit, 64-bit, and ARM) using [musl](https://www.musl-libc.org/) and [GCC](https://gcc.gnu.org/) 6.3.0 - - macOS using [Clang](https://clang.llvm.org/) 5.0.0 -- Supported languages for cross-compiling: - - C - - C++ -- Supported build platforms: - - Linux -- Supported build tools: - - [CMake](https://cmake.org/) - - [GNU Make](https://www.gnu.org/software/make/) - - [Ninja](https://ninja-build.org/) - - pkg-config (as implemented by [pkgconf](https://github.com/pkgconf/pkgconf)) - - [GNU Bash](https://www.gnu.org/software/bash/) - - [Ruby](https://www.ruby-lang.org/) -- Notable supported libraries: - - [Qt](https://www.qt.io/) 5.9.6 - - [libusb](https://libusb.info/) - - [libusbp](https://github.com/pololu/libusbp) - - [Windows API](https://en.wikipedia.org/wiki/Windows_API) (thanks to mingw-w64) - - -## Getting started - -To get started, you should first install Nix on a Linux machine by following the -instructions on the [Nix website][nix]. - -Next, run `df -h` to make sure you have enough disk space. - -- The filesystem that holds `/nix` should have several gigabytes of free -space. Each GCC cross-compiler takes about 300 MB while each Qt installation -takes about 800 MB. -- The filesystem that holds `/tmp` should have at least 4 gigabytes of free -space, which will be needed while building cross-compilers. If that is not the -case on your system, you can set the `TMPDIR` environment variable to tell -`nix-build` to perform its builds in a different directory on a filesystem with -more free space. - -Next, clone or download this repository and use `cd` to change into the -top-level directory. - -To build a simple "Hello, World!" program for Windows, run: - - nix-build -A win32.hello - -The first time you run this command, it will take a while because Nix has to -build a cross-compiling toolchain. When `nix-build` is done, it will print the -name of a directory in `/nix/store` that holds the resulting program, and it -will create a symbolic link in the current directory named `result` that points -to that directory. - -If you copy `result/bin/hello.exe` to a Windows machine and run it, you should -see a message box appear that says "Hello, World!". - -If you run `nix-build -A win32.hello` a second time, Nix will detect that -nothing about the build recipes has changed, so it will simply print the -directory name and remake the symbolic link. - -To see how the `hello` package is specified in nixcrpkgs, you can look in -`pkgs.nix` and the `pkgs/hello` directory. To see how the GCC cross-compiler -for Windows was specified, you can look in the `mingw-w64` directory. If you -change any of the build recipes for `hello` or its dependencies and then run the -`nix-build` command again, Nix will automatically rebuild those dependencies and -anything that depends on them, ensuring that you always get a consistent build. - - -### Obtaining the macOS SDK - -If you are trying to build software for macOS, you will need to get a -macOS SDK tarball and put it in the the right place. Otherwise, you -will get an error like this: - - error: getting attributes of path '/home/yourname/nixcrpkgs/macos/MacOSX.sdk.tar.xz': No such file or directory - -To generate the tarball, follow these steps: - -1. On a macOS machine, install [Xcode](https://developer.apple.com/xcode/). -2. Download this repository to the machine. -3. In a Terminal window, run the `macos/gen_sdk_package.sh` script from this repository. -4. After several minutes, the current directory should have a tarball with a name like - `MacOSX10.12.sdk.tar.xz` and a size of about 25 MB. -5. Copy the SDK tarball file to the machine where you will be building software, - and put it in the `macos` directory. -6. The nixcrpkgs build recipe for the SDK is hardcoded to look for a file named - `MacOSX.sdk.tar.xz`, so rename the tarball to that. -7. Consider keeping a backup of the tarball so you can always rebuild any software you - made with it. - -Now you should be able to build your software for macOS. - - -## Integrating nixcrpkgs into your project - -The instructions above show how to cross-compile a "Hello, World!" program that -is included with nixcrpkgs. Instead of including your project in nixcrpkgs like -the hello program, you will probably want to just use nixcrpkgs as a tool in -your project. To get an idea of how to do that, you can look at other projects -that have done the same. In the projects listed below, you should look for a -file named `default.nix` in the top-level directory and look for build -instructions that explain what `nix-build` commands to run. - -* The [Pololu Tic Stepper Motor Controller software](https://github.com/pololu/pololu-tic-software) is a C/C++ project that uses CMake and nixcrpkgs. -* The [Pololu USB AVR Programmer v2 software](https://github.com/pololu/pololu-usb-avr-programmer-v2) is a C++ project that uses CMake and nixcrpkgs. -* The [Pololu USB Bootloader Utility (p-load)](https://github.com/pololu/p-load) is a C++ project that uses CMake and nixcrpkgs. - -[nix]: http://nixos.org/nix/ -[nixpkgs]: http://nixos.org/nixpkgs/ - -## Updating package versions - -Each build recipe in nixcrpkgs specifies a version number for the software that it builds. It is relatively easy to update the recipes even if you have not worked with Nix before. The general procedure is: - -1) Find the build recipe you want to update. For example, if you wanted to update the version of GCC used to build Linux programs, you would update the build recipe in `linux/gcc/default.nix`. -2) Find the part of the build recipe where the software sources are downloaded from the internet. It is usually a `fetchurl` command with two parameters: `url` and `sha256`. The `url` parameter usually refers to a version string defined nearby, so update that version string and/or the `url` parameter as desired. -3) In a shell, run `nix-prefetch-url URL`, where URL is the new URL specified in your modified build recipe with all version variables fully expanded). This command will download the URL you specified, store it in the Nix store, and output the hash of it in the proper format for Nix build recipes. -3) Update the `sha256` hash string in the build recipe by replacing it with the hash that was printed in the output of `nix-prefetch-url`. Updating the hash in the build recipe is important: Nix uses it to determine whether you already downloaded the right file, so if you don't update the hash then Nix might use the wrong file (e.g. an older version of the software that you downloaded earlier). -4) Run the usual `nix-build` command that you use to build your software. For example, you could go to the top-level directory of nixcrpkgs and run `nix-build -A rpi.hello` to build a "Hello world" program for the Raspberry Pi, or you could run `nix-build -A rpi.gcc` to just build the cross-compiler. -5) Fix any error messages that happen, one at a time. (Tip: to make a `.patch` file, run `diff -ur old new` where `old` and `new` are directories that contain the unpatched and patched versions of the source code, respectively.) -6) Once things are working, consider publishing your work on Github so others can benefit from what you figured out. - - -## Maintaining the nixcrpkgs system - -You should occasionally run `nix-collect-garbage` to remove items that are no -longer needed and reclaim your disk space. However, note that Nix will -typically remove all of your cross compilers and libraries when you run this -command, so be prepared to do a lengthy mass rebuild. The Nix manual has more -information about [Nix garbage -collection](http://nixos.org/nix/manual/#sec-garbage-collection). - -You should occasionally run `nix-channel --update` to update to the latest -version of Nixpkgs. However, when doing this, be aware that the new version of -Nixpkgs might require you to do a mass rebuild. - -You should occasionally update your checkout of the nixcrpkgs repository to get -the latest versions of build tools, new features, and bug fixes. Once again, -this might require a mass rebuild. - -If you want your builds to be very stable and reliable, you could make forks of -nixcrpkgs and/or Nixpkgs and update them at your own pace, carefully considering -any changes made by others before merging them in. That's one of the beauties -of Nix when compared to other package management systems: you will never be -forced to upgrade your build tools, and using old tools is just as easy as using -new ones. You can use the `NIX_PATH` environment variable to tell `nix-build` -to use your forked versions. - - -## Related projects - -* [osxcross]: Cross-compiling toolchain targeting macOS. -* [musl-cross-make]: Makefile-based build tool for creating cross-compilers targeting musl. -* [musl_nix_arm]: A fork of nixcrpkgs with a focus on building Docker images for ARM Linux. - -[osxcross]: https://github.com/tpoechtrager/osxcross -[musl-cross-make]: https://github.com/richfelker/musl-cross-make -[musl_nix_arm]: https://github.com/filleduchaos/musl_nix_arm diff --git a/nix/nixcrpkgs/bundle_builder.sh b/nix/nixcrpkgs/bundle_builder.sh deleted file mode 100644 index 9cb37fda5..000000000 --- a/nix/nixcrpkgs/bundle_builder.sh +++ /dev/null @@ -1,8 +0,0 @@ -source $setup -names=($names) -dirs=($dirs) -mkdir $out -cd $out -for ((i=0;i<${#names[@]};i++)); do - ln -s "${dirs[i]}" "${names[i]}" -done diff --git a/nix/nixcrpkgs/cmake_toolchain/builder.sh b/nix/nixcrpkgs/cmake_toolchain/builder.sh deleted file mode 100644 index 6d422fcc7..000000000 --- a/nix/nixcrpkgs/cmake_toolchain/builder.sh +++ /dev/null @@ -1,8 +0,0 @@ -source $stdenv/setup - -cat > $out < {}; - osx_sdk = ./macos/MacOSX.sdk.tar.xz; -} diff --git a/nix/nixcrpkgs/linux/README.md b/nix/nixcrpkgs/linux/README.md deleted file mode 100644 index 14a41d067..000000000 --- a/nix/nixcrpkgs/linux/README.md +++ /dev/null @@ -1,14 +0,0 @@ -## Linux GCC toolchain - -The files in this directory define how we build our GCC cross-compiler that -targets Linux, using the musl libc. - -### A note about `-rdynamic` - -Do not pass `-rdynamic` to GCC when building an executable; it will cause the compiled executable to depend on a musl libc dynamic loader in `/lib` that probably doesn't exist, and defeats the point of static linking. The `-static` option overrides `-rdynamic`, so adding`-static` to the linker flags of a project using `-rdynamic` is one way to fix the issue. - -CMake will pass `-rdynamic` unless you set [CMP0065](https://cmake.org/cmake/help/v3.8/policy/CMP0065.html) to new as shown below, or set your [CMake policy version](https://cmake.org/cmake/help/v3.8/command/cmake_policy.html) to 3.4 or later. - - # Don't use -rdynamic since it causes Musl static linking to not work. - cmake_policy(SET CMP0065 NEW) - diff --git a/nix/nixcrpkgs/linux/binutils/builder.sh b/nix/nixcrpkgs/linux/binutils/builder.sh deleted file mode 100644 index 8795ea0ce..000000000 --- a/nix/nixcrpkgs/linux/binutils/builder.sh +++ /dev/null @@ -1,26 +0,0 @@ -source $stdenv/setup - -unset CC CXX CFLAGS LDFLAGS LD AR AS RANLIB SIZE STRINGS NM STRIP OBJCOPY - -tar -xf $src - -cd binutils-$version -for patch in $patches; do - echo applying patch $patch - patch -p1 -i $patch -done - -# Clear the default library search path (noSysDirs) -echo 'NATIVE_LIB_DIRS=' >> ld/configure.tgt - -cd .. - -mkdir build -cd build - -../binutils-$version/configure --prefix=$out $configure_flags - -make - -make install - diff --git a/nix/nixcrpkgs/linux/binutils/default.nix b/nix/nixcrpkgs/linux/binutils/default.nix deleted file mode 100644 index 824b3d395..000000000 --- a/nix/nixcrpkgs/linux/binutils/default.nix +++ /dev/null @@ -1,26 +0,0 @@ -{ native, host }: - -native.make_derivation rec { - name = "binutils-${version}-${host}"; - - version = "2.27"; - - src = native.nixpkgs.fetchurl { - url = "mirror://gnu/binutils/binutils-${version}.tar.bz2"; - sha256 = "125clslv17xh1sab74343fg6v31msavpmaa1c1394zsqa773g5rn"; - }; - - patches = [ - ./deterministic.patch - ]; - - native_inputs = [ native.nixpkgs.bison native.nixpkgs.zlib ]; - - configure_flags = - "--target=${host} " + - "--enable-shared " + - "--enable-deterministic-archives " + - "--disable-werror "; - - builder = ./builder.sh; -} diff --git a/nix/nixcrpkgs/linux/binutils/deterministic.patch b/nix/nixcrpkgs/linux/binutils/deterministic.patch deleted file mode 100644 index 0a264b35c..000000000 --- a/nix/nixcrpkgs/linux/binutils/deterministic.patch +++ /dev/null @@ -1,12 +0,0 @@ -Make binutils output deterministic by default. ---- orig/ld/ldlang.c -+++ new/ld/ldlang.c -@@ -3095,6 +3095,8 @@ - ldfile_output_machine)) - einfo (_("%P%F:%s: can not set architecture: %E\n"), name); - -+ link_info.output_bfd->flags |= BFD_DETERMINISTIC_OUTPUT; -+ - link_info.hash = bfd_link_hash_table_create (link_info.output_bfd); - if (link_info.hash == NULL) - einfo (_("%P%F: can not create hash table: %E\n")); diff --git a/nix/nixcrpkgs/linux/default.nix b/nix/nixcrpkgs/linux/default.nix deleted file mode 100644 index 188cd0261..000000000 --- a/nix/nixcrpkgs/linux/default.nix +++ /dev/null @@ -1,78 +0,0 @@ -{ native, arch, gcc_options ? "" }: -let - nixpkgs = native.nixpkgs; - - host = "${arch}-linux-musleabi"; - - os = "linux"; - - compiler = "gcc"; - - exe_suffix = ""; - - binutils = import ./binutils { inherit native host; }; - - linux_arch = - if arch == "i686" || arch == "x86_64" then "x86" - else if arch == "armv6" || arch == "armv7" then "arm" - else throw "not sure what Linux architecture code to use"; - - headers = native.make_derivation rec { - name = "linux-headers-${linux_arch}-${version}"; - inherit linux_arch; - version = "4.4.10"; - src = nixpkgs.fetchurl { - url = "https://cdn.kernel.org/pub/linux/kernel/v4.x/linux-${version}.tar.xz"; - sha256 = "1kpjvvd9q9wwr3314q5ymvxii4dv2d27295bzly225wlc552xhja"; - }; - builder = ./headers_builder.sh; - }; - - gcc = import ./gcc { - inherit native host binutils headers gcc_options; - }; - - license = native.make_derivation { - name = "linux-license"; - inherit (gcc) musl_src gcc_src; - linux_src = headers.src; - builder = ./license_builder.sh; - }; - - global_license_set = { _global = license; }; - - cmake_toolchain = import ../cmake_toolchain { - cmake_system_name = "Linux"; - inherit nixpkgs host; - }; - - crossenv = { - is_cross = true; - - # Build tools available on the PATH for every derivation. - default_native_inputs = native.default_native_inputs ++ - [ gcc binutils native.pkgconf native.wrappers ]; - - # Target info environment variables. - inherit host arch os compiler exe_suffix; - - # CMake toolchain file. - inherit cmake_toolchain; - - # A wide variety of programs and build tools. - inherit nixpkgs; - - # Some native build tools made by nixcrpkgs. - inherit native; - - # License information that should be shipped with any software - # compiled by this environment. - inherit global_license_set; - - # Make it easy to refer to the build tools. - inherit headers gcc binutils; - - make_derivation = import ../make_derivation.nix crossenv; - }; -in - crossenv diff --git a/nix/nixcrpkgs/linux/gcc/builder.sh b/nix/nixcrpkgs/linux/gcc/builder.sh deleted file mode 100644 index f134826cc..000000000 --- a/nix/nixcrpkgs/linux/gcc/builder.sh +++ /dev/null @@ -1,34 +0,0 @@ -source $setup - -tar -xf $gcc_src -mv gcc-* gcc -cd gcc -for patch in $gcc_patches; do - echo applying patch $patch - patch -p1 -i $patch -done -cd .. - -tar -xf $musl_src -mv musl-* musl - -mkdir -p $out/$host -cp -r --no-preserve=mode $headers/include $out/$host - -mkdir build_gcc -cd build_gcc -../gcc/configure --prefix=$out $gcc_conf -cd .. -make -C build_gcc all-gcc -mkdir build_musl -cd build_musl -../musl/configure --prefix=$out/$host $musl_conf \ - CC="../build_gcc/gcc/xgcc -B ../build_gcc/gcc" \ - LIBCC=../build_gcc/$host/libgcc/libgcc.a -cd .. -make -C build_musl install-headers -make -C build_gcc all-target-libgcc -make -C build_musl -make -C build_musl install -make -C build_gcc -make -C build_gcc install diff --git a/nix/nixcrpkgs/linux/gcc/default.nix b/nix/nixcrpkgs/linux/gcc/default.nix deleted file mode 100644 index 8500cde1a..000000000 --- a/nix/nixcrpkgs/linux/gcc/default.nix +++ /dev/null @@ -1,81 +0,0 @@ -{ native, host, binutils, headers, gcc_options }: - -let - nixpkgs = native.nixpkgs; - isl = nixpkgs.isl_0_14; - inherit (nixpkgs) stdenv lib fetchurl; - inherit (nixpkgs) gmp libmpc libelf mpfr zlib; -in - -native.make_derivation rec { - name = "gcc-${gcc_version}-${host}"; - - gcc_version = "6.3.0"; - gcc_src = fetchurl { - url = "mirror://gnu/gcc/gcc-${gcc_version}/gcc-${gcc_version}.tar.bz2"; - sha256 = "17xjz30jb65hcf714vn9gcxvrrji8j20xm7n33qg1ywhyzryfsph"; - }; - - musl_version = "1.1.16"; - musl_src = nixpkgs.fetchurl { - url = "https://www.musl-libc.org/releases/musl-${musl_version}.tar.gz"; - sha256 = "048h0w4yjyza4h05bkc6dpwg3hq6l03na46g0q1ha8fpwnjqawck"; - }; - - inherit host headers; - - builder = ./builder.sh; - - gcc_patches = [ - # These patches are from nixpkgs. - ./use-source-date-epoch.patch - ./libstdc++-target.patch - - # Without this, we cannot build a simple hello world program for ARM. - # See https://gcc.gnu.org/bugzilla/show_bug.cgi?id=31798 - ./link_gcc_c_sequence_spec.patch - - # Fix a compiler error in GCC's ubsan.c: ISO C++ forbids comparison - # between pointer and integer. - ./ubsan.patch - ]; - - native_inputs = [ binutils ]; - - gcc_conf = - "--target=${host} " + - gcc_options + - "--with-gnu-as " + - "--with-gnu-ld " + - "--with-as=${binutils}/bin/${host}-as " + - "--with-ld=${binutils}/bin/${host}-ld " + - "--with-isl=${isl} " + - "--with-gmp-include=${gmp.dev}/include " + - "--with-gmp-lib=${gmp.out}/lib " + - "--with-libelf=${libelf}" + - "--with-mpfr=${mpfr.dev} " + - "--with-mpfr-include=${mpfr.dev}/include " + - "--with-mpfr-lib=${mpfr.out}/lib " + - "--with-mpc=${libmpc.out} " + - "--with-zlib-include=${zlib.dev}/include " + - "--with-zlib-lib=${zlib.out}/lib " + - "--enable-deterministic-archives " + - "--enable-languages=c,c++ " + - "--enable-libstdcxx-time " + - "--enable-static " + - "--enable-tls " + - "--disable-gnu-indirect-function " + - "--disable-libmudflap " + - "--disable-libmpx " + - "--disable-libsanitizer " + - "--disable-multilib " + - "--disable-shared " + - "--disable-werror"; - - musl_conf = - "--target=${host} " + - "--disable-shared"; - - hardeningDisable = [ "format" ]; -} - diff --git a/nix/nixcrpkgs/linux/gcc/libstdc++-target.patch b/nix/nixcrpkgs/linux/gcc/libstdc++-target.patch deleted file mode 100644 index fb622b395..000000000 --- a/nix/nixcrpkgs/linux/gcc/libstdc++-target.patch +++ /dev/null @@ -1,32 +0,0 @@ -Patch to make the target libraries 'configure' scripts find the proper CPP. -I noticed that building the mingw32 cross compiler. -Looking at the build script for mingw in archlinux, I think that only nixos -needs this patch. I don't know why. -diff --git a/Makefile.in b/Makefile.in -index 93f66b6..d691917 100644 ---- a/Makefile.in -+++ b/Makefile.in -@@ -266,6 +266,7 @@ BASE_TARGET_EXPORTS = \ - AR="$(AR_FOR_TARGET)"; export AR; \ - AS="$(COMPILER_AS_FOR_TARGET)"; export AS; \ - CC="$(CC_FOR_TARGET) $(XGCC_FLAGS_FOR_TARGET) $$TFLAGS"; export CC; \ -+ CPP="$(CC_FOR_TARGET) $(XGCC_FLAGS_FOR_TARGET) $$TFLAGS -E"; export CC; \ - CFLAGS="$(CFLAGS_FOR_TARGET)"; export CFLAGS; \ - CONFIG_SHELL="$(SHELL)"; export CONFIG_SHELL; \ - CPPFLAGS="$(CPPFLAGS_FOR_TARGET)"; export CPPFLAGS; \ -@@ -291,11 +292,13 @@ BASE_TARGET_EXPORTS = \ - RAW_CXX_TARGET_EXPORTS = \ - $(BASE_TARGET_EXPORTS) \ - CXX_FOR_TARGET="$(RAW_CXX_FOR_TARGET)"; export CXX_FOR_TARGET; \ -- CXX="$(RAW_CXX_FOR_TARGET) $(XGCC_FLAGS_FOR_TARGET) $$TFLAGS"; export CXX; -+ CXX="$(RAW_CXX_FOR_TARGET) $(XGCC_FLAGS_FOR_TARGET) $$TFLAGS"; export CXX; \ -+ CXXCPP="$(RAW_CXX_FOR_TARGET) $(XGCC_FLAGS_FOR_TARGET) $$TFLAGS -E"; export CXX; - - NORMAL_TARGET_EXPORTS = \ - $(BASE_TARGET_EXPORTS) \ -- CXX="$(CXX_FOR_TARGET) $(XGCC_FLAGS_FOR_TARGET) $$TFLAGS"; export CXX; -+ CXX="$(CXX_FOR_TARGET) $(XGCC_FLAGS_FOR_TARGET) $$TFLAGS"; export CXX; \ -+ CXXCPP="$(CXX_FOR_TARGET) $(XGCC_FLAGS_FOR_TARGET) $$TFLAGS -E"; export CXX; - - # Where to find GMP - HOST_GMPLIBS = @gmplibs@ diff --git a/nix/nixcrpkgs/linux/gcc/link_gcc_c_sequence_spec.patch b/nix/nixcrpkgs/linux/gcc/link_gcc_c_sequence_spec.patch deleted file mode 100644 index 5c976bcb8..000000000 --- a/nix/nixcrpkgs/linux/gcc/link_gcc_c_sequence_spec.patch +++ /dev/null @@ -1,12 +0,0 @@ -diff -ur gcc-6.3.0-orig/gcc/config/gnu-user.h gcc-6.3.0/gcc/config/gnu-user.h ---- gcc-6.3.0-orig/gcc/config/gnu-user.h 2017-08-13 19:03:08.671572528 -0700 -+++ gcc-6.3.0/gcc/config/gnu-user.h 2017-08-13 19:15:00.768588499 -0700 -@@ -123,7 +123,7 @@ - - #undef LINK_GCC_C_SEQUENCE_SPEC - #define LINK_GCC_C_SEQUENCE_SPEC \ -- "%{static:--start-group} %G %L %{static:--end-group}%{!static:%G}" -+ "--start-group %G %L --end-group" - - /* Use --as-needed -lgcc_s for eh support. */ - #ifdef HAVE_LD_AS_NEEDED diff --git a/nix/nixcrpkgs/linux/gcc/ubsan.patch b/nix/nixcrpkgs/linux/gcc/ubsan.patch deleted file mode 100644 index 0ad3b7991..000000000 --- a/nix/nixcrpkgs/linux/gcc/ubsan.patch +++ /dev/null @@ -1,10 +0,0 @@ ---- gcc-6.3.0-orig/gcc/ubsan.c -+++ gcc-6.3.0/gcc/ubsan.c -@@ -1471,7 +1471,7 @@ - - expanded_location xloc = expand_location (loc); - if (xloc.file == NULL || strncmp (xloc.file, "\1", 2) == 0 -- || xloc.file == '\0' || xloc.file[0] == '\xff' -+ || xloc.file == NULL || xloc.file[0] == '\xff' - || xloc.file[1] == '\xff') - return false; diff --git a/nix/nixcrpkgs/linux/gcc/use-source-date-epoch.patch b/nix/nixcrpkgs/linux/gcc/use-source-date-epoch.patch deleted file mode 100644 index 65a5ab028..000000000 --- a/nix/nixcrpkgs/linux/gcc/use-source-date-epoch.patch +++ /dev/null @@ -1,52 +0,0 @@ -https://gcc.gnu.org/ml/gcc-patches/2015-06/msg02210.html - -diff --git a/libcpp/macro.c b/libcpp/macro.c -index 1e0a0b5..a52e3cb 100644 ---- a/libcpp/macro.c -+++ b/libcpp/macro.c -@@ -349,14 +349,38 @@ _cpp_builtin_macro_text (cpp_reader *pfile, cpp_hashnode *node) - slow on some systems. */ - time_t tt; - struct tm *tb = NULL; -+ char *source_date_epoch; - -- /* (time_t) -1 is a legitimate value for "number of seconds -- since the Epoch", so we have to do a little dance to -- distinguish that from a genuine error. */ -- errno = 0; -- tt = time(NULL); -- if (tt != (time_t)-1 || errno == 0) -- tb = localtime (&tt); -+ /* Allow the date and time to be set externally by an exported -+ environment variable to enable reproducible builds. */ -+ source_date_epoch = getenv ("SOURCE_DATE_EPOCH"); -+ if (source_date_epoch) -+ { -+ errno = 0; -+ tt = (time_t) strtol (source_date_epoch, NULL, 10); -+ if (errno == 0) -+ { -+ tb = gmtime (&tt); -+ if (tb == NULL) -+ cpp_error (pfile, CPP_DL_ERROR, -+ "SOURCE_DATE_EPOCH=\"%s\" is not a valid date", -+ source_date_epoch); -+ } -+ else -+ cpp_error (pfile, CPP_DL_ERROR, -+ "SOURCE_DATE_EPOCH=\"%s\" is not a valid number", -+ source_date_epoch); -+ } -+ else -+ { -+ /* (time_t) -1 is a legitimate value for "number of seconds -+ since the Epoch", so we have to do a little dance to -+ distinguish that from a genuine error. */ -+ errno = 0; -+ tt = time(NULL); -+ if (tt != (time_t)-1 || errno == 0) -+ tb = localtime (&tt); -+ } - - if (tb) - { diff --git a/nix/nixcrpkgs/linux/headers_builder.sh b/nix/nixcrpkgs/linux/headers_builder.sh deleted file mode 100644 index 9f4a5db09..000000000 --- a/nix/nixcrpkgs/linux/headers_builder.sh +++ /dev/null @@ -1,13 +0,0 @@ -source $setup -shopt -u nullglob - -tar -xf $src -mv linux-$version linux - -mkdir -p obj/staged -make -C linux headers_install \ - ARCH=$linux_arch \ - O=$(pwd)/obj \ - INSTALL_HDR_PATH=$out - -find $out '(' -name .install -o -name ..install.cmd ')' -exec rm {} + diff --git a/nix/nixcrpkgs/linux/license_builder.sh b/nix/nixcrpkgs/linux/license_builder.sh deleted file mode 100644 index 0a7d95073..000000000 --- a/nix/nixcrpkgs/linux/license_builder.sh +++ /dev/null @@ -1,45 +0,0 @@ -source $setup - -tar -xf $gcc_src -mv gcc-* gcc - -tar -xf $musl_src -mv musl-* musl - -tar -xf $linux_src -mv linux-* linux - -license_gcc=$(cat gcc/COPYING3.LIB) -license_musl=$(cat musl/COPYRIGHT) -license_linux=$(cat linux/COPYING) - -cat > $out < - The third-party software included with this software may - have been patched or otherwise modified. -

- -

GCC run-time libraries

- -

- The GCC run-time libraries libgcc and libstdc++ are licensed under the GNU - General Public License Version 3 (GPLv3) as shown below. -

- -
-$license_gcc
-
- -

musl libc

- -
-$license_musl
-
- -

Linux headers

- -
-$license_linux
-
- -EOF diff --git a/nix/nixcrpkgs/macos/.gitignore b/nix/nixcrpkgs/macos/.gitignore deleted file mode 100644 index 40af15962..000000000 --- a/nix/nixcrpkgs/macos/.gitignore +++ /dev/null @@ -1 +0,0 @@ -/MacOSX.sdk.tar.xz diff --git a/nix/nixcrpkgs/macos/ar_builder.sh b/nix/nixcrpkgs/macos/ar_builder.sh deleted file mode 100644 index 14f1e438c..000000000 --- a/nix/nixcrpkgs/macos/ar_builder.sh +++ /dev/null @@ -1,46 +0,0 @@ -source $setup - -tar -xf $src -mv cctools-port-* cctools-port - -cd cctools-port - -for patch in $patches; do - echo applying patch $patch - patch -p1 -i $patch -done - -# Similar to but not the same as the other _structs.h. -rm cctools/include/foreign/mach/i386/_structs.h - -# Causes a troublesome undefined reference. -rm cctools/libstuff/vm_flush_cache.c - -cd .. - -mv cctools-port/cctools/ar . -mv cctools-port/cctools/include . -mv cctools-port/cctools/libstuff . -rm -r cctools-port - -mkdir build -cd build - -CFLAGS="-Wno-deprecated -Wno-deprecated-declarations -Wno-unused-result -Werror -Wfatal-errors -O2 -g -I../include -I../include/foreign -DPROGRAM_PREFIX=\\\"$host-\\\" -D__LITTLE_ENDIAN__ -D__private_extern__= -D__DARWIN_UNIX03 -DPACKAGE_NAME=\\\"cctools\\\" -DPACKAGE_VERSION=\\\"$apple_version\\\" -DEMULATED_HOST_CPU_TYPE=16777223 -DEMULATED_HOST_CPU_SUBTYPE=3" - -CXXFLAGS="-std=gnu++11 $CFLAGS" - -LDFLAGS="-ldl -lpthread" - -for f in ../ar/*.c ../libstuff/*.c; do - echo "compiling $f" - eval "gcc -c $CFLAGS $f -o $(basename $f).o" -done - -gcc *.o $LDFLAGS -o $host-ar - -mkdir -p $out/bin -cp $host-ar $out/bin/ - -# ar looks for ranlib in this directory -ln -s $ranlib/bin/$host-ranlib $out/bin/ diff --git a/nix/nixcrpkgs/macos/cctools-bytesex.patch b/nix/nixcrpkgs/macos/cctools-bytesex.patch deleted file mode 100644 index 19ac3845f..000000000 --- a/nix/nixcrpkgs/macos/cctools-bytesex.patch +++ /dev/null @@ -1,11 +0,0 @@ -diff -ur cctools-port-c1cc758/cctools/include/stuff/bytesex.h cctools-port-bytesex/cctools/include/stuff/bytesex.h ---- cctools-port-c1cc758/cctools/include/stuff/bytesex.h 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-bytesex/cctools/include/stuff/bytesex.h 2017-11-10 19:07:26.338161875 -0800 -@@ -48,6 +48,7 @@ - #include - #include - #include -+#include - #include - #include - /* cctools-port: need to undef these to avoid warnings */ diff --git a/nix/nixcrpkgs/macos/cctools-format.patch b/nix/nixcrpkgs/macos/cctools-format.patch deleted file mode 100644 index f38ffe7ef..000000000 --- a/nix/nixcrpkgs/macos/cctools-format.patch +++ /dev/null @@ -1,2661 +0,0 @@ -diff -ur cctools-port-c1cc758/cctools/ar/archive.h cctools-port-format/cctools/ar/archive.h ---- cctools-port-c1cc758/cctools/ar/archive.h 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-format/cctools/ar/archive.h 2017-11-10 21:42:36.315713255 -0800 -@@ -112,11 +112,11 @@ - } CHDR; - - /* Header format strings. */ --#define HDR1 "%s%-13d%-12ld%-6u%-6u%-8o%-10qd%2s" --#define HDR2 "%-16.16s%-12ld%-6u%-6u%-8o%-10qd%2s" -+#define HDR1 "%s%-13d%-12ld%-6u%-6u%-8o%-10ld%2s" -+#define HDR2 "%-16.16s%-12ld%-6u%-6u%-8o%-10ld%2s" - - #define OLDARMAXNAME 15 --#define HDR3 "%-16.15s%-12ld%-6u%-6u%-8o%-10qd%2s" -+#define HDR3 "%-16.15s%-12ld%-6u%-6u%-8o%-10ld%2s" - - - #include -diff -ur cctools-port-c1cc758/cctools/ar/contents.c cctools-port-format/cctools/ar/contents.c ---- cctools-port-c1cc758/cctools/ar/contents.c 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-format/cctools/ar/contents.c 2017-11-10 21:43:06.022707300 -0800 -@@ -107,7 +107,7 @@ - goto next; - if (options & AR_V) { - (void)strmode(chdr.mode, buf); -- (void)printf("%s %6d/%-6d %8qd ", -+ (void)printf("%s %6d/%-6d %8ld ", - buf + 1, chdr.uid, chdr.gid, chdr.size); - tp = localtime(&chdr.date); - (void)strftime(buf, sizeof(buf), "%b %e %H:%M %Y", tp); -diff -ur cctools-port-c1cc758/cctools/ld64/src/ld/debugline.c cctools-port-format/cctools/ld64/src/ld/debugline.c ---- cctools-port-c1cc758/cctools/ld64/src/ld/debugline.c 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-format/cctools/ld64/src/ld/debugline.c 2017-11-10 19:13:05.914926185 -0800 -@@ -396,11 +396,11 @@ - case DW_LNE_set_address: - if (sz == 9) { - lnd->cur.pc = read_64 (eop); -- if (verbose) fprintf(stderr, "DW_LNE_set_address(0x%08llX)\n", lnd->cur.pc); -+ if (verbose) fprintf(stderr, "DW_LNE_set_address(0x%08lX)\n", lnd->cur.pc); - } - else if (sz == 5) { - lnd->cur.pc = read_32 (eop); -- if (verbose) fprintf(stderr, "DW_LNE_set_address(0x%08llX)\n", lnd->cur.pc); -+ if (verbose) fprintf(stderr, "DW_LNE_set_address(0x%08lX)\n", lnd->cur.pc); - } - else - return false; -@@ -442,11 +442,11 @@ - if (tmp == (uint64_t) -1) - return false; - lnd->cur.pc += tmp * lnd->minimum_instruction_length; -- if (verbose) fprintf(stderr, "DW_LNS_advance_pc(0x%08llX)\n", lnd->cur.pc); -+ if (verbose) fprintf(stderr, "DW_LNS_advance_pc(0x%08lX)\n", lnd->cur.pc); - break; - case DW_LNS_advance_line: - lnd->cur.line += read_sleb128 (lnd); -- if (verbose) fprintf(stderr, "DW_LNS_advance_line(%lld)\n", lnd->cur.line); -+ if (verbose) fprintf(stderr, "DW_LNS_advance_line(%ld)\n", lnd->cur.line); - break; - case DW_LNS_set_file: - if (verbose) fprintf(stderr, "DW_LNS_set_file\n"); -diff -ur cctools-port-c1cc758/cctools/ld64/src/ld/InputFiles.cpp cctools-port-format/cctools/ld64/src/ld/InputFiles.cpp ---- cctools-port-c1cc758/cctools/ld64/src/ld/InputFiles.cpp 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-format/cctools/ld64/src/ld/InputFiles.cpp 2017-11-10 19:13:05.914926185 -0800 -@@ -226,7 +226,7 @@ - if ( fd == -1 ) - throwf("can't open file, errno=%d", errno); - if ( info.fileLen < 20 ) -- throwf("file too small (length=%llu)", info.fileLen); -+ throwf("file too small (length=%lu)", info.fileLen); - - uint8_t* p = (uint8_t*)::mmap(NULL, info.fileLen, PROT_READ, MAP_FILE | MAP_PRIVATE, fd, 0); - if ( p == (uint8_t*)(-1) ) -@@ -276,7 +276,7 @@ - newFileLen = statBuffer.st_size; - } - if ( fileOffset+len > newFileLen ) { -- throwf("truncated fat file. Slice from %u to %llu is past end of file with length %llu", -+ throwf("truncated fat file. Slice from %u to %lu is past end of file with length %lu", - fileOffset, fileOffset+len, info.fileLen); - } - } -diff -ur cctools-port-c1cc758/cctools/ld64/src/ld/ld.cpp cctools-port-format/cctools/ld64/src/ld/ld.cpp ---- cctools-port-c1cc758/cctools/ld64/src/ld/ld.cpp 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-format/cctools/ld64/src/ld/ld.cpp 2017-11-10 19:13:05.914926185 -0800 -@@ -1026,10 +1026,10 @@ - // sanity check size - if ( ((address + sect->size) > _options.maxAddress()) && (_options.outputKind() != Options::kObjectFile) - && (_options.outputKind() != Options::kStaticExecutable) ) -- throwf("section %s (address=0x%08llX, size=%llu) would make the output executable exceed available address range", -+ throwf("section %s (address=0x%08lX, size=%lu) would make the output executable exceed available address range", - sect->sectionName(), address, sect->size); - -- if ( log ) fprintf(stderr, " address=0x%08llX, hidden=%d, alignment=%02d, section=%s,%s\n", -+ if ( log ) fprintf(stderr, " address=0x%08lX, hidden=%d, alignment=%02d, section=%s,%s\n", - sect->address, sect->isSectionHidden(), sect->alignment, sect->segmentName(), sect->sectionName()); - // update running totals - if ( !sect->isSectionHidden() || hiddenSectionsOccupyAddressSpace ) -@@ -1060,7 +1060,7 @@ - address = ( (unalignedAddress+alignment-1) & (-alignment) ); - sect->alignmentPaddingBytes = (address - unalignedAddress); - sect->address = address; -- if ( log ) fprintf(stderr, " address=0x%08llX, hidden=%d, alignment=%02d, section=%s,%s\n", -+ if ( log ) fprintf(stderr, " address=0x%08lX, hidden=%d, alignment=%02d, section=%s,%s\n", - sect->address, sect->isSectionHidden(), sect->alignment, sect->segmentName(), sect->sectionName()); - // update running totals - if ( !sect->isSectionHidden() || hiddenSectionsOccupyAddressSpace ) -@@ -1119,7 +1119,7 @@ - // sanity check size - if ( ((address + sect->size) > _options.maxAddress()) && (_options.outputKind() != Options::kObjectFile) - && (_options.outputKind() != Options::kStaticExecutable) ) -- throwf("section %s (address=0x%08llX, size=%llu) would make the output executable exceed available address range", -+ throwf("section %s (address=0x%08lX, size=%lu) would make the output executable exceed available address range", - sect->sectionName(), address, sect->size); - - // sanity check it does not overlap a fixed address segment -@@ -1145,7 +1145,7 @@ - } - } - -- if ( log ) fprintf(stderr, " address=0x%08llX, size=0x%08llX, hidden=%d, alignment=%02d, padBytes=%d, section=%s,%s\n", -+ if ( log ) fprintf(stderr, " address=0x%08lX, size=0x%08lX, hidden=%d, alignment=%02d, padBytes=%d, section=%s,%s\n", - sect->address, sect->size, sect->isSectionHidden(), sect->alignment, sect->alignmentPaddingBytes, - sect->segmentName(), sect->sectionName()); - // update running totals -@@ -1159,7 +1159,7 @@ - ld::Internal::FinalSection* sect = *it; - //if ( sect->isSectionHidden() ) - // continue; -- fprintf(stderr, " address:0x%08llX, alignment:2^%d, size:0x%08llX, padBytes:%d, section:%s/%s\n", -+ fprintf(stderr, " address:0x%08lX, alignment:2^%d, size:0x%08lX, padBytes:%d, section:%s/%s\n", - sect->address, sect->alignment, sect->size, sect->alignmentPaddingBytes, - sect->segmentName(), sect->sectionName()); - -@@ -1200,7 +1200,7 @@ - fileOffset += sect->size; - } - -- if ( log ) fprintf(stderr, " fileoffset=0x%08llX, address=0x%08llX, hidden=%d, size=%lld, alignment=%02d, section=%s,%s\n", -+ if ( log ) fprintf(stderr, " fileoffset=0x%08lX, address=0x%08lX, hidden=%d, size=%ld, alignment=%02d, section=%s,%s\n", - sect->fileOffset, sect->address, sect->isSectionHidden(), sect->size, sect->alignment, - sect->segmentName(), sect->sectionName()); - } -@@ -1226,7 +1226,7 @@ - { - char* result = out; - char rawNum[30]; -- sprintf(rawNum, "%llu", in); -+ sprintf(rawNum, "%lu", in); - const int rawNumLen = strlen(rawNum); - for(int i=0; i < rawNumLen-1; ++i) { - *out++ = rawNum[i]; -diff -ur cctools-port-c1cc758/cctools/ld64/src/ld/LinkEdit.hpp cctools-port-format/cctools/ld64/src/ld/LinkEdit.hpp ---- cctools-port-c1cc758/cctools/ld64/src/ld/LinkEdit.hpp 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-format/cctools/ld64/src/ld/LinkEdit.hpp 2017-11-10 19:13:05.914926185 -0800 -@@ -324,39 +324,39 @@ - done = true; - break; - case REBASE_OPCODE_SET_TYPE_IMM: -- if ( log ) fprintf(stderr, "REBASE_OPCODE_SET_TYPE_IMM(%lld)\n", it->operand1); -+ if ( log ) fprintf(stderr, "REBASE_OPCODE_SET_TYPE_IMM(%ld)\n", it->operand1); - this->_encodedData.append_byte(REBASE_OPCODE_SET_TYPE_IMM | it->operand1); - break; - case REBASE_OPCODE_SET_SEGMENT_AND_OFFSET_ULEB: -- if ( log ) fprintf(stderr, "REBASE_OPCODE_SET_SEGMENT_AND_OFFSET_ULEB(%lld, 0x%llX)\n", it->operand1, it->operand2); -+ if ( log ) fprintf(stderr, "REBASE_OPCODE_SET_SEGMENT_AND_OFFSET_ULEB(%ld, 0x%lX)\n", it->operand1, it->operand2); - this->_encodedData.append_byte(REBASE_OPCODE_SET_SEGMENT_AND_OFFSET_ULEB | it->operand1); - this->_encodedData.append_uleb128(it->operand2); - break; - case REBASE_OPCODE_ADD_ADDR_ULEB: -- if ( log ) fprintf(stderr, "REBASE_OPCODE_ADD_ADDR_ULEB(0x%llX)\n", it->operand1); -+ if ( log ) fprintf(stderr, "REBASE_OPCODE_ADD_ADDR_ULEB(0x%lX)\n", it->operand1); - this->_encodedData.append_byte(REBASE_OPCODE_ADD_ADDR_ULEB); - this->_encodedData.append_uleb128(it->operand1); - break; - case REBASE_OPCODE_ADD_ADDR_IMM_SCALED: -- if ( log ) fprintf(stderr, "REBASE_OPCODE_ADD_ADDR_IMM_SCALED(%lld=0x%llX)\n", it->operand1, it->operand1*sizeof(pint_t)); -+ if ( log ) fprintf(stderr, "REBASE_OPCODE_ADD_ADDR_IMM_SCALED(%ld=0x%lX)\n", it->operand1, it->operand1*sizeof(pint_t)); - this->_encodedData.append_byte(REBASE_OPCODE_ADD_ADDR_IMM_SCALED | it->operand1 ); - break; - case REBASE_OPCODE_DO_REBASE_IMM_TIMES: -- if ( log ) fprintf(stderr, "REBASE_OPCODE_DO_REBASE_IMM_TIMES(%lld)\n", it->operand1); -+ if ( log ) fprintf(stderr, "REBASE_OPCODE_DO_REBASE_IMM_TIMES(%ld)\n", it->operand1); - this->_encodedData.append_byte(REBASE_OPCODE_DO_REBASE_IMM_TIMES | it->operand1); - break; - case REBASE_OPCODE_DO_REBASE_ULEB_TIMES: -- if ( log ) fprintf(stderr, "REBASE_OPCODE_DO_REBASE_ULEB_TIMES(%lld)\n", it->operand1); -+ if ( log ) fprintf(stderr, "REBASE_OPCODE_DO_REBASE_ULEB_TIMES(%ld)\n", it->operand1); - this->_encodedData.append_byte(REBASE_OPCODE_DO_REBASE_ULEB_TIMES); - this->_encodedData.append_uleb128(it->operand1); - break; - case REBASE_OPCODE_DO_REBASE_ADD_ADDR_ULEB: -- if ( log ) fprintf(stderr, "REBASE_OPCODE_DO_REBASE_ADD_ADDR_ULEB(0x%llX)\n", it->operand1); -+ if ( log ) fprintf(stderr, "REBASE_OPCODE_DO_REBASE_ADD_ADDR_ULEB(0x%lX)\n", it->operand1); - this->_encodedData.append_byte(REBASE_OPCODE_DO_REBASE_ADD_ADDR_ULEB); - this->_encodedData.append_uleb128(it->operand1); - break; - case REBASE_OPCODE_DO_REBASE_ULEB_TIMES_SKIPPING_ULEB: -- if ( log ) fprintf(stderr, "REBASE_OPCODE_DO_REBASE_ULEB_TIMES_SKIPPING_ULEB(%lld, %lld)\n", it->operand1, it->operand2); -+ if ( log ) fprintf(stderr, "REBASE_OPCODE_DO_REBASE_ULEB_TIMES_SKIPPING_ULEB(%ld, %ld)\n", it->operand1, it->operand2); - this->_encodedData.append_byte(REBASE_OPCODE_DO_REBASE_ULEB_TIMES_SKIPPING_ULEB); - this->_encodedData.append_uleb128(it->operand1); - this->_encodedData.append_uleb128(it->operand2); -@@ -534,39 +534,39 @@ - done = true; - break; - case BIND_OPCODE_SET_DYLIB_ORDINAL_IMM: -- if ( log ) fprintf(stderr, "BIND_OPCODE_SET_DYLIB_ORDINAL_IMM(%lld)\n", it->operand1); -+ if ( log ) fprintf(stderr, "BIND_OPCODE_SET_DYLIB_ORDINAL_IMM(%ld)\n", it->operand1); - this->_encodedData.append_byte(BIND_OPCODE_SET_DYLIB_ORDINAL_IMM | it->operand1); - break; - case BIND_OPCODE_SET_DYLIB_ORDINAL_ULEB: -- if ( log ) fprintf(stderr, "BIND_OPCODE_SET_DYLIB_ORDINAL_ULEB(%lld)\n", it->operand1); -+ if ( log ) fprintf(stderr, "BIND_OPCODE_SET_DYLIB_ORDINAL_ULEB(%ld)\n", it->operand1); - this->_encodedData.append_byte(BIND_OPCODE_SET_DYLIB_ORDINAL_ULEB); - this->_encodedData.append_uleb128(it->operand1); - break; - case BIND_OPCODE_SET_DYLIB_SPECIAL_IMM: -- if ( log ) fprintf(stderr, "BIND_OPCODE_SET_DYLIB_SPECIAL_IMM(%lld)\n", it->operand1); -+ if ( log ) fprintf(stderr, "BIND_OPCODE_SET_DYLIB_SPECIAL_IMM(%ld)\n", it->operand1); - this->_encodedData.append_byte(BIND_OPCODE_SET_DYLIB_SPECIAL_IMM | (it->operand1 & BIND_IMMEDIATE_MASK)); - break; - case BIND_OPCODE_SET_SYMBOL_TRAILING_FLAGS_IMM: -- if ( log ) fprintf(stderr, "BIND_OPCODE_SET_SYMBOL_TRAILING_FLAGS_IMM(0x%0llX, %s)\n", it->operand1, it->name); -+ if ( log ) fprintf(stderr, "BIND_OPCODE_SET_SYMBOL_TRAILING_FLAGS_IMM(0x%0lX, %s)\n", it->operand1, it->name); - this->_encodedData.append_byte(BIND_OPCODE_SET_SYMBOL_TRAILING_FLAGS_IMM | it->operand1); - this->_encodedData.append_string(it->name); - break; - case BIND_OPCODE_SET_TYPE_IMM: -- if ( log ) fprintf(stderr, "BIND_OPCODE_SET_TYPE_IMM(%lld)\n", it->operand1); -+ if ( log ) fprintf(stderr, "BIND_OPCODE_SET_TYPE_IMM(%ld)\n", it->operand1); - this->_encodedData.append_byte(BIND_OPCODE_SET_TYPE_IMM | it->operand1); - break; - case BIND_OPCODE_SET_ADDEND_SLEB: -- if ( log ) fprintf(stderr, "BIND_OPCODE_SET_ADDEND_SLEB(%lld)\n", it->operand1); -+ if ( log ) fprintf(stderr, "BIND_OPCODE_SET_ADDEND_SLEB(%ld)\n", it->operand1); - this->_encodedData.append_byte(BIND_OPCODE_SET_ADDEND_SLEB); - this->_encodedData.append_sleb128(it->operand1); - break; - case BIND_OPCODE_SET_SEGMENT_AND_OFFSET_ULEB: -- if ( log ) fprintf(stderr, "BIND_OPCODE_SET_SEGMENT_AND_OFFSET_ULEB(%lld, 0x%llX)\n", it->operand1, it->operand2); -+ if ( log ) fprintf(stderr, "BIND_OPCODE_SET_SEGMENT_AND_OFFSET_ULEB(%ld, 0x%lX)\n", it->operand1, it->operand2); - this->_encodedData.append_byte(BIND_OPCODE_SET_SEGMENT_AND_OFFSET_ULEB | it->operand1); - this->_encodedData.append_uleb128(it->operand2); - break; - case BIND_OPCODE_ADD_ADDR_ULEB: -- if ( log ) fprintf(stderr, "BIND_OPCODE_ADD_ADDR_ULEB(0x%llX)\n", it->operand1); -+ if ( log ) fprintf(stderr, "BIND_OPCODE_ADD_ADDR_ULEB(0x%lX)\n", it->operand1); - this->_encodedData.append_byte(BIND_OPCODE_ADD_ADDR_ULEB); - this->_encodedData.append_uleb128(it->operand1); - break; -@@ -575,16 +575,16 @@ - this->_encodedData.append_byte(BIND_OPCODE_DO_BIND); - break; - case BIND_OPCODE_DO_BIND_ADD_ADDR_ULEB: -- if ( log ) fprintf(stderr, "BIND_OPCODE_DO_BIND_ADD_ADDR_ULEB(0x%llX)\n", it->operand1); -+ if ( log ) fprintf(stderr, "BIND_OPCODE_DO_BIND_ADD_ADDR_ULEB(0x%lX)\n", it->operand1); - this->_encodedData.append_byte(BIND_OPCODE_DO_BIND_ADD_ADDR_ULEB); - this->_encodedData.append_uleb128(it->operand1); - break; - case BIND_OPCODE_DO_BIND_ADD_ADDR_IMM_SCALED: -- if ( log ) fprintf(stderr, "BIND_OPCODE_DO_BIND_ADD_ADDR_IMM_SCALED(%lld=0x%llX)\n", it->operand1, it->operand1*sizeof(pint_t)); -+ if ( log ) fprintf(stderr, "BIND_OPCODE_DO_BIND_ADD_ADDR_IMM_SCALED(%ld=0x%lX)\n", it->operand1, it->operand1*sizeof(pint_t)); - this->_encodedData.append_byte(BIND_OPCODE_DO_BIND_ADD_ADDR_IMM_SCALED | it->operand1 ); - break; - case BIND_OPCODE_DO_BIND_ULEB_TIMES_SKIPPING_ULEB: -- if ( log ) fprintf(stderr, "BIND_OPCODE_DO_BIND_ULEB_TIMES_SKIPPING_ULEB(%lld, %lld)\n", it->operand1, it->operand2); -+ if ( log ) fprintf(stderr, "BIND_OPCODE_DO_BIND_ULEB_TIMES_SKIPPING_ULEB(%ld, %ld)\n", it->operand1, it->operand2); - this->_encodedData.append_byte(BIND_OPCODE_DO_BIND_ULEB_TIMES_SKIPPING_ULEB); - this->_encodedData.append_uleb128(it->operand1); - this->_encodedData.append_uleb128(it->operand2); -@@ -772,39 +772,39 @@ - done = true; - break; - case BIND_OPCODE_SET_DYLIB_ORDINAL_IMM: -- if ( log ) fprintf(stderr, "BIND_OPCODE_SET_DYLIB_ORDINAL_IMM(%lld)\n", it->operand1); -+ if ( log ) fprintf(stderr, "BIND_OPCODE_SET_DYLIB_ORDINAL_IMM(%ld)\n", it->operand1); - this->_encodedData.append_byte(BIND_OPCODE_SET_DYLIB_ORDINAL_IMM | it->operand1); - break; - case BIND_OPCODE_SET_DYLIB_ORDINAL_ULEB: -- if ( log ) fprintf(stderr, "BIND_OPCODE_SET_DYLIB_ORDINAL_ULEB(%lld)\n", it->operand1); -+ if ( log ) fprintf(stderr, "BIND_OPCODE_SET_DYLIB_ORDINAL_ULEB(%ld)\n", it->operand1); - this->_encodedData.append_byte(BIND_OPCODE_SET_DYLIB_ORDINAL_ULEB); - this->_encodedData.append_uleb128(it->operand1); - break; - case BIND_OPCODE_SET_DYLIB_SPECIAL_IMM: -- if ( log ) fprintf(stderr, "BIND_OPCODE_SET_DYLIB_SPECIAL_IMM(%lld)\n", it->operand1); -+ if ( log ) fprintf(stderr, "BIND_OPCODE_SET_DYLIB_SPECIAL_IMM(%ld)\n", it->operand1); - this->_encodedData.append_byte(BIND_OPCODE_SET_DYLIB_SPECIAL_IMM | (it->operand1 & BIND_IMMEDIATE_MASK)); - break; - case BIND_OPCODE_SET_SYMBOL_TRAILING_FLAGS_IMM: -- if ( log ) fprintf(stderr, "BIND_OPCODE_SET_SYMBOL_TRAILING_FLAGS_IMM(0x%0llX, %s)\n", it->operand1, it->name); -+ if ( log ) fprintf(stderr, "BIND_OPCODE_SET_SYMBOL_TRAILING_FLAGS_IMM(0x%0lX, %s)\n", it->operand1, it->name); - this->_encodedData.append_byte(BIND_OPCODE_SET_SYMBOL_TRAILING_FLAGS_IMM | it->operand1); - this->_encodedData.append_string(it->name); - break; - case BIND_OPCODE_SET_TYPE_IMM: -- if ( log ) fprintf(stderr, "BIND_OPCODE_SET_TYPE_IMM(%lld)\n", it->operand1); -+ if ( log ) fprintf(stderr, "BIND_OPCODE_SET_TYPE_IMM(%ld)\n", it->operand1); - this->_encodedData.append_byte(BIND_OPCODE_SET_TYPE_IMM | it->operand1); - break; - case BIND_OPCODE_SET_ADDEND_SLEB: -- if ( log ) fprintf(stderr, "BIND_OPCODE_SET_ADDEND_SLEB(%lld)\n", it->operand1); -+ if ( log ) fprintf(stderr, "BIND_OPCODE_SET_ADDEND_SLEB(%ld)\n", it->operand1); - this->_encodedData.append_byte(BIND_OPCODE_SET_ADDEND_SLEB); - this->_encodedData.append_sleb128(it->operand1); - break; - case BIND_OPCODE_SET_SEGMENT_AND_OFFSET_ULEB: -- if ( log ) fprintf(stderr, "BIND_OPCODE_SET_SEGMENT_AND_OFFSET_ULEB(%lld, 0x%llX)\n", it->operand1, it->operand2); -+ if ( log ) fprintf(stderr, "BIND_OPCODE_SET_SEGMENT_AND_OFFSET_ULEB(%ld, 0x%lX)\n", it->operand1, it->operand2); - this->_encodedData.append_byte(BIND_OPCODE_SET_SEGMENT_AND_OFFSET_ULEB | it->operand1); - this->_encodedData.append_uleb128(it->operand2); - break; - case BIND_OPCODE_ADD_ADDR_ULEB: -- if ( log ) fprintf(stderr, "BIND_OPCODE_ADD_ADDR_ULEB(0x%llX)\n", it->operand1); -+ if ( log ) fprintf(stderr, "BIND_OPCODE_ADD_ADDR_ULEB(0x%lX)\n", it->operand1); - this->_encodedData.append_byte(BIND_OPCODE_ADD_ADDR_ULEB); - this->_encodedData.append_uleb128(it->operand1); - break; -@@ -813,16 +813,16 @@ - this->_encodedData.append_byte(BIND_OPCODE_DO_BIND); - break; - case BIND_OPCODE_DO_BIND_ADD_ADDR_ULEB: -- if ( log ) fprintf(stderr, "BIND_OPCODE_DO_BIND_ADD_ADDR_ULEB(0x%llX)\n", it->operand1); -+ if ( log ) fprintf(stderr, "BIND_OPCODE_DO_BIND_ADD_ADDR_ULEB(0x%lX)\n", it->operand1); - this->_encodedData.append_byte(BIND_OPCODE_DO_BIND_ADD_ADDR_ULEB); - this->_encodedData.append_uleb128(it->operand1); - break; - case BIND_OPCODE_DO_BIND_ADD_ADDR_IMM_SCALED: -- if ( log ) fprintf(stderr, "BIND_OPCODE_DO_BIND_ADD_ADDR_IMM_SCALED(%lld=0x%llX)\n", it->operand1, it->operand1*sizeof(pint_t)); -+ if ( log ) fprintf(stderr, "BIND_OPCODE_DO_BIND_ADD_ADDR_IMM_SCALED(%ld=0x%lX)\n", it->operand1, it->operand1*sizeof(pint_t)); - this->_encodedData.append_byte(BIND_OPCODE_DO_BIND_ADD_ADDR_IMM_SCALED | it->operand1 ); - break; - case BIND_OPCODE_DO_BIND_ULEB_TIMES_SKIPPING_ULEB: -- if ( log ) fprintf(stderr, "BIND_OPCODE_DO_BIND_ULEB_TIMES_SKIPPING_ULEB(%lld, %lld)\n", it->operand1, it->operand2); -+ if ( log ) fprintf(stderr, "BIND_OPCODE_DO_BIND_ULEB_TIMES_SKIPPING_ULEB(%ld, %ld)\n", it->operand1, it->operand2); - this->_encodedData.append_byte(BIND_OPCODE_DO_BIND_ULEB_TIMES_SKIPPING_ULEB); - this->_encodedData.append_uleb128(it->operand1); - this->_encodedData.append_uleb128(it->operand2); -@@ -1018,7 +1018,7 @@ - entry.importName = atom->name(); - } - entries.push_back(entry); -- //fprintf(stderr, "re-export %s from lib %llu as %s\n", entry.importName, entry.other, entry.name); -+ //fprintf(stderr, "re-export %s from lib %lu as %s\n", entry.importName, entry.other, entry.name); - } - else if ( atom->definition() == ld::Atom::definitionAbsolute ) { - entry.name = atom->name(); -@@ -1136,7 +1136,7 @@ - _64bitPointerLocations.push_back(address); - break; - default: -- warning("codegen at address 0x%08llX prevents image from working in dyld shared cache", address); -+ warning("codegen at address 0x%08lX prevents image from working in dyld shared cache", address); - break; - } - } -@@ -1152,7 +1152,7 @@ - _32bitPointerLocations.push_back(address); - break; - default: -- warning("codegen at address 0x%08llX prevents image from working in dyld shared cache", address); -+ warning("codegen at address 0x%08lX prevents image from working in dyld shared cache", address); - break; - } - } -@@ -1179,7 +1179,7 @@ - _thumbHi16Locations[extra].push_back(address); - break; - default: -- warning("codegen at address 0x%08llX prevents image from working in dyld shared cache", address); -+ warning("codegen at address 0x%08lX prevents image from working in dyld shared cache", address); - break; - } - } -@@ -1207,7 +1207,7 @@ - _64bitPointerLocations.push_back(address); - break; - default: -- warning("codegen at address 0x%08llX prevents image from working in dyld shared cache", address); -+ warning("codegen at address 0x%08lX prevents image from working in dyld shared cache", address); - break; - } - } -@@ -1219,9 +1219,9 @@ - pint_t addr = this->_options.baseAddress(); - for(typename std::vector::const_iterator it = locations.begin(); it != locations.end(); ++it) { - pint_t nextAddr = *it; -- //fprintf(stderr, "nextAddr=0x%0llX\n", (uint64_t)nextAddr); -+ //fprintf(stderr, "nextAddr=0x%0lX\n", (uint64_t)nextAddr); - uint64_t delta = nextAddr - addr; -- //fprintf(stderr, "delta=0x%0llX\n", delta); -+ //fprintf(stderr, "delta=0x%0lX\n", delta); - if ( delta == 0 ) - throw "double split seg info for same address"; - // uleb128 encode -diff -ur cctools-port-c1cc758/cctools/ld64/src/ld/Options.cpp cctools-port-format/cctools/ld64/src/ld/Options.cpp ---- cctools-port-c1cc758/cctools/ld64/src/ld/Options.cpp 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-format/cctools/ld64/src/ld/Options.cpp 2017-11-10 19:13:05.914926185 -0800 -@@ -2622,7 +2622,7 @@ - fBaseAddress = parseAddress(address); - uint64_t temp = ((fBaseAddress+fSegmentAlignment-1) & (-fSegmentAlignment)); - if ( fBaseAddress != temp ) { -- warning("-seg1addr not %lld byte aligned, rounding up", fSegmentAlignment); -+ warning("-seg1addr not %ld byte aligned, rounding up", fSegmentAlignment); - fBaseAddress = temp; - } - cannotBeUsedWithBitcode(arg); -@@ -2886,7 +2886,7 @@ - seg.address = parseAddress(argv[++i]); - uint64_t temp = ((seg.address+fSegmentAlignment-1) & (-fSegmentAlignment)); - if ( seg.address != temp ) -- warning("-segaddr %s not %lld byte aligned", seg.name, fSegmentAlignment); -+ warning("-segaddr %s not %ld byte aligned", seg.name, fSegmentAlignment); - fCustomSegmentAddresses.push_back(seg); - cannotBeUsedWithBitcode(arg); - } -@@ -4572,7 +4572,7 @@ - } - // range check -seg1addr for ARM - if ( fBaseAddress > fMaxAddress ) { -- warning("ignoring -seg1addr 0x%08llX. Address out of range.", fBaseAddress); -+ warning("ignoring -seg1addr 0x%08lX. Address out of range.", fBaseAddress); - fBaseAddress = 0; - } - break; -@@ -5207,7 +5207,7 @@ - // Make sure -image_base matches alignment - uint64_t alignedBaseAddress = (fBaseAddress+fSegmentAlignment-1) & (-fSegmentAlignment); - if ( alignedBaseAddress != fBaseAddress ) { -- warning("base address 0x%llX is not properly aligned. Changing it to 0x%llX", fBaseAddress, alignedBaseAddress); -+ warning("base address 0x%lX is not properly aligned. Changing it to 0x%lX", fBaseAddress, alignedBaseAddress); - fBaseAddress = alignedBaseAddress; - } - -@@ -5401,7 +5401,7 @@ - break; - } - if ( (fStackSize & (-fSegmentAlignment)) != fStackSize ) -- throwf("-stack_size must be multiple of segment alignment (%lldKB)", fSegmentAlignment/1024); -+ throwf("-stack_size must be multiple of segment alignment (%ldKB)", fSegmentAlignment/1024); - switch ( fOutputKind ) { - case Options::kDynamicExecutable: - case Options::kStaticExecutable: -@@ -5416,7 +5416,7 @@ - throw "-stack_size option can only be used when linking a main executable"; - } - if ( fStackSize > fStackAddr ) -- throwf("-stack_size (0x%08llX) must be smaller than -stack_addr (0x%08llX)", fStackSize, fStackAddr); -+ throwf("-stack_size (0x%08lX) must be smaller than -stack_addr (0x%08lX)", fStackSize, fStackAddr); - } - - // check that -allow_stack_execute is only used with main executables -@@ -5582,7 +5582,7 @@ - if ( fZeroPageSize != ULLONG_MAX ) { - for (std::vector::iterator it = fCustomSegmentAddresses.begin(); it != fCustomSegmentAddresses.end(); ++it) { - if ( it->address < fZeroPageSize ) -- throwf("-segaddr %s 0x%llX conflicts with -pagezero_size", it->name, it->address); -+ throwf("-segaddr %s 0x%lX conflicts with -pagezero_size", it->name, it->address); - } - } - // verify no duplicates -diff -ur cctools-port-c1cc758/cctools/ld64/src/ld/OutputFile.cpp cctools-port-format/cctools/ld64/src/ld/OutputFile.cpp ---- cctools-port-c1cc758/cctools/ld64/src/ld/OutputFile.cpp 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-format/cctools/ld64/src/ld/OutputFile.cpp 2017-11-10 19:13:05.918259550 -0800 -@@ -132,13 +132,13 @@ - { - fprintf(stderr, "SORTED:\n"); - for (std::vector::iterator it = state.sections.begin(); it != state.sections.end(); ++it) { -- fprintf(stderr, "final section %p %s/%s %s start addr=0x%08llX, size=0x%08llX, alignment=%02d, fileOffset=0x%08llX\n", -+ fprintf(stderr, "final section %p %s/%s %s start addr=0x%08lX, size=0x%08lX, alignment=%02d, fileOffset=0x%08lX\n", - (*it), (*it)->segmentName(), (*it)->sectionName(), (*it)->isSectionHidden() ? "(hidden)" : "", - (*it)->address, (*it)->size, (*it)->alignment, (*it)->fileOffset); - if ( printAtoms ) { - std::vector& atoms = (*it)->atoms; - for (std::vector::iterator ait = atoms.begin(); ait != atoms.end(); ++ait) { -- fprintf(stderr, " %p (0x%04llX) %s\n", *ait, (*ait)->size(), (*ait)->name()); -+ fprintf(stderr, " %p (0x%04lX) %s\n", *ait, (*ait)->size(), (*ait)->name()); - } - } - } -@@ -179,7 +179,7 @@ - ld::Internal::FinalSection* sect = *it; - if ( (segFirstSection == NULL ) || strcmp(segFirstSection->segmentName(), sect->segmentName()) != 0 ) { - if ( segFirstSection != NULL ) { -- //fprintf(stderr, "findSegment(0x%llX) seg changed to %s\n", addr, sect->segmentName()); -+ //fprintf(stderr, "findSegment(0x%lX) seg changed to %s\n", addr, sect->segmentName()); - if ( (addr >= segFirstSection->address) && (addr < lastSection->address+lastSection->size) ) { - *start = segFirstSection->address; - *end = lastSection->address+lastSection->size; -@@ -219,7 +219,7 @@ - break; - default: - (const_cast(atom))->setSectionStartAddress(sect->address); -- if ( log ) fprintf(stderr, " atom=%p, addr=0x%08llX, name=%s\n", atom, atom->finalAddress(), atom->name()); -+ if ( log ) fprintf(stderr, " atom=%p, addr=0x%08lX, name=%s\n", atom, atom->finalAddress(), atom->name()); - break; - } - } -@@ -435,7 +435,7 @@ - switch ( atom.symbolTableInclusion() ) { - case ld::Atom::symbolTableNotIn: - case ld::Atom::symbolTableNotInFinalLinkedImages: -- sprintf(buffer, "%s@0x%08llX", atom.name(), atom.objectAddress()); -+ sprintf(buffer, "%s@0x%08lX", atom.name(), atom.objectAddress()); - break; - case ld::Atom::symbolTableIn: - case ld::Atom::symbolTableInAndNeverStrip: -@@ -598,7 +598,7 @@ - for (std::vector::iterator it = state.sections.begin(); it != state.sections.end(); ++it) { - if ( (*it)->isSectionHidden() ) - continue; -- fprintf(stderr, " %s/%s addr=0x%08llX, size=0x%08llX, fileOffset=0x%08llX, type=%d\n", -+ fprintf(stderr, " %s/%s addr=0x%08lX, size=0x%08lX, fileOffset=0x%08lX, type=%d\n", - (*it)->segmentName(), (*it)->sectionName(), - (*it)->address, (*it)->size, (*it)->fileOffset, (*it)->type()); - } -@@ -612,7 +612,7 @@ - printSectionLayout(state); - - const ld::Atom* target; -- throwf("8-bit reference out of range (%lld max is +/-127B): from %s (0x%08llX) to %s (0x%08llX)", -+ throwf("8-bit reference out of range (%ld max is +/-127B): from %s (0x%08lX) to %s (0x%08lX)", - displacement, atom->name(), atom->finalAddress(), referenceTargetAtomName(state, fixup), - addressOf(state, fixup, &target)); - } -@@ -626,7 +626,7 @@ - printSectionLayout(state); - - const ld::Atom* target; -- throwf("16-bit reference out of range (%lld max is +/-32KB): from %s (0x%08llX) to %s (0x%08llX)", -+ throwf("16-bit reference out of range (%ld max is +/-32KB): from %s (0x%08lX) to %s (0x%08lX)", - displacement, atom->name(), atom->finalAddress(), referenceTargetAtomName(state, fixup), - addressOf(state, fixup, &target)); - } -@@ -640,7 +640,7 @@ - printSectionLayout(state); - - const ld::Atom* target; -- throwf("32-bit branch out of range (%lld max is +/-2GB): from %s (0x%08llX) to %s (0x%08llX)", -+ throwf("32-bit branch out of range (%ld max is +/-2GB): from %s (0x%08lX) to %s (0x%08lX)", - displacement, atom->name(), atom->finalAddress(), referenceTargetAtomName(state, fixup), - addressOf(state, fixup, &target)); - } -@@ -659,7 +659,7 @@ - if ( (_options.architecture() == CPU_TYPE_ARM) || (_options.architecture() == CPU_TYPE_I386) ) { - // Unlikely userland code does funky stuff like this, so warn for them, but not warn for -preload or -static - if ( (_options.outputKind() != Options::kPreload) && (_options.outputKind() != Options::kStaticExecutable) ) { -- warning("32-bit absolute address out of range (0x%08llX max is 4GB): from %s + 0x%08X (0x%08llX) to 0x%08llX", -+ warning("32-bit absolute address out of range (0x%08lX max is 4GB): from %s + 0x%08X (0x%08lX) to 0x%08lX", - displacement, atom->name(), fixup->offsetInAtom, atom->finalAddress(), displacement); - } - return; -@@ -669,10 +669,10 @@ - - const ld::Atom* target; - if ( fixup->binding == ld::Fixup::bindingNone ) -- throwf("32-bit absolute address out of range (0x%08llX max is 4GB): from %s + 0x%08X (0x%08llX) to 0x%08llX", -+ throwf("32-bit absolute address out of range (0x%08lX max is 4GB): from %s + 0x%08X (0x%08lX) to 0x%08lX", - displacement, atom->name(), fixup->offsetInAtom, atom->finalAddress(), displacement); - else -- throwf("32-bit absolute address out of range (0x%08llX max is 4GB): from %s + 0x%08X (0x%08llX) to %s (0x%08llX)", -+ throwf("32-bit absolute address out of range (0x%08lX max is 4GB): from %s + 0x%08X (0x%08lX) to %s (0x%08lX)", - displacement, atom->name(), fixup->offsetInAtom, atom->finalAddress(), referenceTargetAtomName(state, fixup), - addressOf(state, fixup, &target)); - } -@@ -687,7 +687,7 @@ - printSectionLayout(state); - - const ld::Atom* target; -- throwf("32-bit RIP relative reference out of range (%lld max is +/-4GB): from %s (0x%08llX) to %s (0x%08llX)", -+ throwf("32-bit RIP relative reference out of range (%ld max is +/-4GB): from %s (0x%08lX) to %s (0x%08lX)", - displacement, atom->name(), atom->finalAddress(), referenceTargetAtomName(state, fixup), - addressOf(state, fixup, &target)); - } -@@ -700,7 +700,7 @@ - printSectionLayout(state); - - const ld::Atom* target; -- throwf("ARM ldr 12-bit displacement out of range (%lld max is +/-4096B): from %s (0x%08llX) to %s (0x%08llX)", -+ throwf("ARM ldr 12-bit displacement out of range (%ld max is +/-4096B): from %s (0x%08lX) to %s (0x%08lX)", - displacement, atom->name(), atom->finalAddress(), referenceTargetAtomName(state, fixup), - addressOf(state, fixup, &target)); - } -@@ -720,7 +720,7 @@ - printSectionLayout(state); - - const ld::Atom* target; -- throwf("b/bl/blx ARM branch out of range (%lld max is +/-32MB): from %s (0x%08llX) to %s (0x%08llX)", -+ throwf("b/bl/blx ARM branch out of range (%ld max is +/-32MB): from %s (0x%08lX) to %s (0x%08lX)", - displacement, atom->name(), atom->finalAddress(), referenceTargetAtomName(state, fixup), - addressOf(state, fixup, &target)); - } -@@ -752,12 +752,12 @@ - - const ld::Atom* target; - if ( _options.preferSubArchitecture() && _options.archSupportsThumb2() ) { -- throwf("b/bl/blx thumb2 branch out of range (%lld max is +/-16MB): from %s (0x%08llX) to %s (0x%08llX)", -+ throwf("b/bl/blx thumb2 branch out of range (%ld max is +/-16MB): from %s (0x%08lX) to %s (0x%08lX)", - displacement, atom->name(), atom->finalAddress(), referenceTargetAtomName(state, fixup), - addressOf(state, fixup, &target)); - } - else { -- throwf("b/bl/blx thumb1 branch out of range (%lld max is +/-4MB): from %s (0x%08llX) to %s (0x%08llX)", -+ throwf("b/bl/blx thumb1 branch out of range (%ld max is +/-4MB): from %s (0x%08lX) to %s (0x%08lX)", - displacement, atom->name(), atom->finalAddress(), referenceTargetAtomName(state, fixup), - addressOf(state, fixup, &target)); - } -@@ -772,7 +772,7 @@ - printSectionLayout(state); - - const ld::Atom* target; -- throwf("b(l) ARM64 branch out of range (%lld max is +/-128MB): from %s (0x%08llX) to %s (0x%08llX)", -+ throwf("b(l) ARM64 branch out of range (%ld max is +/-128MB): from %s (0x%08lX) to %s (0x%08lX)", - displacement, atom->name(), atom->finalAddress(), referenceTargetAtomName(state, fixup), - addressOf(state, fixup, &target)); - } -@@ -786,7 +786,7 @@ - printSectionLayout(state); - - const ld::Atom* target; -- throwf("ARM64 ADRP out of range (%lld max is +/-4GB): from %s (0x%08llX) to %s (0x%08llX)", -+ throwf("ARM64 ADRP out of range (%ld max is +/-4GB): from %s (0x%08lX) to %s (0x%08lX)", - displacement, atom->name(), atom->finalAddress(), referenceTargetAtomName(state, fixup), - addressOf(state, fixup, &target)); - } -@@ -1871,7 +1871,7 @@ - uint32_t nextDisp = (j1 << 13) | (j2 << 11) | imm11; - uint32_t firstDisp = (s << 10) | imm10; - newInstruction = instruction | (nextDisp << 16) | firstDisp; -- //warning("s=%d, j1=%d, j2=%d, imm10=0x%0X, imm11=0x%0X, instruction=0x%08X, first=0x%04X, next=0x%04X, new=0x%08X, disp=0x%llX for %s to %s\n", -+ //warning("s=%d, j1=%d, j2=%d, imm10=0x%0X, imm11=0x%0X, instruction=0x%08X, first=0x%04X, next=0x%04X, new=0x%08X, disp=0x%lX for %s to %s\n", - // s, j1, j2, imm10, imm11, instruction, firstDisp, nextDisp, newInstruction, delta, atom->name(), toTarget->name()); - set32LE(fixUpLocation, newInstruction); - } -@@ -2003,7 +2003,7 @@ - // vector and byte LDR/STR have same "size" bits, need to check other bits to differenciate - implictShift = 4; - if ( (offset & 0xF) != 0 ) { -- throwf("128-bit LDR/STR not 16-byte aligned: from %s (0x%08llX) to %s (0x%08llX)", -+ throwf("128-bit LDR/STR not 16-byte aligned: from %s (0x%08lX) to %s (0x%08lX)", - atom->name(), atom->finalAddress(), referenceTargetAtomName(state, fit), - addressOf(state, fit, &toTarget)); - } -@@ -2011,21 +2011,21 @@ - break; - case 1: - if ( (offset & 0x1) != 0 ) { -- throwf("16-bit LDR/STR not 2-byte aligned: from %s (0x%08llX) to %s (0x%08llX)", -+ throwf("16-bit LDR/STR not 2-byte aligned: from %s (0x%08lX) to %s (0x%08lX)", - atom->name(), atom->finalAddress(), referenceTargetAtomName(state, fit), - addressOf(state, fit, &toTarget)); - } - break; - case 2: - if ( (offset & 0x3) != 0 ) { -- throwf("32-bit LDR/STR not 4-byte aligned: from %s (0x%08llX) to %s (0x%08llX)", -+ throwf("32-bit LDR/STR not 4-byte aligned: from %s (0x%08lX) to %s (0x%08lX)", - atom->name(), atom->finalAddress(), referenceTargetAtomName(state, fit), - addressOf(state, fit, &toTarget)); - } - break; - case 3: - if ( (offset & 0x7) != 0 ) { -- throwf("64-bit LDR/STR not 8-byte aligned: from %s (0x%08llX) to %s (0x%08llX)", -+ throwf("64-bit LDR/STR not 8-byte aligned: from %s (0x%08lX) to %s (0x%08lX)", - atom->name(), atom->finalAddress(), referenceTargetAtomName(state, fit), - addressOf(state, fit, &toTarget)); - } -@@ -2177,11 +2177,11 @@ - set32LE(infoA.instructionContent, makeNOP()); - set32LE(infoB.instructionContent, makeLDR_literal(ldrInfoB, infoA.targetAddress, infoB.instructionAddress)); - if ( _options.verboseOptimizationHints() ) -- fprintf(stderr, "adrp-ldr at 0x%08llX transformed to LDR literal, usableSegment=%d usableSegment\n", infoB.instructionAddress, usableSegment); -+ fprintf(stderr, "adrp-ldr at 0x%08lX transformed to LDR literal, usableSegment=%d usableSegment\n", infoB.instructionAddress, usableSegment); - } - else { - if ( _options.verboseOptimizationHints() ) -- fprintf(stderr, "adrp-ldr at 0x%08llX not transformed, isLDR=%d, literalableSize=%d, inRange=%d, usableSegment=%d, scaledOffset=%d\n", -+ fprintf(stderr, "adrp-ldr at 0x%08lX not transformed, isLDR=%d, literalableSize=%d, inRange=%d, usableSegment=%d, scaledOffset=%d\n", - infoB.instructionAddress, isLDR, literalableSize, withinOneMeg(infoB.instructionAddress, infoA.targetAddress), usableSegment, ldrInfoB.offset); - } - break; -@@ -2208,7 +2208,7 @@ - set32LE(infoB.instructionContent, makeNOP()); - set32LE(infoC.instructionContent, makeLDR_literal(ldrInfoC, infoA.targetAddress+ldrInfoC.offset, infoC.instructionAddress)); - if ( _options.verboseOptimizationHints() ) { -- fprintf(stderr, "adrp-add-ldr at 0x%08llX T1 transformed to LDR literal\n", infoC.instructionAddress); -+ fprintf(stderr, "adrp-add-ldr at 0x%08lX T1 transformed to LDR literal\n", infoC.instructionAddress); - } - } - else if ( usableSegment && withinOneMeg(infoA.instructionAddress, infoA.targetAddress+ldrInfoC.offset) ) { -@@ -2219,7 +2219,7 @@ - set32LE(infoC.instructionContent, makeLoadOrStore(ldrInfoC)); - set32LE(infoC.instructionContent, infoC.instruction & 0xFFC003FF); - if ( _options.verboseOptimizationHints() ) -- fprintf(stderr, "adrp-add-ldr at 0x%08llX T4 transformed to ADR/LDR\n", infoB.instructionAddress); -+ fprintf(stderr, "adrp-add-ldr at 0x%08lX T4 transformed to ADR/LDR\n", infoB.instructionAddress); - } - else if ( ((infoB.targetAddress % ldrInfoC.size) == 0) && (ldrInfoC.offset == 0) ) { - // can do T2 transformation by merging ADD into LD -@@ -2228,11 +2228,11 @@ - ldrInfoC.offset += addInfoB.addend; - set32LE(infoC.instructionContent, makeLoadOrStore(ldrInfoC)); - if ( _options.verboseOptimizationHints() ) -- fprintf(stderr, "adrp-add-ldr at 0x%08llX T2 transformed to ADRP/LDR \n", infoC.instructionAddress); -+ fprintf(stderr, "adrp-add-ldr at 0x%08lX T2 transformed to ADRP/LDR \n", infoC.instructionAddress); - } - else { - if ( _options.verboseOptimizationHints() ) -- fprintf(stderr, "adrp-add-ldr at 0x%08llX could not be transformed, loadSize=%d, literalableSize=%d, inRange=%d, usableSegment=%d, targetFourByteAligned=%d, imm12=%d\n", -+ fprintf(stderr, "adrp-add-ldr at 0x%08lX could not be transformed, loadSize=%d, literalableSize=%d, inRange=%d, usableSegment=%d, targetFourByteAligned=%d, imm12=%d\n", - infoC.instructionAddress, ldrInfoC.size, literalableSize, withinOneMeg(infoC.instructionAddress, infoA.targetAddress+ldrInfoC.offset), usableSegment, targetFourByteAligned, ldrInfoC.offset); - } - break; -@@ -2252,11 +2252,11 @@ - set32LE(infoA.instructionContent, makeADR(addInfoB.destReg, infoA.targetAddress, infoA.instructionAddress)); - set32LE(infoB.instructionContent, makeNOP()); - if ( _options.verboseOptimizationHints() ) -- fprintf(stderr, "adrp-add at 0x%08llX transformed to ADR\n", infoB.instructionAddress); -+ fprintf(stderr, "adrp-add at 0x%08lX transformed to ADR\n", infoB.instructionAddress); - } - else { - if ( _options.verboseOptimizationHints() ) -- fprintf(stderr, "adrp-add at 0x%08llX not transformed, isAdd=%d, inRange=%d, usableSegment=%d\n", -+ fprintf(stderr, "adrp-add at 0x%08lX not transformed, isAdd=%d, inRange=%d, usableSegment=%d\n", - infoB.instructionAddress, isADD, withinOneMeg(infoA.targetAddress, infoA.instructionAddress), usableSegment); - } - break; -@@ -2278,19 +2278,19 @@ - LOH_ASSERT(ldrInfoB.size == 8); - LOH_ASSERT(!ldrInfoB.isFloat); - LOH_ASSERT(ldrInfoC.baseReg == ldrInfoB.reg); -- //fprintf(stderr, "infoA.target=%p, %s, infoA.targetAddress=0x%08llX\n", infoA.target, infoA.target->name(), infoA.targetAddress); -+ //fprintf(stderr, "infoA.target=%p, %s, infoA.targetAddress=0x%08lX\n", infoA.target, infoA.target->name(), infoA.targetAddress); - targetFourByteAligned = ( ((infoA.targetAddress + ldrInfoC.offset) & 0x3) == 0 ); - if ( usableSegment && targetFourByteAligned && withinOneMeg(infoB.instructionAddress, infoA.targetAddress + ldrInfoC.offset) ) { - // can do T5 transform - set32LE(infoA.instructionContent, makeNOP()); - set32LE(infoB.instructionContent, makeLDR_literal(ldrInfoB, infoA.targetAddress, infoB.instructionAddress)); - if ( _options.verboseOptimizationHints() ) { -- fprintf(stderr, "adrp-ldr-got-ldr at 0x%08llX T5 transformed to LDR literal of GOT plus LDR\n", infoC.instructionAddress); -+ fprintf(stderr, "adrp-ldr-got-ldr at 0x%08lX T5 transformed to LDR literal of GOT plus LDR\n", infoC.instructionAddress); - } - } - else { - if ( _options.verboseOptimizationHints() ) -- fprintf(stderr, "adrp-ldr-got-ldr at 0x%08llX no optimization done\n", infoC.instructionAddress); -+ fprintf(stderr, "adrp-ldr-got-ldr at 0x%08lX no optimization done\n", infoC.instructionAddress); - } - } - else if ( isADD ) { -@@ -2305,7 +2305,7 @@ - set32LE(infoB.instructionContent, makeNOP()); - set32LE(infoC.instructionContent, makeLDR_literal(ldrInfoC, infoA.targetAddress + ldrInfoC.offset, infoC.instructionAddress)); - if ( _options.verboseOptimizationHints() ) -- fprintf(stderr, "adrp-ldr-got-ldr at 0x%08llX T1 transformed to LDR literal\n", infoC.instructionAddress); -+ fprintf(stderr, "adrp-ldr-got-ldr at 0x%08lX T1 transformed to LDR literal\n", infoC.instructionAddress); - } - else if ( usableSegment && withinOneMeg(infoA.instructionAddress, infoA.targetAddress) ) { - // can do T4 transform -@@ -2313,7 +2313,7 @@ - set32LE(infoB.instructionContent, makeNOP()); - set32LE(infoC.instructionContent, makeLoadOrStore(ldrInfoC)); - if ( _options.verboseOptimizationHints() ) { -- fprintf(stderr, "adrp-ldr-got-ldr at 0x%08llX T4 transformed to ADR/LDR\n", infoC.instructionAddress); -+ fprintf(stderr, "adrp-ldr-got-ldr at 0x%08lX T4 transformed to ADR/LDR\n", infoC.instructionAddress); - } - } - else if ( ((infoA.targetAddress % ldrInfoC.size) == 0) && ((addInfoB.addend + ldrInfoC.offset) < 4096) ) { -@@ -2323,19 +2323,19 @@ - ldrInfoC.offset += addInfoB.addend; - set32LE(infoC.instructionContent, makeLoadOrStore(ldrInfoC)); - if ( _options.verboseOptimizationHints() ) { -- fprintf(stderr, "adrp-ldr-got-ldr at 0x%08llX T2 transformed to ADRP/NOP/LDR\n", infoC.instructionAddress); -+ fprintf(stderr, "adrp-ldr-got-ldr at 0x%08lX T2 transformed to ADRP/NOP/LDR\n", infoC.instructionAddress); - } - } - else { - // T3 transform already done by ld::passes:got:doPass() - if ( _options.verboseOptimizationHints() ) { -- fprintf(stderr, "adrp-ldr-got-ldr at 0x%08llX T3 transformed to ADRP/ADD/LDR\n", infoC.instructionAddress); -+ fprintf(stderr, "adrp-ldr-got-ldr at 0x%08lX T3 transformed to ADRP/ADD/LDR\n", infoC.instructionAddress); - } - } - } - else { - if ( _options.verboseOptimizationHints() ) -- fprintf(stderr, "adrp-ldr-got-ldr at 0x%08llX not ADD or LDR\n", infoC.instructionAddress); -+ fprintf(stderr, "adrp-ldr-got-ldr at 0x%08lX not ADD or LDR\n", infoC.instructionAddress); - } - break; - case LOH_ARM64_ADRP_ADD_STR: -@@ -2361,7 +2361,7 @@ - set32LE(infoC.instructionContent, makeLoadOrStore(ldrInfoC)); - set32LE(infoC.instructionContent, infoC.instruction & 0xFFC003FF); - if ( _options.verboseOptimizationHints() ) -- fprintf(stderr, "adrp-add-str at 0x%08llX T4 transformed to ADR/STR\n", infoB.instructionAddress); -+ fprintf(stderr, "adrp-add-str at 0x%08lX T4 transformed to ADR/STR\n", infoB.instructionAddress); - } - else if ( ((infoB.targetAddress % ldrInfoC.size) == 0) && (ldrInfoC.offset == 0) ) { - // can do T2 transformation by merging ADD into STR -@@ -2370,11 +2370,11 @@ - ldrInfoC.offset += addInfoB.addend; - set32LE(infoC.instructionContent, makeLoadOrStore(ldrInfoC)); - if ( _options.verboseOptimizationHints() ) -- fprintf(stderr, "adrp-add-str at 0x%08llX T2 transformed to ADRP/STR \n", infoC.instructionAddress); -+ fprintf(stderr, "adrp-add-str at 0x%08lX T2 transformed to ADRP/STR \n", infoC.instructionAddress); - } - else { - if ( _options.verboseOptimizationHints() ) -- fprintf(stderr, "adrp-add-str at 0x%08llX could not be transformed, loadSize=%d, inRange=%d, usableSegment=%d, imm12=%d\n", -+ fprintf(stderr, "adrp-add-str at 0x%08lX could not be transformed, loadSize=%d, inRange=%d, usableSegment=%d, imm12=%d\n", - infoC.instructionAddress, ldrInfoC.size, withinOneMeg(infoC.instructionAddress, infoA.targetAddress+ldrInfoC.offset), usableSegment, ldrInfoC.offset); - } - break; -@@ -2402,12 +2402,12 @@ - set32LE(infoA.instructionContent, makeNOP()); - set32LE(infoB.instructionContent, makeLDR_literal(ldrInfoB, infoA.targetAddress, infoB.instructionAddress)); - if ( _options.verboseOptimizationHints() ) { -- fprintf(stderr, "adrp-ldr-got-str at 0x%08llX T5 transformed to LDR literal of GOT plus STR\n", infoC.instructionAddress); -+ fprintf(stderr, "adrp-ldr-got-str at 0x%08lX T5 transformed to LDR literal of GOT plus STR\n", infoC.instructionAddress); - } - } - else { - if ( _options.verboseOptimizationHints() ) -- fprintf(stderr, "adrp-ldr-got-str at 0x%08llX no optimization done\n", infoC.instructionAddress); -+ fprintf(stderr, "adrp-ldr-got-str at 0x%08lX no optimization done\n", infoC.instructionAddress); - } - } - else if ( isADD ) { -@@ -2422,7 +2422,7 @@ - set32LE(infoB.instructionContent, makeNOP()); - set32LE(infoC.instructionContent, makeLoadOrStore(ldrInfoC)); - if ( _options.verboseOptimizationHints() ) { -- fprintf(stderr, "adrp-ldr-got-str at 0x%08llX T4 transformed to ADR/STR\n", infoC.instructionAddress); -+ fprintf(stderr, "adrp-ldr-got-str at 0x%08lX T4 transformed to ADR/STR\n", infoC.instructionAddress); - } - } - else if ( ((infoA.targetAddress % ldrInfoC.size) == 0) && (ldrInfoC.offset == 0) ) { -@@ -2432,19 +2432,19 @@ - ldrInfoC.offset += addInfoB.addend; - set32LE(infoC.instructionContent, makeLoadOrStore(ldrInfoC)); - if ( _options.verboseOptimizationHints() ) { -- fprintf(stderr, "adrp-ldr-got-str at 0x%08llX T4 transformed to ADRP/NOP/STR\n", infoC.instructionAddress); -+ fprintf(stderr, "adrp-ldr-got-str at 0x%08lX T4 transformed to ADRP/NOP/STR\n", infoC.instructionAddress); - } - } - else { - // T3 transform already done by ld::passes:got:doPass() - if ( _options.verboseOptimizationHints() ) { -- fprintf(stderr, "adrp-ldr-got-str at 0x%08llX T3 transformed to ADRP/ADD/STR\n", infoC.instructionAddress); -+ fprintf(stderr, "adrp-ldr-got-str at 0x%08lX T3 transformed to ADRP/ADD/STR\n", infoC.instructionAddress); - } - } - } - else { - if ( _options.verboseOptimizationHints() ) -- fprintf(stderr, "adrp-ldr-got-str at 0x%08llX not ADD or LDR\n", infoC.instructionAddress); -+ fprintf(stderr, "adrp-ldr-got-str at 0x%08lX not ADD or LDR\n", infoC.instructionAddress); - } - break; - case LOH_ARM64_ADRP_LDR_GOT: -@@ -2463,7 +2463,7 @@ - set32LE(infoA.instructionContent, makeNOP()); - set32LE(infoB.instructionContent, makeLDR_literal(ldrInfoB, infoA.targetAddress, infoB.instructionAddress)); - if ( _options.verboseOptimizationHints() ) { -- fprintf(stderr, "adrp-ldr-got at 0x%08llX T5 transformed to NOP/LDR\n", infoC.instructionAddress); -+ fprintf(stderr, "adrp-ldr-got at 0x%08lX T5 transformed to NOP/LDR\n", infoC.instructionAddress); - } - } - } -@@ -2473,23 +2473,23 @@ - set32LE(infoA.instructionContent, makeADR(addInfoB.destReg, infoA.targetAddress, infoA.instructionAddress)); - set32LE(infoB.instructionContent, makeNOP()); - if ( _options.verboseOptimizationHints() ) { -- fprintf(stderr, "adrp-ldr-got at 0x%08llX T4 transformed to ADR/STR\n", infoC.instructionAddress); -+ fprintf(stderr, "adrp-ldr-got at 0x%08lX T4 transformed to ADR/STR\n", infoC.instructionAddress); - } - } - } - else { - if ( _options.verboseOptimizationHints() ) -- fprintf(stderr, "adrp-ldr-got at 0x%08llX not LDR or ADD\n", infoB.instructionAddress); -+ fprintf(stderr, "adrp-ldr-got at 0x%08lX not LDR or ADD\n", infoB.instructionAddress); - } - } - else { - if ( _options.verboseOptimizationHints() ) -- fprintf(stderr, "adrp-ldr-got at 0x%08llX not ADRP\n", infoA.instructionAddress); -+ fprintf(stderr, "adrp-ldr-got at 0x%08lX not ADRP\n", infoA.instructionAddress); - } - break; - default: - if ( _options.verboseOptimizationHints() ) -- fprintf(stderr, "unknown hint kind %d alt.info.kind at 0x%08llX\n", alt.info.kind, infoA.instructionAddress); -+ fprintf(stderr, "unknown hint kind %d alt.info.kind at 0x%08lX\n", alt.info.kind, infoA.instructionAddress); - break; - } - } -@@ -2511,13 +2511,13 @@ - LOH_ASSERT(isPageKind(infoB.fixup)); - if ( (infoA.instruction & 0x9F000000) != 0x90000000 ) { - if ( _options.verboseOptimizationHints() ) -- fprintf(stderr, "may-reused-adrp at 0x%08llX no longer an ADRP, now 0x%08X\n", infoA.instructionAddress, infoA.instruction); -+ fprintf(stderr, "may-reused-adrp at 0x%08lX no longer an ADRP, now 0x%08X\n", infoA.instructionAddress, infoA.instruction); - sAdrpNA++; - break; - } - if ( (infoB.instruction & 0x9F000000) != 0x90000000 ) { - if ( _options.verboseOptimizationHints() ) -- fprintf(stderr, "may-reused-adrp at 0x%08llX no longer an ADRP, now 0x%08X\n", infoB.instructionAddress, infoA.instruction); -+ fprintf(stderr, "may-reused-adrp at 0x%08lX no longer an ADRP, now 0x%08X\n", infoB.instructionAddress, infoA.instruction); - sAdrpNA++; - break; - } -@@ -2607,7 +2607,7 @@ - if ( takesNoDiskSpace(sect) ) - continue; - const bool sectionUsesNops = (sect->type() == ld::Section::typeCode); -- //fprintf(stderr, "file offset=0x%08llX, section %s\n", sect->fileOffset, sect->sectionName()); -+ //fprintf(stderr, "file offset=0x%08lX, section %s\n", sect->fileOffset, sect->sectionName()); - std::vector& atoms = sect->atoms; - bool lastAtomWasThumb = false; - for (std::vector::iterator ait = atoms.begin(); ait != atoms.end(); ++ait) { -@@ -2662,11 +2662,11 @@ - // the bitcode section should also be excluded in the UUID computation. - // Bitcode section should appears before LINKEDIT - // Exclude section cmd -- if ( log ) fprintf(stderr, "bundle cmd start=0x%08llX, bundle cmd end=0x%08llX\n", -+ if ( log ) fprintf(stderr, "bundle cmd start=0x%08lX, bundle cmd end=0x%08lX\n", - bitcodeCmdOffset, bitcodeCmdEnd); - excludeRegions.emplace_back(std::pair(bitcodeCmdOffset, bitcodeCmdEnd)); - // Exclude section content -- if ( log ) fprintf(stderr, "bundle start=0x%08llX, bundle end=0x%08llX\n", -+ if ( log ) fprintf(stderr, "bundle start=0x%08lX, bundle end=0x%08lX\n", - bitcodeSectOffset, bitcodePaddingEnd); - excludeRegions.emplace_back(std::pair(bitcodeSectOffset, bitcodePaddingEnd)); - } -@@ -2691,8 +2691,8 @@ - uint64_t lastStabNlistFileOffset = symbolTableFileOffset + stabsOffsetEnd; - uint64_t firstStabStringFileOffset = stringPoolFileOffset + stabsStringsOffsetStart; - uint64_t lastStabStringFileOffset = stringPoolFileOffset + tabsStringsOffsetEnd; -- if ( log ) fprintf(stderr, "stabNlist offset=0x%08llX, size=0x%08llX\n", firstStabNlistFileOffset, lastStabNlistFileOffset-firstStabNlistFileOffset); -- if ( log ) fprintf(stderr, "stabString offset=0x%08llX, size=0x%08llX\n", firstStabStringFileOffset, lastStabStringFileOffset-firstStabStringFileOffset); -+ if ( log ) fprintf(stderr, "stabNlist offset=0x%08lX, size=0x%08lX\n", firstStabNlistFileOffset, lastStabNlistFileOffset-firstStabNlistFileOffset); -+ if ( log ) fprintf(stderr, "stabString offset=0x%08lX, size=0x%08lX\n", firstStabStringFileOffset, lastStabStringFileOffset-firstStabStringFileOffset); - assert(firstStabNlistFileOffset <= firstStabStringFileOffset); - excludeRegions.emplace_back(std::pair(firstStabNlistFileOffset, lastStabNlistFileOffset)); - excludeRegions.emplace_back(std::pair(firstStabStringFileOffset, lastStabStringFileOffset)); -@@ -2701,12 +2701,12 @@ - uint64_t linkeditSegCmdSize; - _headersAndLoadCommandAtom->linkeditCmdInfo(linkeditSegCmdOffset, linkeditSegCmdSize); - excludeRegions.emplace_back(std::pair(linkeditSegCmdOffset, linkeditSegCmdOffset+linkeditSegCmdSize)); -- if ( log ) fprintf(stderr, "linkedit SegCmdOffset=0x%08llX, size=0x%08llX\n", linkeditSegCmdOffset, linkeditSegCmdSize); -+ if ( log ) fprintf(stderr, "linkedit SegCmdOffset=0x%08lX, size=0x%08lX\n", linkeditSegCmdOffset, linkeditSegCmdSize); - uint64_t symbolTableCmdOffset; - uint64_t symbolTableCmdSize; - _headersAndLoadCommandAtom->symbolTableCmdInfo(symbolTableCmdOffset, symbolTableCmdSize); - excludeRegions.emplace_back(std::pair(symbolTableCmdOffset, symbolTableCmdOffset+symbolTableCmdSize)); -- if ( log ) fprintf(stderr, "linkedit SegCmdOffset=0x%08llX, size=0x%08llX\n", symbolTableCmdOffset, symbolTableCmdSize); -+ if ( log ) fprintf(stderr, "linkedit SegCmdOffset=0x%08lX, size=0x%08lX\n", symbolTableCmdOffset, symbolTableCmdSize); - } - if ( !excludeRegions.empty() ) { - CC_MD5_CTX md5state; -@@ -2722,11 +2722,11 @@ - uint64_t regionStart = region.first; - uint64_t regionEnd = region.second; - assert(checksumStart <= regionStart && regionStart <= regionEnd && "Region overlapped"); -- if ( log ) fprintf(stderr, "checksum 0x%08llX -> 0x%08llX\n", checksumStart, regionStart); -+ if ( log ) fprintf(stderr, "checksum 0x%08lX -> 0x%08lX\n", checksumStart, regionStart); - CC_MD5_Update(&md5state, &wholeBuffer[checksumStart], regionStart - checksumStart); - checksumStart = regionEnd; - } -- if ( log ) fprintf(stderr, "checksum 0x%08llX -> 0x%08llX\n", checksumStart, _fileSize); -+ if ( log ) fprintf(stderr, "checksum 0x%08lX -> 0x%08lX\n", checksumStart, _fileSize); - CC_MD5_Update(&md5state, &wholeBuffer[checksumStart], _fileSize-checksumStart); - CC_MD5_Final(digest, &md5state); - if ( log ) fprintf(stderr, "uuid=%02X, %02X, %02X, %02X, %02X, %02X, %02X, %02X\n", digest[0], digest[1], digest[2], -@@ -2857,7 +2857,7 @@ - - wholeBuffer = (uint8_t *)mmap(NULL, _fileSize, PROT_WRITE|PROT_READ, MAP_SHARED, fd, 0); - if ( wholeBuffer == MAP_FAILED ) -- throwf("can't create buffer of %llu bytes for output", _fileSize); -+ throwf("can't create buffer of %lu bytes for output", _fileSize); - } - else { - if ( outputIsRegularFile ) -@@ -2869,7 +2869,7 @@ - // try to allocate buffer for entire output file content - wholeBuffer = (uint8_t*)calloc(_fileSize, 1); - if ( wholeBuffer == NULL ) -- throwf("can't create buffer of %llu bytes for output", _fileSize); -+ throwf("can't create buffer of %lu bytes for output", _fileSize); - } - - if ( _options.UUIDMode() == Options::kUUIDRandom ) { -@@ -4284,7 +4284,7 @@ - uint64_t sctEnd = (sct->address+sct->size); - if ( (sct->address <= targetAddress) && (targetAddress < sctEnd) ) { - if ( (targetAddress+checkAddend) > sctEnd ) { -- warning("data symbol %s from %s has pointer to %s + 0x%08llX. " -+ warning("data symbol %s from %s has pointer to %s + 0x%08lX. " - "That large of an addend may disable %s from being put in the dyld shared cache.", - atom->name(), atom->file()->path(), target->name(), addend, _options.installPath() ); - } -@@ -4728,7 +4728,7 @@ - if ( sect->isSectionHidden() ) - continue; - bool codeSection = (sect->type() == ld::Section::typeCode); -- if (log) fprintf(stderr, "sect: %s, address=0x%llX\n", sect->sectionName(), sect->address); -+ if (log) fprintf(stderr, "sect: %s, address=0x%lX\n", sect->sectionName(), sect->address); - for (std::vector::iterator ait = sect->atoms.begin(); ait != sect->atoms.end(); ++ait) { - const ld::Atom* atom = *ait; - const ld::Atom* target = NULL; -@@ -4887,7 +4887,7 @@ - if ( !hadSubtract && addend ) - toOffset += addend; - assert(toSectionIndex != 255); -- if (log) fprintf(stderr, "from (%d.%s + 0x%llX) to (%d.%s + 0x%llX), kind=%d, atomAddr=0x%llX, sectAddr=0x%llx\n", -+ if (log) fprintf(stderr, "from (%d.%s + 0x%lX) to (%d.%s + 0x%lX), kind=%d, atomAddr=0x%lX, sectAddr=0x%lx\n", - fromSectionIndex, sect->sectionName(), fromOffset, toSectionIndex, state.atomToSection[target]->sectionName(), - toOffset, kind, atom->finalAddress(), sect->address); - _splitSegV2Infos.push_back(SplitSegInfoV2Entry(fromSectionIndex, fromOffset, toSectionIndex, toOffset, kind)); -@@ -4961,7 +4961,7 @@ - ld::Internal::FinalSection* sect = *sit; - if ( sect->isSectionHidden() ) - continue; -- fprintf(mapFile, "0x%08llX\t0x%08llX\t%s\t%s\n", sect->address, sect->size, -+ fprintf(mapFile, "0x%08lX\t0x%08lX\t%s\t%s\n", sect->address, sect->size, - sect->segmentName(), sect->sectionName()); - } - // write table of symbols -@@ -5026,7 +5026,7 @@ - } - name = buffer; - } -- fprintf(mapFile, "0x%08llX\t0x%08llX\t[%3u] %s\n", atom->finalAddress(), atom->size(), -+ fprintf(mapFile, "0x%08lX\t0x%08lX\t[%3u] %s\n", atom->finalAddress(), atom->size(), - readerToFileOrdinal[atom->originalFile()], name); - } - } -@@ -5060,7 +5060,7 @@ - buffer[4095] = '\0'; - name = buffer; - } -- fprintf(mapFile, "<> \t0x%08llX\t[%3u] %s\n", atom->size(), -+ fprintf(mapFile, "<> \t0x%08lX\t[%3u] %s\n", atom->size(), - readerToFileOrdinal[atom->originalFile()], name); - } - } -diff -ur cctools-port-c1cc758/cctools/ld64/src/ld/parsers/archive_file.cpp cctools-port-format/cctools/ld64/src/ld/parsers/archive_file.cpp ---- cctools-port-c1cc758/cctools/ld64/src/ld/parsers/archive_file.cpp 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-format/cctools/ld64/src/ld/parsers/archive_file.cpp 2017-11-10 19:13:05.918259550 -0800 -@@ -601,7 +601,7 @@ - const char* entryName = &_tableOfContentStrings[E::get32(entry->ran_un.ran_strx)]; - uint64_t offset = E::get32(entry->ran_off); - if ( offset > _archiveFilelength ) { -- throwf("malformed archive TOC entry for %s, offset %d is beyond end of file %lld\n", -+ throwf("malformed archive TOC entry for %s, offset %d is beyond end of file %ld\n", - entryName, entry->ran_off, _archiveFilelength); - } - -@@ -622,11 +622,11 @@ - const char* entryName = &_tableOfContentStrings[E::get64(entry->ran_un.ran_strx)]; - uint64_t offset = E::get64(entry->ran_off); - if ( offset > _archiveFilelength ) { -- throwf("malformed archive TOC entry for %s, offset %lld is beyond end of file %lld\n", -+ throwf("malformed archive TOC entry for %s, offset %ld is beyond end of file %ld\n", - entryName, entry->ran_off, _archiveFilelength); - } - -- //fprintf(stderr, "adding hash %d: %s -> 0x%0llX\n", i, entryName, offset); -+ //fprintf(stderr, "adding hash %d: %s -> 0x%0lX\n", i, entryName, offset); - _hashTable[entryName] = offset; - } - } -diff -ur cctools-port-c1cc758/cctools/ld64/src/ld/parsers/libunwind/DwarfInstructions.hpp cctools-port-format/cctools/ld64/src/ld/parsers/libunwind/DwarfInstructions.hpp ---- cctools-port-c1cc758/cctools/ld64/src/ld/parsers/libunwind/DwarfInstructions.hpp 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-format/cctools/ld64/src/ld/parsers/libunwind/DwarfInstructions.hpp 2017-11-10 19:13:05.918259550 -0800 -@@ -245,7 +245,7 @@ - pint_t offsetOfFunctionAddress = p-currentCFI; - pint_t pcStart = addressSpace.getEncodedP(p, nextCFI, cieInfo.pointerEncoding); - pint_t pcRange = addressSpace.getEncodedP(p, nextCFI, cieInfo.pointerEncoding & 0x0F); -- //fprintf(stderr, "FDE with pcRange [0x%08llX, 0x%08llX)\n",(uint64_t)pcStart, (uint64_t)(pcStart+pcRange)); -+ //fprintf(stderr, "FDE with pcRange [0x%08lX, 0x%08lX)\n",(uint64_t)pcStart, (uint64_t)(pcStart+pcRange)); - entry->u.fdeInfo.function.targetAddress = pcStart; - entry->u.fdeInfo.function.offsetInCFI = offsetOfFunctionAddress; - entry->u.fdeInfo.function.encodingOfTargetAddress = cieInfo.pointerEncoding; -@@ -429,7 +429,7 @@ - template - int DwarfInstructions::stepWithDwarf(A& addressSpace, pint_t pc, pint_t fdeStart, R& registers) - { -- //fprintf(stderr, "stepWithDwarf(pc=0x%0llX, fdeStart=0x%0llX)\n", (uint64_t)pc, (uint64_t)fdeStart); -+ //fprintf(stderr, "stepWithDwarf(pc=0x%0lX, fdeStart=0x%0lX)\n", (uint64_t)pc, (uint64_t)fdeStart); - typename CFI_Parser::FDE_Info fdeInfo; - typename CFI_Parser::CIE_Info cieInfo; - if ( CFI_Parser::decodeFDE(addressSpace, fdeStart, &fdeInfo, &cieInfo) == NULL ) { -@@ -483,7 +483,7 @@ - pint_t expressionEnd = expression+20; // just need something until length is read - uint64_t length = addressSpace.getULEB128(p, expressionEnd); - expressionEnd = p + length; -- if (log) fprintf(stderr, "evaluateExpression(): length=%llu\n", length); -+ if (log) fprintf(stderr, "evaluateExpression(): length=%lu\n", length); - pint_t stack[100]; - pint_t* sp = stack; - *(++sp) = initialStackValue; -@@ -491,7 +491,7 @@ - while ( p < expressionEnd ) { - if (log) { - for(pint_t* t = sp; t > stack; --t) { -- fprintf(stderr, "sp[] = 0x%llX\n", (uint64_t)(*t)); -+ fprintf(stderr, "sp[] = 0x%lX\n", (uint64_t)(*t)); - } - } - uint8_t opcode = addressSpace.get8(p++); -@@ -504,14 +504,14 @@ - value = addressSpace.getP(p); - p += sizeof(pint_t); - *(++sp) = value; -- if (log) fprintf(stderr, "push 0x%llX\n", (uint64_t)value); -+ if (log) fprintf(stderr, "push 0x%lX\n", (uint64_t)value); - break; - - case DW_OP_deref: - // pop stack, dereference, push result - value = *sp--; - *(++sp) = addressSpace.getP(value); -- if (log) fprintf(stderr, "dereference 0x%llX\n", (uint64_t)value); -+ if (log) fprintf(stderr, "dereference 0x%lX\n", (uint64_t)value); - break; - - case DW_OP_const1u: -@@ -519,7 +519,7 @@ - value = addressSpace.get8(p); - p += 1; - *(++sp) = value; -- if (log) fprintf(stderr, "push 0x%llX\n", (uint64_t)value); -+ if (log) fprintf(stderr, "push 0x%lX\n", (uint64_t)value); - break; - - case DW_OP_const1s: -@@ -527,7 +527,7 @@ - svalue = (int8_t)addressSpace.get8(p); - p += 1; - *(++sp) = svalue; -- if (log) fprintf(stderr, "push 0x%llX\n", (uint64_t)svalue); -+ if (log) fprintf(stderr, "push 0x%lX\n", (uint64_t)svalue); - break; - - case DW_OP_const2u: -@@ -535,7 +535,7 @@ - value = addressSpace.get16(p); - p += 2; - *(++sp) = value; -- if (log) fprintf(stderr, "push 0x%llX\n", (uint64_t)value); -+ if (log) fprintf(stderr, "push 0x%lX\n", (uint64_t)value); - break; - - case DW_OP_const2s: -@@ -543,7 +543,7 @@ - svalue = (int16_t)addressSpace.get16(p); - p += 2; - *(++sp) = svalue; -- if (log) fprintf(stderr, "push 0x%llX\n", (uint64_t)svalue); -+ if (log) fprintf(stderr, "push 0x%lX\n", (uint64_t)svalue); - break; - - case DW_OP_const4u: -@@ -551,7 +551,7 @@ - value = addressSpace.get32(p); - p += 4; - *(++sp) = value; -- if (log) fprintf(stderr, "push 0x%llX\n", (uint64_t)value); -+ if (log) fprintf(stderr, "push 0x%lX\n", (uint64_t)value); - break; - - case DW_OP_const4s: -@@ -559,7 +559,7 @@ - svalue = (int32_t)addressSpace.get32(p); - p += 4; - *(++sp) = svalue; -- if (log) fprintf(stderr, "push 0x%llX\n", (uint64_t)svalue); -+ if (log) fprintf(stderr, "push 0x%lX\n", (uint64_t)svalue); - break; - - case DW_OP_const8u: -@@ -567,7 +567,7 @@ - value = addressSpace.get64(p); - p += 8; - *(++sp) = value; -- if (log) fprintf(stderr, "push 0x%llX\n", (uint64_t)value); -+ if (log) fprintf(stderr, "push 0x%lX\n", (uint64_t)value); - break; - - case DW_OP_const8s: -@@ -575,21 +575,21 @@ - value = (int32_t)addressSpace.get64(p); - p += 8; - *(++sp) = value; -- if (log) fprintf(stderr, "push 0x%llX\n", (uint64_t)value); -+ if (log) fprintf(stderr, "push 0x%lX\n", (uint64_t)value); - break; - - case DW_OP_constu: - // push immediate ULEB128 value - value = addressSpace.getULEB128(p, expressionEnd); - *(++sp) = value; -- if (log) fprintf(stderr, "push 0x%llX\n", (uint64_t)value); -+ if (log) fprintf(stderr, "push 0x%lX\n", (uint64_t)value); - break; - - case DW_OP_consts: - // push immediate SLEB128 value - svalue = addressSpace.getSLEB128(p, expressionEnd); - *(++sp) = svalue; -- if (log) fprintf(stderr, "push 0x%llX\n", (uint64_t)svalue); -+ if (log) fprintf(stderr, "push 0x%lX\n", (uint64_t)svalue); - break; - - case DW_OP_dup: -@@ -642,7 +642,7 @@ - // pop stack, dereference, push result - value = *sp--; - *sp = *((uint64_t*)value); -- if (log) fprintf(stderr, "x-dereference 0x%llX\n", (uint64_t)value); -+ if (log) fprintf(stderr, "x-dereference 0x%lX\n", (uint64_t)value); - break; - - case DW_OP_abs: -@@ -740,7 +740,7 @@ - svalue = (int16_t)addressSpace.get16(p); - p += 2; - p += svalue; -- if (log) fprintf(stderr, "skip %lld\n", (uint64_t)svalue); -+ if (log) fprintf(stderr, "skip %ld\n", (uint64_t)svalue); - break; - - case DW_OP_bra: -@@ -748,7 +748,7 @@ - p += 2; - if ( *sp-- ) - p += svalue; -- if (log) fprintf(stderr, "bra %lld\n", (uint64_t)svalue); -+ if (log) fprintf(stderr, "bra %ld\n", (uint64_t)svalue); - break; - - case DW_OP_eq: -@@ -821,7 +821,7 @@ - case DW_OP_lit31: - value = opcode - DW_OP_lit0; - *(++sp) = value; -- if (log) fprintf(stderr, "push literal 0x%llX\n", (uint64_t)value); -+ if (log) fprintf(stderr, "push literal 0x%lX\n", (uint64_t)value); - break; - - case DW_OP_reg0: -@@ -864,7 +864,7 @@ - case DW_OP_regx: - reg = addressSpace.getULEB128(p, expressionEnd); - *(++sp) = registers.getRegister(reg); -- if (log) fprintf(stderr, "push reg %d + 0x%llX\n", reg, (uint64_t)svalue); -+ if (log) fprintf(stderr, "push reg %d + 0x%lX\n", reg, (uint64_t)svalue); - break; - - case DW_OP_breg0: -@@ -902,14 +902,14 @@ - reg = opcode - DW_OP_breg0; - svalue = addressSpace.getSLEB128(p, expressionEnd); - *(++sp) = registers.getRegister(reg) + svalue; -- if (log) fprintf(stderr, "push reg %d + 0x%llX\n", reg, (uint64_t)svalue); -+ if (log) fprintf(stderr, "push reg %d + 0x%lX\n", reg, (uint64_t)svalue); - break; - - case DW_OP_bregx: - reg = addressSpace.getULEB128(p, expressionEnd); - svalue = addressSpace.getSLEB128(p, expressionEnd); - *(++sp) = registers.getRegister(reg) + svalue; -- if (log) fprintf(stderr, "push reg %d + 0x%llX\n", reg, (uint64_t)svalue); -+ if (log) fprintf(stderr, "push reg %d + 0x%lX\n", reg, (uint64_t)svalue); - break; - - case DW_OP_fbreg: -@@ -940,7 +940,7 @@ - ABORT("DW_OP_deref_size with bad size"); - } - *(++sp) = value; -- if (log) fprintf(stderr, "sized dereference 0x%llX\n", (uint64_t)value); -+ if (log) fprintf(stderr, "sized dereference 0x%lX\n", (uint64_t)value); - break; - - case DW_OP_xderef_size: -@@ -954,7 +954,7 @@ - } - - } -- if (log) fprintf(stderr, "expression evaluates to 0x%llX\n", (uint64_t)*sp); -+ if (log) fprintf(stderr, "expression evaluates to 0x%lX\n", (uint64_t)*sp); - return *sp; - } - -diff -ur cctools-port-c1cc758/cctools/ld64/src/ld/parsers/libunwind/DwarfParser.hpp cctools-port-format/cctools/ld64/src/ld/parsers/libunwind/DwarfParser.hpp ---- cctools-port-c1cc758/cctools/ld64/src/ld/parsers/libunwind/DwarfParser.hpp 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-format/cctools/ld64/src/ld/parsers/libunwind/DwarfParser.hpp 2017-11-10 19:13:05.918259550 -0800 -@@ -212,12 +212,12 @@ - template - bool CFI_Parser::findFDE(A& addressSpace, pint_t pc, pint_t ehSectionStart, uint32_t sectionLength, pint_t fdeHint, FDE_Info* fdeInfo, CIE_Info* cieInfo) - { -- //fprintf(stderr, "findFDE(0x%llX)\n", (long long)pc); -+ //fprintf(stderr, "findFDE(0x%lX)\n", (long long)pc); - pint_t p = (fdeHint != 0) ? fdeHint : ehSectionStart; - const pint_t ehSectionEnd = p + sectionLength; - while ( p < ehSectionEnd ) { - pint_t currentCFI = p; -- //fprintf(stderr, "findFDE() CFI at 0x%llX\n", (long long)p); -+ //fprintf(stderr, "findFDE() CFI at 0x%lX\n", (long long)p); - uint64_t cfiLength = addressSpace.get32(p); - p += 4; - if ( cfiLength == 0xffffffff ) { -@@ -244,7 +244,7 @@ - // parse pc begin and range - pint_t pcStart = addressSpace.getEncodedP(p, nextCFI, cieInfo->pointerEncoding); - pint_t pcRange = addressSpace.getEncodedP(p, nextCFI, cieInfo->pointerEncoding & 0x0F); -- //fprintf(stderr, "FDE with pcRange [0x%08llX, 0x%08llX)\n",(uint64_t)pcStart, (uint64_t)(pcStart+pcRange)); -+ //fprintf(stderr, "FDE with pcRange [0x%08lX, 0x%08lX)\n",(uint64_t)pcStart, (uint64_t)(pcStart+pcRange)); - // test if pc is within the function this FDE covers - if ( (pcStart < pc) && (pc <= pcStart+pcRange) ) { - // parse rest of info -@@ -269,11 +269,11 @@ - fdeInfo->fdeInstructions = p; - fdeInfo->pcStart = pcStart; - fdeInfo->pcEnd = pcStart+pcRange; -- //fprintf(stderr, "findFDE(pc=0x%llX) found with pcRange [0x%08llX, 0x%08llX)\n",(uint64_t)pc, (uint64_t)pcStart, (uint64_t)(pcStart+pcRange)); -+ //fprintf(stderr, "findFDE(pc=0x%lX) found with pcRange [0x%08lX, 0x%08lX)\n",(uint64_t)pc, (uint64_t)pcStart, (uint64_t)(pcStart+pcRange)); - return true; - } - else { -- //fprintf(stderr, "findFDE(pc=0x%llX) not found with pcRange [0x%08llX, 0x%08llX)\n",(uint64_t)pc, (uint64_t)pcStart, (uint64_t)(pcStart+pcRange)); -+ //fprintf(stderr, "findFDE(pc=0x%lX) not found with pcRange [0x%08lX, 0x%08lX)\n",(uint64_t)pc, (uint64_t)pcStart, (uint64_t)(pcStart+pcRange)); - // pc is not in begin/range, skip this FDE - } - } -@@ -284,13 +284,13 @@ - } - else { - // malformed FDE. CIE is bad -- //fprintf(stderr, "malformed FDE, cieStart=0x%llX, ehSectionStart=0x%llX, ehSectionEnd=0x%llX\n", -+ //fprintf(stderr, "malformed FDE, cieStart=0x%lX, ehSectionStart=0x%lX, ehSectionEnd=0x%lX\n", - // (uint64_t)cieStart, (uint64_t)ehSectionStart, (uint64_t)ehSectionEnd); - } - p = nextCFI; - } - } -- //fprintf(stderr, "findFDE(pc=0x%llX) not found\n",(uint64_t)pc); -+ //fprintf(stderr, "findFDE(pc=0x%lX) not found\n",(uint64_t)pc); - return false; - } - -@@ -302,7 +302,7 @@ - template - const char* CFI_Parser::parseCIE(A& addressSpace, pint_t cie, CIE_Info* cieInfo) - { -- //fprintf(stderr, "parseCIE(0x%llX)\n", (long long)cie); -+ //fprintf(stderr, "parseCIE(0x%lX)\n", (long long)cie); - cieInfo->pointerEncoding = 0; - cieInfo->lsdaEncoding = 0; - cieInfo->personalityEncoding = 0; -@@ -463,7 +463,7 @@ - pint_t offsetOfFunctionAddress = p-currentCFI; - pint_t pcStart = addressSpace.getEncodedP(p, nextCFI, cieInfo.pointerEncoding); - pint_t pcRange = addressSpace.getEncodedP(p, nextCFI, cieInfo.pointerEncoding & 0x0F); -- //fprintf(stderr, "FDE with pcRange [0x%08llX, 0x%08llX)\n",(uint64_t)pcStart, (uint64_t)(pcStart+pcRange)); -+ //fprintf(stderr, "FDE with pcRange [0x%08lX, 0x%08lX)\n",(uint64_t)pcStart, (uint64_t)(pcStart+pcRange)); - // test if pc is within the function this FDE covers - entry.function.address = pcStart; - entry.function.offsetInFDE = offsetOfFunctionAddress; -@@ -518,7 +518,7 @@ - pint_t p = instructions; - uint32_t codeOffset = 0; - PrologInfo initialState = *results; -- if ( logDwarf ) fprintf(stderr, "parseInstructions(instructions=0x%0llX)\n", (uint64_t)instructionsEnd); -+ if ( logDwarf ) fprintf(stderr, "parseInstructions(instructions=0x%0lX)\n", (uint64_t)instructionsEnd); - - // see Dwarf Spec, section 6.4.2 for details on unwind opcodes - while ( (p < instructionsEnd) && (codeOffset < pcoffset) ) { -@@ -564,7 +564,7 @@ - results->registerSavedMoreThanOnce = true; - results->savedRegisters[reg].location = kRegisterInCFA; - results->savedRegisters[reg].value = offset; -- if ( logDwarf ) fprintf(stderr, "DW_CFA_offset_extended(reg=%lld, offset=%lld)\n", reg, offset); -+ if ( logDwarf ) fprintf(stderr, "DW_CFA_offset_extended(reg=%ld, offset=%ld)\n", reg, offset); - break; - case DW_CFA_restore_extended: - reg = addressSpace.getULEB128(p, instructionsEnd);; -@@ -573,7 +573,7 @@ - return false; - } - results->savedRegisters[reg] = initialState.savedRegisters[reg]; -- if ( logDwarf ) fprintf(stderr, "DW_CFA_restore_extended(reg=%lld)\n", reg); -+ if ( logDwarf ) fprintf(stderr, "DW_CFA_restore_extended(reg=%ld)\n", reg); - break; - case DW_CFA_undefined: - reg = addressSpace.getULEB128(p, instructionsEnd); -@@ -582,7 +582,7 @@ - return false; - } - results->savedRegisters[reg].location = kRegisterUnused; -- if ( logDwarf ) fprintf(stderr, "DW_CFA_undefined(reg=%lld)\n", reg); -+ if ( logDwarf ) fprintf(stderr, "DW_CFA_undefined(reg=%ld)\n", reg); - break; - case DW_CFA_same_value: - reg = addressSpace.getULEB128(p, instructionsEnd); -@@ -597,7 +597,7 @@ - results->savedRegisters[reg].location = kRegisterUnused; - // set flag to disable conversion to compact unwind - results->sameValueUsed = true; -- if ( logDwarf ) fprintf(stderr, "DW_CFA_same_value(reg=%lld)\n", reg); -+ if ( logDwarf ) fprintf(stderr, "DW_CFA_same_value(reg=%ld)\n", reg); - break; - case DW_CFA_register: - reg = addressSpace.getULEB128(p, instructionsEnd); -@@ -614,7 +614,7 @@ - results->savedRegisters[reg].value = reg2; - // set flag to disable conversion to compact unwind - results->registersInOtherRegisters = true; -- if ( logDwarf ) fprintf(stderr, "DW_CFA_register(reg=%lld, reg2=%lld)\n", reg, reg2); -+ if ( logDwarf ) fprintf(stderr, "DW_CFA_register(reg=%ld, reg2=%ld)\n", reg, reg2); - break; - case DW_CFA_remember_state: - // avoid operator new, because that would be an upward dependency -@@ -652,7 +652,7 @@ - results->cfaRegisterOffset = offset; - if ( offset > 0x80000000 ) - results->cfaOffsetWasNegative = true; -- if ( logDwarf ) fprintf(stderr, "DW_CFA_def_cfa(reg=%lld, offset=%lld)\n", reg, offset); -+ if ( logDwarf ) fprintf(stderr, "DW_CFA_def_cfa(reg=%ld, offset=%ld)\n", reg, offset); - break; - case DW_CFA_def_cfa_register: - reg = addressSpace.getULEB128(p, instructionsEnd); -@@ -661,7 +661,7 @@ - return false; - } - results->cfaRegister = reg; -- if ( logDwarf ) fprintf(stderr, "DW_CFA_def_cfa_register(%lld)\n", reg); -+ if ( logDwarf ) fprintf(stderr, "DW_CFA_def_cfa_register(%ld)\n", reg); - break; - case DW_CFA_def_cfa_offset: - results->cfaRegisterOffset = addressSpace.getULEB128(p, instructionsEnd); -@@ -673,7 +673,7 @@ - results->cfaExpression = p; - length = addressSpace.getULEB128(p, instructionsEnd); - p += length; -- if ( logDwarf ) fprintf(stderr, "DW_CFA_def_cfa_expression(expression=0x%llX, length=%llu)\n", -+ if ( logDwarf ) fprintf(stderr, "DW_CFA_def_cfa_expression(expression=0x%lX, length=%lu)\n", - results->cfaExpression, length); - break; - case DW_CFA_expression: -@@ -686,7 +686,7 @@ - results->savedRegisters[reg].value = p; - length = addressSpace.getULEB128(p, instructionsEnd); - p += length; -- if ( logDwarf ) fprintf(stderr, "DW_CFA_expression(reg=%lld, expression=0x%llX, length=%llu)\n", -+ if ( logDwarf ) fprintf(stderr, "DW_CFA_expression(reg=%ld, expression=0x%lX, length=%lu)\n", - reg, results->savedRegisters[reg].value, length); - break; - case DW_CFA_offset_extended_sf: -@@ -700,7 +700,7 @@ - results->registerSavedMoreThanOnce = true; - results->savedRegisters[reg].location = kRegisterInCFA; - results->savedRegisters[reg].value = offset; -- if ( logDwarf ) fprintf(stderr, "DW_CFA_offset_extended_sf(reg=%lld, offset=%lld)\n", reg, offset); -+ if ( logDwarf ) fprintf(stderr, "DW_CFA_offset_extended_sf(reg=%ld, offset=%ld)\n", reg, offset); - break; - case DW_CFA_def_cfa_sf: - reg = addressSpace.getULEB128(p, instructionsEnd); -@@ -711,7 +711,7 @@ - } - results->cfaRegister = reg; - results->cfaRegisterOffset = offset; -- if ( logDwarf ) fprintf(stderr, "DW_CFA_def_cfa_sf(reg=%lld, offset=%lld)\n", reg, offset); -+ if ( logDwarf ) fprintf(stderr, "DW_CFA_def_cfa_sf(reg=%ld, offset=%ld)\n", reg, offset); - break; - case DW_CFA_def_cfa_offset_sf: - results->cfaRegisterOffset = addressSpace.getSLEB128(p, instructionsEnd) * cieInfo.dataAlignFactor; -@@ -723,7 +723,7 @@ - offset = addressSpace.getULEB128(p, instructionsEnd) * cieInfo.dataAlignFactor; - results->savedRegisters[reg].location = kRegisterOffsetFromCFA; - results->savedRegisters[reg].value = offset; -- if ( logDwarf ) fprintf(stderr, "DW_CFA_val_offset(reg=%lld, offset=%lld\n", reg, offset); -+ if ( logDwarf ) fprintf(stderr, "DW_CFA_val_offset(reg=%ld, offset=%ld\n", reg, offset); - break; - case DW_CFA_val_offset_sf: - reg = addressSpace.getULEB128(p, instructionsEnd); -@@ -734,7 +734,7 @@ - offset = addressSpace.getSLEB128(p, instructionsEnd) * cieInfo.dataAlignFactor; - results->savedRegisters[reg].location = kRegisterOffsetFromCFA; - results->savedRegisters[reg].value = offset; -- if ( logDwarf ) fprintf(stderr, "DW_CFA_val_offset_sf(reg=%lld, offset=%lld\n", reg, offset); -+ if ( logDwarf ) fprintf(stderr, "DW_CFA_val_offset_sf(reg=%ld, offset=%ld\n", reg, offset); - break; - case DW_CFA_val_expression: - reg = addressSpace.getULEB128(p, instructionsEnd); -@@ -746,13 +746,13 @@ - results->savedRegisters[reg].value = p; - length = addressSpace.getULEB128(p, instructionsEnd); - p += length; -- if ( logDwarf ) fprintf(stderr, "DW_CFA_val_expression(reg=%lld, expression=0x%llX, length=%lld)\n", -+ if ( logDwarf ) fprintf(stderr, "DW_CFA_val_expression(reg=%ld, expression=0x%lX, length=%ld)\n", - reg, results->savedRegisters[reg].value, length); - break; - case DW_CFA_GNU_args_size: - offset = addressSpace.getULEB128(p, instructionsEnd); - results->spExtraArgSize = offset; -- if ( logDwarf ) fprintf(stderr, "DW_CFA_GNU_args_size(%lld)\n", offset); -+ if ( logDwarf ) fprintf(stderr, "DW_CFA_GNU_args_size(%ld)\n", offset); - break; - case DW_CFA_GNU_negative_offset_extended: - reg = addressSpace.getULEB128(p, instructionsEnd); -@@ -765,7 +765,7 @@ - results->registerSavedMoreThanOnce = true; - results->savedRegisters[reg].location = kRegisterInCFA; - results->savedRegisters[reg].value = -offset; -- if ( logDwarf ) fprintf(stderr, "DW_CFA_GNU_negative_offset_extended(%lld)\n", offset); -+ if ( logDwarf ) fprintf(stderr, "DW_CFA_GNU_negative_offset_extended(%ld)\n", offset); - break; - default: - operand = opcode & 0x3F; -@@ -784,7 +784,7 @@ - } - results->savedRegisters[reg].location = kRegisterInCFA; - results->savedRegisters[reg].value = offset; -- if ( logDwarf ) fprintf(stderr, "DW_CFA_offset(reg=%d, offset=%lld)\n", operand, offset); -+ if ( logDwarf ) fprintf(stderr, "DW_CFA_offset(reg=%d, offset=%ld)\n", operand, offset); - break; - case DW_CFA_advance_loc: - codeOffset += operand * cieInfo.codeAlignFactor; -@@ -796,7 +796,7 @@ - //return true; // gcc-4.5 starts the epilog with this - reg = operand; - results->savedRegisters[reg] = initialState.savedRegisters[reg]; -- if ( logDwarf ) fprintf(stderr, "DW_CFA_restore(reg=%lld)\n", reg); -+ if ( logDwarf ) fprintf(stderr, "DW_CFA_restore(reg=%ld)\n", reg); - break; - default: - if ( logDwarf ) fprintf(stderr, "unknown CFA opcode 0x%02X\n", opcode); -diff -ur cctools-port-c1cc758/cctools/ld64/src/ld/parsers/macho_relocatable_file.cpp cctools-port-format/cctools/ld64/src/ld/parsers/macho_relocatable_file.cpp ---- cctools-port-c1cc758/cctools/ld64/src/ld/parsers/macho_relocatable_file.cpp 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-format/cctools/ld64/src/ld/parsers/macho_relocatable_file.cpp 2017-11-10 19:13:05.918259550 -0800 -@@ -902,7 +902,7 @@ - { - const macho_section

* sct = this->sect().machoSection(); - if ( this->_objAddress > sct->addr() + sct->size() ) -- throwf("malformed .o file, symbol has address 0x%0llX which is outside range of its section", (uint64_t)this->_objAddress); -+ throwf("malformed .o file, symbol has address 0x%0lX which is outside range of its section", (uint64_t)this->_objAddress); - uint32_t fileOffset = sct->offset() - sct->addr() + this->_objAddress; - return this->sect().file().fileContent()+fileOffset; - } -@@ -1561,7 +1561,7 @@ - const macho_nlist

& sym = parser.symbolFromIndex(sortedSymbolIndexes[symIndex]); - if ( ! sect.ignoreLabel(parser.nameFromSymbol(sym)) ) { - pint_t nextSymbolAddr = sym.n_value(); -- //fprintf(stderr, "sectNum=%d, nextSymbolAddr=0x%08llX, name=%s\n", sectNum, (uint64_t)nextSymbolAddr, parser.nameFromSymbol(sym)); -+ //fprintf(stderr, "sectNum=%d, nextSymbolAddr=0x%08lX, name=%s\n", sectNum, (uint64_t)nextSymbolAddr, parser.nameFromSymbol(sym)); - if ( (nextSymbolAddr > startAddr) || ((nextSymbolAddr == startAddr) && (sym.n_sect() == sectNum)) ) - break; - } -@@ -1801,7 +1801,7 @@ - for(uint32_t i=0; i < countOfCFIs; ++i) { - if ( cfiArray[i].isCIE ) - continue; -- //fprintf(stderr, "cfiArray[i].func = 0x%08llX, cfiArray[i].lsda = 0x%08llX, encoding=0x%08X\n", -+ //fprintf(stderr, "cfiArray[i].func = 0x%08lX, cfiArray[i].lsda = 0x%08lX, encoding=0x%08X\n", - // (uint64_t)cfiArray[i].u.fdeInfo.function.targetAddress, - // (uint64_t)cfiArray[i].u.fdeInfo.lsda.targetAddress, - // cfiArray[i].u.fdeInfo.compactUnwindInfo); -@@ -1865,7 +1865,7 @@ - breakIterator.beginSection(); - uint32_t count = sections[i]->computeAtomCount(*this, breakIterator, cfis); - //const macho_section

* sect = sections[i]->machoSection(); -- //fprintf(stderr, "computed count=%u for section %s size=%llu\n", count, sect->sectname(), (sect != NULL) ? sect->size() : 0); -+ //fprintf(stderr, "computed count=%u for section %s size=%lu\n", count, sect->sectname(), (sect != NULL) ? sect->size() : 0); - computedAtomCount += count; - } - //fprintf(stderr, "allocating %d atoms * sizeof(Atom)=%ld, sizeof(ld::Atom)=%ld\n", computedAtomCount, sizeof(Atom), sizeof(ld::Atom)); -@@ -2340,7 +2340,7 @@ - if ( log ) { - fprintf(stderr, "unsorted sections:\n"); - for(unsigned int i=0; i < _machOSectionsCount; ++i ) -- fprintf(stderr, "0x%08llX %s %s\n", _sectionsStart[i].addr(), _sectionsStart[i].segname(), _sectionsStart[i].sectname()); -+ fprintf(stderr, "0x%08lX %s %s\n", _sectionsStart[i].addr(), _sectionsStart[i].segname(), _sectionsStart[i].sectname()); - } - - // sort by symbol table address -@@ -2351,7 +2351,7 @@ - if ( log ) { - fprintf(stderr, "sorted sections:\n"); - for(unsigned int i=0; i < _machOSectionsCount; ++i ) -- fprintf(stderr, "0x%08llX %s %s\n", _sectionsStart[array[i]].addr(), _sectionsStart[array[i]].segname(), _sectionsStart[array[i]].sectname()); -+ fprintf(stderr, "0x%08lX %s %s\n", _sectionsStart[array[i]].addr(), _sectionsStart[array[i]].segname(), _sectionsStart[array[i]].sectname()); - } - } - -@@ -2446,7 +2446,7 @@ - _overlappingSymbols = false; - for (unsigned int i=1; i < _symbolsInSections; ++i) { - if ( symbolFromIndex(array[i-1]).n_value() == symbolFromIndex(array[i]).n_value() ) { -- //fprintf(stderr, "overlapping symbols at 0x%08llX\n", symbolFromIndex(array[i-1]).n_value()); -+ //fprintf(stderr, "overlapping symbols at 0x%08lX\n", symbolFromIndex(array[i-1]).n_value()); - _overlappingSymbols = true; - break; - } -@@ -2455,7 +2455,7 @@ - if ( log ) { - fprintf(stderr, "sorted symbols:\n"); - for(unsigned int i=0; i < _symbolsInSections; ++i ) -- fprintf(stderr, "0x%09llX symIndex=%d sectNum=%2d, %s\n", symbolFromIndex(array[i]).n_value(), array[i], symbolFromIndex(array[i]).n_sect(), nameFromSymbol(symbolFromIndex(array[i])) ); -+ fprintf(stderr, "0x%09lX symIndex=%d sectNum=%2d, %s\n", symbolFromIndex(array[i]).n_value(), array[i], symbolFromIndex(array[i]).n_sect(), nameFromSymbol(symbolFromIndex(array[i])) ); - } - } - -@@ -2553,7 +2553,7 @@ - _file->_swiftVersion = ((flags >> 8) & 0xFF); - _file->_objcHasCategoryClassPropertiesField = (flags & 64); - if ( sect->size() > 8 ) { -- warning("section %s/%s has unexpectedly large size %llu in %s", -+ warning("section %s/%s has unexpectedly large size %lu in %s", - sect->segname(), Section::makeSectionName(sect), sect->size(), _file->path()); - } - } -@@ -2805,7 +2805,7 @@ - } - } - -- throwf("sectionForAddress(0x%llX) address not in any section", (uint64_t)addr); -+ throwf("sectionForAddress(0x%lX) address not in any section", (uint64_t)addr); - } - - template -@@ -3540,7 +3540,7 @@ - di += 4; - break; - default: -- warning("unknown dwarf string encoding (form=%lld) in %s", form, this->_path); -+ warning("unknown dwarf string encoding (form=%ld) in %s", form, this->_path); - break; - } - return result; -@@ -3647,7 +3647,7 @@ - std::map dwarfIndexToFile; - if ( lines != NULL ) { - while ( line_next(lines, &result, line_stop_pc) ) { -- //fprintf(stderr, "curAtom=%p, result.pc=0x%llX, result.line=%llu, result.end_of_sequence=%d," -+ //fprintf(stderr, "curAtom=%p, result.pc=0x%lX, result.line=%lu, result.end_of_sequence=%d," - // " curAtomAddress=0x%X, curAtomSize=0x%X\n", - // curAtom, result.pc, result.line, result.end_of_sequence, curAtomAddress, curAtomSize); - // work around weird debug line table compiler generates if no functions in __text section -@@ -3720,7 +3720,7 @@ - entry.info.atomOffset = curAtomOffset; - entry.info.fileName = filename; - entry.info.lineNumber = result.line; -- //fprintf(stderr, "addr=0x%08llX, line=%lld, file=%s, atom=%s, atom.size=0x%X, end=%d\n", -+ //fprintf(stderr, "addr=0x%08lX, line=%ld, file=%s, atom=%s, atom.size=0x%X, end=%d\n", - // result.pc, result.line, filename, curAtom->name(), curAtomSize, result.end_of_sequence); - entries.push_back(entry); - curAtom->incrementLineInfoCount(); -@@ -3796,7 +3796,7 @@ - stab.string = symString; - } - else { -- fprintf(stderr, "can't find atom for stabs BNSYM at %08llX in %s", -+ fprintf(stderr, "can't find atom for stabs BNSYM at %08lX in %s", - (uint64_t)sym.n_value(), _path); - } - break; -@@ -3859,7 +3859,7 @@ - stab.string = symString; - } - else { -- warning("can't find atom for stabs FUN at %08llX in %s", -+ warning("can't find atom for stabs FUN at %08lX in %s", - (uint64_t)currentAtomAddress, _path); - } - } -@@ -3895,7 +3895,7 @@ - stab.string = symString; - } - else { -- warning("can't find atom for stabs 0x%X at %08llX in %s", -+ warning("can't find atom for stabs 0x%X at %08lX in %s", - type, (uint64_t)sym.n_value(), _path); - } - break; -@@ -3927,7 +3927,7 @@ - stab.string = symString; - } - else { -- warning("can't find atom for stabs FUN at %08llX in %s", -+ warning("can't find atom for stabs FUN at %08lX in %s", - (uint64_t)currentAtomAddress, _path); - } - } -@@ -4339,7 +4339,7 @@ - const uint32_t sectionAlignment = this->_machOSection->align(); - uint32_t modulus = (addr % (1 << sectionAlignment)); - if ( modulus > 0xFFFF ) -- warning("alignment for symbol at address 0x%08llX in %s exceeds 2^16", (uint64_t)addr, this->file().path()); -+ warning("alignment for symbol at address 0x%08lX in %s exceeds 2^16", (uint64_t)addr, this->file().path()); - return ld::Atom::Alignment(sectionAlignment, modulus); - } - -@@ -4842,7 +4842,7 @@ - } - } - } -- throwf("__eh_frame parsing problem. Can't find target of reference to address 0x%08llX", (uint64_t)addr); -+ throwf("__eh_frame parsing problem. Can't find target of reference to address 0x%08lX", (uint64_t)addr); - } - } - -@@ -5403,7 +5403,7 @@ - skip = true; - } - else { -- //fprintf(stderr, " 0x%08llX make annon, size=%lld\n", (uint64_t)foundAddr, (uint64_t)size); -+ //fprintf(stderr, " 0x%08lX make annon, size=%ld\n", (uint64_t)foundAddr, (uint64_t)size); - new (allocatedSpace) Atom(*this, this->unlabeledAtomName(parser, foundAddr), foundAddr, - this->elementSizeAtAddress(foundAddr), this->definition(), - this->combine(parser, foundAddr), this->scopeAtAddress(parser, foundAddr), -@@ -5413,7 +5413,7 @@ - } - else { - // make named atom for label -- //fprintf(stderr, " 0x%08llX make labeled\n", (uint64_t)foundAddr); -+ //fprintf(stderr, " 0x%08lX make labeled\n", (uint64_t)foundAddr); - new (allocatedSpace) Atom(*this, parser, *foundLabel, labeledAtomSize); - } - if ( !skip ) { -@@ -5427,7 +5427,7 @@ - for (pint_t addr = foundAddr; addr < (foundAddr+size); addr += elementSizeAtAddress(addr) ) { - // make anon atoms for area before label - if ( this->useElementAt(parser, it, addr) ) { -- //fprintf(stderr, " 0x%08llX make annon, size=%lld\n", (uint64_t)addr, (uint64_t)elementSizeAtAddress(addr)); -+ //fprintf(stderr, " 0x%08lX make annon, size=%ld\n", (uint64_t)addr, (uint64_t)elementSizeAtAddress(addr)); - allocatedSpace = (Atom*)p; - new (allocatedSpace) Atom(*this, this->unlabeledAtomName(parser, addr), addr, this->elementSizeAtAddress(addr), - this->definition(), this->combine(parser, addr), this->scopeAtAddress(parser, addr), -@@ -7587,17 +7587,17 @@ - const uint64_t atomEndAddr = atomStartAddr + inAtom->size(); - for(int i=0; i < count; ++i) { - if ( (addrs[i] < atomStartAddr) || (addrs[i] >= atomEndAddr) ) { -- warning("arm64 Linker Optimiztion Hint addresses are not in same atom: 0x%08llX and 0x%08llX", -+ warning("arm64 Linker Optimiztion Hint addresses are not in same atom: 0x%08lX and 0x%08lX", - lowestAddress, addrs[i]); - return; // skip this LOH - } - if ( (addrs[i] & 0x3) != 0 ) { -- warning("arm64 Linker Optimiztion Hint address is not 4-byte aligned: 0x%08llX", addrs[i]); -+ warning("arm64 Linker Optimiztion Hint address is not 4-byte aligned: 0x%08lX", addrs[i]); - return; // skip this LOH - } - if ( (addrs[i] - lowestAddress) > 0xFFFF ) { - if ( parser.verboseOptimizationHints() ) { -- warning("arm64 Linker Optimiztion Hint addresses are too far apart: 0x%08llX and 0x%08llX", -+ warning("arm64 Linker Optimiztion Hint addresses are too far apart: 0x%08lX and 0x%08lX", - lowestAddress, addrs[i]); - } - return; // skip this LOH -@@ -7785,7 +7785,7 @@ - this->addLOH(parser, kind, count, addrs); - //fprintf(stderr, "kind=%d", kind); - //for (int32_t i=0; i < count; ++i) { -- // fprintf(stderr, ", addr=0x%08llX", addrs[i]); -+ // fprintf(stderr, ", addr=0x%08lX", addrs[i]); - //} - //fprintf(stderr, "\n"); - } -diff -ur cctools-port-c1cc758/cctools/ld64/src/ld/passes/branch_island.cpp cctools-port-format/cctools/ld64/src/ld/passes/branch_island.cpp ---- cctools-port-c1cc758/cctools/ld64/src/ld/passes/branch_island.cpp 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-format/cctools/ld64/src/ld/passes/branch_island.cpp 2017-11-10 19:13:05.918259550 -0800 -@@ -451,7 +451,7 @@ - uint64_t totalTextSize = offset + stubCount*16; - if ( (totalTextSize < textSizeWhenMightNeedBranchIslands(opts, hasThumbBranches)) && !haveCrossSectionBranches ) - return; -- if (_s_log) fprintf(stderr, "ld: section %s size=%llu, might need branch islands\n", textSection->sectionName(), totalTextSize); -+ if (_s_log) fprintf(stderr, "ld: section %s size=%lu, might need branch islands\n", textSection->sectionName(), totalTextSize); - - // Figure out how many regions of branch islands will be needed, and their locations. - // Construct a vector containing the atoms after which branch islands will be inserted, -@@ -493,7 +493,7 @@ - regionsMap[i] = new AtomToIsland(); - regionsIslands[i] = new std::vector(); - regionAddresses[i] = branchIslandInsertionPoints[i]->sectionOffset() + branchIslandInsertionPoints[i]->size(); -- if (_s_log) fprintf(stderr, "ld: branch islands will be inserted at 0x%08llX after %s\n", regionAddresses[i], branchIslandInsertionPoints[i]->name()); -+ if (_s_log) fprintf(stderr, "ld: branch islands will be inserted at 0x%08lX after %s\n", regionAddresses[i], branchIslandInsertionPoints[i]->name()); - } - unsigned int islandCount = 0; - -@@ -561,7 +561,7 @@ - if ( pos == region->end() ) { - island = makeBranchIsland(opts, fit->kind, 0, target, finalTargetAndOffset, atom->section(), true); - (*region)[finalTargetAndOffset] = island; -- if (_s_log) fprintf(stderr, "added absolute branching island %p %s, displacement=%lld\n", -+ if (_s_log) fprintf(stderr, "added absolute branching island %p %s, displacement=%ld\n", - island, island->name(), displacement); - ++islandCount; - regionsIslands[0]->push_back(island); -@@ -577,7 +577,7 @@ - else if ( displacement > kBranchLimit ) { - // create forward branch chain - const ld::Atom* nextTarget = target; -- if (_s_log) fprintf(stderr, "need forward branching island srcAdr=0x%08llX, dstAdr=0x%08llX, target=%s\n", -+ if (_s_log) fprintf(stderr, "need forward branching island srcAdr=0x%08lX, dstAdr=0x%08lX, target=%s\n", - srcAddr, dstAddr, target->name()); - for (int i=kIslandRegionsCount-1; i >=0 ; --i) { - AtomToIsland* region = regionsMap[i]; -@@ -609,7 +609,7 @@ - AtomToIsland* region = regionsMap[i]; - int64_t islandRegionAddr = regionAddresses[i]; - if ( (dstAddr < islandRegionAddr) && (islandRegionAddr <= srcAddr) ) { -- if (_s_log) fprintf(stderr, "need backward branching island srcAdr=0x%08llX, dstAdr=0x%08llX, target=%s\n", srcAddr, dstAddr, target->name()); -+ if (_s_log) fprintf(stderr, "need backward branching island srcAdr=0x%08lX, dstAdr=0x%08lX, target=%s\n", srcAddr, dstAddr, target->name()); - AtomToIsland::iterator pos = region->find(finalTargetAndOffset); - if ( pos == region->end() ) { - ld::Atom* island = makeBranchIsland(opts, fit->kind, i, prevTarget, finalTargetAndOffset, atom->section(), false); -@@ -670,7 +670,7 @@ - ld::Internal::FinalSection* sect = *sit; - uint16_t maxAlignment = 0; - uint64_t offset = 0; -- if ( log ) fprintf(stderr, " section=%s/%s, address=0x%08llX\n", sect->segmentName(), sect->sectionName(), sect->address); -+ if ( log ) fprintf(stderr, " section=%s/%s, address=0x%08lX\n", sect->segmentName(), sect->sectionName(), sect->address); - for (std::vector::iterator ait = sect->atoms.begin(); ait != sect->atoms.end(); ++ait) { - const ld::Atom* atom = *ait; - uint32_t atomAlignmentPowerOf2 = atom->alignment().powerOf2; -@@ -688,7 +688,7 @@ - offset += requiredModulus+alignment-currentModulus; - } - -- if ( log ) fprintf(stderr, " 0x%08llX atom=%p, name=%s\n", sect->address+offset, atom, atom->name()); -+ if ( log ) fprintf(stderr, " 0x%08lX atom=%p, name=%s\n", sect->address+offset, atom, atom->name()); - sAtomToAddress[atom] = sect->address + offset; - - offset += atom->size(); -diff -ur cctools-port-c1cc758/cctools/ld64/src/ld/passes/code_dedup.cpp cctools-port-format/cctools/ld64/src/ld/passes/code_dedup.cpp ---- cctools-port-c1cc758/cctools/ld64/src/ld/passes/code_dedup.cpp 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-format/cctools/ld64/src/ld/passes/code_dedup.cpp 2017-11-10 19:13:05.918259550 -0800 -@@ -296,7 +296,7 @@ - continue; - if ( verbose ) { - dedupSavings += ((dups.size() - 1) * masterAtom->size()); -- fprintf(stderr, "deduplicate the following %lu functions (%llu bytes apiece):\n", dups.size(), masterAtom->size()); -+ fprintf(stderr, "deduplicate the following %lu functions (%lu bytes apiece):\n", dups.size(), masterAtom->size()); - } - for (const ld::Atom* dupAtom : dups) { - if ( verbose ) -@@ -313,7 +313,7 @@ - } - } - if ( verbose ) { -- fprintf(stderr, "deduplication saved %llu bytes of __text\n", dedupSavings); -+ fprintf(stderr, "deduplication saved %lu bytes of __text\n", dedupSavings); - } - - if ( log ) { -@@ -348,7 +348,7 @@ - if ( log ) { - fprintf(stderr, "atoms before pruning:\n"); - for (const ld::Atom* atom : textSection->atoms) -- fprintf(stderr, " %p (size=%llu) %sp\n", atom, atom->size(), atom->name()); -+ fprintf(stderr, " %p (size=%lu) %sp\n", atom, atom->size(), atom->name()); - } - // remove replaced atoms from section - textSection->atoms.erase(std::remove_if(textSection->atoms.begin(), textSection->atoms.end(), -@@ -363,7 +363,7 @@ - if ( log ) { - fprintf(stderr, "atoms after pruning:\n"); - for (const ld::Atom* atom : textSection->atoms) -- fprintf(stderr, " %p (size=%llu) %sp\n", atom, atom->size(), atom->name()); -+ fprintf(stderr, " %p (size=%lu) %sp\n", atom, atom->size(), atom->name()); - } - - //fprintf(stderr, "hash-count=%lu, fixup-compares=%lu, atom-count=%u\n", sHashCount, sFixupCompareCount, atomsBeingComparedCount); -diff -ur cctools-port-c1cc758/cctools/ld64/src/ld/passes/dtrace_dof.cpp cctools-port-format/cctools/ld64/src/ld/passes/dtrace_dof.cpp ---- cctools-port-c1cc758/cctools/ld64/src/ld/passes/dtrace_dof.cpp 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-format/cctools/ld64/src/ld/passes/dtrace_dof.cpp 2017-11-10 19:13:05.918259550 -0800 -@@ -308,9 +308,9 @@ - f->reserveFixups(3*probeCount); - for (uint32_t i=0; i < probeCount; ++i) { - uint64_t offset = offsetsInDOF[i]; -- //fprintf(stderr, "%s offset[%d]=0x%08llX\n", providerName, i, offset); -+ //fprintf(stderr, "%s offset[%d]=0x%08lX\n", providerName, i, offset); - if ( offset > dofSectionSize ) -- throwf("offsetsInDOF[%d]=%0llX > dofSectionSize=%0lX\n", i, offset, dofSectionSize); -+ throwf("offsetsInDOF[%d]=%0lX > dofSectionSize=%0lX\n", i, offset, dofSectionSize); - f->addSectionFixup(ld::Fixup(offset, ld::Fixup::k1of4, ld::Fixup::kindSetTargetAddress, probes[i].atom)); - f->addSectionFixup(ld::Fixup(offset, ld::Fixup::k2of4, ld::Fixup::kindAddAddend, probes[i].offset)); - f->addSectionFixup(ld::Fixup(offset, ld::Fixup::k3of4, ld::Fixup::kindSubtractTargetAddress, &f->atom())); -diff -ur cctools-port-c1cc758/cctools/ld64/src/ld/passes/huge.cpp cctools-port-format/cctools/ld64/src/ld/passes/huge.cpp ---- cctools-port-c1cc758/cctools/ld64/src/ld/passes/huge.cpp 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-format/cctools/ld64/src/ld/passes/huge.cpp 2017-11-10 19:13:05.921592917 -0800 -@@ -115,7 +115,7 @@ - const ld::Atom* atom = *ait; - if ( (address > 0x7FFFFFFFLL) && !sect->isSectionHidden() ) { - state.usingHugeSections = true; -- if (log) fprintf(stderr, "atom: %s is >2GB (0x%09llX), so enabling huge mode\n", atom->name(), address); -+ if (log) fprintf(stderr, "atom: %s is >2GB (0x%09lX), so enabling huge mode\n", atom->name(), address); - break; - } - address += atom->size(); -@@ -139,7 +139,7 @@ - if ( atom->size() > 1024*1024 ) { - hugeSection->atoms.push_back(atom); - state.atomToSection[atom] = hugeSection; -- if (log) fprintf(stderr, "moved to __huge: %s, size=%llu\n", atom->name(), atom->size()); -+ if (log) fprintf(stderr, "moved to __huge: %s, size=%lu\n", atom->name(), atom->size()); - *ait = NULL; // change atom to NULL for later bulk removal - movedSome = true; - } -diff -ur cctools-port-c1cc758/cctools/ld64/src/ld/SymbolTable.cpp cctools-port-format/cctools/ld64/src/ld/SymbolTable.cpp ---- cctools-port-c1cc758/cctools/ld64/src/ld/SymbolTable.cpp 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-format/cctools/ld64/src/ld/SymbolTable.cpp 2017-11-10 19:13:05.921592917 -0800 -@@ -324,7 +324,7 @@ - if ( _atomB.size() > _atomA.size() ) { - const char* atomApath = (_atomA.file() != NULL) ? _atomA.file()->path() : ""; - const char* atomBpath = (_atomB.file() != NULL) ? _atomB.file()->path() : ""; -- warning("tentative definition of '%s' with size %llu from '%s' is being replaced by real definition of smaller size %llu from '%s'", -+ warning("tentative definition of '%s' with size %lu from '%s' is being replaced by real definition of smaller size %lu from '%s'", - _atomA.name(), _atomB.size(), atomBpath, _atomA.size(), atomApath); - } - pickAtomA(); -@@ -344,7 +344,7 @@ - if ( _atomA.size() > _atomB.size() ) { - const char* atomApath = (_atomA.file() != NULL) ? _atomA.file()->path() : ""; - const char* atomBpath = (_atomB.file() != NULL) ? _atomB.file()->path() : ""; -- warning("tentative definition of '%s' with size %llu from '%s' is being replaced by real definition of smaller size %llu from '%s'", -+ warning("tentative definition of '%s' with size %lu from '%s' is being replaced by real definition of smaller size %lu from '%s'", - _atomA.name(), _atomA.size(),atomApath, _atomB.size(), atomBpath); - } - pickAtomB(); -diff -ur cctools-port-c1cc758/cctools/ld64/src/other/dyldinfo.cpp cctools-port-format/cctools/ld64/src/other/dyldinfo.cpp ---- cctools-port-c1cc758/cctools/ld64/src/other/dyldinfo.cpp 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-format/cctools/ld64/src/other/dyldinfo.cpp 2017-11-10 19:13:05.921592917 -0800 -@@ -587,7 +587,7 @@ - return i; - } - } -- throwf("address 0x%llX is not in any segment", (uint64_t)address); -+ throwf("address 0x%lX is not in any segment", (uint64_t)address); - } - - template -@@ -599,7 +599,7 @@ - return (pint_t*)((uint8_t*)fHeader + offsetInMappedFile); - } - } -- throwf("address 0x%llX is not in any segment", (uint64_t)vmaddress); -+ throwf("address 0x%lX is not in any segment", (uint64_t)vmaddress); - } - - template -@@ -686,26 +686,26 @@ - break; - case REBASE_OPCODE_DO_REBASE_IMM_TIMES: - for (int i=0; i < immediate; ++i) { -- printf("%-7s %-16s 0x%08llX %s\n", segName, sectionName(segIndex, segStartAddr+segOffset), segStartAddr+segOffset, typeName); -+ printf("%-7s %-16s 0x%08lX %s\n", segName, sectionName(segIndex, segStartAddr+segOffset), segStartAddr+segOffset, typeName); - segOffset += sizeof(pint_t); - } - break; - case REBASE_OPCODE_DO_REBASE_ULEB_TIMES: - count = read_uleb128(p, end); - for (uint32_t i=0; i < count; ++i) { -- printf("%-7s %-16s 0x%08llX %s\n", segName, sectionName(segIndex, segStartAddr+segOffset), segStartAddr+segOffset, typeName); -+ printf("%-7s %-16s 0x%08lX %s\n", segName, sectionName(segIndex, segStartAddr+segOffset), segStartAddr+segOffset, typeName); - segOffset += sizeof(pint_t); - } - break; - case REBASE_OPCODE_DO_REBASE_ADD_ADDR_ULEB: -- printf("%-7s %-16s 0x%08llX %s\n", segName, sectionName(segIndex, segStartAddr+segOffset), segStartAddr+segOffset, typeName); -+ printf("%-7s %-16s 0x%08lX %s\n", segName, sectionName(segIndex, segStartAddr+segOffset), segStartAddr+segOffset, typeName); - segOffset += read_uleb128(p, end) + sizeof(pint_t); - break; - case REBASE_OPCODE_DO_REBASE_ULEB_TIMES_SKIPPING_ULEB: - count = read_uleb128(p, end); - skip = read_uleb128(p, end); - for (uint32_t i=0; i < count; ++i) { -- printf("%-7s %-16s 0x%08llX %s\n", segName, sectionName(segIndex, segStartAddr+segOffset), segStartAddr+segOffset, typeName); -+ printf("%-7s %-16s 0x%08lX %s\n", segName, sectionName(segIndex, segStartAddr+segOffset), segStartAddr+segOffset, typeName); - segOffset += skip + sizeof(pint_t); - } - break; -@@ -754,15 +754,15 @@ - case REBASE_OPCODE_SET_SEGMENT_AND_OFFSET_ULEB: - segmentIndex = immediate; - address = read_uleb128(p, end); -- printf("0x%04X REBASE_OPCODE_SET_SEGMENT_AND_OFFSET_ULEB(%d, 0x%08llX)\n", opcodeOffset, segmentIndex, address); -+ printf("0x%04X REBASE_OPCODE_SET_SEGMENT_AND_OFFSET_ULEB(%d, 0x%08lX)\n", opcodeOffset, segmentIndex, address); - break; - case REBASE_OPCODE_ADD_ADDR_ULEB: - address = read_uleb128(p, end); -- printf("0x%04X REBASE_OPCODE_ADD_ADDR_ULEB(0x%0llX)\n", opcodeOffset, address); -+ printf("0x%04X REBASE_OPCODE_ADD_ADDR_ULEB(0x%0lX)\n", opcodeOffset, address); - break; - case REBASE_OPCODE_ADD_ADDR_IMM_SCALED: - address = immediate*sizeof(pint_t); -- printf("0x%04X REBASE_OPCODE_ADD_ADDR_IMM_SCALED(0x%0llX)\n", opcodeOffset, address); -+ printf("0x%04X REBASE_OPCODE_ADD_ADDR_IMM_SCALED(0x%0lX)\n", opcodeOffset, address); - break; - case REBASE_OPCODE_DO_REBASE_IMM_TIMES: - printf("0x%04X REBASE_OPCODE_DO_REBASE_IMM_TIMES(%d)\n", opcodeOffset, immediate); -@@ -871,12 +871,12 @@ - break; - case BIND_OPCODE_SET_ADDEND_SLEB: - addend = read_sleb128(p, end); -- printf("0x%04X BIND_OPCODE_SET_ADDEND_SLEB(%lld)\n", opcodeOffset, addend); -+ printf("0x%04X BIND_OPCODE_SET_ADDEND_SLEB(%ld)\n", opcodeOffset, addend); - break; - case BIND_OPCODE_SET_SEGMENT_AND_OFFSET_ULEB: - segmentIndex = immediate; - address = read_uleb128(p, end); -- printf("0x%04X BIND_OPCODE_SET_SEGMENT_AND_OFFSET_ULEB(0x%02X, 0x%08llX)\n", opcodeOffset, segmentIndex, address); -+ printf("0x%04X BIND_OPCODE_SET_SEGMENT_AND_OFFSET_ULEB(0x%02X, 0x%08lX)\n", opcodeOffset, segmentIndex, address); - break; - case BIND_OPCODE_ADD_ADDR_ULEB: - skip = read_uleb128(p, end); -@@ -987,22 +987,22 @@ - segOffset += read_uleb128(p, end); - break; - case BIND_OPCODE_DO_BIND: -- printf("%-7s %-16s 0x%08llX %10s %5lld %-16s %s%s\n", segName, sectionName(segIndex, segStartAddr+segOffset), segStartAddr+segOffset, typeName, addend, fromDylib, symbolName, weak_import ); -+ printf("%-7s %-16s 0x%08lX %10s %5ld %-16s %s%s\n", segName, sectionName(segIndex, segStartAddr+segOffset), segStartAddr+segOffset, typeName, addend, fromDylib, symbolName, weak_import ); - segOffset += sizeof(pint_t); - break; - case BIND_OPCODE_DO_BIND_ADD_ADDR_ULEB: -- printf("%-7s %-16s 0x%08llX %10s %5lld %-16s %s%s\n", segName, sectionName(segIndex, segStartAddr+segOffset), segStartAddr+segOffset, typeName, addend, fromDylib, symbolName, weak_import ); -+ printf("%-7s %-16s 0x%08lX %10s %5ld %-16s %s%s\n", segName, sectionName(segIndex, segStartAddr+segOffset), segStartAddr+segOffset, typeName, addend, fromDylib, symbolName, weak_import ); - segOffset += read_uleb128(p, end) + sizeof(pint_t); - break; - case BIND_OPCODE_DO_BIND_ADD_ADDR_IMM_SCALED: -- printf("%-7s %-16s 0x%08llX %10s %5lld %-16s %s%s\n", segName, sectionName(segIndex, segStartAddr+segOffset), segStartAddr+segOffset, typeName, addend, fromDylib, symbolName, weak_import ); -+ printf("%-7s %-16s 0x%08lX %10s %5ld %-16s %s%s\n", segName, sectionName(segIndex, segStartAddr+segOffset), segStartAddr+segOffset, typeName, addend, fromDylib, symbolName, weak_import ); - segOffset += immediate*sizeof(pint_t) + sizeof(pint_t); - break; - case BIND_OPCODE_DO_BIND_ULEB_TIMES_SKIPPING_ULEB: - count = read_uleb128(p, end); - skip = read_uleb128(p, end); - for (uint32_t i=0; i < count; ++i) { -- printf("%-7s %-16s 0x%08llX %10s %5lld %-16s %s%s\n", segName, sectionName(segIndex, segStartAddr+segOffset), segStartAddr+segOffset, typeName, addend, fromDylib, symbolName, weak_import ); -+ printf("%-7s %-16s 0x%08lX %10s %5ld %-16s %s%s\n", segName, sectionName(segIndex, segStartAddr+segOffset), segStartAddr+segOffset, typeName, addend, fromDylib, symbolName, weak_import ); - segOffset += skip + sizeof(pint_t); - } - break; -@@ -1070,22 +1070,22 @@ - segOffset += read_uleb128(p, end); - break; - case BIND_OPCODE_DO_BIND: -- printf("%-7s %-16s 0x%08llX %10s %5lld %s\n", segName, sectionName(segIndex, segStartAddr+segOffset), segStartAddr+segOffset, typeName, addend, symbolName ); -+ printf("%-7s %-16s 0x%08lX %10s %5ld %s\n", segName, sectionName(segIndex, segStartAddr+segOffset), segStartAddr+segOffset, typeName, addend, symbolName ); - segOffset += sizeof(pint_t); - break; - case BIND_OPCODE_DO_BIND_ADD_ADDR_ULEB: -- printf("%-7s %-16s 0x%08llX %10s %5lld %s\n", segName, sectionName(segIndex, segStartAddr+segOffset), segStartAddr+segOffset, typeName, addend, symbolName ); -+ printf("%-7s %-16s 0x%08lX %10s %5ld %s\n", segName, sectionName(segIndex, segStartAddr+segOffset), segStartAddr+segOffset, typeName, addend, symbolName ); - segOffset += read_uleb128(p, end) + sizeof(pint_t); - break; - case BIND_OPCODE_DO_BIND_ADD_ADDR_IMM_SCALED: -- printf("%-7s %-16s 0x%08llX %10s %5lld %s\n", segName, sectionName(segIndex, segStartAddr+segOffset), segStartAddr+segOffset, typeName, addend, symbolName ); -+ printf("%-7s %-16s 0x%08lX %10s %5ld %s\n", segName, sectionName(segIndex, segStartAddr+segOffset), segStartAddr+segOffset, typeName, addend, symbolName ); - segOffset += immediate*sizeof(pint_t) + sizeof(pint_t); - break; - case BIND_OPCODE_DO_BIND_ULEB_TIMES_SKIPPING_ULEB: - count = read_uleb128(p, end); - skip = read_uleb128(p, end); - for (uint32_t i=0; i < count; ++i) { -- printf("%-7s %-16s 0x%08llX %10s %5lld %s\n", segName, sectionName(segIndex, segStartAddr+segOffset), segStartAddr+segOffset, typeName, addend, symbolName ); -+ printf("%-7s %-16s 0x%08lX %10s %5ld %s\n", segName, sectionName(segIndex, segStartAddr+segOffset), segStartAddr+segOffset, typeName, addend, symbolName ); - segOffset += skip + sizeof(pint_t); - } - break; -@@ -1178,7 +1178,7 @@ - segOffset += read_uleb128(p, end); - break; - case BIND_OPCODE_DO_BIND: -- printf("%-7s %-16s 0x%08llX 0x%04X %-16s %s%s\n", segName, sectionName(segIndex, segStartAddr+segOffset), segStartAddr+segOffset, lazy_offset, fromDylib, symbolName, weak_import); -+ printf("%-7s %-16s 0x%08lX 0x%04X %-16s %s%s\n", segName, sectionName(segIndex, segStartAddr+segOffset), segStartAddr+segOffset, lazy_offset, fromDylib, symbolName, weak_import); - segOffset += sizeof(pint_t); - break; - default: -@@ -1252,12 +1252,12 @@ - break; - case BIND_OPCODE_SET_ADDEND_SLEB: - addend = read_sleb128(p, end); -- printf("0x%04X BIND_OPCODE_SET_ADDEND_SLEB(%lld)\n", opcodeOffset, addend); -+ printf("0x%04X BIND_OPCODE_SET_ADDEND_SLEB(%ld)\n", opcodeOffset, addend); - break; - case BIND_OPCODE_SET_SEGMENT_AND_OFFSET_ULEB: - segmentIndex = immediate; - address = read_uleb128(p, end); -- printf("0x%04X BIND_OPCODE_SET_SEGMENT_AND_OFFSET_ULEB(0x%02X, 0x%08llX)\n", opcodeOffset, segmentIndex, address); -+ printf("0x%04X BIND_OPCODE_SET_SEGMENT_AND_OFFSET_ULEB(0x%02X, 0x%08lX)\n", opcodeOffset, segmentIndex, address); - break; - case BIND_OPCODE_ADD_ADDR_ULEB: - skip = read_uleb128(p, end); -@@ -1317,7 +1317,7 @@ - if ( reExport ) - printf("[re-export] "); - else -- printf("0x%08llX ", fBaseAddress+it->address); -+ printf("0x%08lX ", fBaseAddress+it->address); - printf("%s", it->name); - if ( weakDef || threadLocal || resolver || abs ) { - bool needComma = false; -@@ -1341,7 +1341,7 @@ - if ( resolver ) { - if ( needComma ) - printf(", "); -- printf("resolver=0x%08llX", it->other); -+ printf("resolver=0x%08lX", it->other); - needComma = true; - } - printf("]"); -@@ -1375,13 +1375,13 @@ - ++p; - ++p; - if ( *importName == '\0' ) -- printf("\tnode%03ld [ label=%s,re-export from dylib=%llu ];\n", (long)(me-start), cummulativeString, ordinal); -+ printf("\tnode%03ld [ label=%s,re-export from dylib=%lu ];\n", (long)(me-start), cummulativeString, ordinal); - else -- printf("\tnode%03ld [ label=%s,re-export %s from dylib=%llu ];\n", (long)(me-start), cummulativeString, importName, ordinal); -+ printf("\tnode%03ld [ label=%s,re-export %s from dylib=%lu ];\n", (long)(me-start), cummulativeString, importName, ordinal); - } - else { - uint64_t address = read_uleb128(p, end); -- printf("\tnode%03ld [ label=%s,addr0x%08llX ];\n", (long)(me-start), cummulativeString, address); -+ printf("\tnode%03ld [ label=%s,addr0x%08lX ];\n", (long)(me-start), cummulativeString, address); - if ( flags & EXPORT_SYMBOL_FLAGS_STUB_AND_RESOLVER ) - read_uleb128(p, end); - } -@@ -1469,25 +1469,25 @@ - ++p; - ++p; - if ( strlen(importName) == 0 ) -- printf("[flags=REEXPORT ordinal=%llu] ", ordinal); -+ printf("[flags=REEXPORT ordinal=%lu] ", ordinal); - else -- printf("[flags=REEXPORT ordinal=%llu import=%s] ", ordinal, importName); -+ printf("[flags=REEXPORT ordinal=%lu import=%s] ", ordinal, importName); - } - else if ( flags & EXPORT_SYMBOL_FLAGS_STUB_AND_RESOLVER ) { - uint64_t stub = read_uleb128(p, end); - uint64_t resolver = read_uleb128(p, end); -- printf("[flags=STUB_AND_RESOLVER stub=0x%06llX resolver=0x%06llX] ", stub, resolver); -+ printf("[flags=STUB_AND_RESOLVER stub=0x%06lX resolver=0x%06lX] ", stub, resolver); - } - else { - uint64_t address = read_uleb128(p, end); - if ( (flags & EXPORT_SYMBOL_FLAGS_KIND_MASK) == EXPORT_SYMBOL_FLAGS_KIND_REGULAR ) -- printf("[addr=0x%06llX] ", address); -+ printf("[addr=0x%06lX] ", address); - else if ( (flags & EXPORT_SYMBOL_FLAGS_KIND_MASK) == EXPORT_SYMBOL_FLAGS_KIND_THREAD_LOCAL) -- printf("[flags=THREAD_LOCAL addr=0x%06llX] ", address); -+ printf("[flags=THREAD_LOCAL addr=0x%06lX] ", address); - else if ( (flags & EXPORT_SYMBOL_FLAGS_KIND_MASK) == EXPORT_SYMBOL_FLAGS_KIND_ABSOLUTE) -- printf("[flags=ABSOLUTE addr=0x%06llX] ", address); -+ printf("[flags=ABSOLUTE addr=0x%06lX] ", address); - else -- printf("[flags=0x%llX addr=0x%06llX] ", flags, address); -+ printf("[flags=0x%lX addr=0x%06lX] ", flags, address); - } - } - // print child edges -@@ -1627,7 +1627,7 @@ - do { - delta = read_uleb128(p, end); - address += delta; -- printf("0x%0llX %s\n", address+fBaseAddress, kindStr); -+ printf("0x%0lX %s\n", address+fBaseAddress, kindStr); - } while (delta); - - return p; -@@ -1659,7 +1659,7 @@ - char fromSectionName[20]; - strncpy(fromSectionName, fromSection->sectname(), 16); - fromSectionName[16] = '\0'; -- printf("from sect=%s/%s, to sect=%s/%s, count=%lld:\n", fromSection->segname(), fromSectionName, toSection->segname(), toSection->sectname(), toOffsetCount); -+ printf("from sect=%s/%s, to sect=%s/%s, count=%ld:\n", fromSection->segname(), fromSectionName, toSection->segname(), toSection->sectname(), toOffsetCount); - uint64_t toSectionOffset = 0; - const char* lastFromSymbol = NULL; - for (uint64_t j=0; j < toOffsetCount; ++j) { -@@ -1678,12 +1678,12 @@ - if ( (s != lastFromSymbol) && (s != NULL) ) - printf(" %s:\n", s); - const char* toSymbol = closestSymbolNameForAddress(toSection->addr()+toSectionOffset, &symbolOffset, toSectionIndex); -- printf(" from addr=0x%0llX %s to addr=0x%0llX", fromSection->addr()+fromSectionOffset, sharedRegionKindName(kind), toSection->addr()+toSectionOffset); -+ printf(" from addr=0x%0lX %s to addr=0x%0lX", fromSection->addr()+fromSectionOffset, sharedRegionKindName(kind), toSection->addr()+toSectionOffset); - if ( toSymbol != NULL ) { - if ( symbolOffset == 0 ) - printf(" (%s)", toSymbol); - else -- printf(" (%s + %lld)", toSymbol, symbolOffset); -+ printf(" (%s + %ld)", toSymbol, symbolOffset); - } - printf("\n"); - lastFromSymbol = s; -@@ -1740,16 +1740,16 @@ - void DyldInfoPrinter::printFunctionStartLine(uint64_t addr) - { - if ( addr & 1 ) -- printf("0x%0llX [thumb] %s\n", (addr & -2), symbolNameForAddress(addr & -2)); -+ printf("0x%0lX [thumb] %s\n", (addr & -2), symbolNameForAddress(addr & -2)); - else -- printf("0x%0llX %s\n", addr, symbolNameForAddress(addr)); -+ printf("0x%0lX %s\n", addr, symbolNameForAddress(addr)); - } - #endif - - template - void DyldInfoPrinter::printFunctionStartLine(uint64_t addr) - { -- printf("0x%0llX %s\n", addr, symbolNameForAddress(addr)); -+ printf("0x%0lX %s\n", addr, symbolNameForAddress(addr)); - } - - -@@ -2023,7 +2023,7 @@ - const char* typeName = relocTypeName(reloc->r_type()); - const char* segName = segmentName(segIndex); - const char* sectName = sectionName(segIndex, addr); -- printf("%-8s %-16s 0x%08llX %s\n", segName, sectName, (uint64_t)addr, typeName); -+ printf("%-8s %-16s 0x%08lX %s\n", segName, sectName, (uint64_t)addr, typeName); - } - else { - const macho_scattered_relocation_info

* sreloc = (macho_scattered_relocation_info

*)reloc; -@@ -2032,7 +2032,7 @@ - const char* typeName = relocTypeName(sreloc->r_type()); - const char* segName = segmentName(segIndex); - const char* sectName = sectionName(segIndex, addr); -- printf("%-8s %-16s 0x%08llX %s\n", segName, sectName, (uint64_t)addr, typeName); -+ printf("%-8s %-16s 0x%08lX %s\n", segName, sectName, (uint64_t)addr, typeName); - } - } - // look for local non-lazy-pointers -@@ -2054,7 +2054,7 @@ - const char* typeName = "pointer"; - const char* segName = segmentName(segIndex); - const char* sectName = sectionName(segIndex, addr); -- printf("%-8s %-16s 0x%08llX %s\n", segName, sectName, (uint64_t)addr, typeName); -+ printf("%-8s %-16s 0x%08lX %s\n", segName, sectName, (uint64_t)addr, typeName); - } - } - } -@@ -2080,7 +2080,7 @@ - pint_t thumb = 0; - if ( sym->n_desc() & N_ARM_THUMB_DEF ) - thumb = 1; -- printf("0x%08llX %s%s\n", sym->n_value()+thumb, flags, &fStrings[sym->n_strx()]); -+ printf("0x%08lX %s%s\n", sym->n_value()+thumb, flags, &fStrings[sym->n_strx()]); - } - } - } -@@ -2176,7 +2176,7 @@ - // To get the addend requires subtracting out the base address it was prebound to. - addend -= sym->n_value(); - } -- printf("%-8s %-16s 0x%08llX %10s %4s %5lld %-16s %s\n", segName, sectName, (uint64_t)addr, -+ printf("%-8s %-16s 0x%08lX %10s %4s %5ld %-16s %s\n", segName, sectName, (uint64_t)addr, - typeName, weak_import, addend, fromDylib, symbolName); - } - // look for non-lazy pointers -@@ -2203,7 +2203,7 @@ - const char* segName = segmentName(segIndex); - const char* sectName = sectionName(segIndex, addr); - int64_t addend = 0; -- printf("%-8s %-16s 0x%08llX %10s %4s %5lld %-16s %s\n", segName, sectName, (uint64_t)addr, -+ printf("%-8s %-16s 0x%08lX %10s %4s %5ld %-16s %s\n", segName, sectName, (uint64_t)addr, - typeName, weak_import, addend, fromDylib, symbolName); - } - } -@@ -2242,7 +2242,7 @@ - uint8_t segIndex = segmentIndexForAddress(addr); - const char* segName = segmentName(segIndex); - const char* sectName = sectionName(segIndex, addr); -- printf("%-7s %-16s 0x%08llX 0x%04X %-16s %s\n", segName, sectName, (uint64_t)addr, symbolIndex, fromDylib, symbolName); -+ printf("%-7s %-16s 0x%08lX 0x%04X %-16s %s\n", segName, sectName, (uint64_t)addr, symbolIndex, fromDylib, symbolName); - } - } - else if ( (type == S_SYMBOL_STUBS) && (((sect->flags() & S_ATTR_SELF_MODIFYING_CODE) != 0)) && (sect->reserved2() == 5) ) { -@@ -2259,7 +2259,7 @@ - uint8_t segIndex = segmentIndexForAddress(addr); - const char* segName = segmentName(segIndex); - const char* sectName = sectionName(segIndex, addr); -- printf("%-7s %-16s 0x%08llX 0x%04X %-16s %s\n", segName, sectName, (uint64_t)addr, symbolIndex, fromDylib, symbolName); -+ printf("%-7s %-16s 0x%08lX 0x%04X %-16s %s\n", segName, sectName, (uint64_t)addr, symbolIndex, fromDylib, symbolName); - } - } - } -diff -ur cctools-port-c1cc758/cctools/ld64/src/other/machochecker.cpp cctools-port-format/cctools/ld64/src/other/machochecker.cpp ---- cctools-port-c1cc758/cctools/ld64/src/other/machochecker.cpp 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-format/cctools/ld64/src/other/machochecker.cpp 2017-11-10 19:13:05.921592917 -0800 -@@ -639,12 +639,12 @@ - if ( threadInfo != NULL ) { - pint_t initialPC = getEntryPoint(threadInfo); - if ( (initialPC < fTEXTSegment->vmaddr()) || (initialPC >= (fTEXTSegment->vmaddr()+fTEXTSegment->vmsize())) ) -- throwf("entry point 0x%0llX is outside __TEXT segment", (long long)initialPC); -+ throwf("entry point 0x%0lX is outside __TEXT segment", (long long)initialPC); - } - else if ( entryPoint != NULL ) { - pint_t initialOffset = entryPoint->entryoff(); - if ( (initialOffset < fTEXTSegment->fileoff()) || (initialOffset >= (fTEXTSegment->fileoff()+fTEXTSegment->filesize())) ) -- throwf("entry point 0x%0llX is outside __TEXT segment", (long long)initialOffset); -+ throwf("entry point 0x%0lX is outside __TEXT segment", (long long)initialOffset); - } - - // checks for executables -@@ -1086,7 +1086,7 @@ - for (pint_t* p=arrayStart; p < arrayEnd; ++p) { - pint_t pointer = P::getP(*p); - if ( (pointer < fTEXTSegment->vmaddr()) || (pointer >= (fTEXTSegment->vmaddr()+fTEXTSegment->vmsize())) ) -- throwf("%s 0x%08llX points outside __TEXT segment", kind, (long long)pointer); -+ throwf("%s 0x%08lX points outside __TEXT segment", kind, (long long)pointer); - } - // check each pointer in array will be rebased and not bound - if ( fSlidableImage ) { -@@ -1094,9 +1094,9 @@ - pint_t sectionEndddr = sect->addr() + sect->size(); - for(pint_t addr = sectionBeginAddr; addr < sectionEndddr; addr += sizeof(pint_t)) { - if ( addressIsBindingSite(addr) ) -- throwf("%s at 0x%0llX has binding to external symbol", kind, (long long)addr); -+ throwf("%s at 0x%0lX has binding to external symbol", kind, (long long)addr); - if ( ! addressIsRebaseSite(addr) ) -- throwf("%s at 0x%0llX is not rebased", kind, (long long)addr); -+ throwf("%s at 0x%0lX is not rebased", kind, (long long)addr); - } - } - break; -@@ -1392,7 +1392,7 @@ - addr = segStartAddr+segOffset; - if ( (rangeStart <= addr) && (addr < rangeEnd) ) - return true; -- //printf("%-7s %-16s 0x%08llX %s\n", segName, sectionName(segIndex, segStartAddr+segOffset), segStartAddr+segOffset, typeName); -+ //printf("%-7s %-16s 0x%08lX %s\n", segName, sectionName(segIndex, segStartAddr+segOffset), segStartAddr+segOffset, typeName); - segOffset += sizeof(pint_t); - } - break; -@@ -1402,7 +1402,7 @@ - addr = segStartAddr+segOffset; - if ( (rangeStart <= addr) && (addr < rangeEnd) ) - return true; -- //printf("%-7s %-16s 0x%08llX %s\n", segName, sectionName(segIndex, segStartAddr+segOffset), segStartAddr+segOffset, typeName); -+ //printf("%-7s %-16s 0x%08lX %s\n", segName, sectionName(segIndex, segStartAddr+segOffset), segStartAddr+segOffset, typeName); - segOffset += sizeof(pint_t); - } - break; -@@ -1410,7 +1410,7 @@ - addr = segStartAddr+segOffset; - if ( (rangeStart <= addr) && (addr < rangeEnd) ) - return true; -- //printf("%-7s %-16s 0x%08llX %s\n", segName, sectionName(segIndex, segStartAddr+segOffset), segStartAddr+segOffset, typeName); -+ //printf("%-7s %-16s 0x%08lX %s\n", segName, sectionName(segIndex, segStartAddr+segOffset), segStartAddr+segOffset, typeName); - segOffset += read_uleb128(p, end) + sizeof(pint_t); - break; - case REBASE_OPCODE_DO_REBASE_ULEB_TIMES_SKIPPING_ULEB: -@@ -1420,7 +1420,7 @@ - addr = segStartAddr+segOffset; - if ( (rangeStart <= addr) && (addr < rangeEnd) ) - return true; -- //printf("%-7s %-16s 0x%08llX %s\n", segName, sectionName(segIndex, segStartAddr+segOffset), segStartAddr+segOffset, typeName); -+ //printf("%-7s %-16s 0x%08lX %s\n", segName, sectionName(segIndex, segStartAddr+segOffset), segStartAddr+segOffset, typeName); - segOffset += skip + sizeof(pint_t); - } - break; -@@ -1482,7 +1482,7 @@ - addr = segStartAddr+segOffset; - if ( addr == targetAddr ) - return true; -- //printf("%-7s %-16s 0x%08llX %s\n", segName, sectionName(segIndex, segStartAddr+segOffset), segStartAddr+segOffset, typeName); -+ //printf("%-7s %-16s 0x%08lX %s\n", segName, sectionName(segIndex, segStartAddr+segOffset), segStartAddr+segOffset, typeName); - segOffset += sizeof(pint_t); - } - break; -@@ -1492,7 +1492,7 @@ - addr = segStartAddr+segOffset; - if ( addr == targetAddr ) - return true; -- //printf("%-7s %-16s 0x%08llX %s\n", segName, sectionName(segIndex, segStartAddr+segOffset), segStartAddr+segOffset, typeName); -+ //printf("%-7s %-16s 0x%08lX %s\n", segName, sectionName(segIndex, segStartAddr+segOffset), segStartAddr+segOffset, typeName); - segOffset += sizeof(pint_t); - } - break; -@@ -1500,7 +1500,7 @@ - addr = segStartAddr+segOffset; - if ( addr == targetAddr ) - return true; -- //printf("%-7s %-16s 0x%08llX %s\n", segName, sectionName(segIndex, segStartAddr+segOffset), segStartAddr+segOffset, typeName); -+ //printf("%-7s %-16s 0x%08lX %s\n", segName, sectionName(segIndex, segStartAddr+segOffset), segStartAddr+segOffset, typeName); - segOffset += read_uleb128(p, end) + sizeof(pint_t); - break; - case REBASE_OPCODE_DO_REBASE_ULEB_TIMES_SKIPPING_ULEB: -@@ -1510,7 +1510,7 @@ - addr = segStartAddr+segOffset; - if ( addr == targetAddr ) - return true; -- //printf("%-7s %-16s 0x%08llX %s\n", segName, sectionName(segIndex, segStartAddr+segOffset), segStartAddr+segOffset, typeName); -+ //printf("%-7s %-16s 0x%08lX %s\n", segName, sectionName(segIndex, segStartAddr+segOffset), segStartAddr+segOffset, typeName); - segOffset += skip + sizeof(pint_t); - } - break; -diff -ur cctools-port-c1cc758/cctools/ld64/src/other/ObjectDump.cpp cctools-port-format/cctools/ld64/src/other/ObjectDump.cpp ---- cctools-port-c1cc758/cctools/ld64/src/other/ObjectDump.cpp 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-format/cctools/ld64/src/other/ObjectDump.cpp 2017-11-10 19:13:05.921592917 -0800 -@@ -630,7 +630,7 @@ - } - else { - uint64_t sectAddr = addressOfFirstAtomInSection(atom.section()); -- sprintf(buffer, "%s@%s+0x%08llX", atom.name(), atom.section().sectionName(), atom.objectAddress()-sectAddr); -+ sprintf(buffer, "%s@%s+0x%08lX", atom.name(), atom.section().sectionName(), atom.objectAddress()-sectAddr); - } - break; - case ld::Atom::symbolTableNotInFinalLinkedImages: -@@ -704,10 +704,10 @@ - printf(" - %s", referenceTargetAtomName(ref)); - break; - case ld::Fixup::kindAddAddend: -- printf(" + 0x%llX", ref->u.addend); -+ printf(" + 0x%lX", ref->u.addend); - break; - case ld::Fixup::kindSubtractAddend: -- printf(" - 0x%llX", ref->u.addend); -+ printf(" - 0x%lX", ref->u.addend); - break; - case ld::Fixup::kindSetTargetImageOffset: - printf("imageOffset(%s)", referenceTargetAtomName(ref)); -@@ -1063,7 +1063,7 @@ - void dumper::dumpAtom(const ld::Atom& atom) - { - printf("name: %s\n", makeName(atom)); -- printf("size: 0x%0llX\n", atom.size()); -+ printf("size: 0x%0lX\n", atom.size()); - printf("align: %u mod %u\n", atom.alignment().modulus, (1 << atom.alignment().powerOf2) ); - printf("scope: %s\n", scopeString(atom)); - if ( sShowDefinitionKind ) -diff -ur cctools-port-c1cc758/cctools/ld64/src/other/rebase.cpp cctools-port-format/cctools/ld64/src/other/rebase.cpp ---- cctools-port-c1cc758/cctools/ld64/src/other/rebase.cpp 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-format/cctools/ld64/src/other/rebase.cpp 2017-11-10 19:13:05.921592917 -0800 -@@ -454,7 +454,7 @@ - template - void Rebaser::rebaseAt(int segIndex, uint64_t offset, uint8_t type) - { -- //fprintf(stderr, "rebaseAt(seg=%d, offset=0x%08llX, type=%d\n", segIndex, offset, type); -+ //fprintf(stderr, "rebaseAt(seg=%d, offset=0x%08lX, type=%d\n", segIndex, offset, type); - static int lastSegIndex = -1; - static uint8_t* lastSegMappedStart = NULL; - if ( segIndex != lastSegIndex ) { -@@ -857,7 +857,7 @@ - ai.vmSize = rebaser->getVMSize(); - ai.orgBase = rebaser->getBaseAddress(); - ai.newBase = 0; -- //fprintf(stderr, "base=0x%llX, size=0x%llX\n", ai.orgBase, ai.vmSize); -+ //fprintf(stderr, "base=0x%lX, size=0x%lX\n", ai.orgBase, ai.vmSize); - info.archs.push_back(ai); - } - } -@@ -903,7 +903,7 @@ - if ( (*rit)->getArchitecture() == fait->arch ) { - (*rit)->setBaseAddress(fait->newBase); - if ( verbose ) -- printf("%8s 0x%0llX -> 0x%0llX %s\n", nameForArch(fait->arch), fait->orgBase, fait->newBase, info.path); -+ printf("%8s 0x%0lX -> 0x%0lX %s\n", nameForArch(fait->arch), fait->orgBase, fait->newBase, info.path); - } - } - } -diff -ur cctools-port-c1cc758/cctools/ld64/src/other/unwinddump.cpp cctools-port-format/cctools/ld64/src/other/unwinddump.cpp ---- cctools-port-c1cc758/cctools/ld64/src/other/unwinddump.cpp 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-format/cctools/ld64/src/other/unwinddump.cpp 2017-11-10 19:13:05.924926282 -0800 -@@ -250,7 +250,7 @@ - value |= 1; - if ( value == addr ) { - const char* r = &fStrings[fSymbols[i].n_strx()]; -- //fprintf(stderr, "addr=0x%08llX, i=%u, n_type=0x%0X, r=%s\n", (long long)(fSymbols[i].n_value()), i, fSymbols[i].n_type(), r); -+ //fprintf(stderr, "addr=0x%08lX, i=%u, n_type=0x%0X, r=%s\n", (long long)(fSymbols[i].n_value()), i, fSymbols[i].n_type(), r); - return r; - } - else if ( offset != NULL ) { -@@ -904,14 +904,14 @@ - template - void UnwindPrinter::printObjectUnwindSection(bool showFunctionNames) - { -- printf("Arch: %s, Section: __LD,__compact_unwind (size=0x%08llX, => %lld entries)\n", -+ printf("Arch: %s, Section: __LD,__compact_unwind (size=0x%08lX, => %ld entries)\n", - archName(), fUnwindSection->size(), fUnwindSection->size() / sizeof(macho_compact_unwind_entry

)); - - const macho_compact_unwind_entry

* const entriesStart = (macho_compact_unwind_entry

*)((uint8_t*)fHeader + fUnwindSection->offset()); - const macho_compact_unwind_entry

* const entriesEnd = (macho_compact_unwind_entry

*)((uint8_t*)fHeader + fUnwindSection->offset() + fUnwindSection->size()); - for (const macho_compact_unwind_entry

* entry=entriesStart; entry < entriesEnd; ++entry) { - uint64_t entryAddress = ((char*)entry - (char*)entriesStart) + fUnwindSection->addr(); -- printf("0x%08llX:\n", entryAddress); -+ printf("0x%08lX:\n", entryAddress); - const char* functionNameStr; - pint_t funcAddress; - uint32_t offsetInFunction; -@@ -923,11 +923,11 @@ - funcAddress = entry->codeStart(); - } - if ( offsetInFunction == 0 ) -- printf(" start: 0x%08llX %s\n", (uint64_t)funcAddress, functionNameStr); -+ printf(" start: 0x%08lX %s\n", (uint64_t)funcAddress, functionNameStr); - else -- printf(" start: 0x%08llX %s+0x%X\n", (uint64_t)funcAddress+offsetInFunction, functionNameStr, offsetInFunction); -+ printf(" start: 0x%08lX %s+0x%X\n", (uint64_t)funcAddress+offsetInFunction, functionNameStr, offsetInFunction); - -- printf(" end: 0x%08llX (len=0x%08X)\n", (uint64_t)(funcAddress+offsetInFunction+entry->codeLen()), entry->codeLen()); -+ printf(" end: 0x%08lX (len=0x%08X)\n", (uint64_t)(funcAddress+offsetInFunction+entry->codeLen()), entry->codeLen()); - - char encodingString[200]; - this->decode(entry->compactUnwindInfo(), ((const uint8_t*)fHeader), encodingString); -@@ -947,9 +947,9 @@ - uint32_t lsdaOffset; - const char* lsdaName = this->functionName(entry->lsda(), &lsdaOffset); - if ( lsdaOffset == 0 ) -- printf(" lsda: 0x%08llX %s\n", (uint64_t)entry->lsda(), lsdaName); -+ printf(" lsda: 0x%08lX %s\n", (uint64_t)entry->lsda(), lsdaName); - else -- printf(" lsda: 0x%08llX %s+0x%X\n", (uint64_t)entry->lsda(), lsdaName, lsdaOffset); -+ printf(" lsda: 0x%08lX %s+0x%X\n", (uint64_t)entry->lsda(), lsdaName, lsdaOffset); - } - } - } -@@ -962,7 +962,7 @@ - const uint8_t* sectionContent = (uint8_t*)fHeader + fUnwindSection->offset(); - macho_unwind_info_section_header

* sectionHeader = (macho_unwind_info_section_header

*)(sectionContent); - -- printf("Arch: %s, Section: __TEXT,__unwind_info (addr=0x%08llX, size=0x%08llX, fileOffset=0x%08X)\n", -+ printf("Arch: %s, Section: __TEXT,__unwind_info (addr=0x%08lX, size=0x%08lX, fileOffset=0x%08X)\n", - archName(), fUnwindSection->addr(), fUnwindSection->size(), fUnwindSection->offset()); - printf("\tversion=0x%08X\n", sectionHeader->version()); - printf("\tcommonEncodingsArraySectionOffset=0x%08X\n", sectionHeader->commonEncodingsArraySectionOffset()); -diff -ur cctools-port-c1cc758/cctools/libstuff/ofile.c cctools-port-format/cctools/libstuff/ofile.c ---- cctools-port-c1cc758/cctools/libstuff/ofile.c 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-format/cctools/libstuff/ofile.c 2017-11-10 19:38:34.511211634 -0800 -@@ -3108,7 +3108,7 @@ - } - if(offset % - (1 << align) != 0){ -- error("fat file: %s offset: %llu for cputype (%d) cpusubtype " -+ error("fat file: %s offset: %lu for cputype (%d) cpusubtype " - "(%d)) not aligned on it's alignment (2^%u)", - ofile->file_name, offset, cputype, - cpusubtype & ~CPU_SUBTYPE_MASK, align); -@@ -3210,7 +3210,7 @@ - return(CHECK_BAD); - } - if(offset % (1 << align) != 0){ -- archive_member_error(ofile, "fat file's offset: %llu for " -+ archive_member_error(ofile, "fat file's offset: %lu for " - "cputype (%d) cpusubtype (%d) not aligned on it's " - "alignment (2^%u)", offset, cputype, - cpusubtype & ~CPU_SUBTYPE_MASK, align); -diff -ur cctools-port-c1cc758/cctools/libstuff/writeout.c cctools-port-format/cctools/libstuff/writeout.c ---- cctools-port-c1cc758/cctools/libstuff/writeout.c 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-format/cctools/libstuff/writeout.c 2017-11-10 19:40:56.792695079 -0800 -@@ -421,7 +421,7 @@ - if((r = vm_allocate(mach_task_self(), (vm_address_t *)&file, - file_size, TRUE)) != KERN_SUCCESS) - mach_fatal(r, "can't vm_allocate() buffer for output file: %s of " -- "size %llu", filename, file_size); -+ "size %lu", filename, file_size); - - /* - * If there is more than one architecture then fill in the fat file -@@ -460,7 +460,7 @@ - if(offset > UINT32_MAX && archs[i].fat_arch64 == NULL){ - error("file too large to create as a fat file because " - "offset field in struct fat_arch is only 32-bits and " -- "offset (%llu) to architecture %s exceeds that", -+ "offset (%lu) to architecture %s exceeds that", - offset, archs[i].fat_arch_name); - return; - } -diff -ur cctools-port-c1cc758/cctools/misc/libtool.c cctools-port-format/cctools/misc/libtool.c ---- cctools-port-c1cc758/cctools/misc/libtool.c 2017-11-10 19:22:01.790476705 -0800 -+++ cctools-port-format/cctools/misc/libtool.c 2017-11-10 19:24:48.435607249 -0800 -@@ -2545,7 +2545,7 @@ - if((r = vm_allocate(mach_task_self(), (vm_address_t *)&library, - library_size, TRUE)) != KERN_SUCCESS) - mach_fatal(r, "can't vm_allocate() buffer for output file: %s " -- "of size %llu", output, library_size); -+ "of size %lu", output, library_size); - - - /* put in the archive magic string in the buffer */ -@@ -2581,7 +2581,7 @@ - if((r = vm_allocate(mach_task_self(), (vm_address_t *)&library, - library_size, TRUE)) != KERN_SUCCESS) - mach_fatal(r, "can't vm_allocate() buffer for output file: %s of " -- "size %llu", output, library_size); -+ "size %lu", output, library_size); - - /* - * Create the output file. The unlink() is done to handle the problem -@@ -2635,7 +2635,7 @@ - if(cmd_flags.fat64 == FALSE && offset > UINT32_MAX) - error("file too large to create as a fat file because " - "offset field in struct fat_arch is only 32-bits and " -- "offset (%llu) to architecture %s exceeds that", -+ "offset (%lu) to architecture %s exceeds that", - offset, archs[i].arch_flag.name); - if(archs[i].arch_flag.cputype & CPU_ARCH_ABI64){ - if(cmd_flags.fat64 == TRUE) -@@ -2660,7 +2660,7 @@ - if(cmd_flags.fat64 == FALSE && archs[i].size > UINT32_MAX) - error("file too large to create as a fat file because " - "size field in struct fat_arch is only 32-bits and " -- "size (%llu) of architecture %s exceeds that", -+ "size (%lu) of architecture %s exceeds that", - archs[i].size, archs[i].arch_flag.name); - if(cmd_flags.fat64 == TRUE) - fat_arch64[i].size = archs[i].size; -@@ -3043,15 +3043,15 @@ - return; - - if(offset + size > library_size) -- fatal("internal error: output_flush(offset = %llu, size = %llu) " -- "out of range for library_size = %llu", offset, size, -+ fatal("internal error: output_flush(offset = %lu, size = %lu) " -+ "out of range for library_size = %lu", offset, size, - library_size); - - #ifdef DEBUG - if(cmd_flags.debug & (1 << 2)) - print_block_list(); - if(cmd_flags.debug & (1 << 1)) -- printf("output_flush(offset = %llu, size %llu)", offset, size); -+ printf("output_flush(offset = %lu, size %lu)", offset, size); - #endif /* DEBUG */ - - if(size == 0){ -@@ -3087,9 +3087,9 @@ - */ - if(before != NULL){ - if(before->offset + before->size > offset){ -- warning("internal error: output_flush(offset = %llu, size = " -- "%llu) overlaps with flushed block(offset = %llu, " -- "size = %llu)", offset, size, before->offset, -+ warning("internal error: output_flush(offset = %lu, size = " -+ "%lu) overlaps with flushed block(offset = %lu, " -+ "size = %lu)", offset, size, before->offset, - before->size); - printf("calling abort()\n"); - abort(); -@@ -3097,9 +3097,9 @@ - } - if(after != NULL){ - if(offset + size > after->offset){ -- warning("internal error: output_flush(offset = %llu, size = " -- "%llu) overlaps with flushed block(offset = %llu, " -- "size = %llu)", offset, size, after->offset, -+ warning("internal error: output_flush(offset = %lu, size = " -+ "%lu) overlaps with flushed block(offset = %lu, " -+ "size = %lu)", offset, size, after->offset, - after->size); - printf("calling abort()\n"); - abort(); diff --git a/nix/nixcrpkgs/macos/cctools-ld64-registers.patch b/nix/nixcrpkgs/macos/cctools-ld64-registers.patch deleted file mode 100644 index 70963e253..000000000 --- a/nix/nixcrpkgs/macos/cctools-ld64-registers.patch +++ /dev/null @@ -1,299 +0,0 @@ -diff -ur cctools-port-c1cc758/cctools/ld64/src/ld/parsers/libunwind/Registers.hpp cctools-port-patched/cctools/ld64/src/ld/parsers/libunwind/Registers.hpp ---- cctools-port-c1cc758/cctools/ld64/src/ld/parsers/libunwind/Registers.hpp 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-patched/cctools/ld64/src/ld/parsers/libunwind/Registers.hpp 2017-10-29 10:12:23.150301208 -0700 -@@ -72,22 +72,22 @@ - const char* getRegisterName(int num); - void jumpto(); - -- uint32_t getSP() const { return fRegisters.__esp; } -- void setSP(uint32_t value) { fRegisters.__esp = value; } -- uint32_t getIP() const { return fRegisters.__eip; } -- void setIP(uint32_t value) { fRegisters.__eip = value; } -- uint32_t getEBP() const { return fRegisters.__ebp; } -- void setEBP(uint32_t value) { fRegisters.__ebp = value; } -- uint32_t getEBX() const { return fRegisters.__ebx; } -- void setEBX(uint32_t value) { fRegisters.__ebx = value; } -- uint32_t getECX() const { return fRegisters.__ecx; } -- void setECX(uint32_t value) { fRegisters.__ecx = value; } -- uint32_t getEDX() const { return fRegisters.__edx; } -- void setEDX(uint32_t value) { fRegisters.__edx = value; } -- uint32_t getESI() const { return fRegisters.__esi; } -- void setESI(uint32_t value) { fRegisters.__esi = value; } -- uint32_t getEDI() const { return fRegisters.__edi; } -- void setEDI(uint32_t value) { fRegisters.__edi = value; } -+ uint32_t getSP() const { return fRegisters.esp; } -+ void setSP(uint32_t value) { fRegisters.esp = value; } -+ uint32_t getIP() const { return fRegisters.eip; } -+ void setIP(uint32_t value) { fRegisters.eip = value; } -+ uint32_t getEBP() const { return fRegisters.ebp; } -+ void setEBP(uint32_t value) { fRegisters.ebp = value; } -+ uint32_t getEBX() const { return fRegisters.ebx; } -+ void setEBX(uint32_t value) { fRegisters.ebx = value; } -+ uint32_t getECX() const { return fRegisters.ecx; } -+ void setECX(uint32_t value) { fRegisters.ecx = value; } -+ uint32_t getEDX() const { return fRegisters.edx; } -+ void setEDX(uint32_t value) { fRegisters.edx = value; } -+ uint32_t getESI() const { return fRegisters.esi; } -+ void setESI(uint32_t value) { fRegisters.esi = value; } -+ uint32_t getEDI() const { return fRegisters.edi; } -+ void setEDI(uint32_t value) { fRegisters.edi = value; } - - private: - i386_thread_state_t fRegisters; -@@ -122,25 +122,25 @@ - { - switch ( regNum ) { - case UNW_REG_IP: -- return fRegisters.__eip; -+ return fRegisters.eip; - case UNW_REG_SP: -- return fRegisters.__esp; -+ return fRegisters.esp; - case UNW_X86_EAX: -- return fRegisters.__eax; -+ return fRegisters.eax; - case UNW_X86_ECX: -- return fRegisters.__ecx; -+ return fRegisters.ecx; - case UNW_X86_EDX: -- return fRegisters.__edx; -+ return fRegisters.edx; - case UNW_X86_EBX: -- return fRegisters.__ebx; -+ return fRegisters.ebx; - case UNW_X86_EBP: -- return fRegisters.__ebp; -+ return fRegisters.ebp; - case UNW_X86_ESP: -- return fRegisters.__esp; -+ return fRegisters.esp; - case UNW_X86_ESI: -- return fRegisters.__esi; -+ return fRegisters.esi; - case UNW_X86_EDI: -- return fRegisters.__edi; -+ return fRegisters.edi; - } - ABORT("unsupported x86 register"); - } -@@ -149,34 +149,34 @@ - { - switch ( regNum ) { - case UNW_REG_IP: -- fRegisters.__eip = value; -+ fRegisters.eip = value; - return; - case UNW_REG_SP: -- fRegisters.__esp = value; -+ fRegisters.esp = value; - return; - case UNW_X86_EAX: -- fRegisters.__eax = value; -+ fRegisters.eax = value; - return; - case UNW_X86_ECX: -- fRegisters.__ecx = value; -+ fRegisters.ecx = value; - return; - case UNW_X86_EDX: -- fRegisters.__edx = value; -+ fRegisters.edx = value; - return; - case UNW_X86_EBX: -- fRegisters.__ebx = value; -+ fRegisters.ebx = value; - return; - case UNW_X86_EBP: -- fRegisters.__ebp = value; -+ fRegisters.ebp = value; - return; - case UNW_X86_ESP: -- fRegisters.__esp = value; -+ fRegisters.esp = value; - return; - case UNW_X86_ESI: -- fRegisters.__esi = value; -+ fRegisters.esi = value; - return; - case UNW_X86_EDI: -- fRegisters.__edi = value; -+ fRegisters.edi = value; - return; - } - ABORT("unsupported x86 register"); -@@ -253,22 +253,22 @@ - void setVectorRegister(int num, v128 value); - const char* getRegisterName(int num); - void jumpto(); -- uint64_t getSP() const { return fRegisters.__rsp; } -- void setSP(uint64_t value) { fRegisters.__rsp = value; } -- uint64_t getIP() const { return fRegisters.__rip; } -- void setIP(uint64_t value) { fRegisters.__rip = value; } -- uint64_t getRBP() const { return fRegisters.__rbp; } -- void setRBP(uint64_t value) { fRegisters.__rbp = value; } -- uint64_t getRBX() const { return fRegisters.__rbx; } -- void setRBX(uint64_t value) { fRegisters.__rbx = value; } -- uint64_t getR12() const { return fRegisters.__r12; } -- void setR12(uint64_t value) { fRegisters.__r12 = value; } -- uint64_t getR13() const { return fRegisters.__r13; } -- void setR13(uint64_t value) { fRegisters.__r13 = value; } -- uint64_t getR14() const { return fRegisters.__r14; } -- void setR14(uint64_t value) { fRegisters.__r14 = value; } -- uint64_t getR15() const { return fRegisters.__r15; } -- void setR15(uint64_t value) { fRegisters.__r15 = value; } -+ uint64_t getSP() const { return fRegisters.rsp; } -+ void setSP(uint64_t value) { fRegisters.rsp = value; } -+ uint64_t getIP() const { return fRegisters.rip; } -+ void setIP(uint64_t value) { fRegisters.rip = value; } -+ uint64_t getRBP() const { return fRegisters.rbp; } -+ void setRBP(uint64_t value) { fRegisters.rbp = value; } -+ uint64_t getRBX() const { return fRegisters.rbx; } -+ void setRBX(uint64_t value) { fRegisters.rbx = value; } -+ uint64_t getR12() const { return fRegisters.r12; } -+ void setR12(uint64_t value) { fRegisters.r12 = value; } -+ uint64_t getR13() const { return fRegisters.r13; } -+ void setR13(uint64_t value) { fRegisters.r13 = value; } -+ uint64_t getR14() const { return fRegisters.r14; } -+ void setR14(uint64_t value) { fRegisters.r14 = value; } -+ uint64_t getR15() const { return fRegisters.r15; } -+ void setR15(uint64_t value) { fRegisters.r15 = value; } - private: - x86_thread_state64_t fRegisters; - }; -@@ -302,41 +302,41 @@ - { - switch ( regNum ) { - case UNW_REG_IP: -- return fRegisters.__rip; -+ return fRegisters.rip; - case UNW_REG_SP: -- return fRegisters.__rsp; -+ return fRegisters.rsp; - case UNW_X86_64_RAX: -- return fRegisters.__rax; -+ return fRegisters.rax; - case UNW_X86_64_RDX: -- return fRegisters.__rdx; -+ return fRegisters.rdx; - case UNW_X86_64_RCX: -- return fRegisters.__rcx; -+ return fRegisters.rcx; - case UNW_X86_64_RBX: -- return fRegisters.__rbx; -+ return fRegisters.rbx; - case UNW_X86_64_RSI: -- return fRegisters.__rsi; -+ return fRegisters.rsi; - case UNW_X86_64_RDI: -- return fRegisters.__rdi; -+ return fRegisters.rdi; - case UNW_X86_64_RBP: -- return fRegisters.__rbp; -+ return fRegisters.rbp; - case UNW_X86_64_RSP: -- return fRegisters.__rsp; -+ return fRegisters.rsp; - case UNW_X86_64_R8: -- return fRegisters.__r8; -+ return fRegisters.r8; - case UNW_X86_64_R9: -- return fRegisters.__r9; -+ return fRegisters.r9; - case UNW_X86_64_R10: -- return fRegisters.__r10; -+ return fRegisters.r10; - case UNW_X86_64_R11: -- return fRegisters.__r11; -+ return fRegisters.r11; - case UNW_X86_64_R12: -- return fRegisters.__r12; -+ return fRegisters.r12; - case UNW_X86_64_R13: -- return fRegisters.__r13; -+ return fRegisters.r13; - case UNW_X86_64_R14: -- return fRegisters.__r14; -+ return fRegisters.r14; - case UNW_X86_64_R15: -- return fRegisters.__r15; -+ return fRegisters.r15; - } - ABORT("unsupported x86_64 register"); - } -@@ -345,58 +345,58 @@ - { - switch ( regNum ) { - case UNW_REG_IP: -- fRegisters.__rip = value; -+ fRegisters.rip = value; - return; - case UNW_REG_SP: -- fRegisters.__rsp = value; -+ fRegisters.rsp = value; - return; - case UNW_X86_64_RAX: -- fRegisters.__rax = value; -+ fRegisters.rax = value; - return; - case UNW_X86_64_RDX: -- fRegisters.__rdx = value; -+ fRegisters.rdx = value; - return; - case UNW_X86_64_RCX: -- fRegisters.__rcx = value; -+ fRegisters.rcx = value; - return; - case UNW_X86_64_RBX: -- fRegisters.__rbx = value; -+ fRegisters.rbx = value; - return; - case UNW_X86_64_RSI: -- fRegisters.__rsi = value; -+ fRegisters.rsi = value; - return; - case UNW_X86_64_RDI: -- fRegisters.__rdi = value; -+ fRegisters.rdi = value; - return; - case UNW_X86_64_RBP: -- fRegisters.__rbp = value; -+ fRegisters.rbp = value; - return; - case UNW_X86_64_RSP: -- fRegisters.__rsp = value; -+ fRegisters.rsp = value; - return; - case UNW_X86_64_R8: -- fRegisters.__r8 = value; -+ fRegisters.r8 = value; - return; - case UNW_X86_64_R9: -- fRegisters.__r9 = value; -+ fRegisters.r9 = value; - return; - case UNW_X86_64_R10: -- fRegisters.__r10 = value; -+ fRegisters.r10 = value; - return; - case UNW_X86_64_R11: -- fRegisters.__r11 = value; -+ fRegisters.r11 = value; - return; - case UNW_X86_64_R12: -- fRegisters.__r12 = value; -+ fRegisters.r12 = value; - return; - case UNW_X86_64_R13: -- fRegisters.__r13 = value; -+ fRegisters.r13 = value; - return; - case UNW_X86_64_R14: -- fRegisters.__r14 = value; -+ fRegisters.r14 = value; - return; - case UNW_X86_64_R15: -- fRegisters.__r15 = value; -+ fRegisters.r15 = value; - return; - } - ABORT("unsupported x86_64 register"); diff --git a/nix/nixcrpkgs/macos/cctools-libstuff-no-error.patch b/nix/nixcrpkgs/macos/cctools-libstuff-no-error.patch deleted file mode 100644 index 6be415388..000000000 --- a/nix/nixcrpkgs/macos/cctools-libstuff-no-error.patch +++ /dev/null @@ -1,93 +0,0 @@ -diff -ur cctools-port-c1cc758/cctools/include/stuff/errors.h cctools-port-libstuff-no-error/cctools/include/stuff/errors.h ---- cctools-port-c1cc758/cctools/include/stuff/errors.h 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-libstuff-no-error/cctools/include/stuff/errors.h 2017-11-10 21:52:54.172522281 -0800 -@@ -40,7 +40,7 @@ - __attribute__ ((format (printf, 1, 2))) - #endif - __attribute__((visibility("hidden"))); --extern void error( -+extern void errorf( - const char *format, ...) - #ifdef __GNUC__ - __attribute__ ((format (printf, 1, 2))) -diff -ur cctools-port-c1cc758/cctools/libstuff/errors.c cctools-port-libstuff-no-error/cctools/libstuff/errors.c ---- cctools-port-c1cc758/cctools/libstuff/errors.c 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-libstuff-no-error/cctools/libstuff/errors.c 2017-11-10 21:52:42.795730237 -0800 -@@ -57,7 +57,7 @@ - */ - __private_extern__ - void --error( -+errorf( - const char *format, - ...) - { -diff -ur cctools-port-c1cc758/cctools/libstuff/ofile.c cctools-port-libstuff-no-error/cctools/libstuff/ofile.c ---- cctools-port-c1cc758/cctools/libstuff/ofile.c 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-libstuff-no-error/cctools/libstuff/ofile.c 2017-11-10 21:54:20.156803208 -0800 -@@ -115,6 +115,8 @@ - }; - #endif /* !defined(OTOOL) */ - -+#define error errorf -+ - static enum bool ofile_specific_arch( - struct ofile *ofile, - uint32_t narch); -diff -ur cctools-port-c1cc758/cctools/libstuff/swap_headers.c cctools-port-libstuff-no-error/cctools/libstuff/swap_headers.c ---- cctools-port-c1cc758/cctools/libstuff/swap_headers.c 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-libstuff-no-error/cctools/libstuff/swap_headers.c 2017-11-10 21:54:49.873797374 -0800 -@@ -50,6 +50,8 @@ - #include "stuff/bytesex.h" - #include "stuff/errors.h" - -+#define error errorf -+ - /* - * swap_object_headers() swaps the object file headers from the host byte sex - * into the non-host byte sex. It returns TRUE if it can and did swap the -diff -ur cctools-port-c1cc758/cctools/libstuff/SymLoc.c cctools-port-libstuff-no-error/cctools/libstuff/SymLoc.c ---- cctools-port-c1cc758/cctools/libstuff/SymLoc.c 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-libstuff-no-error/cctools/libstuff/SymLoc.c 2017-11-10 21:53:06.199321490 -0800 -@@ -118,7 +118,7 @@ - if(fclose(file) != 0) - system_error("fclose() failed"); - if (!*viewPath) { -- error("symLocForDylib(): Can't locate view path for release %s", -+ errorf("symLocForDylib(): Can't locate view path for release %s", - releaseName); - return NULL; - } -@@ -252,7 +252,7 @@ - // process return value - if (!c) { - if(no_error_if_missing == FALSE) -- error("Can't find project that builds %s", installName); -+ errorf("Can't find project that builds %s", installName); - return NULL; - } else { - *found_project = TRUE; -diff -ur cctools-port-c1cc758/cctools/libstuff/version_number.c cctools-port-libstuff-no-error/cctools/libstuff/version_number.c ---- cctools-port-c1cc758/cctools/libstuff/version_number.c 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-libstuff-no-error/cctools/libstuff/version_number.c 2017-11-10 21:55:18.674114769 -0800 -@@ -27,6 +27,8 @@ - #include "stuff/allocate.h" - #include "stuff/errors.h" - -+#define error errorf -+ - /* - * get_version_number() converts an ascii version number string of the form: - * X[.Y[.Z]] -diff -ur cctools-port-c1cc758/cctools/libstuff/writeout.c cctools-port-libstuff-no-error/cctools/libstuff/writeout.c ---- cctools-port-c1cc758/cctools/libstuff/writeout.c 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-libstuff-no-error/cctools/libstuff/writeout.c 2017-11-10 21:55:43.537722114 -0800 -@@ -37,6 +37,8 @@ - #include "stuff/lto.h" - #endif /* LTO_SUPPORT */ - -+#define error errorf -+ - static void copy_new_symbol_info( - char *p, - uint32_t *size, diff --git a/nix/nixcrpkgs/macos/cctools-private-extern.patch b/nix/nixcrpkgs/macos/cctools-private-extern.patch deleted file mode 100644 index ce0f099fd..000000000 --- a/nix/nixcrpkgs/macos/cctools-private-extern.patch +++ /dev/null @@ -1,271 +0,0 @@ -diff -ur cctools-port-c1cc758/cctools/include/foreign/extern.h cctools-port-private-extern/cctools/include/foreign/extern.h ---- cctools-port-c1cc758/cctools/include/foreign/extern.h 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-private-extern/cctools/include/foreign/extern.h 2017-11-10 18:32:37.035890924 -0800 -@@ -1 +1,2 @@ -+#pragma once - #define __private_extern__ __attribute__((visibility("hidden"))) -diff -ur cctools-port-c1cc758/cctools/include/mach-o/dyld.h cctools-port-private-extern/cctools/include/mach-o/dyld.h ---- cctools-port-c1cc758/cctools/include/mach-o/dyld.h 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-private-extern/cctools/include/mach-o/dyld.h 2017-11-10 18:32:37.035890924 -0800 -@@ -27,9 +27,7 @@ - extern "C" { - #endif /* __cplusplus */ - --#if defined(__MWERKS__) && !defined(__private_extern__) --#define __private_extern__ __declspec(private_extern) --#endif -+#include - - #include - #include -diff -ur cctools-port-c1cc758/cctools/include/stuff/allocate.h cctools-port-private-extern/cctools/include/stuff/allocate.h ---- cctools-port-c1cc758/cctools/include/stuff/allocate.h 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-private-extern/cctools/include/stuff/allocate.h 2017-11-10 18:33:52.006780029 -0800 -@@ -20,9 +20,8 @@ - * - * @APPLE_LICENSE_HEADER_END@ - */ --#if defined(__MWERKS__) && !defined(__private_extern__) --#define __private_extern__ __declspec(private_extern) --#endif -+ -+#include - - /* defined in allocate.c */ - -diff -ur cctools-port-c1cc758/cctools/include/stuff/arch.h cctools-port-private-extern/cctools/include/stuff/arch.h ---- cctools-port-c1cc758/cctools/include/stuff/arch.h 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-private-extern/cctools/include/stuff/arch.h 2017-11-10 18:34:36.487305108 -0800 -@@ -23,9 +23,8 @@ - #ifndef _STUFF_ARCH_H_ - #define _STUFF_ARCH_H_ - --#if defined(__MWERKS__) && !defined(__private_extern__) --#define __private_extern__ __declspec(private_extern) --#endif -+#include -+ - /* - * This file contains the current known set of flags and constants for the - * known architectures. -diff -ur cctools-port-c1cc758/cctools/include/stuff/best_arch.h cctools-port-private-extern/cctools/include/stuff/best_arch.h ---- cctools-port-c1cc758/cctools/include/stuff/best_arch.h 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-private-extern/cctools/include/stuff/best_arch.h 2017-11-10 18:34:48.764116432 -0800 -@@ -20,9 +20,8 @@ - * - * @APPLE_LICENSE_HEADER_END@ - */ --#if defined(__MWERKS__) && !defined(__private_extern__) --#define __private_extern__ __declspec(private_extern) --#endif -+ -+#include - - #include - #include -diff -ur cctools-port-c1cc758/cctools/include/stuff/breakout.h cctools-port-private-extern/cctools/include/stuff/breakout.h ---- cctools-port-c1cc758/cctools/include/stuff/breakout.h 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-private-extern/cctools/include/stuff/breakout.h 2017-11-10 18:35:04.334299743 -0800 -@@ -20,9 +20,8 @@ - * - * @APPLE_LICENSE_HEADER_END@ - */ --#if defined(__MWERKS__) && !defined(__private_extern__) --#define __private_extern__ __declspec(private_extern) --#endif -+ -+#include - - #import "stuff/ofile.h" - -diff -ur cctools-port-c1cc758/cctools/include/stuff/bytesex.h cctools-port-private-extern/cctools/include/stuff/bytesex.h ---- cctools-port-c1cc758/cctools/include/stuff/bytesex.h 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-private-extern/cctools/include/stuff/bytesex.h 2017-11-10 18:35:12.637730768 -0800 -@@ -29,9 +29,7 @@ - #ifndef _STUFF_BYTESEX_H_ - #define _STUFF_BYTESEX_H_ - --#if defined(__MWERKS__) && !defined(__private_extern__) --#define __private_extern__ __declspec(private_extern) --#endif -+#include - - #include - #include -diff -ur cctools-port-c1cc758/cctools/include/stuff/execute.h cctools-port-private-extern/cctools/include/stuff/execute.h ---- cctools-port-c1cc758/cctools/include/stuff/execute.h 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-private-extern/cctools/include/stuff/execute.h 2017-11-10 18:35:34.417986815 -0800 -@@ -20,9 +20,8 @@ - * - * @APPLE_LICENSE_HEADER_END@ - */ --#if defined(__MWERKS__) && !defined(__private_extern__) --#define __private_extern__ __declspec(private_extern) --#endif -+ -+#include - - /* - * execute() does an execvp using the argv passed to it. If the parameter -diff -ur cctools-port-c1cc758/cctools/include/stuff/guess_short_name.h cctools-port-private-extern/cctools/include/stuff/guess_short_name.h ---- cctools-port-c1cc758/cctools/include/stuff/guess_short_name.h 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-private-extern/cctools/include/stuff/guess_short_name.h 2017-11-10 18:40:11.801171715 -0800 -@@ -22,6 +22,8 @@ - */ - #include "stuff/bool.h" - -+#include -+ - __private_extern__ char * guess_short_name( - char *name, - enum bool *is_framework, -diff -ur cctools-port-c1cc758/cctools/include/stuff/hash_string.h cctools-port-private-extern/cctools/include/stuff/hash_string.h ---- cctools-port-c1cc758/cctools/include/stuff/hash_string.h 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-private-extern/cctools/include/stuff/hash_string.h 2017-11-10 18:35:43.698095826 -0800 -@@ -20,9 +20,8 @@ - * - * @APPLE_LICENSE_HEADER_END@ - */ --#if defined(__MWERKS__) && !defined(__private_extern__) --#define __private_extern__ __declspec(private_extern) --#endif -+ -+#include - - __private_extern__ int32_t hash_string( - char *key); -diff -ur cctools-port-c1cc758/cctools/include/stuff/hppa.h cctools-port-private-extern/cctools/include/stuff/hppa.h ---- cctools-port-c1cc758/cctools/include/stuff/hppa.h 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-private-extern/cctools/include/stuff/hppa.h 2017-11-10 18:36:01.414970472 -0800 -@@ -20,9 +20,8 @@ - * - * @APPLE_LICENSE_HEADER_END@ - */ --#if defined(__MWERKS__) && !defined(__private_extern__) --#define __private_extern__ __declspec(private_extern) --#endif -+ -+#include - - __private_extern__ void calc_hppa_HILO( - uint32_t base, -diff -ur cctools-port-c1cc758/cctools/include/stuff/lto.h cctools-port-private-extern/cctools/include/stuff/lto.h ---- cctools-port-c1cc758/cctools/include/stuff/lto.h 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-private-extern/cctools/include/stuff/lto.h 2017-11-10 18:40:27.811342692 -0800 -@@ -3,6 +3,8 @@ - - #include "stuff/arch.h" - -+#include -+ - #ifdef LTO_SUPPORT - - __private_extern__ int is_llvm_bitcode_from_memory( -diff -ur cctools-port-c1cc758/cctools/include/stuff/macosx_deployment_target.h cctools-port-private-extern/cctools/include/stuff/macosx_deployment_target.h ---- cctools-port-c1cc758/cctools/include/stuff/macosx_deployment_target.h 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-private-extern/cctools/include/stuff/macosx_deployment_target.h 2017-11-10 18:39:47.814249693 -0800 -@@ -22,6 +22,8 @@ - */ - #include - -+#include -+ - struct macosx_deployment_target { - uint32_t major; /* major version */ - uint32_t minor; /* minor version (if any or zero) */ -diff -ur cctools-port-c1cc758/cctools/include/stuff/ofile.h cctools-port-private-extern/cctools/include/stuff/ofile.h ---- cctools-port-c1cc758/cctools/include/stuff/ofile.h 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-private-extern/cctools/include/stuff/ofile.h 2017-11-10 18:36:14.268454589 -0800 -@@ -24,9 +24,7 @@ - #ifndef _STUFF_OFILE_H_ - #define _STUFF_OFILE_H_ - --#if defined(__MWERKS__) && !defined(__private_extern__) --#define __private_extern__ __declspec(private_extern) --#endif -+#include - - #import - #ifndef AR_EFMT1 -diff -ur cctools-port-c1cc758/cctools/include/stuff/print.h cctools-port-private-extern/cctools/include/stuff/print.h ---- cctools-port-c1cc758/cctools/include/stuff/print.h 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-private-extern/cctools/include/stuff/print.h 2017-11-10 18:36:24.805244801 -0800 -@@ -20,9 +20,8 @@ - * - * @APPLE_LICENSE_HEADER_END@ - */ --#if defined(__MWERKS__) && !defined(__private_extern__) --#define __private_extern__ __declspec(private_extern) --#endif -+ -+#include - - #import - -diff -ur cctools-port-c1cc758/cctools/include/stuff/reloc.h cctools-port-private-extern/cctools/include/stuff/reloc.h ---- cctools-port-c1cc758/cctools/include/stuff/reloc.h 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-private-extern/cctools/include/stuff/reloc.h 2017-11-10 18:36:31.878661041 -0800 -@@ -20,9 +20,8 @@ - * - * @APPLE_LICENSE_HEADER_END@ - */ --#if defined(__MWERKS__) && !defined(__private_extern__) --#define __private_extern__ __declspec(private_extern) --#endif -+ -+#include - - #import - #import "stuff/bool.h" -diff -ur cctools-port-c1cc758/cctools/include/stuff/rnd.h cctools-port-private-extern/cctools/include/stuff/rnd.h ---- cctools-port-c1cc758/cctools/include/stuff/rnd.h 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-private-extern/cctools/include/stuff/rnd.h 2017-11-10 18:36:39.068745293 -0800 -@@ -27,9 +27,7 @@ - */ - #include - --#if defined(__MWERKS__) && !defined(__private_extern__) --#define __private_extern__ __declspec(private_extern) --#endif -+#include - - /* - * rnd() rounds v to a multiple of r. -diff -ur cctools-port-c1cc758/cctools/include/stuff/symbol_list.h cctools-port-private-extern/cctools/include/stuff/symbol_list.h ---- cctools-port-c1cc758/cctools/include/stuff/symbol_list.h 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-private-extern/cctools/include/stuff/symbol_list.h 2017-11-10 18:37:11.605792928 -0800 -@@ -23,6 +23,8 @@ - #include - #include - -+#include -+ - /* - * Data structures to perform selective stripping of symbol table entries. - */ -diff -ur cctools-port-c1cc758/cctools/include/stuff/unix_standard_mode.h cctools-port-private-extern/cctools/include/stuff/unix_standard_mode.h ---- cctools-port-c1cc758/cctools/include/stuff/unix_standard_mode.h 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-private-extern/cctools/include/stuff/unix_standard_mode.h 2017-11-10 18:37:42.596155389 -0800 -@@ -22,5 +22,7 @@ - */ - #include "stuff/bool.h" - -+#include -+ - __private_extern__ enum bool get_unix_standard_mode( - void); -diff -ur cctools-port-c1cc758/cctools/include/stuff/vm_flush_cache.h cctools-port-private-extern/cctools/include/stuff/vm_flush_cache.h ---- cctools-port-c1cc758/cctools/include/stuff/vm_flush_cache.h 2017-10-01 13:47:04.000000000 -0700 -+++ cctools-port-private-extern/cctools/include/stuff/vm_flush_cache.h 2017-11-10 18:37:59.973025145 -0800 -@@ -20,9 +20,8 @@ - * - * @APPLE_LICENSE_HEADER_END@ - */ --#if defined(__MWERKS__) && !defined(__private_extern__) --#define __private_extern__ __declspec(private_extern) --#endif -+ -+#include - - #import - __private_extern__ kern_return_t vm_flush_cache( diff --git a/nix/nixcrpkgs/macos/clang_builder.sh b/nix/nixcrpkgs/macos/clang_builder.sh deleted file mode 100644 index 2afa96bb9..000000000 --- a/nix/nixcrpkgs/macos/clang_builder.sh +++ /dev/null @@ -1,30 +0,0 @@ -source $setup - -tar -xf $llvm_src -mv llvm-* llvm - -tar -xf $lld_src -mv lld-* lld -mv lld llvm/tools/ - -tar -xf $src -mv cfe-* clang -cd clang -for patch in $patches; do - echo applying patch $patch - patch -p1 -i $patch -done -cd .. -mv clang llvm/projects/ - -mkdir build -cd build - -cmake ../llvm -GNinja -DDEFAULT_SYSROOT=$out -DCMAKE_INSTALL_PREFIX=$out $cmake_flags - -ninja - -ninja install - -# clang-tblgen is supposed to be an internal tool, but tapi needs it -cp bin/clang-tblgen $out/bin diff --git a/nix/nixcrpkgs/macos/clang_megapatch.patch b/nix/nixcrpkgs/macos/clang_megapatch.patch deleted file mode 100644 index b5941e2c3..000000000 --- a/nix/nixcrpkgs/macos/clang_megapatch.patch +++ /dev/null @@ -1,37 +0,0 @@ -diff -ur cfe-5.0.0.src.orig/lib/Driver/ToolChains/Gnu.cpp cfe-5.0.0.src/lib/Driver/ToolChains/Gnu.cpp ---- cfe-5.0.0.src.orig/lib/Driver/ToolChains/Gnu.cpp 2017-09-13 07:15:52.419093088 -0700 -+++ cfe-5.0.0.src/lib/Driver/ToolChains/Gnu.cpp 2017-09-13 07:21:58.892639000 -0700 -@@ -493,10 +493,6 @@ - CmdArgs.push_back("-export-dynamic"); - - if (!Args.hasArg(options::OPT_shared)) { -- const std::string Loader = -- D.DyldPrefix + ToolChain.getDynamicLinker(Args); -- CmdArgs.push_back("-dynamic-linker"); -- CmdArgs.push_back(Args.MakeArgString(Loader)); - } - } - -diff -ur cfe-5.0.0.src.orig/lib/Driver/ToolChains/Linux.cpp cfe-5.0.0.src/lib/Driver/ToolChains/Linux.cpp ---- cfe-5.0.0.src.orig/lib/Driver/ToolChains/Linux.cpp 2017-09-13 07:15:52.419093088 -0700 -+++ cfe-5.0.0.src/lib/Driver/ToolChains/Linux.cpp 2017-09-13 07:17:58.530311694 -0700 -@@ -195,18 +195,7 @@ - llvm::Triple::ArchType Arch = Triple.getArch(); - std::string SysRoot = computeSysRoot(); - -- // Cross-compiling binutils and GCC installations (vanilla and openSUSE at -- // least) put various tools in a triple-prefixed directory off of the parent -- // of the GCC installation. We use the GCC triple here to ensure that we end -- // up with tools that support the same amount of cross compiling as the -- // detected GCC installation. For example, if we find a GCC installation -- // targeting x86_64, but it is a bi-arch GCC installation, it can also be -- // used to target i386. -- // FIXME: This seems unlikely to be Linux-specific. -- ToolChain::path_list &PPaths = getProgramPaths(); -- PPaths.push_back(Twine(GCCInstallation.getParentLibPath() + "/../" + -- GCCInstallation.getTriple().str() + "/bin") -- .str()); -+ // Removed some code here that found programs like ld in "/..//bin" - - Distro Distro(D.getVFS()); - diff --git a/nix/nixcrpkgs/macos/default.nix b/nix/nixcrpkgs/macos/default.nix deleted file mode 100644 index c82f42d5b..000000000 --- a/nix/nixcrpkgs/macos/default.nix +++ /dev/null @@ -1,192 +0,0 @@ -# Note: To reduce clutter here, it might be nice to move clang to -# `native`, and also make `native` provide a function for building -# binutils. So clang and binutils recipes could be shared by the -# different platforms we targets. - -{ osx_sdk, native }: -let - nixpkgs = native.nixpkgs; - - arch = "x86_64"; - - # was darwin15, changed to darwin so that lld guesses flavor=Darwin correctly - darwin_name = "darwin15"; - - macos_version_min = "10.11"; - - host = "${arch}-apple-${darwin_name}"; - - os = "macos"; - - compiler = "clang"; - - exe_suffix = ""; - - clang = native.make_derivation rec { - name = "clang"; - - version = "5.0.0"; - - src = nixpkgs.fetchurl { - url = "https://llvm.org/releases/${version}/cfe-${version}.src.tar.xz"; - sha256 = "0w09s8fn3lkn6i04nj0cisgp821r815fk5b5fjn97xrd371277q1"; - }; - - llvm_src = nixpkgs.fetchurl { - url = "https://llvm.org/releases/${version}/llvm-${version}.src.tar.xz"; - sha256 = "1nin64vz21hyng6jr19knxipvggaqlkl2l9jpd5czbc4c2pcnpg3"; - }; - - # Note: We aren't actually using lld for anything yet. - lld_src = nixpkgs.fetchurl { - url = "http://releases.llvm.org/${version}/lld-${version}.src.tar.xz"; - sha256 = "15rqsmfw0jlsri7hszbs8l0j7v1030cy9xvvdb245397llh7k6ir"; - }; - - patches = [ ./clang_megapatch.patch ]; - - builder = ./clang_builder.sh; - - native_inputs = [ nixpkgs.python2 ]; - - cmake_flags = - "-DCMAKE_BUILD_TYPE=Release " + - # "-DCMAKE_BUILD_TYPE=Debug " + - "-DLLVM_TARGETS_TO_BUILD=X86\;ARM " + - "-DLLVM_ENABLE_RTTI=ON " + # ld64 uses dynamic_cast, requiring rtti - "-DLLVM_ENABLE_ASSERTIONS=OFF"; - }; - - # Note: There is an alternative version we could use, but it - # has a copy of LLVM in it: https://github.com/tpoechtrager/apple-libtapi - tapi = native.make_derivation rec { - name = "tapi"; - version = "${version0}.${version1}.${version2}"; - version0 = "2"; - version1 = "0"; - version2 = "0"; - src = nixpkgs.fetchurl { - url = "https://github.com/DavidEGrayson/tapi/archive/f98d0c3.tar.gz"; - sha256 = "0jibz0fsyh47q8y3w6f0qspjh6fhs164rkhjg7x6k7qhlawcdy6g"; - }; - builder = ./tapi_builder.sh; - native_inputs = [ clang ]; - inherit clang; - }; - - cctools_commit = "c1cc758"; - cctools_apple_version = "274.2"; # from README.md - cctools_port_src = nixpkgs.fetchurl { - url = "https://github.com/tpoechtrager/cctools-port/archive/${cctools_commit}.tar.gz"; - sha256= "11bfcndzbdmjp2piabyqs34da617fh5fhirqvb9w87anfan15ffa"; - }; - - ld = native.make_derivation rec { - name = "cctools-ld64"; - apple_version = cctools_apple_version; - src = cctools_port_src; - patches = [ - ./cctools-format.patch - ./cctools-ld64-registers.patch - ]; - builder = ./ld_builder.sh; - native_inputs = [ tapi ]; - inherit host; - }; - - ranlib = native.make_derivation rec { - name = "cctools-ranlib"; - apple_version = cctools_apple_version; - src = ld.src; - builder = ./ranlib_builder.sh; - patches = [ - ./cctools-format.patch - ./cctools-bytesex.patch - ]; - inherit host; - }; - - ar = native.make_derivation rec { - name = "cctools-ar"; - apple_version = cctools_apple_version; - src = cctools_port_src; - builder = ./ar_builder.sh; - patches = [ - ./cctools-format.patch - ./cctools-libstuff-no-error.patch - ]; - inherit host ranlib; - }; - - strip = native.make_derivation rec { - name = "cctools-strip"; - apple_version = cctools_apple_version; - src = cctools_port_src; - builder = ./strip_builder.sh; - patches = [ - ./cctools-format.patch - ]; - inherit host; - }; - - # TODO: add instructions for building the SDK tarball, probably want a copy of - # the script from osxcross. - sdk = native.make_derivation rec { - name = "macos-sdk"; - builder = ./sdk_builder.sh; - src = osx_sdk; - }; - - toolchain = native.make_derivation rec { - name = "macos-toolchain"; - builder = ./toolchain_builder.sh; - src_file = ./wrapper.cpp; - inherit host clang ld ranlib ar strip sdk; - - CXXFLAGS = - "-std=c++11 " + - "-Wall " + - "-I. " + - "-O2 -g " + - "-DWRAPPER_OS_VERSION_MIN=\\\"${macos_version_min}\\\" " + - "-DWRAPPER_HOST=\\\"${host}\\\" " + - "-DWRAPPER_ARCH=\\\"${arch}\\\" " + - "-DWRAPPER_SDK_PATH=\\\"${sdk}\\\" " + - "-DWRAPPER_LINKER_VERSION=\\\"${ld.apple_version}\\\""; - }; - - cmake_toolchain = import ../cmake_toolchain { - cmake_system_name = "Darwin"; - inherit nixpkgs host; - }; - - crossenv = { - is_cross = true; - - # Build tools available on the PATH for every derivation. - default_native_inputs = native.default_native_inputs - ++ [ clang toolchain native.wrappers ]; - - # Target info environment variables. - inherit host arch os compiler exe_suffix macos_version_min; - - # CMake toolchain file. - inherit cmake_toolchain; - - # A wide variety of programs and build tools. - inherit nixpkgs; - - # Some native build tools made by nixcrpkgs. - inherit native; - - # License information that should be shipped with any software - # compiled by this environment. - global_license_set = { }; - - # Make it easy to build or refer to the build tools. - inherit clang tapi ld ranlib ar sdk toolchain strip; - - make_derivation = import ../make_derivation.nix crossenv; - }; -in - crossenv diff --git a/nix/nixcrpkgs/macos/gen_sdk_package.sh b/nix/nixcrpkgs/macos/gen_sdk_package.sh deleted file mode 100755 index 843171ba6..000000000 --- a/nix/nixcrpkgs/macos/gen_sdk_package.sh +++ /dev/null @@ -1,164 +0,0 @@ -#!/usr/bin/env bash -# -# Package the OS X SDKs into a tar file to be used by `build.sh`. -# - -# This file comes from the osxcross project and is licensed under the GNU GPLv2. -# For more information, see the `COPYING` file from: -# https://github.com/tpoechtrager/osxcross/tree/1a1733a773fe26e7b6c93b16fbf9341f22fac831 - -export LC_ALL=C - -function set_xcode_dir() -{ - local tmp=$(ls $1 2>/dev/null | grep "^Xcode.*.app" | grep -v "beta" | head -n1) - - if [ -z "$tmp" ]; then - tmp=$(ls $1 2>/dev/null | grep "^Xcode.*.app" | head -n1) - fi - - if [ -n "$tmp" ]; then - XCODEDIR="$1/$tmp" - fi -} - -if [ $(uname -s) != "Darwin" ]; then - if [ -z "$XCODEDIR" ]; then - echo "This script must be run on OS X" 1>&2 - echo "... Or with XCODEDIR=... on Linux" 1>&2 - exit 1 - else - case $XCODEDIR in - /*) ;; - *) XCODEDIR="$PWD/$XCODEDIR" ;; - esac - set_xcode_dir $XCODEDIR - fi -else - set_xcode_dir $(echo /Volumes/Xcode* | tr ' ' '\n' | grep -v "beta" | head -n1) - - if [ -z "$XCODEDIR" ]; then - set_xcode_dir /Applications - - if [ -z "$XCODEDIR" ]; then - set_xcode_dir $(echo /Volumes/Xcode* | tr ' ' '\n' | head -n1) - - if [ -z "$XCODEDIR" ]; then - echo "please mount Xcode.dmg" 1>&2 - exit 1 - fi - fi - fi -fi - -if [ ! -d $XCODEDIR ]; then - echo "cannot find Xcode (XCODEDIR=$XCODEDIR)" 1>&2 - exit 1 -fi - -echo -e "found Xcode: $XCODEDIR" - -WDIR=$(pwd) - -which gnutar &>/dev/null - -if [ $? -eq 0 ]; then - TAR=gnutar -else - TAR=tar -fi - -which xz &>/dev/null - -if [ $? -eq 0 ]; then - COMPRESSOR=xz - PKGEXT="tar.xz" -else - COMPRESSOR=bzip2 - PKGEXT="tar.bz2" -fi - -set -e - -pushd $XCODEDIR &>/dev/null - -if [ -d "Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs" ]; then - pushd "Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs" &>/dev/null -else - if [ -d "../Packages" ]; then - pushd "../Packages" &>/dev/null - elif [ -d "Packages" ]; then - pushd "Packages" &>/dev/null - else - if [ $? -ne 0 ]; then - echo "Xcode (or this script) is out of date" 1>&2 - echo "trying some magic to find the SDKs anyway ..." 1>&2 - - SDKDIR=$(find . -name SDKs -type d | grep MacOSX | head -n1) - - if [ -z "$SDKDIR" ]; then - echo "cannot find SDKs!" 1>&2 - exit 1 - fi - - pushd $SDKDIR &>/dev/null - fi - fi -fi - -SDKS=$(ls | grep "^MacOSX10.*" | grep -v "Patch") - -if [ -z "$SDKS" ]; then - echo "No SDK found" 1>&2 - exit 1 -fi - -# Xcode 5 -LIBCXXDIR1="Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/lib/c++/v1" - -# Xcode 6 -LIBCXXDIR2="Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/include/c++/v1" - -# Manual directory -MANDIR="Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/share/man" - -for SDK in $SDKS; do - echo -n "packaging $(echo "$SDK" | sed -E "s/(.sdk|.pkg)//g") SDK " - echo "(this may take several minutes) ..." - - if [[ $SDK == *.pkg ]]; then - cp $SDK $WDIR - continue - fi - - TMP=$(mktemp -d /tmp/XXXXXXXXXXX) - cp -r $SDK $TMP &>/dev/null || true - - pushd $XCODEDIR &>/dev/null - - # libc++ headers for C++11/C++14 - if [ -d $LIBCXXDIR1 ]; then - cp -rf $LIBCXXDIR1 "$TMP/$SDK/usr/include/c++" - elif [ -d $LIBCXXDIR2 ]; then - cp -rf $LIBCXXDIR2 "$TMP/$SDK/usr/include/c++" - fi - - if [ -d $MANDIR ]; then - mkdir -p $TMP/$SDK/usr/share/man - cp -rf $MANDIR/* $TMP/$SDK/usr/share/man - fi - - popd &>/dev/null - - pushd $TMP &>/dev/null - $TAR -cf - * | $COMPRESSOR -9 -c - > "$WDIR/$SDK.$PKGEXT" - popd &>/dev/null - - rm -rf $TMP -done - -popd &>/dev/null -popd &>/dev/null - -echo "" -ls -lh | grep MacOSX diff --git a/nix/nixcrpkgs/macos/ld_builder.sh b/nix/nixcrpkgs/macos/ld_builder.sh deleted file mode 100644 index 29453d31c..000000000 --- a/nix/nixcrpkgs/macos/ld_builder.sh +++ /dev/null @@ -1,45 +0,0 @@ -source $setup - -tar -xf $src -mv cctools-port-* cctools-port - -cd cctools-port - -for patch in $patches; do - echo applying patch $patch - patch -p1 -i $patch -done - -# Similar to but not the same as the other _structs.h. -rm cctools/include/foreign/mach/i386/_structs.h - -cd .. - -mv cctools-port/cctools/ld64 ld64 -mv cctools-port/cctools/include include -rm -r cctools-port -rm -r ld64/src/other - -mkdir build -cd build - -CFLAGS="-Wno-deprecated -Wno-deprecated-declarations -Wno-unused-result -Werror -Wfatal-errors -O2 -g -I../ld64/src -I../ld64/src/ld -I../ld64/src/ld/parsers -I../ld64/src/abstraction -I../ld64/src/3rd -I../ld64/src/3rd/include -I../ld64/src/3rd/BlocksRuntime -I../include -I../include/foreign -DTAPI_SUPPORT -DPROGRAM_PREFIX=\\\"$host-\\\" -D__LITTLE_ENDIAN__ -D__private_extern__= $(pkg-config --cflags libtapi)" - -CXXFLAGS="-std=gnu++11 $CFLAGS" - -LDFLAGS="$(pkg-config --libs libtapi) -ldl -lpthread" - -for f in ../ld64/src/ld/*.c ../ld64/src/3rd/*.c; do - echo "compiling $f" - eval "gcc -c $CFLAGS $f -o $(basename $f).o" -done - -for f in $(find ../ld64/src -name \*.cpp); do - echo "compiling $f" - eval "g++ -c $CXXFLAGS $f -o $(basename $f).o" -done - -g++ *.o $LDFLAGS -o $host-ld - -mkdir -p $out/bin -cp $host-ld $out/bin diff --git a/nix/nixcrpkgs/macos/ranlib_builder.sh b/nix/nixcrpkgs/macos/ranlib_builder.sh deleted file mode 100644 index 15c0bd206..000000000 --- a/nix/nixcrpkgs/macos/ranlib_builder.sh +++ /dev/null @@ -1,45 +0,0 @@ -source $setup - -tar -xf $src -mv cctools-port-* cctools-port - -cd cctools-port - -for patch in $patches; do - echo applying patch $patch - patch -p1 -i $patch -done - -# Similar to but not the same as the other _structs.h. -rm cctools/include/foreign/mach/i386/_structs.h - -# Causes a troublesome undefined reference. -rm cctools/libstuff/vm_flush_cache.c - -cd .. - -mv cctools-port/cctools/misc . -mv cctools-port/cctools/include . -mv cctools-port/cctools/libstuff . -rm -r cctools-port - -mkdir build -cd build - -CFLAGS="-Wno-deprecated -Wno-deprecated-declarations -Wno-unused-result -Wno-format-overflow -Werror -Wfatal-errors -O2 -g -I../include -I../include/foreign -DPROGRAM_PREFIX=\\\"$host-\\\" -D__LITTLE_ENDIAN__ -D__private_extern__= -D__DARWIN_UNIX03 -DPACKAGE_NAME=\\\"cctools\\\" -DPACKAGE_VERSION=\\\"$apple_version\\\" -DEMULATED_HOST_CPU_TYPE=16777223 -DEMULATED_HOST_CPU_SUBTYPE=3" - -CXXFLAGS="-std=gnu++11 $CFLAGS" - -LDFLAGS="-ldl" - -for f in ../libstuff/*.c ; do - echo "compiling $f" - eval "gcc -c $CFLAGS $f -o $(basename $f).o" -done - -eval "gcc $CFLAGS ../misc/libtool.c *.o $LDFLAGS -o $host-libtool" -eval "gcc $CFLAGS -DRANLIB ../misc/libtool.c *.o $LDFLAGS -o $host-ranlib" - -mkdir -p $out/bin -cp $host-libtool $host-ranlib $out/bin/ - diff --git a/nix/nixcrpkgs/macos/sdk_builder.sh b/nix/nixcrpkgs/macos/sdk_builder.sh deleted file mode 100644 index 8a0f872e0..000000000 --- a/nix/nixcrpkgs/macos/sdk_builder.sh +++ /dev/null @@ -1,4 +0,0 @@ -source $setup - -tar -xf $src -mv MacOSX*.sdk $out diff --git a/nix/nixcrpkgs/macos/strip_builder.sh b/nix/nixcrpkgs/macos/strip_builder.sh deleted file mode 100644 index e69a12949..000000000 --- a/nix/nixcrpkgs/macos/strip_builder.sh +++ /dev/null @@ -1,43 +0,0 @@ -source $setup - -tar -xf $src -mv cctools-port-* cctools-port - -cd cctools-port - -for patch in $patches; do - echo applying patch $patch - patch -p1 -i $patch -done - -# Similar to but not the same as the other _structs.h. -rm cctools/include/foreign/mach/i386/_structs.h - -# Causes a troublesome undefined reference. -rm cctools/libstuff/vm_flush_cache.c - -cd .. - -mv cctools-port/cctools/misc . -mv cctools-port/cctools/include . -mv cctools-port/cctools/libstuff . -rm -r cctools-port - -mkdir build -cd build - -CFLAGS="-Wno-deprecated -Wno-deprecated-declarations -Wno-unused-result -Werror -Wfatal-errors -O2 -g -I../include -I../include/foreign -DPROGRAM_PREFIX=\\\"$host-\\\" -D__LITTLE_ENDIAN__ -D__private_extern__= -D__DARWIN_UNIX03 -DPACKAGE_NAME=\\\"cctools\\\" -DPACKAGE_VERSION=\\\"$apple_version\\\" -DEMULATED_HOST_CPU_TYPE=16777223 -DEMULATED_HOST_CPU_SUBTYPE=3" - -CXXFLAGS="-std=gnu++11 $CFLAGS" - -LDFLAGS="-ldl -lpthread" - -for f in ../misc/strip.c ../libstuff/*.c; do - echo "compiling $f" - eval "gcc -c $CFLAGS $f -o $(basename $f).o" -done - -gcc *.o $LDFLAGS -o $host-strip - -mkdir -p $out/bin -cp $host-strip $out/bin/ diff --git a/nix/nixcrpkgs/macos/tapi_builder.sh b/nix/nixcrpkgs/macos/tapi_builder.sh deleted file mode 100644 index d8c15a6a1..000000000 --- a/nix/nixcrpkgs/macos/tapi_builder.sh +++ /dev/null @@ -1,80 +0,0 @@ -source $setup - -tar -xf $src -mv tapi-* tapi - -mkdir build -cd build - -mkdir -p include/tapi/{Core,Driver} -cat > include/tapi/Core/ArchitectureConfig.h < include/tapi/Version.inc < $out/lib/pkgconfig/libtapi.pc < -#include -#include -#include -#include -#include - -int do_exec(const std::string & compiler_name, - const std::vector & args) -{ - char ** exec_args = new char *[args.size() + 1]; - size_t i = 0; - for (const std::string & arg : args) - { - exec_args[i++] = (char *)arg.c_str(); - } - exec_args[i] = nullptr; - - execvp(compiler_name.c_str(), exec_args); - - int result = errno; - std::cerr << "execvp failed: " << compiler_name << ": " - << strerror(result) << std::endl; - return 1; -} - -int compiler_main(int argc, char ** argv, - const std::string & compiler_name) -{ - std::vector args; - - args.push_back(compiler_name); - - args.push_back("-target"); - args.push_back(WRAPPER_HOST); - - args.push_back("-mmacosx-version-min=" WRAPPER_OS_VERSION_MIN); - - // The ld64 linker will just assume sdk_version is the same as - // macosx-version-min if we don't supply it. That probably will not - // do any harm. - // args.push_back("-Wl,-sdk_version," WRAPPER_SDK_VERSION); - - // Suppress warnings about the -Wl arguments not being used when we're just - // compiling and not linking. - args.push_back("-Wno-unused-command-line-argument"); - - args.push_back("--sysroot"); - args.push_back(WRAPPER_SDK_PATH); - - // Causes clang to pass -demangle, -lto_library, -no_deduplicate, and other - // options that could be useful. Version 274.2 is the version number used here: - // https://github.com/tpoechtrager/osxcross/blob/474f359/build.sh#L140 - if (WRAPPER_LINKER_VERSION[0]) - { - args.push_back("-mlinker-version=" WRAPPER_LINKER_VERSION); - } - - if (compiler_name == "clang++") - { - args.push_back("-stdlib=libc++"); - args.push_back("-cxx-isystem"); - args.push_back(WRAPPER_SDK_PATH "/usr/include/c++/v1"); - } - - for (int i = 1; i < argc; ++i) - { - args.push_back(argv[i]); - } - - return do_exec(compiler_name, args); -} - -int c_compiler_main(int argc, char ** argv) -{ - return compiler_main(argc, argv, "clang"); -} - -int cxx_compiler_main(int argc, char ** argv) -{ - return compiler_main(argc, argv, "clang++"); -} - -int wrapper_main(int argc, char ** argv) -{ - std::cout << - "host: " WRAPPER_HOST "\n" - "path: " WRAPPER_PATH "\n" - "sdk_path: " WRAPPER_SDK_PATH "\n"; - return 0; -} - -struct { - const char * name; - int (*main_func)(int argc, char ** argv); -} prgms[] = { - { WRAPPER_HOST "-gcc", c_compiler_main }, - { WRAPPER_HOST "-cc", c_compiler_main }, - { WRAPPER_HOST "-clang", c_compiler_main }, - { WRAPPER_HOST "-g++", cxx_compiler_main }, - { WRAPPER_HOST "-c++", cxx_compiler_main }, - { WRAPPER_HOST "-clang++", cxx_compiler_main }, - { WRAPPER_HOST "-wrapper", wrapper_main }, - { nullptr, nullptr }, -}; - -const char * get_program_name(const char * path) -{ - const char * p = strrchr(path, '/'); - if (p) { path = p + 1; } - return path; -} - -int main(int argc, char ** argv) -{ - // We only want this wrapper and the compiler it invokes to access a certain - // set of tools that are determined at build time. Ignore whatever is on the - // user's path and use the path specified by our Nix expression instead. - int result = setenv("PATH", WRAPPER_PATH, 1); - if (result) - { - std::cerr << "wrapper failed to set PATH" << std::endl; - return 1; - } - - std::string program_name = get_program_name(argv[0]); - - for (auto * p = prgms; p->name; p++) - { - if (program_name == p->name) - { - return p->main_func(argc, argv); - } - } - - std::cerr << "compiler wrapper invoked with unknown program name: " - << argv[0] << std::endl; - return 1; -} diff --git a/nix/nixcrpkgs/make_derivation.nix b/nix/nixcrpkgs/make_derivation.nix deleted file mode 100644 index af74ee91f..000000000 --- a/nix/nixcrpkgs/make_derivation.nix +++ /dev/null @@ -1,91 +0,0 @@ -env: attrs: - -let - nixpkgs = env.nixpkgs; - - native_inputs = - (attrs.native_inputs or []) - ++ env.default_native_inputs; - - cross_inputs = (attrs.cross_inputs or []); - - path_join = builtins.concatStringsSep ":"; - - path_map = dir: inputs: (map (i: "${i}" + dir) inputs); - - # We can't just set PATH in our derivation because nix-shell will make the - # derivation's PATH override the system PATH, meaning we can't use utilities - # like "git" or "which" form the host system. So we set _PATH instead, and we - # use a setup script ($setup) that copies _PATH to PATH. And we provide - # $stdenv/setup so that nix-shell can find our setup script. - # - # nixcrpkgs does not expose its users to this mess. The user can specify a - # PATH if they want, and it will be automatically moved to _PATH in the - # derivation. - filtered_attrs = nixpkgs.lib.filterAttrs (n: v: n != "PATH") attrs; - - path_attrs = { - _PATH = path_join ( - (if attrs ? PATH then [attrs.PATH] else []) ++ - (path_map "/bin" native_inputs) - ); - }; - - default_attrs = { - system = builtins.currentSystem; - - SHELL = "${nixpkgs.bashInteractive}/bin/bash"; - - setup = ./pretend_stdenv/setup; - - # This allows nix-shell to find our setup script. - stdenv = ./pretend_stdenv; - - PKG_CONFIG_PATH = path_join ( - (if attrs ? PKG_CONFIG_PATH then [attrs.PKG_CONFIG_PATH] else []) ++ - (path_map "/lib/pkgconfig" native_inputs) - ); - }; - - cross_attrs = if !env.is_cross then {} else { - NIXCRPKGS = true; - - inherit (env) host arch os exe_suffix; - inherit (env) cmake_toolchain; - - PKG_CONFIG_CROSS_PATH = path_join ( - (if attrs ? PKG_CONFIG_CROSS_PATH then [attrs.PKG_CONFIG_CROSS_PATH] else []) ++ - (path_map "/lib/pkgconfig" cross_inputs) - ); - - CMAKE_CROSS_PREFIX_PATH = path_join ( - (if attrs ? CMAKE_CROSS_PREFIX_PATH then [attrs.CMAKE_CROSS_PREFIX_PATH] else []) ++ - cross_inputs - ); - }; - - name_attrs = { - name = (attrs.name or "package") - + (if env.is_cross then "-${env.host}" else ""); - }; - - builder_attrs = - if builtins.isAttrs attrs.builder then - if attrs.builder ? ruby then - { - builder = "${nixpkgs.ruby}/bin/ruby"; - args = [attrs.builder.ruby]; - } - else - attrs.builder - else - rec { - builder = "${nixpkgs.bashInteractive}/bin/bash"; - args = ["-ue" attrs.builder]; - }; - - drv_attrs = default_attrs // cross_attrs - // filtered_attrs // name_attrs // builder_attrs // path_attrs; - -in - derivation drv_attrs diff --git a/nix/nixcrpkgs/mingw-w64/binutils/builder.sh b/nix/nixcrpkgs/mingw-w64/binutils/builder.sh deleted file mode 100644 index 8795ea0ce..000000000 --- a/nix/nixcrpkgs/mingw-w64/binutils/builder.sh +++ /dev/null @@ -1,26 +0,0 @@ -source $stdenv/setup - -unset CC CXX CFLAGS LDFLAGS LD AR AS RANLIB SIZE STRINGS NM STRIP OBJCOPY - -tar -xf $src - -cd binutils-$version -for patch in $patches; do - echo applying patch $patch - patch -p1 -i $patch -done - -# Clear the default library search path (noSysDirs) -echo 'NATIVE_LIB_DIRS=' >> ld/configure.tgt - -cd .. - -mkdir build -cd build - -../binutils-$version/configure --prefix=$out $configure_flags - -make - -make install - diff --git a/nix/nixcrpkgs/mingw-w64/binutils/default.nix b/nix/nixcrpkgs/mingw-w64/binutils/default.nix deleted file mode 100644 index 1a52fd226..000000000 --- a/nix/nixcrpkgs/mingw-w64/binutils/default.nix +++ /dev/null @@ -1,26 +0,0 @@ -{ native, host }: - -native.make_derivation rec { - name = "binutils-${version}-${host}"; - - version = "2.27"; - - src = native.nixpkgs.fetchurl { - url = "mirror://gnu/binutils/binutils-${version}.tar.bz2"; - sha256 = "125clslv17xh1sab74343fg6v31msavpmaa1c1394zsqa773g5rn"; - }; - - patches = [ - ./deterministic.patch - ]; - - build_inputs = [ native.nixpkgs.bison native.nixpkgs.zlib ]; - - configure_flags = - "--target=${host} " + - "--enable-shared " + - "--enable-deterministic-archives " + - "--disable-werror "; - - builder = ./builder.sh; -} diff --git a/nix/nixcrpkgs/mingw-w64/binutils/deterministic.patch b/nix/nixcrpkgs/mingw-w64/binutils/deterministic.patch deleted file mode 100644 index 0a264b35c..000000000 --- a/nix/nixcrpkgs/mingw-w64/binutils/deterministic.patch +++ /dev/null @@ -1,12 +0,0 @@ -Make binutils output deterministic by default. ---- orig/ld/ldlang.c -+++ new/ld/ldlang.c -@@ -3095,6 +3095,8 @@ - ldfile_output_machine)) - einfo (_("%P%F:%s: can not set architecture: %E\n"), name); - -+ link_info.output_bfd->flags |= BFD_DETERMINISTIC_OUTPUT; -+ - link_info.hash = bfd_link_hash_table_create (link_info.output_bfd); - if (link_info.hash == NULL) - einfo (_("%P%F: can not create hash table: %E\n")); diff --git a/nix/nixcrpkgs/mingw-w64/builder.sh b/nix/nixcrpkgs/mingw-w64/builder.sh deleted file mode 100644 index 65c9e68e3..000000000 --- a/nix/nixcrpkgs/mingw-w64/builder.sh +++ /dev/null @@ -1,35 +0,0 @@ -source $setup - -cp -r $src mingw-w64 -chmod -R u+w mingw-w64 - -cd mingw-w64 -for patch in $patches; do - echo applying patch $patch - patch -p1 -i $patch -done -cd .. - -if [ -n "$just_headers" ]; then - mkdir build_headers - cd build_headers - ../mingw-w64/mingw-w64-headers/configure --prefix=$out $configure_flags - make - make install - cd .. -else - mkdir build_crt_and_headers - cd build_crt_and_headers - ../mingw-w64/configure --prefix=$out $configure_flags - make - make install - cd .. - - mkdir build_winpthreads - cd build_winpthreads - LDFLAGS="-L${out}/lib" ../mingw-w64/mingw-w64-libraries/winpthreads/configure \ - --host=$host --prefix=$out --disable-shared --enable-static - make - make install - cd .. -fi diff --git a/nix/nixcrpkgs/mingw-w64/default.nix b/nix/nixcrpkgs/mingw-w64/default.nix deleted file mode 100644 index 691c27154..000000000 --- a/nix/nixcrpkgs/mingw-w64/default.nix +++ /dev/null @@ -1,106 +0,0 @@ -{ native, arch }: - -let - nixpkgs = native.nixpkgs; - - host = "${arch}-w64-mingw32"; - - binutils = import ./binutils { inherit native host; }; - - mingw-w64_info = rec { - name = "mingw-w64-${version}"; - version = "2017-08-03"; - src = nixpkgs.fetchgit { - url = "git://git.code.sf.net/p/mingw-w64/mingw-w64"; - rev = "6de0055f99ed447ec63c1a650a3830f266a808bd"; - sha256 = "1830rcd0vsbvpr5m1lrabcqh12qrw1flq333b8xrs5b3n542xy2i"; - }; - patches = [ - ./usb.patch - ./guid-selectany.patch - ]; - configure_flags = "--enable-secure-api --enable-idl"; - }; - - mingw-w64_headers = native.make_derivation { - name = "${mingw-w64_info.name}-headers"; - inherit host; - inherit (mingw-w64_info) src patches configure_flags; - builder = ./builder.sh; - just_headers = true; - }; - - gcc_stage_1 = import ./gcc { - stage = 1; - libc = mingw-w64_headers; - inherit native arch binutils; - }; - - mingw-w64_full = native.make_derivation { - name = "${mingw-w64_info.name}-${host}"; - inherit host; - inherit (mingw-w64_info) version src patches; - configure_flags = - "--host=${host} " + - "--disable-shared --enable-static " + - mingw-w64_info.configure_flags; - native_inputs = [ binutils gcc_stage_1 ]; - builder = ./builder.sh; - just_headers = false; - }; - - gcc = import ./gcc { - libc = mingw-w64_full; - inherit native arch binutils; - }; - - license = native.make_derivation { - name = "${mingw-w64_info.name}-license"; - inherit (mingw-w64_info) version src; - gcc_src = gcc.src; - builder = ./license_builder.sh; - }; - - global_license_set = { _global = license; }; - - cmake_toolchain = import ../cmake_toolchain { - cmake_system_name = "Windows"; - inherit nixpkgs host; - }; - - os = "windows"; - - compiler = "gcc"; - - exe_suffix = ".exe"; - - crossenv = { - is_cross = true; - - default_native_inputs = native.default_native_inputs - ++ [ gcc binutils native.pkgconf native.wrappers ]; - - # Target info variables. - inherit host arch os compiler exe_suffix; - - # CMake toolchain file. - inherit cmake_toolchain; - - # A wide variety of programs and build tools. - inherit nixpkgs; - - # Some native build tools made by nixcrpkgs. - inherit native; - - # License information that should be shipped with any software compiled by - # this environment. - inherit global_license_set; - - # Make it easy to build or refer to the build tools. - inherit gcc binutils mingw-w64_full mingw-w64_info mingw-w64_headers gcc_stage_1; - mingw-w64 = mingw-w64_full; - - make_derivation = import ../make_derivation.nix crossenv; - }; -in - crossenv diff --git a/nix/nixcrpkgs/mingw-w64/gcc/builder.sh b/nix/nixcrpkgs/mingw-w64/gcc/builder.sh deleted file mode 100644 index ae8462f8d..000000000 --- a/nix/nixcrpkgs/mingw-w64/gcc/builder.sh +++ /dev/null @@ -1,63 +0,0 @@ -source $setup - -tar -xf $src - -cd gcc-$version -for patch in $patches; do - echo applying patch $patch - patch -p1 -i $patch -done - -# Prevents a name collision with mingw-w64 headers. -# See: https://gcc.gnu.org/ml/gcc-help/2017-05/msg00121.html -cd libstdc++-v3 -sed -i 's/\b__in\b/___in/g' \ - include/ext/random.tcc \ - include/ext/vstring.tcc \ - include/std/utility \ - include/std/tuple \ - include/std/istream \ - include/tr2/bool_set.tcc \ - include/tr2/bool_set \ - include/bits/basic_string.h \ - include/bits/basic_string.tcc \ - include/bits/locale_facets.h \ - include/bits/istream.tcc \ - include/tr1/utility \ - include/tr1/tuple -sed -i 's/\b__out\b/___out/g' \ - include/ext/random.tcc \ - include/ext/algorithm \ - include/ext/pb_ds/detail/debug_map_base.hpp \ - include/std/ostream \ - include/std/thread \ - include/tr2/bool_set \ - include/bits/ostream.tcc \ - include/bits/regex.tcc \ - include/bits/stl_algo.h \ - include/bits/locale_conv.h \ - include/bits/regex.h \ - include/bits/ostream_insert.h \ - include/tr1/regex \ - include/parallel/algo.h \ - include/parallel/set_operations.h \ - include/parallel/multiway_merge.h \ - include/parallel/unique_copy.h \ - include/experimental/algorithm \ - config/locale/dragonfly/c_locale.h \ - config/locale/generic/c_locale.h \ - config/locale/gnu/c_locale.h - -cd ../.. - -mkdir build -cd build - -../gcc-$version/configure --prefix=$out $configure_flags - -make $make_flags - -make $install_targets - -# Remove "install-tools" so we don't have a reference to bash. -rm -r "$out/libexec/gcc/$target/$version/install-tools/" diff --git a/nix/nixcrpkgs/mingw-w64/gcc/cppdefault.patch b/nix/nixcrpkgs/mingw-w64/gcc/cppdefault.patch deleted file mode 100644 index adc979e68..000000000 --- a/nix/nixcrpkgs/mingw-w64/gcc/cppdefault.patch +++ /dev/null @@ -1,35 +0,0 @@ -cppdefault.c If CROSS_DIRECTORY_STRUCTURE is defined, don't use the native -system header dir; use CROSS_INCLUDE_DIR instead if it is defined. - -This just makes GCC's behavior match the documentation for the -"--with-sysroot" configure option, which corresponds to -TARGET_SYSTEM_ROOT. The documentation says that if you specify -directories with --with-sysroot and --with-native-system-header-dir, -then the compilter will concatenate the the two together (with the -sysroot coming first) and search that directory instead of the default -/usr/include. - -The concatenation is done with this line in configure.ac: - - CROSS_SYSTEM_HEADER_DIR='$(TARGET_SYSTEM_ROOT)$${sysroot_headers_suffix}$(NATIVE_SYSTEM_HEADER_DIR)' - -Then Makefile.in sets the preprocessor macro CROSS_INCLUDE_DIR equal to -CROSS_SYSTEM_HEADER_DIR. - -This patch reverts one of the changes from Daniel Jacobowitz on 2013-02-13. -https://github.com/gcc-mirror/gcc/commit/17acc97af91fbd116659301b0b7d4965ecc1631d - ---- gcc-5.4.0/gcc/cppdefault.c -+++ gcc-5.4.0/gcc/cppdefault.c -@@ -28,9 +28,9 @@ - #define NATIVE_SYSTEM_HEADER_COMPONENT 0 - #endif - --#if defined (CROSS_DIRECTORY_STRUCTURE) && !defined (TARGET_SYSTEM_ROOT) -+#if defined (CROSS_DIRECTORY_STRUCTURE) - # undef LOCAL_INCLUDE_DIR - # undef NATIVE_SYSTEM_HEADER_DIR - #else - # undef CROSS_INCLUDE_DIR - #endif - diff --git a/nix/nixcrpkgs/mingw-w64/gcc/default.nix b/nix/nixcrpkgs/mingw-w64/gcc/default.nix deleted file mode 100644 index 8a5a46b7b..000000000 --- a/nix/nixcrpkgs/mingw-w64/gcc/default.nix +++ /dev/null @@ -1,103 +0,0 @@ -{ native, arch, stage ? 2, binutils, libc }: - -let - nixpkgs = native.nixpkgs; - isl = nixpkgs.isl_0_14; - inherit (nixpkgs) stdenv lib fetchurl; - inherit (nixpkgs) gettext gmp libmpc libelf mpfr texinfo which zlib; - - stageName = if stage == 1 then "-stage1" - else assert stage == 2; ""; -in - -native.make_derivation rec { - name = "gcc-${version}-${target}${stageName}"; - - target = "${arch}-w64-mingw32"; - - version = "6.3.0"; - - src = fetchurl { - url = "mirror://gnu/gcc/gcc-${version}/gcc-${version}.tar.bz2"; - sha256 = "17xjz30jb65hcf714vn9gcxvrrji8j20xm7n33qg1ywhyzryfsph"; - }; - - builder = ./builder.sh; - - patches = [ - # TODO: combine three of these patches into one called search-dirs.patch - ./mingw-search-paths.patch - ./use-source-date-epoch.patch - ./libstdc++-target.patch - ./no-sys-dirs.patch - ./cppdefault.patch - - # Fix a compiler error in GCC's ubsan.c: ISO C++ forbids comparison - # between pointer and integer. - ./ubsan.patch - ]; - - # TODO: can probably remove libelf here, and might as well remove - # the libraries that are given to GCC as configure flags - # TODO: just let GCC use its own gettext (intl) - native_inputs = [ - binutils gettext libelf texinfo which zlib - ]; - - configure_flags = - "--target=${arch}-w64-mingw32 " + - "--with-sysroot=${libc} " + - "--with-native-system-header-dir=/include " + - "--with-gnu-as " + - "--with-gnu-ld " + - "--with-as=${binutils}/bin/${arch}-w64-mingw32-as " + - "--with-ld=${binutils}/bin/${arch}-w64-mingw32-ld " + - "--with-isl=${isl} " + - "--with-gmp-include=${gmp.dev}/include " + - "--with-gmp-lib=${gmp.out}/lib " + - "--with-mpfr-include=${mpfr.dev}/include " + - "--with-mpfr-lib=${mpfr.out}/lib " + - "--with-mpc=${libmpc} " + - "--with-zlib-include=${zlib.dev}/include " + - "--with-zlib-lib=${zlib.out}/lib " + - "--enable-lto " + - "--enable-plugin " + - "--enable-static " + - "--enable-sjlj-exceptions " + - "--enable-__cxa_atexit " + - "--enable-long-long " + - "--with-dwarf2 " + - "--enable-fully-dynamic-string " + - (if stage == 1 then - "--enable-languages=c " + - "--enable-threads=win32 " - else - "--enable-languages=c,c++ " + - "--enable-threads=posix " - ) + - "--without-included-gettext " + - "--disable-libstdcxx-pch " + - "--disable-nls " + - "--disable-shared " + - "--disable-multilib " + - "--disable-libssp " + - "--disable-win32-registry " + - "--disable-bootstrap"; # TODO: not needed, --disable-bootstrap - # only applies to native builds - - make_flags = - if stage == 1 then - ["all-gcc" "all-target-libgcc"] - else - []; - - install_targets = - if stage == 1 then - ["install-gcc install-target-libgcc"] - else - ["install-strip"]; - - hardeningDisable = [ "format" ]; -} - -# TODO: why is GCC providing a fixed limits.h? diff --git a/nix/nixcrpkgs/mingw-w64/gcc/libstdc++-target.patch b/nix/nixcrpkgs/mingw-w64/gcc/libstdc++-target.patch deleted file mode 100644 index fb622b395..000000000 --- a/nix/nixcrpkgs/mingw-w64/gcc/libstdc++-target.patch +++ /dev/null @@ -1,32 +0,0 @@ -Patch to make the target libraries 'configure' scripts find the proper CPP. -I noticed that building the mingw32 cross compiler. -Looking at the build script for mingw in archlinux, I think that only nixos -needs this patch. I don't know why. -diff --git a/Makefile.in b/Makefile.in -index 93f66b6..d691917 100644 ---- a/Makefile.in -+++ b/Makefile.in -@@ -266,6 +266,7 @@ BASE_TARGET_EXPORTS = \ - AR="$(AR_FOR_TARGET)"; export AR; \ - AS="$(COMPILER_AS_FOR_TARGET)"; export AS; \ - CC="$(CC_FOR_TARGET) $(XGCC_FLAGS_FOR_TARGET) $$TFLAGS"; export CC; \ -+ CPP="$(CC_FOR_TARGET) $(XGCC_FLAGS_FOR_TARGET) $$TFLAGS -E"; export CC; \ - CFLAGS="$(CFLAGS_FOR_TARGET)"; export CFLAGS; \ - CONFIG_SHELL="$(SHELL)"; export CONFIG_SHELL; \ - CPPFLAGS="$(CPPFLAGS_FOR_TARGET)"; export CPPFLAGS; \ -@@ -291,11 +292,13 @@ BASE_TARGET_EXPORTS = \ - RAW_CXX_TARGET_EXPORTS = \ - $(BASE_TARGET_EXPORTS) \ - CXX_FOR_TARGET="$(RAW_CXX_FOR_TARGET)"; export CXX_FOR_TARGET; \ -- CXX="$(RAW_CXX_FOR_TARGET) $(XGCC_FLAGS_FOR_TARGET) $$TFLAGS"; export CXX; -+ CXX="$(RAW_CXX_FOR_TARGET) $(XGCC_FLAGS_FOR_TARGET) $$TFLAGS"; export CXX; \ -+ CXXCPP="$(RAW_CXX_FOR_TARGET) $(XGCC_FLAGS_FOR_TARGET) $$TFLAGS -E"; export CXX; - - NORMAL_TARGET_EXPORTS = \ - $(BASE_TARGET_EXPORTS) \ -- CXX="$(CXX_FOR_TARGET) $(XGCC_FLAGS_FOR_TARGET) $$TFLAGS"; export CXX; -+ CXX="$(CXX_FOR_TARGET) $(XGCC_FLAGS_FOR_TARGET) $$TFLAGS"; export CXX; \ -+ CXXCPP="$(CXX_FOR_TARGET) $(XGCC_FLAGS_FOR_TARGET) $$TFLAGS -E"; export CXX; - - # Where to find GMP - HOST_GMPLIBS = @gmplibs@ diff --git a/nix/nixcrpkgs/mingw-w64/gcc/mingw-search-paths.patch b/nix/nixcrpkgs/mingw-w64/gcc/mingw-search-paths.patch deleted file mode 100644 index c79730e10..000000000 --- a/nix/nixcrpkgs/mingw-w64/gcc/mingw-search-paths.patch +++ /dev/null @@ -1,14 +0,0 @@ -Make it so GCC does not force us to have a "mingw" symlink. - ---- gcc-6.3.0-orig/gcc/config/i386/mingw32.h -+++ gcc-6.3.0/gcc/config/i386/mingw32.h -@@ -163,3 +163,3 @@ - #ifndef STANDARD_STARTFILE_PREFIX_1 --#define STANDARD_STARTFILE_PREFIX_1 "/mingw/lib/" -+#define STANDARD_STARTFILE_PREFIX_1 "/lib/" - #endif -@@ -172,3 +172,3 @@ - #undef NATIVE_SYSTEM_HEADER_DIR --#define NATIVE_SYSTEM_HEADER_DIR "/mingw/include" -+#define NATIVE_SYSTEM_HEADER_DIR "/include" - diff --git a/nix/nixcrpkgs/mingw-w64/gcc/no-sys-dirs.patch b/nix/nixcrpkgs/mingw-w64/gcc/no-sys-dirs.patch deleted file mode 100644 index 36df51904..000000000 --- a/nix/nixcrpkgs/mingw-w64/gcc/no-sys-dirs.patch +++ /dev/null @@ -1,28 +0,0 @@ -diff -ru -x '*~' gcc-4.8.3-orig/gcc/cppdefault.c gcc-4.8.3/gcc/cppdefault.c ---- gcc-4.8.3-orig/gcc/cppdefault.c 2013-01-10 21:38:27.000000000 +0100 -+++ gcc-4.8.3/gcc/cppdefault.c 2014-08-18 16:20:32.893944536 +0200 -@@ -35,6 +35,8 @@ - # undef CROSS_INCLUDE_DIR - #endif - -+#undef LOCAL_INCLUDE_DIR -+ - const struct default_include cpp_include_defaults[] - #ifdef INCLUDE_DEFAULTS - = INCLUDE_DEFAULTS; -diff -ru -x '*~' gcc-4.8.3-orig/gcc/gcc.c gcc-4.8.3/gcc/gcc.c ---- gcc-4.8.3-orig/gcc/gcc.c 2014-03-23 12:30:57.000000000 +0100 -+++ gcc-4.8.3/gcc/gcc.c 2014-08-18 13:19:32.689201690 +0200 -@@ -1162,10 +1162,10 @@ - /* Default prefixes to attach to command names. */ - - #ifndef STANDARD_STARTFILE_PREFIX_1 --#define STANDARD_STARTFILE_PREFIX_1 "/lib/" -+#define STANDARD_STARTFILE_PREFIX_1 "" - #endif - #ifndef STANDARD_STARTFILE_PREFIX_2 --#define STANDARD_STARTFILE_PREFIX_2 "/usr/lib/" -+#define STANDARD_STARTFILE_PREFIX_2 "" - #endif - - #ifdef CROSS_DIRECTORY_STRUCTURE /* Don't use these prefixes for a cross compiler. */ diff --git a/nix/nixcrpkgs/mingw-w64/gcc/ubsan.patch b/nix/nixcrpkgs/mingw-w64/gcc/ubsan.patch deleted file mode 100644 index 0ad3b7991..000000000 --- a/nix/nixcrpkgs/mingw-w64/gcc/ubsan.patch +++ /dev/null @@ -1,10 +0,0 @@ ---- gcc-6.3.0-orig/gcc/ubsan.c -+++ gcc-6.3.0/gcc/ubsan.c -@@ -1471,7 +1471,7 @@ - - expanded_location xloc = expand_location (loc); - if (xloc.file == NULL || strncmp (xloc.file, "\1", 2) == 0 -- || xloc.file == '\0' || xloc.file[0] == '\xff' -+ || xloc.file == NULL || xloc.file[0] == '\xff' - || xloc.file[1] == '\xff') - return false; diff --git a/nix/nixcrpkgs/mingw-w64/gcc/use-source-date-epoch.patch b/nix/nixcrpkgs/mingw-w64/gcc/use-source-date-epoch.patch deleted file mode 100644 index 65a5ab028..000000000 --- a/nix/nixcrpkgs/mingw-w64/gcc/use-source-date-epoch.patch +++ /dev/null @@ -1,52 +0,0 @@ -https://gcc.gnu.org/ml/gcc-patches/2015-06/msg02210.html - -diff --git a/libcpp/macro.c b/libcpp/macro.c -index 1e0a0b5..a52e3cb 100644 ---- a/libcpp/macro.c -+++ b/libcpp/macro.c -@@ -349,14 +349,38 @@ _cpp_builtin_macro_text (cpp_reader *pfile, cpp_hashnode *node) - slow on some systems. */ - time_t tt; - struct tm *tb = NULL; -+ char *source_date_epoch; - -- /* (time_t) -1 is a legitimate value for "number of seconds -- since the Epoch", so we have to do a little dance to -- distinguish that from a genuine error. */ -- errno = 0; -- tt = time(NULL); -- if (tt != (time_t)-1 || errno == 0) -- tb = localtime (&tt); -+ /* Allow the date and time to be set externally by an exported -+ environment variable to enable reproducible builds. */ -+ source_date_epoch = getenv ("SOURCE_DATE_EPOCH"); -+ if (source_date_epoch) -+ { -+ errno = 0; -+ tt = (time_t) strtol (source_date_epoch, NULL, 10); -+ if (errno == 0) -+ { -+ tb = gmtime (&tt); -+ if (tb == NULL) -+ cpp_error (pfile, CPP_DL_ERROR, -+ "SOURCE_DATE_EPOCH=\"%s\" is not a valid date", -+ source_date_epoch); -+ } -+ else -+ cpp_error (pfile, CPP_DL_ERROR, -+ "SOURCE_DATE_EPOCH=\"%s\" is not a valid number", -+ source_date_epoch); -+ } -+ else -+ { -+ /* (time_t) -1 is a legitimate value for "number of seconds -+ since the Epoch", so we have to do a little dance to -+ distinguish that from a genuine error. */ -+ errno = 0; -+ tt = time(NULL); -+ if (tt != (time_t)-1 || errno == 0) -+ tb = localtime (&tt); -+ } - - if (tb) - { diff --git a/nix/nixcrpkgs/mingw-w64/guid-selectany.patch b/nix/nixcrpkgs/mingw-w64/guid-selectany.patch deleted file mode 100644 index b495a67b5..000000000 --- a/nix/nixcrpkgs/mingw-w64/guid-selectany.patch +++ /dev/null @@ -1,38 +0,0 @@ -From 339371eafd2fb2bcbf8b0a08e5328fc7c16b892f Mon Sep 17 00:00:00 2001 -From: David Grayson -Date: Thu, 4 May 2017 06:41:28 -0700 -Subject: [PATCH] guiddef.h: Use __declspec(selectany) on GUID declarations. - -If __declspec(selectany) is not used on the prototype but later used -on a definition, GCC 6+ seems to ignore it, and you can get -multiple-definition errors at link time. - -That situation can arise in code like Microsoft's usbview utility that -has multiple translation units including the following headers in this -order: windows.h, initguid.h, winioctl.h. - -However, this patch cannot be upstreamed to mingw-w64 because it -breaks older versions of GCC, and MSDN says that putting selectany on -a declaration is "incorrect". Once GCC is fixed, we can remove this -patch. ---- - mingw-w64-headers/include/guiddef.h | 3 +-- - 1 file changed, 1 insertion(+), 2 deletions(-) - -diff --git a/mingw-w64-headers/include/guiddef.h b/mingw-w64-headers/include/guiddef.h -index 9ecea3e2..6c9444cf 100644 ---- a/mingw-w64-headers/include/guiddef.h -+++ b/mingw-w64-headers/include/guiddef.h -@@ -58,8 +58,7 @@ __extension__ template const GUID &__mingw_uuidof(); - #define DEFINE_GUID(name,l,w1,w2,b1,b2,b3,b4,b5,b6,b7,b8) const GUID DECLSPEC_SELECTANY name = { l, w1, w2, { b1, b2, b3, b4, b5, b6, b7, b8 } } - #endif - #else --/* __declspec(selectany) must be applied to initialized objects on GCC 5 hence must not be used here. */ --#define DEFINE_GUID(name,l,w1,w2,b1,b2,b3,b4,b5,b6,b7,b8) EXTERN_C const GUID name -+#define DEFINE_GUID(name,l,w1,w2,b1,b2,b3,b4,b5,b6,b7,b8) EXTERN_C const GUID DECLSPEC_SELECTANY name - #endif - - #define DEFINE_OLEGUID(name, l, w1, w2) DEFINE_GUID (name, l, w1, w2, 0xc0, 0, 0, 0, 0, 0, 0, 0x46) --- -2.12.1 - diff --git a/nix/nixcrpkgs/mingw-w64/license_builder.sh b/nix/nixcrpkgs/mingw-w64/license_builder.sh deleted file mode 100644 index beb53ee27..000000000 --- a/nix/nixcrpkgs/mingw-w64/license_builder.sh +++ /dev/null @@ -1,42 +0,0 @@ -source $setup - -tar -xf $gcc_src -mv gcc-* gcc - -license_gcc=$(cat gcc/COPYING3.LIB) -cd $src -license_runtime=$(cat COPYING.MinGW-w64-runtime/COPYING.MinGW-w64-runtime.txt) -license_winpthread=$(cat mingw-w64-libraries/winpthreads/COPYING) - -cat > $out < - The third-party software included with this software may - have been patched or otherwise modified. -

- -

GCC run-time libraries

- -

- The GCC run-time libraries libgcc and libstdc++ are licensed under the GNU - General Public License Version 3 (GPLv3) as shown below. -

- -
-$license_gcc
-
- -

MinGW-w64 runtime components

- -
-$license_runtime
-
- -

- libwinpthread also comes from the mingw-w64 project and its license is below. -

- -
-$license_winpthread
-
- -EOF diff --git a/nix/nixcrpkgs/mingw-w64/usb.patch b/nix/nixcrpkgs/mingw-w64/usb.patch deleted file mode 100644 index 768c4a979..000000000 --- a/nix/nixcrpkgs/mingw-w64/usb.patch +++ /dev/null @@ -1,73 +0,0 @@ -diff --git a/mingw-w64-headers/include/usbspec.h b/mingw-w64-headers/include/usbspec.h -index 86557d8d..97ab5f3b 100644 ---- a/mingw-w64-headers/include/usbspec.h -+++ b/mingw-w64-headers/include/usbspec.h -@@ -213,6 +213,13 @@ typedef struct _USB_BOS_DESCRIPTOR { - #define USB_DEVICE_CAPABILITY_USB20_EXTENSION 0x02 - #define USB_DEVICE_CAPABILITY_SUPERSPEED_USB 0x03 - #define USB_DEVICE_CAPABILITY_CONTAINER_ID 0x04 -+#define USB_DEVICE_CAPABILITY_PLATFORM 0x05 -+#define USB_DEVICE_CAPABILITY_POWER_DELIVERY 0x06 -+#define USB_DEVICE_CAPABILITY_BATTERY_INFO 0x07 -+#define USB_DEVICE_CAPABILITY_PD_CONSUMER_PORT 0x08 -+#define USB_DEVICE_CAPABILITY_PD_PROVIDER_PORT 0x09 -+#define USB_DEVICE_CAPABILITY_SUPERSPEEDPLUS_USB 0x0A -+#define USB_DEVICE_CAPABILITY_PRECISION_TIME_MEASUREMENT 0x0B - #define USB_DEVICE_CAPABILITY_BILLBOARD 0x0D - - typedef struct _USB_DEVICE_CAPABILITY_USB20_EXTENSION_DESCRIPTOR { -@@ -666,6 +673,54 @@ typedef struct _USB_SUPERSPEEDPLUS_ISOCH_ENDPOINT_COMPANION_DESCRIPTOR { - ULONG dwBytesPerInterval; - } USB_SUPERSPEEDPLUS_ISOCH_ENDPOINT_COMPANION_DESCRIPTOR,*PUSB_SUPERSPEEDPLUS_ISOCH_ENDPOINT_COMPANION_DESCRIPTOR; - -+typedef union _USB_DEVICE_CAPABILITY_SUPERSPEEDPLUS_SPEED { -+ ULONG AsUlong32; -+ struct { -+ ULONG SublinkSpeedAttrID:4; -+ ULONG LaneSpeedExponent:2; -+ ULONG SublinkTypeMode:1; -+ ULONG SublinkTypeDir:1; -+ ULONG Reserved:6; -+ ULONG LinkProtocol:2; -+ ULONG LaneSpeedMantissa:16; -+ }; -+} USB_DEVICE_CAPABILITY_SUPERSPEEDPLUS_SPEED, *PUSB_DEVICE_CAPABILITY_SUPERSPEEDPLUS_SPEED; -+ -+typedef struct _USB_DEVICE_CAPABILITY_SUPERSPEEDPLUS_USB_DESCRIPTOR { -+ UCHAR bLength; -+ UCHAR bDescriptorType; -+ UCHAR bDevCapabilityType; -+ UCHAR bReserved; -+ union { -+ ULONG AsUlong; -+ struct { -+ ULONG SublinkSpeedAttrCount:5; -+ ULONG SublinkSpeedIDCount:4; -+ ULONG Reserved:23; -+ }; -+ } bmAttributes; -+ union { -+ USHORT AsUshort; -+ struct { -+ USHORT SublinkSpeedAttrID:4; -+ USHORT Reserved:4; -+ USHORT MinRxLaneCount:4; -+ USHORT MinTxLaneCount:4; -+ }; -+ } wFunctionalitySupport; -+ USHORT wReserved; -+ USB_DEVICE_CAPABILITY_SUPERSPEEDPLUS_SPEED bmSublinkSpeedAttr[1]; -+} USB_DEVICE_CAPABILITY_SUPERSPEEDPLUS_USB_DESCRIPTOR,*PUSB_DEVICE_CAPABILITY_SUPERSPEEDPLUS_USB_DESCRIPTOR; -+ -+typedef struct _USB_DEVICE_CAPABILITY_PLATFORM_DESCRIPTOR { -+ UCHAR bLength; -+ UCHAR bDescriptorType; -+ UCHAR bDevCapabilityType; -+ UCHAR bReserved; -+ GUID PlatformCapabilityUuid; -+ UCHAR CapabililityData[1]; -+} USB_DEVICE_CAPABILITY_PLATFORM_DESCRIPTOR,*PUSB_DEVICE_CAPABILITY_PLATFORM_DESCRIPTOR; -+ - #include - - #endif diff --git a/nix/nixcrpkgs/native/default.nix b/nix/nixcrpkgs/native/default.nix deleted file mode 100644 index e56c5ce5c..000000000 --- a/nix/nixcrpkgs/native/default.nix +++ /dev/null @@ -1,54 +0,0 @@ -{ nixpkgs }: - -let - native_base = { - inherit nixpkgs; - - is_cross = false; - - default_native_inputs = [ - nixpkgs.bashInteractive - nixpkgs.binutils - (nixpkgs.binutils-unwrapped or nixpkgs.binutils) - nixpkgs.bzip2 - nixpkgs.cmake - nixpkgs.coreutils - nixpkgs.diffutils - nixpkgs.findutils - nixpkgs.gcc - nixpkgs.gawk - nixpkgs.gnumake - nixpkgs.gnugrep - nixpkgs.gnused - nixpkgs.gnutar - nixpkgs.gzip - nixpkgs.ninja - nixpkgs.patch - nixpkgs.which - nixpkgs.xz - ]; - - make_derivation = import ../make_derivation.nix native_base; - }; - - pkgconf = import ./pkgconf { env = native_base; }; - - wrappers = import ./wrappers { env = native_base; }; - - gnu_config = nixpkgs.fetchgit { - url = "https://git.savannah.gnu.org/git/config.git"; - rev = "81497f5aaf50a12a9fe0cba30ef18bda46b62959"; - sha256 = "1fq0nki2118zwbc8rdkqx5i04lbfw7gqbsyf5bscg5im6sfphq1d"; - }; - - native = native_base // { - default_native_inputs = native_base.default_native_inputs ++ [ - pkgconf - ]; - - inherit pkgconf wrappers gnu_config; - - make_derivation = import ../make_derivation.nix native; - }; - -in native diff --git a/nix/nixcrpkgs/native/pkgconf/builder.sh b/nix/nixcrpkgs/native/pkgconf/builder.sh deleted file mode 100644 index e58ccf98a..000000000 --- a/nix/nixcrpkgs/native/pkgconf/builder.sh +++ /dev/null @@ -1,21 +0,0 @@ -source $setup - -tar -xf $src - -mkdir build - -cd build - -../pkgconf-$version/configure \ - --prefix=$out \ - --with-system-libdir=/no-system-libdir/ \ - --with-system-includedir=/no-system-includedir/ - -make - -make install - -ln -s $out/bin/pkgconf $out/bin/pkg-config - -mkdir $out/license -cp ../pkgconf-$version/COPYING $out/license/LICENSE diff --git a/nix/nixcrpkgs/native/pkgconf/default.nix b/nix/nixcrpkgs/native/pkgconf/default.nix deleted file mode 100644 index cd7eea7bd..000000000 --- a/nix/nixcrpkgs/native/pkgconf/default.nix +++ /dev/null @@ -1,14 +0,0 @@ -{ env }: - -env.make_derivation rec { - name = "pkgconf-${version}"; - - version = "1.0.1"; - - src = env.nixpkgs.fetchurl { - url = "https://github.com/pkgconf/pkgconf/releases/download/pkgconf-${version}/pkgconf-${version}.tar.gz"; - sha256 = "1w9wb2z7zz6s4mifbllvhx0401bwsynhp02v312i6i9jn1m2zkj5"; - }; - - builder = ./builder.sh; -} diff --git a/nix/nixcrpkgs/native/wrappers/builder.sh b/nix/nixcrpkgs/native/wrappers/builder.sh deleted file mode 100644 index 64d149cef..000000000 --- a/nix/nixcrpkgs/native/wrappers/builder.sh +++ /dev/null @@ -1,17 +0,0 @@ -source $setup - -mkdir -p $out/bin - -cat > $out/bin/pkg-config-cross < $out/bin/cmake-cross < $out/gdbcmd.txt -set substitute-path ../samples src/angle/samples -set substitute-path ../util src/angle/util -EOF diff --git a/nix/nixcrpkgs/pkgs/angle/default.nix b/nix/nixcrpkgs/pkgs/angle/default.nix deleted file mode 100644 index da5a76c37..000000000 --- a/nix/nixcrpkgs/pkgs/angle/default.nix +++ /dev/null @@ -1,23 +0,0 @@ -{ crossenv, gdb, debug ? false }: - -if crossenv.os != "windows" then "" else - -let - angle = import ./lib.nix { - inherit crossenv debug; - }; - - util = import ./util.nix { - inherit crossenv angle; - }; - - examples = import ./examples.nix { - inherit crossenv angle; - angle_util = util; - }; - - debug_bundle = import ./debug_bundle.nix { - inherit crossenv gdb angle examples; - }; - -in angle // { inherit util examples debug_bundle; } diff --git a/nix/nixcrpkgs/pkgs/angle/examples.nix b/nix/nixcrpkgs/pkgs/angle/examples.nix deleted file mode 100644 index 6ccb47f05..000000000 --- a/nix/nixcrpkgs/pkgs/angle/examples.nix +++ /dev/null @@ -1,11 +0,0 @@ -{ crossenv, angle, angle_util }: - -crossenv.make_derivation rec { - name = "angle-samples-${angle.version}"; - - src = angle.src; - - inherit angle angle_util; - - builder = ./samples_builder.sh; -} diff --git a/nix/nixcrpkgs/pkgs/angle/lib.nix b/nix/nixcrpkgs/pkgs/angle/lib.nix deleted file mode 100644 index 7357d905f..000000000 --- a/nix/nixcrpkgs/pkgs/angle/lib.nix +++ /dev/null @@ -1,43 +0,0 @@ -{ crossenv, debug ? false }: - -if crossenv.os != "windows" then "windows only" else - -crossenv.make_derivation rec { - name = "angle-${version}"; - - version = "2017-03-09"; - - src = crossenv.nixpkgs.fetchgit { - url = "https://chromium.googlesource.com/angle/angle"; - rev = "fe9306a8e5bb6a8d52368e8e7b8e92f3bc7e77d4"; - sha256 = "0m2pbkl9x9kybcxzhai0s3bk9k0r8nb531gzlxcvb3gb2za388bn"; - }; - - patches = [ ./megapatch.patch ]; - - builder = ./builder.sh; - - native_inputs = [ - crossenv.nixpkgs.pythonPackages.gyp - ]; - - GYP_GENERATORS = "ninja"; - - gyp_flags = - "-D OS=win " + - "-D TARGET=win32 " + - "-D use_ozone=0 " + - "-D angle_enable_vulkan=0 " + # Vulkan support is in progress - "-D angle_gl_library_type=static_library " + - "-I ../src/gyp/common.gypi " + - "--depth ."; - - CC_target = "${crossenv.host}-gcc"; - CXX_target = "${crossenv.host}-g++"; - AR = "${crossenv.host}-ar"; - RANLIB = "${crossenv.host}-ranlib"; - - CXXFLAGS = "-msse2 -Wno-conversion-null"; - - inherit debug; -} diff --git a/nix/nixcrpkgs/pkgs/angle/megapatch.patch b/nix/nixcrpkgs/pkgs/angle/megapatch.patch deleted file mode 100644 index 319b192a7..000000000 --- a/nix/nixcrpkgs/pkgs/angle/megapatch.patch +++ /dev/null @@ -1,24 +0,0 @@ -diff -ur angle-src-orig/src/angle.gyp angle-src/src/angle.gyp ---- angle-src-orig/src/angle.gyp 2017-02-07 07:42:35.090343332 -0800 -+++ angle-src/src/angle.gyp 2017-02-09 08:44:52.752006998 -0800 -@@ -286,7 +286,7 @@ - } - ] - }], -- ['OS=="win"', -+ ['OS=="win" and 0', - { - 'targets': - [ -diff -ur angle-src-orig/src/libGLESv2.gypi angle-src/src/libGLESv2.gypi ---- angle-src-orig/src/libGLESv2.gypi 2017-02-07 07:42:35.113676666 -0800 -+++ angle-src/src/libGLESv2.gypi 2017-02-09 09:19:37.985340331 -0800 -@@ -1078,7 +1078,7 @@ - '<@(libangle_null_sources)', - ], - }], -- ['angle_build_winrt==0 and OS=="win"', -+ ['angle_build_winrt==0 and OS=="win" and 0', - { - 'dependencies': - [ diff --git a/nix/nixcrpkgs/pkgs/angle/samples_builder.sh b/nix/nixcrpkgs/pkgs/angle/samples_builder.sh deleted file mode 100644 index 3b89c9c0b..000000000 --- a/nix/nixcrpkgs/pkgs/angle/samples_builder.sh +++ /dev/null @@ -1,54 +0,0 @@ -source $setup - -cp -r $src/samples . -cp -r $src/util . - -mkdir include -cp -r $src/samples/sample_util/* include/ - -mkdir build -cd build -mkdir bin - -CFLAGS="-mwindows -g -O2 -I../include -I$angle_util/include -I$angle/include" -CFLAGS="$CFLAGS -DGL_APICALL= -DANGLE_EXPORT= -DGL_GLEXT_PROTOTYPES" -LDFLAGS="-L$angle_util/lib -L$angle/lib" -LIBS="-langle_util -lEGL_static -lGLESv2_static -lANGLE -ltranslator --lpreprocessor -langle_image_util -langle_common -ld3d9 -lgdi32" - -echo "compiling texture_wrap" -$host-g++ $CFLAGS $LDFLAGS \ - ../samples/texture_wrap/TextureWrap.cpp \ - ../samples/sample_util/texture_utils.cpp \ - ../samples/sample_util/SampleApplication.cpp \ - $LIBS -o bin/texture_wrap${exe_suffix} - -echo "compiling simple_texture_2d" -$host-g++ $CFLAGS $LDFLAGS \ - ../samples/simple_texture_2d/SimpleTexture2D.cpp \ - ../samples/sample_util/texture_utils.cpp \ - ../samples/sample_util/SampleApplication.cpp \ - $LIBS -o bin/simple_texture_2d${exe_suffix} - -echo "compiling particle_system" -$host-g++ $CFLAGS $LDFLAGS \ - ../samples/particle_system/ParticleSystem.cpp \ - ../samples/sample_util/tga_utils.cpp \ - ../samples/sample_util/SampleApplication.cpp \ - $LIBS -o bin/particle_system${exe_suffix} -cp ../samples/particle_system/smoke.tga bin/ - -echo "compiling hello_triangle" -$host-g++ $CFLAGS $LDFLAGS \ - ../samples/hello_triangle/HelloTriangle.cpp \ - ../samples/sample_util/SampleApplication.cpp \ - $LIBS -o bin/hello_triangle${exe_suffix} - -echo "compiling window_test" -$host-g++ $CFLAGS $LDFLAGS \ - ../samples/WindowTest/WindowTest.cpp \ - -langle_util -lgdi32 -o bin/window_test${exe_suffix} - -mkdir -p $out/license -cp $src/LICENSE $out/license/ -cp -r bin $out/ diff --git a/nix/nixcrpkgs/pkgs/angle/util.nix b/nix/nixcrpkgs/pkgs/angle/util.nix deleted file mode 100644 index 83d703882..000000000 --- a/nix/nixcrpkgs/pkgs/angle/util.nix +++ /dev/null @@ -1,15 +0,0 @@ -# libangle_util is a helper library for programs like tests -# and samples that surround ANGLE but that are not the ANGLE libraries -# themselves - -{ crossenv, angle }: - -crossenv.make_derivation rec { - name = "angle_util-${angle.version}"; - - src = angle.src; - - inherit angle; - - builder = ./util_builder.sh; -} diff --git a/nix/nixcrpkgs/pkgs/angle/util_builder.sh b/nix/nixcrpkgs/pkgs/angle/util_builder.sh deleted file mode 100644 index 7929083b6..000000000 --- a/nix/nixcrpkgs/pkgs/angle/util_builder.sh +++ /dev/null @@ -1,38 +0,0 @@ -source $setup - -cp -r $src/util . - -mkdir include -cp -r $src/util/*.h include/ -cp -r $src/include/export.h include/ -mkdir include/common -cp -r $src/src/common/*.h include/common/ -mkdir -p include/windows/win32 -cp -r $src/util/windows/*.h include/windows/ -cp -r $src/util/windows/win32/*.h include/windows/win32/ - -mkdir -p build/{obj,lib} -cd build - -source_files=../util/*.cpp - -if [ "$os" == "windows" ]; then - source_files="$source_files ../util/windows/*.cpp ../util/windows/win32/*.cpp" -fi - -for c in $source_files; do - echo "compiling $(basename $c)" - $host-g++ -c -g -O2 -fpermissive \ - -I../include -I"$angle/include" -L"$angle/lib" \ - -DGL_APICALL= -DANGLE_EXPORT= -DEGLAPI= \ - -DGL_GLEXT_PROTOTYPES -DEGL_EGLEXT_PROTOTYPES -DLIBANGLE_UTIL_IMPLEMENTATION \ - $c -lGLESv2 -lEGL \ - -o obj/$(basename $c).o -done - -$host-ar r lib/libangle_util.a obj/*.o - -mkdir -p $out/{license,lib} -cp $src/LICENSE $out/license/ -cp lib/libangle_util.a $out/lib/ -cp -r ../include $out/ diff --git a/nix/nixcrpkgs/pkgs/at-spi2-headers/builder.sh b/nix/nixcrpkgs/pkgs/at-spi2-headers/builder.sh deleted file mode 100644 index 04ac14fda..000000000 --- a/nix/nixcrpkgs/pkgs/at-spi2-headers/builder.sh +++ /dev/null @@ -1,12 +0,0 @@ -source $setup - -mkdir -p $out/include/atspi $out/lib/pkgconfig - -cp $src/atspi/*.h $out/include/atspi/ - -cat > $out/lib/pkgconfig/atspi-2.pc < $out <at-spi2-headers -
-$license
-
-EOF diff --git a/nix/nixcrpkgs/pkgs/avrdude/builder.sh b/nix/nixcrpkgs/pkgs/avrdude/builder.sh deleted file mode 100644 index 8c7f2c078..000000000 --- a/nix/nixcrpkgs/pkgs/avrdude/builder.sh +++ /dev/null @@ -1,23 +0,0 @@ -source $setup - -tar -xf $src -mv avrdude-* avrdude - -ls -lad avrdude -cd avrdude -chmod -R u+w . -cp $config_dot_sub config.sub -cat $extra_conf >> avrdude.conf.in -cd .. - -mkdir build -cd build - -../avrdude/configure --host=$host --prefix=$out \ - --enable-static \ - --disable-shared \ - --disable-dependency-tracking - -make - -make install diff --git a/nix/nixcrpkgs/pkgs/avrdude/config.sub b/nix/nixcrpkgs/pkgs/avrdude/config.sub deleted file mode 100644 index 1d8e98bce..000000000 --- a/nix/nixcrpkgs/pkgs/avrdude/config.sub +++ /dev/null @@ -1,1801 +0,0 @@ -#! /bin/sh -# Configuration validation subroutine script. -# Copyright 1992-2018 Free Software Foundation, Inc. - -timestamp='2018-02-22' - -# This file is free software; you can redistribute it and/or modify it -# under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, see . -# -# As a special exception to the GNU General Public License, if you -# distribute this file as part of a program that contains a -# configuration script generated by Autoconf, you may include it under -# the same distribution terms that you use for the rest of that -# program. This Exception is an additional permission under section 7 -# of the GNU General Public License, version 3 ("GPLv3"). - - -# Please send patches to . -# -# Configuration subroutine to validate and canonicalize a configuration type. -# Supply the specified configuration type as an argument. -# If it is invalid, we print an error message on stderr and exit with code 1. -# Otherwise, we print the canonical config type on stdout and succeed. - -# You can get the latest version of this script from: -# https://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.sub - -# This file is supposed to be the same for all GNU packages -# and recognize all the CPU types, system types and aliases -# that are meaningful with *any* GNU software. -# Each package is responsible for reporting which valid configurations -# it does not support. The user should be able to distinguish -# a failure to support a valid configuration from a meaningless -# configuration. - -# The goal of this file is to map all the various variations of a given -# machine specification into a single specification in the form: -# CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM -# or in some cases, the newer four-part form: -# CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM -# It is wrong to echo any other type of specification. - -me=`echo "$0" | sed -e 's,.*/,,'` - -usage="\ -Usage: $0 [OPTION] CPU-MFR-OPSYS or ALIAS - -Canonicalize a configuration name. - -Options: - -h, --help print this help, then exit - -t, --time-stamp print date of last modification, then exit - -v, --version print version number, then exit - -Report bugs and patches to ." - -version="\ -GNU config.sub ($timestamp) - -Copyright 1992-2018 Free Software Foundation, Inc. - -This is free software; see the source for copying conditions. There is NO -warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE." - -help=" -Try \`$me --help' for more information." - -# Parse command line -while test $# -gt 0 ; do - case $1 in - --time-stamp | --time* | -t ) - echo "$timestamp" ; exit ;; - --version | -v ) - echo "$version" ; exit ;; - --help | --h* | -h ) - echo "$usage"; exit ;; - -- ) # Stop option processing - shift; break ;; - - ) # Use stdin as input. - break ;; - -* ) - echo "$me: invalid option $1$help" - exit 1 ;; - - *local*) - # First pass through any local machine types. - echo "$1" - exit ;; - - * ) - break ;; - esac -done - -case $# in - 0) echo "$me: missing argument$help" >&2 - exit 1;; - 1) ;; - *) echo "$me: too many arguments$help" >&2 - exit 1;; -esac - -# Separate what the user gave into CPU-COMPANY and OS or KERNEL-OS (if any). -# Here we must recognize all the valid KERNEL-OS combinations. -maybe_os=`echo "$1" | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\2/'` -case $maybe_os in - nto-qnx* | linux-gnu* | linux-android* | linux-dietlibc | linux-newlib* | \ - linux-musl* | linux-uclibc* | uclinux-uclibc* | uclinux-gnu* | kfreebsd*-gnu* | \ - knetbsd*-gnu* | netbsd*-gnu* | netbsd*-eabi* | \ - kopensolaris*-gnu* | cloudabi*-eabi* | \ - storm-chaos* | os2-emx* | rtmk-nova*) - os=-$maybe_os - basic_machine=`echo "$1" | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\1/'` - ;; - android-linux) - os=-linux-android - basic_machine=`echo "$1" | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\1/'`-unknown - ;; - *) - basic_machine=`echo "$1" | sed 's/-[^-]*$//'` - if [ "$basic_machine" != "$1" ] - then os=`echo "$1" | sed 's/.*-/-/'` - else os=; fi - ;; -esac - -### Let's recognize common machines as not being operating systems so -### that things like config.sub decstation-3100 work. We also -### recognize some manufacturers as not being operating systems, so we -### can provide default operating systems below. -case $os in - -sun*os*) - # Prevent following clause from handling this invalid input. - ;; - -dec* | -mips* | -sequent* | -encore* | -pc532* | -sgi* | -sony* | \ - -att* | -7300* | -3300* | -delta* | -motorola* | -sun[234]* | \ - -unicom* | -ibm* | -next | -hp | -isi* | -apollo | -altos* | \ - -convergent* | -ncr* | -news | -32* | -3600* | -3100* | -hitachi* |\ - -c[123]* | -convex* | -sun | -crds | -omron* | -dg | -ultra | -tti* | \ - -harris | -dolphin | -highlevel | -gould | -cbm | -ns | -masscomp | \ - -apple | -axis | -knuth | -cray | -microblaze*) - os= - basic_machine=$1 - ;; - -bluegene*) - os=-cnk - ;; - -sim | -cisco | -oki | -wec | -winbond) - os= - basic_machine=$1 - ;; - -scout) - ;; - -wrs) - os=-vxworks - basic_machine=$1 - ;; - -chorusos*) - os=-chorusos - basic_machine=$1 - ;; - -chorusrdb) - os=-chorusrdb - basic_machine=$1 - ;; - -hiux*) - os=-hiuxwe2 - ;; - -sco6) - os=-sco5v6 - basic_machine=`echo "$1" | sed -e 's/86-.*/86-pc/'` - ;; - -sco5) - os=-sco3.2v5 - basic_machine=`echo "$1" | sed -e 's/86-.*/86-pc/'` - ;; - -sco4) - os=-sco3.2v4 - basic_machine=`echo "$1" | sed -e 's/86-.*/86-pc/'` - ;; - -sco3.2.[4-9]*) - os=`echo $os | sed -e 's/sco3.2./sco3.2v/'` - basic_machine=`echo "$1" | sed -e 's/86-.*/86-pc/'` - ;; - -sco3.2v[4-9]*) - # Don't forget version if it is 3.2v4 or newer. - basic_machine=`echo "$1" | sed -e 's/86-.*/86-pc/'` - ;; - -sco5v6*) - # Don't forget version if it is 3.2v4 or newer. - basic_machine=`echo "$1" | sed -e 's/86-.*/86-pc/'` - ;; - -sco*) - os=-sco3.2v2 - basic_machine=`echo "$1" | sed -e 's/86-.*/86-pc/'` - ;; - -udk*) - basic_machine=`echo "$1" | sed -e 's/86-.*/86-pc/'` - ;; - -isc) - os=-isc2.2 - basic_machine=`echo "$1" | sed -e 's/86-.*/86-pc/'` - ;; - -clix*) - basic_machine=clipper-intergraph - ;; - -isc*) - basic_machine=`echo "$1" | sed -e 's/86-.*/86-pc/'` - ;; - -lynx*178) - os=-lynxos178 - ;; - -lynx*5) - os=-lynxos5 - ;; - -lynx*) - os=-lynxos - ;; - -ptx*) - basic_machine=`echo "$1" | sed -e 's/86-.*/86-sequent/'` - ;; - -psos*) - os=-psos - ;; - -mint | -mint[0-9]*) - basic_machine=m68k-atari - os=-mint - ;; -esac - -# Decode aliases for certain CPU-COMPANY combinations. -case $basic_machine in - # Recognize the basic CPU types without company name. - # Some are omitted here because they have special meanings below. - 1750a | 580 \ - | a29k \ - | aarch64 | aarch64_be \ - | alpha | alphaev[4-8] | alphaev56 | alphaev6[78] | alphapca5[67] \ - | alpha64 | alpha64ev[4-8] | alpha64ev56 | alpha64ev6[78] | alpha64pca5[67] \ - | am33_2.0 \ - | arc | arceb \ - | arm | arm[bl]e | arme[lb] | armv[2-8] | armv[3-8][lb] | armv7[arm] \ - | avr | avr32 \ - | ba \ - | be32 | be64 \ - | bfin \ - | c4x | c8051 | clipper \ - | d10v | d30v | dlx | dsp16xx \ - | e2k | epiphany \ - | fido | fr30 | frv | ft32 \ - | h8300 | h8500 | hppa | hppa1.[01] | hppa2.0 | hppa2.0[nw] | hppa64 \ - | hexagon \ - | i370 | i860 | i960 | ia16 | ia64 \ - | ip2k | iq2000 \ - | k1om \ - | le32 | le64 \ - | lm32 \ - | m32c | m32r | m32rle | m68000 | m68k | m88k \ - | maxq | mb | microblaze | microblazeel | mcore | mep | metag \ - | mips | mipsbe | mipseb | mipsel | mipsle \ - | mips16 \ - | mips64 | mips64el \ - | mips64octeon | mips64octeonel \ - | mips64orion | mips64orionel \ - | mips64r5900 | mips64r5900el \ - | mips64vr | mips64vrel \ - | mips64vr4100 | mips64vr4100el \ - | mips64vr4300 | mips64vr4300el \ - | mips64vr5000 | mips64vr5000el \ - | mips64vr5900 | mips64vr5900el \ - | mipsisa32 | mipsisa32el \ - | mipsisa32r2 | mipsisa32r2el \ - | mipsisa32r6 | mipsisa32r6el \ - | mipsisa64 | mipsisa64el \ - | mipsisa64r2 | mipsisa64r2el \ - | mipsisa64r6 | mipsisa64r6el \ - | mipsisa64sb1 | mipsisa64sb1el \ - | mipsisa64sr71k | mipsisa64sr71kel \ - | mipsr5900 | mipsr5900el \ - | mipstx39 | mipstx39el \ - | mn10200 | mn10300 \ - | moxie \ - | mt \ - | msp430 \ - | nds32 | nds32le | nds32be \ - | nios | nios2 | nios2eb | nios2el \ - | ns16k | ns32k \ - | open8 | or1k | or1knd | or32 \ - | pdp10 | pj | pjl \ - | powerpc | powerpc64 | powerpc64le | powerpcle \ - | pru \ - | pyramid \ - | riscv32 | riscv64 \ - | rl78 | rx \ - | score \ - | sh | sh[1234] | sh[24]a | sh[24]aeb | sh[23]e | sh[234]eb | sheb | shbe | shle | sh[1234]le | sh3ele \ - | sh64 | sh64le \ - | sparc | sparc64 | sparc64b | sparc64v | sparc86x | sparclet | sparclite \ - | sparcv8 | sparcv9 | sparcv9b | sparcv9v \ - | spu \ - | tahoe | tic4x | tic54x | tic55x | tic6x | tic80 | tron \ - | ubicom32 \ - | v850 | v850e | v850e1 | v850e2 | v850es | v850e2v3 \ - | visium \ - | wasm32 \ - | x86 | xc16x | xstormy16 | xtensa \ - | z8k | z80) - basic_machine=$basic_machine-unknown - ;; - c54x) - basic_machine=tic54x-unknown - ;; - c55x) - basic_machine=tic55x-unknown - ;; - c6x) - basic_machine=tic6x-unknown - ;; - leon|leon[3-9]) - basic_machine=sparc-$basic_machine - ;; - m6811 | m68hc11 | m6812 | m68hc12 | m68hcs12x | nvptx | picochip) - basic_machine=$basic_machine-unknown - os=-none - ;; - m88110 | m680[12346]0 | m683?2 | m68360 | m5200 | v70 | w65) - ;; - ms1) - basic_machine=mt-unknown - ;; - - strongarm | thumb | xscale) - basic_machine=arm-unknown - ;; - xgate) - basic_machine=$basic_machine-unknown - os=-none - ;; - xscaleeb) - basic_machine=armeb-unknown - ;; - - xscaleel) - basic_machine=armel-unknown - ;; - - # We use `pc' rather than `unknown' - # because (1) that's what they normally are, and - # (2) the word "unknown" tends to confuse beginning users. - i*86 | x86_64) - basic_machine=$basic_machine-pc - ;; - # Object if more than one company name word. - *-*-*) - echo Invalid configuration \`"$1"\': machine \`"$basic_machine"\' not recognized 1>&2 - exit 1 - ;; - # Recognize the basic CPU types with company name. - 580-* \ - | a29k-* \ - | aarch64-* | aarch64_be-* \ - | alpha-* | alphaev[4-8]-* | alphaev56-* | alphaev6[78]-* \ - | alpha64-* | alpha64ev[4-8]-* | alpha64ev56-* | alpha64ev6[78]-* \ - | alphapca5[67]-* | alpha64pca5[67]-* | arc-* | arceb-* \ - | arm-* | armbe-* | armle-* | armeb-* | armv*-* \ - | avr-* | avr32-* \ - | ba-* \ - | be32-* | be64-* \ - | bfin-* | bs2000-* \ - | c[123]* | c30-* | [cjt]90-* | c4x-* \ - | c8051-* | clipper-* | craynv-* | cydra-* \ - | d10v-* | d30v-* | dlx-* \ - | e2k-* | elxsi-* \ - | f30[01]-* | f700-* | fido-* | fr30-* | frv-* | fx80-* \ - | h8300-* | h8500-* \ - | hppa-* | hppa1.[01]-* | hppa2.0-* | hppa2.0[nw]-* | hppa64-* \ - | hexagon-* \ - | i*86-* | i860-* | i960-* | ia16-* | ia64-* \ - | ip2k-* | iq2000-* \ - | k1om-* \ - | le32-* | le64-* \ - | lm32-* \ - | m32c-* | m32r-* | m32rle-* \ - | m68000-* | m680[012346]0-* | m68360-* | m683?2-* | m68k-* \ - | m88110-* | m88k-* | maxq-* | mcore-* | metag-* \ - | microblaze-* | microblazeel-* \ - | mips-* | mipsbe-* | mipseb-* | mipsel-* | mipsle-* \ - | mips16-* \ - | mips64-* | mips64el-* \ - | mips64octeon-* | mips64octeonel-* \ - | mips64orion-* | mips64orionel-* \ - | mips64r5900-* | mips64r5900el-* \ - | mips64vr-* | mips64vrel-* \ - | mips64vr4100-* | mips64vr4100el-* \ - | mips64vr4300-* | mips64vr4300el-* \ - | mips64vr5000-* | mips64vr5000el-* \ - | mips64vr5900-* | mips64vr5900el-* \ - | mipsisa32-* | mipsisa32el-* \ - | mipsisa32r2-* | mipsisa32r2el-* \ - | mipsisa32r6-* | mipsisa32r6el-* \ - | mipsisa64-* | mipsisa64el-* \ - | mipsisa64r2-* | mipsisa64r2el-* \ - | mipsisa64r6-* | mipsisa64r6el-* \ - | mipsisa64sb1-* | mipsisa64sb1el-* \ - | mipsisa64sr71k-* | mipsisa64sr71kel-* \ - | mipsr5900-* | mipsr5900el-* \ - | mipstx39-* | mipstx39el-* \ - | mmix-* \ - | mt-* \ - | msp430-* \ - | nds32-* | nds32le-* | nds32be-* \ - | nios-* | nios2-* | nios2eb-* | nios2el-* \ - | none-* | np1-* | ns16k-* | ns32k-* \ - | open8-* \ - | or1k*-* \ - | orion-* \ - | pdp10-* | pdp11-* | pj-* | pjl-* | pn-* | power-* \ - | powerpc-* | powerpc64-* | powerpc64le-* | powerpcle-* \ - | pru-* \ - | pyramid-* \ - | riscv32-* | riscv64-* \ - | rl78-* | romp-* | rs6000-* | rx-* \ - | sh-* | sh[1234]-* | sh[24]a-* | sh[24]aeb-* | sh[23]e-* | sh[34]eb-* | sheb-* | shbe-* \ - | shle-* | sh[1234]le-* | sh3ele-* | sh64-* | sh64le-* \ - | sparc-* | sparc64-* | sparc64b-* | sparc64v-* | sparc86x-* | sparclet-* \ - | sparclite-* \ - | sparcv8-* | sparcv9-* | sparcv9b-* | sparcv9v-* | sv1-* | sx*-* \ - | tahoe-* \ - | tic30-* | tic4x-* | tic54x-* | tic55x-* | tic6x-* | tic80-* \ - | tile*-* \ - | tron-* \ - | ubicom32-* \ - | v850-* | v850e-* | v850e1-* | v850es-* | v850e2-* | v850e2v3-* \ - | vax-* \ - | visium-* \ - | wasm32-* \ - | we32k-* \ - | x86-* | x86_64-* | xc16x-* | xps100-* \ - | xstormy16-* | xtensa*-* \ - | ymp-* \ - | z8k-* | z80-*) - ;; - # Recognize the basic CPU types without company name, with glob match. - xtensa*) - basic_machine=$basic_machine-unknown - ;; - # Recognize the various machine names and aliases which stand - # for a CPU type and a company and sometimes even an OS. - 386bsd) - basic_machine=i386-pc - os=-bsd - ;; - 3b1 | 7300 | 7300-att | att-7300 | pc7300 | safari | unixpc) - basic_machine=m68000-att - ;; - 3b*) - basic_machine=we32k-att - ;; - a29khif) - basic_machine=a29k-amd - os=-udi - ;; - abacus) - basic_machine=abacus-unknown - ;; - adobe68k) - basic_machine=m68010-adobe - os=-scout - ;; - alliant | fx80) - basic_machine=fx80-alliant - ;; - altos | altos3068) - basic_machine=m68k-altos - ;; - am29k) - basic_machine=a29k-none - os=-bsd - ;; - amd64) - basic_machine=x86_64-pc - ;; - amd64-*) - basic_machine=x86_64-`echo "$basic_machine" | sed 's/^[^-]*-//'` - ;; - amdahl) - basic_machine=580-amdahl - os=-sysv - ;; - amiga | amiga-*) - basic_machine=m68k-unknown - ;; - amigaos | amigados) - basic_machine=m68k-unknown - os=-amigaos - ;; - amigaunix | amix) - basic_machine=m68k-unknown - os=-sysv4 - ;; - apollo68) - basic_machine=m68k-apollo - os=-sysv - ;; - apollo68bsd) - basic_machine=m68k-apollo - os=-bsd - ;; - aros) - basic_machine=i386-pc - os=-aros - ;; - asmjs) - basic_machine=asmjs-unknown - ;; - aux) - basic_machine=m68k-apple - os=-aux - ;; - balance) - basic_machine=ns32k-sequent - os=-dynix - ;; - blackfin) - basic_machine=bfin-unknown - os=-linux - ;; - blackfin-*) - basic_machine=bfin-`echo "$basic_machine" | sed 's/^[^-]*-//'` - os=-linux - ;; - bluegene*) - basic_machine=powerpc-ibm - os=-cnk - ;; - c54x-*) - basic_machine=tic54x-`echo "$basic_machine" | sed 's/^[^-]*-//'` - ;; - c55x-*) - basic_machine=tic55x-`echo "$basic_machine" | sed 's/^[^-]*-//'` - ;; - c6x-*) - basic_machine=tic6x-`echo "$basic_machine" | sed 's/^[^-]*-//'` - ;; - c90) - basic_machine=c90-cray - os=-unicos - ;; - cegcc) - basic_machine=arm-unknown - os=-cegcc - ;; - convex-c1) - basic_machine=c1-convex - os=-bsd - ;; - convex-c2) - basic_machine=c2-convex - os=-bsd - ;; - convex-c32) - basic_machine=c32-convex - os=-bsd - ;; - convex-c34) - basic_machine=c34-convex - os=-bsd - ;; - convex-c38) - basic_machine=c38-convex - os=-bsd - ;; - cray | j90) - basic_machine=j90-cray - os=-unicos - ;; - craynv) - basic_machine=craynv-cray - os=-unicosmp - ;; - cr16 | cr16-*) - basic_machine=cr16-unknown - os=-elf - ;; - crds | unos) - basic_machine=m68k-crds - ;; - crisv32 | crisv32-* | etraxfs*) - basic_machine=crisv32-axis - ;; - cris | cris-* | etrax*) - basic_machine=cris-axis - ;; - crx) - basic_machine=crx-unknown - os=-elf - ;; - da30 | da30-*) - basic_machine=m68k-da30 - ;; - decstation | decstation-3100 | pmax | pmax-* | pmin | dec3100 | decstatn) - basic_machine=mips-dec - ;; - decsystem10* | dec10*) - basic_machine=pdp10-dec - os=-tops10 - ;; - decsystem20* | dec20*) - basic_machine=pdp10-dec - os=-tops20 - ;; - delta | 3300 | motorola-3300 | motorola-delta \ - | 3300-motorola | delta-motorola) - basic_machine=m68k-motorola - ;; - delta88) - basic_machine=m88k-motorola - os=-sysv3 - ;; - dicos) - basic_machine=i686-pc - os=-dicos - ;; - djgpp) - basic_machine=i586-pc - os=-msdosdjgpp - ;; - dpx20 | dpx20-*) - basic_machine=rs6000-bull - os=-bosx - ;; - dpx2*) - basic_machine=m68k-bull - os=-sysv3 - ;; - e500v[12]) - basic_machine=powerpc-unknown - os=$os"spe" - ;; - e500v[12]-*) - basic_machine=powerpc-`echo "$basic_machine" | sed 's/^[^-]*-//'` - os=$os"spe" - ;; - ebmon29k) - basic_machine=a29k-amd - os=-ebmon - ;; - elxsi) - basic_machine=elxsi-elxsi - os=-bsd - ;; - encore | umax | mmax) - basic_machine=ns32k-encore - ;; - es1800 | OSE68k | ose68k | ose | OSE) - basic_machine=m68k-ericsson - os=-ose - ;; - fx2800) - basic_machine=i860-alliant - ;; - genix) - basic_machine=ns32k-ns - ;; - gmicro) - basic_machine=tron-gmicro - os=-sysv - ;; - go32) - basic_machine=i386-pc - os=-go32 - ;; - h3050r* | hiux*) - basic_machine=hppa1.1-hitachi - os=-hiuxwe2 - ;; - h8300hms) - basic_machine=h8300-hitachi - os=-hms - ;; - h8300xray) - basic_machine=h8300-hitachi - os=-xray - ;; - h8500hms) - basic_machine=h8500-hitachi - os=-hms - ;; - harris) - basic_machine=m88k-harris - os=-sysv3 - ;; - hp300-*) - basic_machine=m68k-hp - ;; - hp300bsd) - basic_machine=m68k-hp - os=-bsd - ;; - hp300hpux) - basic_machine=m68k-hp - os=-hpux - ;; - hp3k9[0-9][0-9] | hp9[0-9][0-9]) - basic_machine=hppa1.0-hp - ;; - hp9k2[0-9][0-9] | hp9k31[0-9]) - basic_machine=m68000-hp - ;; - hp9k3[2-9][0-9]) - basic_machine=m68k-hp - ;; - hp9k6[0-9][0-9] | hp6[0-9][0-9]) - basic_machine=hppa1.0-hp - ;; - hp9k7[0-79][0-9] | hp7[0-79][0-9]) - basic_machine=hppa1.1-hp - ;; - hp9k78[0-9] | hp78[0-9]) - # FIXME: really hppa2.0-hp - basic_machine=hppa1.1-hp - ;; - hp9k8[67]1 | hp8[67]1 | hp9k80[24] | hp80[24] | hp9k8[78]9 | hp8[78]9 | hp9k893 | hp893) - # FIXME: really hppa2.0-hp - basic_machine=hppa1.1-hp - ;; - hp9k8[0-9][13679] | hp8[0-9][13679]) - basic_machine=hppa1.1-hp - ;; - hp9k8[0-9][0-9] | hp8[0-9][0-9]) - basic_machine=hppa1.0-hp - ;; - hppaosf) - basic_machine=hppa1.1-hp - os=-osf - ;; - hppro) - basic_machine=hppa1.1-hp - os=-proelf - ;; - i370-ibm* | ibm*) - basic_machine=i370-ibm - ;; - i*86v32) - basic_machine=`echo "$1" | sed -e 's/86.*/86-pc/'` - os=-sysv32 - ;; - i*86v4*) - basic_machine=`echo "$1" | sed -e 's/86.*/86-pc/'` - os=-sysv4 - ;; - i*86v) - basic_machine=`echo "$1" | sed -e 's/86.*/86-pc/'` - os=-sysv - ;; - i*86sol2) - basic_machine=`echo "$1" | sed -e 's/86.*/86-pc/'` - os=-solaris2 - ;; - i386mach) - basic_machine=i386-mach - os=-mach - ;; - vsta) - basic_machine=i386-unknown - os=-vsta - ;; - iris | iris4d) - basic_machine=mips-sgi - case $os in - -irix*) - ;; - *) - os=-irix4 - ;; - esac - ;; - isi68 | isi) - basic_machine=m68k-isi - os=-sysv - ;; - leon-*|leon[3-9]-*) - basic_machine=sparc-`echo "$basic_machine" | sed 's/-.*//'` - ;; - m68knommu) - basic_machine=m68k-unknown - os=-linux - ;; - m68knommu-*) - basic_machine=m68k-`echo "$basic_machine" | sed 's/^[^-]*-//'` - os=-linux - ;; - magnum | m3230) - basic_machine=mips-mips - os=-sysv - ;; - merlin) - basic_machine=ns32k-utek - os=-sysv - ;; - microblaze*) - basic_machine=microblaze-xilinx - ;; - mingw64) - basic_machine=x86_64-pc - os=-mingw64 - ;; - mingw32) - basic_machine=i686-pc - os=-mingw32 - ;; - mingw32ce) - basic_machine=arm-unknown - os=-mingw32ce - ;; - miniframe) - basic_machine=m68000-convergent - ;; - *mint | -mint[0-9]* | *MiNT | *MiNT[0-9]*) - basic_machine=m68k-atari - os=-mint - ;; - mips3*-*) - basic_machine=`echo "$basic_machine" | sed -e 's/mips3/mips64/'` - ;; - mips3*) - basic_machine=`echo "$basic_machine" | sed -e 's/mips3/mips64/'`-unknown - ;; - monitor) - basic_machine=m68k-rom68k - os=-coff - ;; - morphos) - basic_machine=powerpc-unknown - os=-morphos - ;; - moxiebox) - basic_machine=moxie-unknown - os=-moxiebox - ;; - msdos) - basic_machine=i386-pc - os=-msdos - ;; - ms1-*) - basic_machine=`echo "$basic_machine" | sed -e 's/ms1-/mt-/'` - ;; - msys) - basic_machine=i686-pc - os=-msys - ;; - mvs) - basic_machine=i370-ibm - os=-mvs - ;; - nacl) - basic_machine=le32-unknown - os=-nacl - ;; - ncr3000) - basic_machine=i486-ncr - os=-sysv4 - ;; - netbsd386) - basic_machine=i386-unknown - os=-netbsd - ;; - netwinder) - basic_machine=armv4l-rebel - os=-linux - ;; - news | news700 | news800 | news900) - basic_machine=m68k-sony - os=-newsos - ;; - news1000) - basic_machine=m68030-sony - os=-newsos - ;; - news-3600 | risc-news) - basic_machine=mips-sony - os=-newsos - ;; - necv70) - basic_machine=v70-nec - os=-sysv - ;; - next | m*-next) - basic_machine=m68k-next - case $os in - -nextstep* ) - ;; - -ns2*) - os=-nextstep2 - ;; - *) - os=-nextstep3 - ;; - esac - ;; - nh3000) - basic_machine=m68k-harris - os=-cxux - ;; - nh[45]000) - basic_machine=m88k-harris - os=-cxux - ;; - nindy960) - basic_machine=i960-intel - os=-nindy - ;; - mon960) - basic_machine=i960-intel - os=-mon960 - ;; - nonstopux) - basic_machine=mips-compaq - os=-nonstopux - ;; - np1) - basic_machine=np1-gould - ;; - neo-tandem) - basic_machine=neo-tandem - ;; - nse-tandem) - basic_machine=nse-tandem - ;; - nsr-tandem) - basic_machine=nsr-tandem - ;; - nsv-tandem) - basic_machine=nsv-tandem - ;; - nsx-tandem) - basic_machine=nsx-tandem - ;; - op50n-* | op60c-*) - basic_machine=hppa1.1-oki - os=-proelf - ;; - openrisc | openrisc-*) - basic_machine=or32-unknown - ;; - os400) - basic_machine=powerpc-ibm - os=-os400 - ;; - OSE68000 | ose68000) - basic_machine=m68000-ericsson - os=-ose - ;; - os68k) - basic_machine=m68k-none - os=-os68k - ;; - pa-hitachi) - basic_machine=hppa1.1-hitachi - os=-hiuxwe2 - ;; - paragon) - basic_machine=i860-intel - os=-osf - ;; - parisc) - basic_machine=hppa-unknown - os=-linux - ;; - parisc-*) - basic_machine=hppa-`echo "$basic_machine" | sed 's/^[^-]*-//'` - os=-linux - ;; - pbd) - basic_machine=sparc-tti - ;; - pbb) - basic_machine=m68k-tti - ;; - pc532 | pc532-*) - basic_machine=ns32k-pc532 - ;; - pc98) - basic_machine=i386-pc - ;; - pc98-*) - basic_machine=i386-`echo "$basic_machine" | sed 's/^[^-]*-//'` - ;; - pentium | p5 | k5 | k6 | nexgen | viac3) - basic_machine=i586-pc - ;; - pentiumpro | p6 | 6x86 | athlon | athlon_*) - basic_machine=i686-pc - ;; - pentiumii | pentium2 | pentiumiii | pentium3) - basic_machine=i686-pc - ;; - pentium4) - basic_machine=i786-pc - ;; - pentium-* | p5-* | k5-* | k6-* | nexgen-* | viac3-*) - basic_machine=i586-`echo "$basic_machine" | sed 's/^[^-]*-//'` - ;; - pentiumpro-* | p6-* | 6x86-* | athlon-*) - basic_machine=i686-`echo "$basic_machine" | sed 's/^[^-]*-//'` - ;; - pentiumii-* | pentium2-* | pentiumiii-* | pentium3-*) - basic_machine=i686-`echo "$basic_machine" | sed 's/^[^-]*-//'` - ;; - pentium4-*) - basic_machine=i786-`echo "$basic_machine" | sed 's/^[^-]*-//'` - ;; - pn) - basic_machine=pn-gould - ;; - power) basic_machine=power-ibm - ;; - ppc | ppcbe) basic_machine=powerpc-unknown - ;; - ppc-* | ppcbe-*) - basic_machine=powerpc-`echo "$basic_machine" | sed 's/^[^-]*-//'` - ;; - ppcle | powerpclittle) - basic_machine=powerpcle-unknown - ;; - ppcle-* | powerpclittle-*) - basic_machine=powerpcle-`echo "$basic_machine" | sed 's/^[^-]*-//'` - ;; - ppc64) basic_machine=powerpc64-unknown - ;; - ppc64-*) basic_machine=powerpc64-`echo "$basic_machine" | sed 's/^[^-]*-//'` - ;; - ppc64le | powerpc64little) - basic_machine=powerpc64le-unknown - ;; - ppc64le-* | powerpc64little-*) - basic_machine=powerpc64le-`echo "$basic_machine" | sed 's/^[^-]*-//'` - ;; - ps2) - basic_machine=i386-ibm - ;; - pw32) - basic_machine=i586-unknown - os=-pw32 - ;; - rdos | rdos64) - basic_machine=x86_64-pc - os=-rdos - ;; - rdos32) - basic_machine=i386-pc - os=-rdos - ;; - rom68k) - basic_machine=m68k-rom68k - os=-coff - ;; - rm[46]00) - basic_machine=mips-siemens - ;; - rtpc | rtpc-*) - basic_machine=romp-ibm - ;; - s390 | s390-*) - basic_machine=s390-ibm - ;; - s390x | s390x-*) - basic_machine=s390x-ibm - ;; - sa29200) - basic_machine=a29k-amd - os=-udi - ;; - sb1) - basic_machine=mipsisa64sb1-unknown - ;; - sb1el) - basic_machine=mipsisa64sb1el-unknown - ;; - sde) - basic_machine=mipsisa32-sde - os=-elf - ;; - sei) - basic_machine=mips-sei - os=-seiux - ;; - sequent) - basic_machine=i386-sequent - ;; - sh5el) - basic_machine=sh5le-unknown - ;; - simso-wrs) - basic_machine=sparclite-wrs - os=-vxworks - ;; - sps7) - basic_machine=m68k-bull - os=-sysv2 - ;; - spur) - basic_machine=spur-unknown - ;; - st2000) - basic_machine=m68k-tandem - ;; - stratus) - basic_machine=i860-stratus - os=-sysv4 - ;; - strongarm-* | thumb-*) - basic_machine=arm-`echo "$basic_machine" | sed 's/^[^-]*-//'` - ;; - sun2) - basic_machine=m68000-sun - ;; - sun2os3) - basic_machine=m68000-sun - os=-sunos3 - ;; - sun2os4) - basic_machine=m68000-sun - os=-sunos4 - ;; - sun3os3) - basic_machine=m68k-sun - os=-sunos3 - ;; - sun3os4) - basic_machine=m68k-sun - os=-sunos4 - ;; - sun4os3) - basic_machine=sparc-sun - os=-sunos3 - ;; - sun4os4) - basic_machine=sparc-sun - os=-sunos4 - ;; - sun4sol2) - basic_machine=sparc-sun - os=-solaris2 - ;; - sun3 | sun3-*) - basic_machine=m68k-sun - ;; - sun4) - basic_machine=sparc-sun - ;; - sun386 | sun386i | roadrunner) - basic_machine=i386-sun - ;; - sv1) - basic_machine=sv1-cray - os=-unicos - ;; - symmetry) - basic_machine=i386-sequent - os=-dynix - ;; - t3e) - basic_machine=alphaev5-cray - os=-unicos - ;; - t90) - basic_machine=t90-cray - os=-unicos - ;; - tile*) - basic_machine=$basic_machine-unknown - os=-linux-gnu - ;; - tx39) - basic_machine=mipstx39-unknown - ;; - tx39el) - basic_machine=mipstx39el-unknown - ;; - toad1) - basic_machine=pdp10-xkl - os=-tops20 - ;; - tower | tower-32) - basic_machine=m68k-ncr - ;; - tpf) - basic_machine=s390x-ibm - os=-tpf - ;; - udi29k) - basic_machine=a29k-amd - os=-udi - ;; - ultra3) - basic_machine=a29k-nyu - os=-sym1 - ;; - v810 | necv810) - basic_machine=v810-nec - os=-none - ;; - vaxv) - basic_machine=vax-dec - os=-sysv - ;; - vms) - basic_machine=vax-dec - os=-vms - ;; - vpp*|vx|vx-*) - basic_machine=f301-fujitsu - ;; - vxworks960) - basic_machine=i960-wrs - os=-vxworks - ;; - vxworks68) - basic_machine=m68k-wrs - os=-vxworks - ;; - vxworks29k) - basic_machine=a29k-wrs - os=-vxworks - ;; - w65*) - basic_machine=w65-wdc - os=-none - ;; - w89k-*) - basic_machine=hppa1.1-winbond - os=-proelf - ;; - x64) - basic_machine=x86_64-pc - ;; - xbox) - basic_machine=i686-pc - os=-mingw32 - ;; - xps | xps100) - basic_machine=xps100-honeywell - ;; - xscale-* | xscalee[bl]-*) - basic_machine=`echo "$basic_machine" | sed 's/^xscale/arm/'` - ;; - ymp) - basic_machine=ymp-cray - os=-unicos - ;; - none) - basic_machine=none-none - os=-none - ;; - -# Here we handle the default manufacturer of certain CPU types. It is in -# some cases the only manufacturer, in others, it is the most popular. - w89k) - basic_machine=hppa1.1-winbond - ;; - op50n) - basic_machine=hppa1.1-oki - ;; - op60c) - basic_machine=hppa1.1-oki - ;; - romp) - basic_machine=romp-ibm - ;; - mmix) - basic_machine=mmix-knuth - ;; - rs6000) - basic_machine=rs6000-ibm - ;; - vax) - basic_machine=vax-dec - ;; - pdp11) - basic_machine=pdp11-dec - ;; - we32k) - basic_machine=we32k-att - ;; - sh[1234] | sh[24]a | sh[24]aeb | sh[34]eb | sh[1234]le | sh[23]ele) - basic_machine=sh-unknown - ;; - cydra) - basic_machine=cydra-cydrome - ;; - orion) - basic_machine=orion-highlevel - ;; - orion105) - basic_machine=clipper-highlevel - ;; - mac | mpw | mac-mpw) - basic_machine=m68k-apple - ;; - pmac | pmac-mpw) - basic_machine=powerpc-apple - ;; - *-unknown) - # Make sure to match an already-canonicalized machine name. - ;; - *) - echo Invalid configuration \`"$1"\': machine \`"$basic_machine"\' not recognized 1>&2 - exit 1 - ;; -esac - -# Here we canonicalize certain aliases for manufacturers. -case $basic_machine in - *-digital*) - basic_machine=`echo "$basic_machine" | sed 's/digital.*/dec/'` - ;; - *-commodore*) - basic_machine=`echo "$basic_machine" | sed 's/commodore.*/cbm/'` - ;; - *) - ;; -esac - -# Decode manufacturer-specific aliases for certain operating systems. - -if [ x"$os" != x"" ] -then -case $os in - # First match some system type aliases that might get confused - # with valid system types. - # -solaris* is a basic system type, with this one exception. - -auroraux) - os=-auroraux - ;; - -solaris1 | -solaris1.*) - os=`echo $os | sed -e 's|solaris1|sunos4|'` - ;; - -solaris) - os=-solaris2 - ;; - -unixware*) - os=-sysv4.2uw - ;; - -gnu/linux*) - os=`echo $os | sed -e 's|gnu/linux|linux-gnu|'` - ;; - # es1800 is here to avoid being matched by es* (a different OS) - -es1800*) - os=-ose - ;; - # Now accept the basic system types. - # The portable systems comes first. - # Each alternative MUST end in a * to match a version number. - # -sysv* is not here because it comes later, after sysvr4. - -gnu* | -bsd* | -mach* | -minix* | -genix* | -ultrix* | -irix* \ - | -*vms* | -sco* | -esix* | -isc* | -aix* | -cnk* | -sunos | -sunos[34]*\ - | -hpux* | -unos* | -osf* | -luna* | -dgux* | -auroraux* | -solaris* \ - | -sym* | -kopensolaris* | -plan9* \ - | -amigaos* | -amigados* | -msdos* | -newsos* | -unicos* | -aof* \ - | -aos* | -aros* | -cloudabi* | -sortix* \ - | -nindy* | -vxsim* | -vxworks* | -ebmon* | -hms* | -mvs* \ - | -clix* | -riscos* | -uniplus* | -iris* | -rtu* | -xenix* \ - | -hiux* | -knetbsd* | -mirbsd* | -netbsd* \ - | -bitrig* | -openbsd* | -solidbsd* | -libertybsd* \ - | -ekkobsd* | -kfreebsd* | -freebsd* | -riscix* | -lynxos* \ - | -bosx* | -nextstep* | -cxux* | -aout* | -elf* | -oabi* \ - | -ptx* | -coff* | -ecoff* | -winnt* | -domain* | -vsta* \ - | -udi* | -eabi* | -lites* | -ieee* | -go32* | -aux* \ - | -chorusos* | -chorusrdb* | -cegcc* | -glidix* \ - | -cygwin* | -msys* | -pe* | -psos* | -moss* | -proelf* | -rtems* \ - | -midipix* | -mingw32* | -mingw64* | -linux-gnu* | -linux-android* \ - | -linux-newlib* | -linux-musl* | -linux-uclibc* \ - | -uxpv* | -beos* | -mpeix* | -udk* | -moxiebox* \ - | -interix* | -uwin* | -mks* | -rhapsody* | -darwin* \ - | -openstep* | -oskit* | -conix* | -pw32* | -nonstopux* \ - | -storm-chaos* | -tops10* | -tenex* | -tops20* | -its* \ - | -os2* | -vos* | -palmos* | -uclinux* | -nucleus* \ - | -morphos* | -superux* | -rtmk* | -windiss* \ - | -powermax* | -dnix* | -nx6 | -nx7 | -sei* | -dragonfly* \ - | -skyos* | -haiku* | -rdos* | -toppers* | -drops* | -es* \ - | -onefs* | -tirtos* | -phoenix* | -fuchsia* | -redox* | -bme* \ - | -midnightbsd*) - # Remember, each alternative MUST END IN *, to match a version number. - ;; - -qnx*) - case $basic_machine in - x86-* | i*86-*) - ;; - *) - os=-nto$os - ;; - esac - ;; - -nto-qnx*) - ;; - -nto*) - os=`echo $os | sed -e 's|nto|nto-qnx|'` - ;; - -sim | -xray | -os68k* | -v88r* \ - | -windows* | -osx | -abug | -netware* | -os9* \ - | -macos* | -mpw* | -magic* | -mmixware* | -mon960* | -lnews*) - ;; - -mac*) - os=`echo "$os" | sed -e 's|mac|macos|'` - ;; - -linux-dietlibc) - os=-linux-dietlibc - ;; - -linux*) - os=`echo $os | sed -e 's|linux|linux-gnu|'` - ;; - -sunos5*) - os=`echo "$os" | sed -e 's|sunos5|solaris2|'` - ;; - -sunos6*) - os=`echo "$os" | sed -e 's|sunos6|solaris3|'` - ;; - -opened*) - os=-openedition - ;; - -os400*) - os=-os400 - ;; - -wince*) - os=-wince - ;; - -utek*) - os=-bsd - ;; - -dynix*) - os=-bsd - ;; - -acis*) - os=-aos - ;; - -atheos*) - os=-atheos - ;; - -syllable*) - os=-syllable - ;; - -386bsd) - os=-bsd - ;; - -ctix* | -uts*) - os=-sysv - ;; - -nova*) - os=-rtmk-nova - ;; - -ns2) - os=-nextstep2 - ;; - -nsk*) - os=-nsk - ;; - # Preserve the version number of sinix5. - -sinix5.*) - os=`echo $os | sed -e 's|sinix|sysv|'` - ;; - -sinix*) - os=-sysv4 - ;; - -tpf*) - os=-tpf - ;; - -triton*) - os=-sysv3 - ;; - -oss*) - os=-sysv3 - ;; - -svr4*) - os=-sysv4 - ;; - -svr3) - os=-sysv3 - ;; - -sysvr4) - os=-sysv4 - ;; - # This must come after -sysvr4. - -sysv*) - ;; - -ose*) - os=-ose - ;; - -*mint | -mint[0-9]* | -*MiNT | -MiNT[0-9]*) - os=-mint - ;; - -zvmoe) - os=-zvmoe - ;; - -dicos*) - os=-dicos - ;; - -pikeos*) - # Until real need of OS specific support for - # particular features comes up, bare metal - # configurations are quite functional. - case $basic_machine in - arm*) - os=-eabi - ;; - *) - os=-elf - ;; - esac - ;; - -nacl*) - ;; - -ios) - ;; - -none) - ;; - *) - # Get rid of the `-' at the beginning of $os. - os=`echo $os | sed 's/[^-]*-//'` - echo Invalid configuration \`"$1"\': system \`"$os"\' not recognized 1>&2 - exit 1 - ;; -esac -else - -# Here we handle the default operating systems that come with various machines. -# The value should be what the vendor currently ships out the door with their -# machine or put another way, the most popular os provided with the machine. - -# Note that if you're going to try to match "-MANUFACTURER" here (say, -# "-sun"), then you have to tell the case statement up towards the top -# that MANUFACTURER isn't an operating system. Otherwise, code above -# will signal an error saying that MANUFACTURER isn't an operating -# system, and we'll never get to this point. - -case $basic_machine in - score-*) - os=-elf - ;; - spu-*) - os=-elf - ;; - *-acorn) - os=-riscix1.2 - ;; - arm*-rebel) - os=-linux - ;; - arm*-semi) - os=-aout - ;; - c4x-* | tic4x-*) - os=-coff - ;; - c8051-*) - os=-elf - ;; - hexagon-*) - os=-elf - ;; - tic54x-*) - os=-coff - ;; - tic55x-*) - os=-coff - ;; - tic6x-*) - os=-coff - ;; - # This must come before the *-dec entry. - pdp10-*) - os=-tops20 - ;; - pdp11-*) - os=-none - ;; - *-dec | vax-*) - os=-ultrix4.2 - ;; - m68*-apollo) - os=-domain - ;; - i386-sun) - os=-sunos4.0.2 - ;; - m68000-sun) - os=-sunos3 - ;; - m68*-cisco) - os=-aout - ;; - mep-*) - os=-elf - ;; - mips*-cisco) - os=-elf - ;; - mips*-*) - os=-elf - ;; - or32-*) - os=-coff - ;; - *-tti) # must be before sparc entry or we get the wrong os. - os=-sysv3 - ;; - sparc-* | *-sun) - os=-sunos4.1.1 - ;; - pru-*) - os=-elf - ;; - *-be) - os=-beos - ;; - *-ibm) - os=-aix - ;; - *-knuth) - os=-mmixware - ;; - *-wec) - os=-proelf - ;; - *-winbond) - os=-proelf - ;; - *-oki) - os=-proelf - ;; - *-hp) - os=-hpux - ;; - *-hitachi) - os=-hiux - ;; - i860-* | *-att | *-ncr | *-altos | *-motorola | *-convergent) - os=-sysv - ;; - *-cbm) - os=-amigaos - ;; - *-dg) - os=-dgux - ;; - *-dolphin) - os=-sysv3 - ;; - m68k-ccur) - os=-rtu - ;; - m88k-omron*) - os=-luna - ;; - *-next) - os=-nextstep - ;; - *-sequent) - os=-ptx - ;; - *-crds) - os=-unos - ;; - *-ns) - os=-genix - ;; - i370-*) - os=-mvs - ;; - *-gould) - os=-sysv - ;; - *-highlevel) - os=-bsd - ;; - *-encore) - os=-bsd - ;; - *-sgi) - os=-irix - ;; - *-siemens) - os=-sysv4 - ;; - *-masscomp) - os=-rtu - ;; - f30[01]-fujitsu | f700-fujitsu) - os=-uxpv - ;; - *-rom68k) - os=-coff - ;; - *-*bug) - os=-coff - ;; - *-apple) - os=-macos - ;; - *-atari*) - os=-mint - ;; - *) - os=-none - ;; -esac -fi - -# Here we handle the case where we know the os, and the CPU type, but not the -# manufacturer. We pick the logical manufacturer. -vendor=unknown -case $basic_machine in - *-unknown) - case $os in - -riscix*) - vendor=acorn - ;; - -sunos*) - vendor=sun - ;; - -cnk*|-aix*) - vendor=ibm - ;; - -beos*) - vendor=be - ;; - -hpux*) - vendor=hp - ;; - -mpeix*) - vendor=hp - ;; - -hiux*) - vendor=hitachi - ;; - -unos*) - vendor=crds - ;; - -dgux*) - vendor=dg - ;; - -luna*) - vendor=omron - ;; - -genix*) - vendor=ns - ;; - -mvs* | -opened*) - vendor=ibm - ;; - -os400*) - vendor=ibm - ;; - -ptx*) - vendor=sequent - ;; - -tpf*) - vendor=ibm - ;; - -vxsim* | -vxworks* | -windiss*) - vendor=wrs - ;; - -aux*) - vendor=apple - ;; - -hms*) - vendor=hitachi - ;; - -mpw* | -macos*) - vendor=apple - ;; - -*mint | -mint[0-9]* | -*MiNT | -MiNT[0-9]*) - vendor=atari - ;; - -vos*) - vendor=stratus - ;; - esac - basic_machine=`echo "$basic_machine" | sed "s/unknown/$vendor/"` - ;; -esac - -echo "$basic_machine$os" -exit - -# Local variables: -# eval: (add-hook 'write-file-functions 'time-stamp) -# time-stamp-start: "timestamp='" -# time-stamp-format: "%:y-%02m-%02d" -# time-stamp-end: "'" -# End: diff --git a/nix/nixcrpkgs/pkgs/avrdude/default.nix b/nix/nixcrpkgs/pkgs/avrdude/default.nix deleted file mode 100644 index d5442cc90..000000000 --- a/nix/nixcrpkgs/pkgs/avrdude/default.nix +++ /dev/null @@ -1,35 +0,0 @@ -# TODO: remove giveio.sys and any other sketchy drivers or binaries from the source - -# Note: There are no patches to help AVRDUDE find its configuration -# file, so it will expect that file to be at -# /nix/store/...-avrdude/etc/avrdude.conf - -{ crossenv }: - -crossenv.make_derivation rec { - name = "avrdude-${version}"; - - version = "6.3"; # February 2016 - - src = crossenv.nixpkgs.fetchurl { - url = "http://download.savannah.gnu.org/releases/avrdude/avrdude-${version}.tar.gz"; - sha256 = "15m1w1qad3dj7r8n5ng1qqcaiyx1gyd6hnc3p2apgjllccdp77qg"; - }; - - native_inputs = [ - crossenv.nixpkgs.yacc - crossenv.nixpkgs.flex - ]; - - cross_inputs = [ - # TODO: libusb - # TODO: libftdi - # TODO: libelf - # TODO: libhid - ]; - - config_dot_sub = ./config.sub; - extra_conf = ./extra.conf; - - builder = ./builder.sh; -} diff --git a/nix/nixcrpkgs/pkgs/avrdude/extra.conf b/nix/nixcrpkgs/pkgs/avrdude/extra.conf deleted file mode 100644 index 539cd65f4..000000000 --- a/nix/nixcrpkgs/pkgs/avrdude/extra.conf +++ /dev/null @@ -1,6 +0,0 @@ -part parent "m328p" - id = "m328pb"; - desc = "ATmega328PB"; - signature = 0x1e 0x95 0x16; - ocdrev = 1; -; \ No newline at end of file diff --git a/nix/nixcrpkgs/pkgs/curl/builder.sh b/nix/nixcrpkgs/pkgs/curl/builder.sh deleted file mode 100644 index 303f6278e..000000000 --- a/nix/nixcrpkgs/pkgs/curl/builder.sh +++ /dev/null @@ -1,15 +0,0 @@ -source $setup - -tar -xf $src -cd * - -export CFLAGS=-fPIC - -case $host in - *darwin*) CFLAGS="$CFLAGS -mmacosx-version-min=10.11";; -esac - -./configure --prefix=$out --host=$host $configureFlags - -make -make install diff --git a/nix/nixcrpkgs/pkgs/curl/default.nix b/nix/nixcrpkgs/pkgs/curl/default.nix deleted file mode 100644 index 8268e288e..000000000 --- a/nix/nixcrpkgs/pkgs/curl/default.nix +++ /dev/null @@ -1,22 +0,0 @@ -{ crossenv, openssl, zlib }: - -crossenv.make_derivation rec { - name = "curl-${version}"; - version = "7.62.0"; - - cross_inputs = [ crossenv.nixpkgs.perl ]; - native_inputs = [ zlib openssl ]; - builder = ./builder.sh; - - configureFlags = [ - "--disable-shared" - "--disable-manual" - "--disable-ldap" - "--with-ssl=${openssl}" - ]; - - src = crossenv.nixpkgs.fetchurl { - url = "https://curl.haxx.se/download/${name}.tar.bz2"; - sha256 = "084niy7cin13ba65p8x38w2xcyc54n3fgzbin40fa2shfr0ca0kq"; - }; -} diff --git a/nix/nixcrpkgs/pkgs/dejavu-fonts/builder.sh b/nix/nixcrpkgs/pkgs/dejavu-fonts/builder.sh deleted file mode 100644 index 1161ce85a..000000000 --- a/nix/nixcrpkgs/pkgs/dejavu-fonts/builder.sh +++ /dev/null @@ -1,6 +0,0 @@ -source $setup - -tar -xf $src -cd dejavu-* -mkdir $out -cp -r * $out/ diff --git a/nix/nixcrpkgs/pkgs/dejavu-fonts/default.nix b/nix/nixcrpkgs/pkgs/dejavu-fonts/default.nix deleted file mode 100644 index bd8a9d743..000000000 --- a/nix/nixcrpkgs/pkgs/dejavu-fonts/default.nix +++ /dev/null @@ -1,29 +0,0 @@ -{ crossenv }: - -let - version = "2.37"; - - name = "dejavu-fonts-${version}"; - - src = crossenv.nixpkgs.fetchurl { - # Sourceforge went down. The original URL was: - # http://sourceforge.net/projects/dejavu/files/dejavu/${version}/dejavu-fonts-ttf-${version}.tar.bz2"; - url = "https://files.tmphax.com/repo1/dejavu-fonts-ttf-${version}.tar.bz2"; - sha256 = "1mqpds24wfs5cmfhj57fsfs07mji2z8812i5c4pi5pbi738s977s"; - }; - - fonts = crossenv.native.make_derivation { - inherit version name src; - builder = ./builder.sh; - }; - - license = crossenv.native.make_derivation { - name = "${name}-license"; - inherit src; - builder = ./license_builder.sh; - }; - - license_set = { "${name}" = license; }; - -in - fonts // { inherit license_set; } diff --git a/nix/nixcrpkgs/pkgs/dejavu-fonts/license_builder.sh b/nix/nixcrpkgs/pkgs/dejavu-fonts/license_builder.sh deleted file mode 100644 index 4ba33030b..000000000 --- a/nix/nixcrpkgs/pkgs/dejavu-fonts/license_builder.sh +++ /dev/null @@ -1,14 +0,0 @@ -source $setup - -tar -xf $src -mv dejavu-* dejavu - -license=$(cat dejavu/LICENSE) - -cat > $out <DejaVu Fonts - -
-$license
-
-EOF diff --git a/nix/nixcrpkgs/pkgs/devcon/builder.sh b/nix/nixcrpkgs/pkgs/devcon/builder.sh deleted file mode 100644 index 166abe2ad..000000000 --- a/nix/nixcrpkgs/pkgs/devcon/builder.sh +++ /dev/null @@ -1,26 +0,0 @@ -source $setup - -cp --no-preserve=mode -r $src/setup/devcon . - -cd devcon -for patch in $patches; do - echo applying patch $patch - patch -p1 -i $patch -done -$host-windmc msg.mc -cd .. - -mkdir build -cd build - -$host-windres ../devcon/devcon.rc rc.o - -$host-g++ -municode -O2 \ - -DUNICODE -D_UNICODE \ - ../devcon/*.cpp rc.o \ - -lsetupapi -lole32 \ - -o devcon.exe - -mkdir -p $out/bin $out/license -cp devcon.exe $out/bin -cp $src/LICENSE $out/license diff --git a/nix/nixcrpkgs/pkgs/devcon/default.nix b/nix/nixcrpkgs/pkgs/devcon/default.nix deleted file mode 100644 index e9410aee8..000000000 --- a/nix/nixcrpkgs/pkgs/devcon/default.nix +++ /dev/null @@ -1,20 +0,0 @@ -{ crossenv }: - -if crossenv.os != "windows" then "windows only" else - -crossenv.make_derivation rec { - name = "devcon-${version}"; - - version = "2017-05-01"; - - src = crossenv.nixpkgs.fetchFromGitHub { - owner = "Microsoft"; - repo = "Windows-driver-samples"; - rev = "4c5c5e0297c7a61e151f92af702cdac650a14489"; - sha256 = "1drq26bnad98xqn805qx0b6g4y65lmrdj7v40b3jhhzdsp8993pf"; - }; - - patches = []; - - builder = ./builder.sh; -} diff --git a/nix/nixcrpkgs/pkgs/devcon/my_xmlhelper.c b/nix/nixcrpkgs/pkgs/devcon/my_xmlhelper.c deleted file mode 100644 index 0cdf29140..000000000 --- a/nix/nixcrpkgs/pkgs/devcon/my_xmlhelper.c +++ /dev/null @@ -1,47 +0,0 @@ -#include "xmlhelper.h" - -EXTERN_C HRESULT InitXmlHelper() -{ - return 0; -} - -EXTERN_C HRESULT ReleaseXmlWriter() -{ - return 0; -} - -EXTERN_C HRESULT SaveXml(LPTSTR szfileName, DWORD dwCreationDisposition) -{ - MessageBox(NULL, - "Sorry, XML saving is not supported in this build.", - "XML not supported", - MB_OK | MB_ICONEXCLAMATION); - return 0; -} - -EXTERN_C HRESULT XmlAddHostController( - PSTR hcName, - PUSBHOSTCONTROLLERINFO hcInfo - ) -{ - return 0; -} - -EXTERN_C HRESULT XmlAddRootHub(PSTR rhName, PUSBROOTHUBINFO rhInfo) -{ - return 0; -} - -EXTERN_C HRESULT XmlAddExternalHub(PSTR ehName, PUSBEXTERNALHUBINFO ehInfo) -{ - return 0; -} - -EXTERN_C HRESULT XmlAddUsbDevice(PSTR devName, PUSBDEVICEINFO deviceInfo) -{ - return 0; -} - -EXTERN_C VOID XmlNotifyEndOfNodeList(PVOID pContext) -{ -} diff --git a/nix/nixcrpkgs/pkgs/expat/builder.sh b/nix/nixcrpkgs/pkgs/expat/builder.sh deleted file mode 100644 index 3a8bfb731..000000000 --- a/nix/nixcrpkgs/pkgs/expat/builder.sh +++ /dev/null @@ -1,26 +0,0 @@ -source $setup - -tar -xf $src - -cd expat-$version -for patch in $patches; do - echo applying patch $patch - patch -p1 -i $patch -done -cd .. - -mkdir build -cd build - -../expat-$version/configure \ - --prefix=$out --host=$host \ - --enable-static --disable-shared - -make - -make install - -mv $out/bin/xmlwf $out/bin/xmlwf.exe - -mkdir $out/license -cp ../expat-$version/COPYING $out/license/LICENSE diff --git a/nix/nixcrpkgs/pkgs/expat/cve-2016-0718.patch b/nix/nixcrpkgs/pkgs/expat/cve-2016-0718.patch deleted file mode 100644 index 6d66fec0c..000000000 --- a/nix/nixcrpkgs/pkgs/expat/cve-2016-0718.patch +++ /dev/null @@ -1,26 +0,0 @@ -From 3e6190e433479e56f8c1e5adc1198b3c86b15577 Mon Sep 17 00:00:00 2001 -From: Sebastian Pipping -Date: Sun, 17 Jul 2016 20:22:29 +0200 -Subject: [PATCH] Fix regression introduced by patch to CVE-2016-0718 (bug - #539) - -Tag names were cut off in some cases; reported by Andy Wang ---- - expat/lib/xmlparse.c | 2 +- - 1 file changed, 1 insertion(+), 1 deletion(-) - -diff --git a/expat/lib/xmlparse.c b/expat/lib/xmlparse.c -index 13e080d..2630310 100644 ---- expat/lib/xmlparse.c -+++ expat-fixed/lib/xmlparse.c -@@ -2430,7 +2430,7 @@ doContent(XML_Parser parser, - &fromPtr, rawNameEnd, - (ICHAR **)&toPtr, (ICHAR *)tag->bufEnd - 1); - convLen = (int)(toPtr - (XML_Char *)tag->buf); -- if ((convert_res == XML_CONVERT_COMPLETED) || (convert_res == XML_CONVERT_INPUT_INCOMPLETE)) { -+ if ((fromPtr >= rawNameEnd) || (convert_res == XML_CONVERT_INPUT_INCOMPLETE)) { - tag->name.strLen = convLen; - break; - } --- -2.9.2 diff --git a/nix/nixcrpkgs/pkgs/expat/default.nix b/nix/nixcrpkgs/pkgs/expat/default.nix deleted file mode 100644 index 860f54004..000000000 --- a/nix/nixcrpkgs/pkgs/expat/default.nix +++ /dev/null @@ -1,20 +0,0 @@ -{ crossenv }: - -crossenv.make_derivation rec { - name = "expat-${version}"; - - version = "2.2.0"; - - src = crossenv.nixpkgs.fetchurl { - # Sourceforge went down. The original URL we used was: - # mirror://sourceforge/expat/expat-${version}.tar.bz2 - url = "https://files.tmphax.com/repo1/expat-${version}.tar.bz2"; - sha256 = "1zq4lnwjlw8s9mmachwfvfjf2x3lk24jm41746ykhdcvs7r0zrfr"; - }; - - patches = [ - ./cve-2016-0718.patch - ]; - - builder = ./builder.sh; -} diff --git a/nix/nixcrpkgs/pkgs/fixesproto/builder.sh b/nix/nixcrpkgs/pkgs/fixesproto/builder.sh deleted file mode 100644 index 4d91d6e9f..000000000 --- a/nix/nixcrpkgs/pkgs/fixesproto/builder.sh +++ /dev/null @@ -1,16 +0,0 @@ -source $setup - -cp -r $src src -chmod -R u+w src -cd src -autoreconf -v --install -cd .. - -mkdir build -cd build - -../src/configure --prefix=$out -make -make install - -ln -sf $xextproto/lib/pkgconfig/*.pc $out/lib/pkgconfig diff --git a/nix/nixcrpkgs/pkgs/fixesproto/default.nix b/nix/nixcrpkgs/pkgs/fixesproto/default.nix deleted file mode 100644 index 49936287e..000000000 --- a/nix/nixcrpkgs/pkgs/fixesproto/default.nix +++ /dev/null @@ -1,41 +0,0 @@ -{ crossenv, xorg-macros, xextproto }: - -let - version = "2017-01-26"; - - name = "fixesproto-${version}"; - - src = crossenv.nixpkgs.fetchgit { - url = "https://anongit.freedesktop.org/git/xorg/proto/fixesproto"; - rev = "4292ec1c63180c5f4e7c0e606fa68c51913f172b"; - sha256 = "0mmx4cmkbrsmbq1j58g8gcx4h3qz9y4xbjpz7jcl7crki7zrz3kx"; - }; - - lib = crossenv.native.make_derivation rec { - inherit version name src; - - builder = ./builder.sh; - - native_inputs = [ - crossenv.nixpkgs.autoconf - crossenv.nixpkgs.automake - ]; - - ACLOCAL_PATH = "${xorg-macros}/lib/aclocal"; - - inherit xextproto; - }; - - license = crossenv.native.make_derivation { - name = "${name}-license"; - inherit src; - builder = ./license_builder.sh; - }; - - license_set = - xorg-macros.license_set // - xextproto.license_set // - { "${name}" = license; }; - -in - lib // { inherit license_set; } diff --git a/nix/nixcrpkgs/pkgs/fixesproto/license_builder.sh b/nix/nixcrpkgs/pkgs/fixesproto/license_builder.sh deleted file mode 100644 index 8e347c361..000000000 --- a/nix/nixcrpkgs/pkgs/fixesproto/license_builder.sh +++ /dev/null @@ -1,11 +0,0 @@ -source $setup - -license=$(cat $src/COPYING) - -cat > $out <fixesproto - -
-$license
-
-EOF diff --git a/nix/nixcrpkgs/pkgs/gdb/builder.sh b/nix/nixcrpkgs/pkgs/gdb/builder.sh deleted file mode 100644 index 8133f5199..000000000 --- a/nix/nixcrpkgs/pkgs/gdb/builder.sh +++ /dev/null @@ -1,31 +0,0 @@ -source $setup - -tar -xf $src - -cd gdb-$version -for patch in $patches -do - echo applying patch $patch - patch -p1 -i $patch -done -cd .. - -mkdir build -cd build - -export LDFLAGS="-L$curses/lib" -export CFLAGS="-I$curses/include" -export CXXFLAGS="-I$curses/include" - -../gdb-$version/configure \ - --prefix=$out \ - --host=$host \ - --target=$host \ - --with-expat=yes --with-libexpat-prefix=$expat \ - --enable-tui \ - --disable-win32-registry \ - --disable-rpath - -make - -make install diff --git a/nix/nixcrpkgs/pkgs/gdb/default.nix b/nix/nixcrpkgs/pkgs/gdb/default.nix deleted file mode 100644 index 43378cb68..000000000 --- a/nix/nixcrpkgs/pkgs/gdb/default.nix +++ /dev/null @@ -1,38 +0,0 @@ -# Note: This package has only been tested on Windows, and the pdcurses library -# it uses does not support Linux in console mode or mac OS X. - -# Note: GDB has a bundled copy of readline that it uses. -# There is a --with-system-readline option we could try to use. - -# Note: consider providing a mingw-w64 isl to gdb because its configure script looks for it - -{ crossenv, expat, curses }: - -crossenv.make_derivation rec { - name = "gdb-${version}"; - - version = "7.12.1"; - - src = crossenv.nixpkgs.fetchurl { - url = "https://ftp.gnu.org/gnu/gdb/gdb-${version}.tar.xz"; - sha256 = "11ii260h1sd7v0bs3cz6d5l8gqxxgldry0md60ncjgixjw5nh1s6"; - }; - - patches = [ - # Make GCC better at finding source files. - # https://sourceware.org/ml/gdb-patches/2017-02/msg00693.html - ./substitute-path-all-filenames.patch - ]; - - native_inputs = [ - crossenv.nixpkgs.texinfo - crossenv.nixpkgs.bison - crossenv.nixpkgs.yacc - crossenv.nixpkgs.m4 - crossenv.nixpkgs.flex - ]; - - inherit expat curses; - - builder = ./builder.sh; -} diff --git a/nix/nixcrpkgs/pkgs/gdb/substitute-path-all-filenames.patch b/nix/nixcrpkgs/pkgs/gdb/substitute-path-all-filenames.patch deleted file mode 100644 index f1821a772..000000000 --- a/nix/nixcrpkgs/pkgs/gdb/substitute-path-all-filenames.patch +++ /dev/null @@ -1,14 +0,0 @@ -diff -ur gdb-7.12.1-orig/gdb/source.c gdb-7.12.1/gdb/source.c ---- gdb-7.12.1-orig/gdb/source.c 2017-02-24 19:33:13.340349333 -0800 -+++ gdb-7.12.1/gdb/source.c 2017-02-24 19:34:40.660349333 -0800 -@@ -1103,10 +1103,7 @@ - } - } - -- if (IS_ABSOLUTE_PATH (filename)) - { -- /* If filename is absolute path, try the source path -- substitution on it. */ - char *rewritten_filename = rewrite_source_path (filename); - - if (rewritten_filename != NULL) diff --git a/nix/nixcrpkgs/pkgs/hello/builder.sh b/nix/nixcrpkgs/pkgs/hello/builder.sh deleted file mode 100644 index 36c57adc7..000000000 --- a/nix/nixcrpkgs/pkgs/hello/builder.sh +++ /dev/null @@ -1,7 +0,0 @@ -source $setup - -$host-gcc $src_file -o hello$exe_suffix - -mkdir -p $out/bin - -cp hello$exe_suffix $out/bin/ diff --git a/nix/nixcrpkgs/pkgs/hello/default.nix b/nix/nixcrpkgs/pkgs/hello/default.nix deleted file mode 100644 index c8e4d5208..000000000 --- a/nix/nixcrpkgs/pkgs/hello/default.nix +++ /dev/null @@ -1,7 +0,0 @@ -{ crossenv }: - -crossenv.make_derivation rec { - name = "hello"; - src_file = ./hello.c; - builder = ./builder.sh; -} diff --git a/nix/nixcrpkgs/pkgs/hello/hello.c b/nix/nixcrpkgs/pkgs/hello/hello.c deleted file mode 100644 index 98e2277bc..000000000 --- a/nix/nixcrpkgs/pkgs/hello/hello.c +++ /dev/null @@ -1,15 +0,0 @@ -#include - -#ifdef _WIN32 -#include -#endif - -int main(int argc, char ** argv) -{ - printf("Hello, World!\n"); - -#ifdef _WIN32 - MessageBoxA(NULL, "Hello, World!", "Hello", MB_OK); -#endif - return 0; -} diff --git a/nix/nixcrpkgs/pkgs/hello_cpp/builder.sh b/nix/nixcrpkgs/pkgs/hello_cpp/builder.sh deleted file mode 100644 index dae159775..000000000 --- a/nix/nixcrpkgs/pkgs/hello_cpp/builder.sh +++ /dev/null @@ -1,8 +0,0 @@ -. $setup - -$host-g++ $src_file -o hello$exe_suffix - -mkdir -p $out/bin/ - -cp hello$exe_suffix $out/bin/ - diff --git a/nix/nixcrpkgs/pkgs/hello_cpp/default.nix b/nix/nixcrpkgs/pkgs/hello_cpp/default.nix deleted file mode 100644 index 8f35d0b6f..000000000 --- a/nix/nixcrpkgs/pkgs/hello_cpp/default.nix +++ /dev/null @@ -1,7 +0,0 @@ -{ crossenv }: - -crossenv.make_derivation rec { - name = "hello_cpp"; - src_file = ./hello.cpp; - builder = ./builder.sh; -} diff --git a/nix/nixcrpkgs/pkgs/hello_cpp/hello.cpp b/nix/nixcrpkgs/pkgs/hello_cpp/hello.cpp deleted file mode 100644 index d081ace70..000000000 --- a/nix/nixcrpkgs/pkgs/hello_cpp/hello.cpp +++ /dev/null @@ -1,15 +0,0 @@ -#include - -#ifdef _WIN32 -#include -#endif - -int main(int argc, char ** argv) -{ - std::cout << "hello world" << std::endl; - -#ifdef _WIN32 - MessageBoxA(NULL, "Hello world", "Hello Box", MB_OK); -#endif - return 0; -} diff --git a/nix/nixcrpkgs/pkgs/inputproto/builder.sh b/nix/nixcrpkgs/pkgs/inputproto/builder.sh deleted file mode 100644 index ff349bbcd..000000000 --- a/nix/nixcrpkgs/pkgs/inputproto/builder.sh +++ /dev/null @@ -1,13 +0,0 @@ -source $setup - -tar -xf $src -mv inputproto-* proto - -mkdir build -cd build - -../proto/configure --prefix=$out - -make - -make install diff --git a/nix/nixcrpkgs/pkgs/inputproto/default.nix b/nix/nixcrpkgs/pkgs/inputproto/default.nix deleted file mode 100644 index 7c384049d..000000000 --- a/nix/nixcrpkgs/pkgs/inputproto/default.nix +++ /dev/null @@ -1,27 +0,0 @@ -{ crossenv }: - -let - version = "2.3.2"; - - name = "inputproto-${version}"; - - src = crossenv.nixpkgs.fetchurl { - url = "https://xorg.freedesktop.org/releases/individual/proto/inputproto-${version}.tar.bz2"; - sha256 = "07gk7v006zqn3dcfh16l06gnccy7xnqywf3vl9c209ikazsnlfl9"; - }; - - lib = crossenv.native.make_derivation rec { - inherit version name src; - builder = ./builder.sh; - }; - - license = crossenv.native.make_derivation { - name = "${name}-license"; - inherit src; - builder = ./license_builder.sh; - }; - - license_set = { "${name}" = license; }; - -in - lib // { inherit license_set; } diff --git a/nix/nixcrpkgs/pkgs/inputproto/license_builder.sh b/nix/nixcrpkgs/pkgs/inputproto/license_builder.sh deleted file mode 100644 index aad143efb..000000000 --- a/nix/nixcrpkgs/pkgs/inputproto/license_builder.sh +++ /dev/null @@ -1,14 +0,0 @@ -source $setup - -tar -xf $src -mv inputproto-* inputproto - -license=$(cat inputproto/COPYING) - -cat > $out <inputproto - -
-$license
-
-EOF diff --git a/nix/nixcrpkgs/pkgs/ion/builder.sh b/nix/nixcrpkgs/pkgs/ion/builder.sh deleted file mode 100644 index 6ac7950a2..000000000 --- a/nix/nixcrpkgs/pkgs/ion/builder.sh +++ /dev/null @@ -1,20 +0,0 @@ -source $setup - -tar -xf $src -mv bitwise-* bitwise - -mkdir build -cd build - -$host-gcc -O2 ../bitwise/ion/main.c -o ion$exe_suffix \ - -DIONHOME=\"$out/ionhome\" - -# TODO: make -DIONHOME actually work - -mkdir $out - -mkdir $out/bin -mv ion$exe_suffix $out/bin/ - -mkdir $out/ionhome -mv ../bitwise/ion/system_packages $out/ionhome/ diff --git a/nix/nixcrpkgs/pkgs/ion/default.nix b/nix/nixcrpkgs/pkgs/ion/default.nix deleted file mode 100644 index 222f6c885..000000000 --- a/nix/nixcrpkgs/pkgs/ion/default.nix +++ /dev/null @@ -1,21 +0,0 @@ -{ crossenv }: - -# TODO: SDL integration would be nice, so we can use noir.ion - -let - version = "7524dc7"; # 2018-04-30 - - name = "ion-${version}"; - - src = crossenv.nixpkgs.fetchurl { - url = "https://github.com/DavidEGrayson/bitwise/archive/${version}.tar.gz"; - sha256 = "169j7yhphvcyfbqgi5p1i4lhd9n5a31n99fv2kxyrh7djmr8g2s9"; - }; - - ion = crossenv.make_derivation { - inherit version name src; - builder = ./builder.sh; - }; - -in - ion diff --git a/nix/nixcrpkgs/pkgs/kbproto/builder.sh b/nix/nixcrpkgs/pkgs/kbproto/builder.sh deleted file mode 100644 index 3f21643e8..000000000 --- a/nix/nixcrpkgs/pkgs/kbproto/builder.sh +++ /dev/null @@ -1,13 +0,0 @@ -source $setup - -tar -xf $src -mv kbproto-* proto - -mkdir build -cd build - -../proto/configure --prefix=$out - -make - -make install diff --git a/nix/nixcrpkgs/pkgs/kbproto/default.nix b/nix/nixcrpkgs/pkgs/kbproto/default.nix deleted file mode 100644 index 6af2fcf90..000000000 --- a/nix/nixcrpkgs/pkgs/kbproto/default.nix +++ /dev/null @@ -1,27 +0,0 @@ -{ crossenv }: - -let - version = "1.0.7"; - - name = "kbproto-${version}"; - - src = crossenv.nixpkgs.fetchurl { - url = "https://xorg.freedesktop.org/releases/individual/proto/kbproto-${version}.tar.bz2"; - sha256 = "0mxqj1pzhjpz9495vrjnpi10kv2n1s4vs7di0sh3yvipfq5j30pq"; - }; - - lib = crossenv.native.make_derivation rec { - inherit version name src; - builder = ./builder.sh; - }; - - license = crossenv.native.make_derivation { - name = "${name}-license"; - inherit src; - builder = ./license_builder.sh; - }; - - license_set = { "${name}" = license; }; - -in - lib // { inherit license_set; } diff --git a/nix/nixcrpkgs/pkgs/kbproto/license_builder.sh b/nix/nixcrpkgs/pkgs/kbproto/license_builder.sh deleted file mode 100644 index 65776595a..000000000 --- a/nix/nixcrpkgs/pkgs/kbproto/license_builder.sh +++ /dev/null @@ -1,14 +0,0 @@ -source $setup - -tar -xf $src -mv kbproto-* kbproto - -license=$(cat kbproto/COPYING) - -cat > $out <kbproto - -
-$license
-
-EOF diff --git a/nix/nixcrpkgs/pkgs/libgmp/builder.sh b/nix/nixcrpkgs/pkgs/libgmp/builder.sh deleted file mode 100644 index c284c9a5f..000000000 --- a/nix/nixcrpkgs/pkgs/libgmp/builder.sh +++ /dev/null @@ -1,9 +0,0 @@ -source $setup - -tar -xf $src - -mkdir build -cd build -../gmp-$version/configure --host=$host --prefix=$out --disable-shared -make -make install diff --git a/nix/nixcrpkgs/pkgs/libgmp/default.nix b/nix/nixcrpkgs/pkgs/libgmp/default.nix deleted file mode 100644 index 52af7aefb..000000000 --- a/nix/nixcrpkgs/pkgs/libgmp/default.nix +++ /dev/null @@ -1,16 +0,0 @@ -{ crossenv }: - -crossenv.make_derivation rec { - name = "gmp-${version}"; - version = "6.1.2"; - builder = ./builder.sh; - native_inputs = [ crossenv.nixpkgs.m4 ]; - - src = crossenv.nixpkgs.fetchurl { - urls = [ "mirror://gnu/gmp/${name}.tar.bz2" - "ftp://ftp.gmplib.org/pub/${name}/${name}.tar.bz2" - ]; - sha256 = "1clg7pbpk6qwxj5b2mw0pghzawp2qlm3jf9gdd8i6fl6yh2bnxaj"; - }; - -} diff --git a/nix/nixcrpkgs/pkgs/libsigsegv/builder.sh b/nix/nixcrpkgs/pkgs/libsigsegv/builder.sh deleted file mode 100644 index 39d7ddc01..000000000 --- a/nix/nixcrpkgs/pkgs/libsigsegv/builder.sh +++ /dev/null @@ -1,30 +0,0 @@ -source $setup - -tar -xf $src - -cd libsigsegv-$version -patch -p1 << 'HEREDOC' ---- a/src/fault-linux-i386.h 2020-06-25 23:46:02.099235491 +0000 -+++ b/src/fault-linux-i386.h 2020-06-25 23:45:48.679156892 +0000 -@@ -18,6 +18,7 @@ - - #include "fault-posix-ucontext.h" - -+#define HAVE_STACKVMA 0 - #if defined __x86_64__ - /* 64 bit registers */ - -HEREDOC -cd .. - -mkdir build -cd build - -../libsigsegv-$version/configure \ - --host=$host \ - --prefix=$out \ - --enable-static=yes \ - --enable-shared=no - -make -make install diff --git a/nix/nixcrpkgs/pkgs/libsigsegv/default.nix b/nix/nixcrpkgs/pkgs/libsigsegv/default.nix deleted file mode 100644 index 60e8c7b6a..000000000 --- a/nix/nixcrpkgs/pkgs/libsigsegv/default.nix +++ /dev/null @@ -1,13 +0,0 @@ -{ crossenv }: - -crossenv.make_derivation rec { - name = "libsigsegv-${version}"; - version = "2.12"; - - src = crossenv.nixpkgs.fetchurl { - url = "mirror://gnu/libsigsegv/${name}.tar.gz"; - sha256 = "1dlhqf4igzpqayms25lkhycjq1ccavisx8cnb3y4zapbkqsszq9s"; - }; - - builder = ./builder.sh; -} diff --git a/nix/nixcrpkgs/pkgs/libudev/builder.sh b/nix/nixcrpkgs/pkgs/libudev/builder.sh deleted file mode 100644 index aad209a22..000000000 --- a/nix/nixcrpkgs/pkgs/libudev/builder.sh +++ /dev/null @@ -1,71 +0,0 @@ -source $setup - -tar -xf $src -mv systemd-* systemd - -cd systemd -for patch in $patches; do - echo applying patch $patch - patch -p1 -i $patch -done -cd .. - -$host-g++ -x c++ -c $size_flags - -o test.o < -#include -#include -static_assert(sizeof(pid_t) == SIZEOF_PID_T); -static_assert(sizeof(uid_t) == SIZEOF_UID_T); -static_assert(sizeof(gid_t) == SIZEOF_GID_T); -static_assert(sizeof(time_t) == SIZEOF_TIME_T); -static_assert(sizeof(rlim_t) == SIZEOF_RLIM_T); -static_assert(sizeof(dev_t) == SIZEOF_DEV_T); -static_assert(sizeof(ino_t) == SIZEOF_INO_T); -EOF - -rm test.o - -mkdir build -cd build - -# -DHAVE_SECURE_GETENV: We don't have secure_getenv but we want to avoid a header error, -# and hopefully secure_getenv isn't actually needed by libudev. - -$host-gcc -c -Werror -I$fill $fill/*.c -$host-gcc -c $CFLAGS \ - -I../systemd/src/libudev \ - -I../systemd/src/basic \ - -I../systemd/src/libsystemd/sd-device \ - -I../systemd/src/libsystemd/sd-hwdb \ - -I../systemd/src/systemd \ - ../systemd/src/libudev/*.c -$host-gcc -c $CFLAGS \ - -I../systemd/src/libsystemd/sd-device \ - -I../systemd/src/basic \ - -I../systemd/src/systemd \ - ../systemd/src/libsystemd/sd-device/{device-enumerator,device-private,sd-device}.c -$host-gcc -c $CFLAGS \ - -DPACKAGE_STRING="\"libudev $version\"" \ - -DFALLBACK_HOSTNAME="\"localhost\"" \ - -DDEFAULT_HIERARCHY_NAME="\"hybrid\"" \ - -DDEFAULT_HIERARCHY=CGROUP_UNIFIED_SYSTEMD \ - -I../systemd/src/basic \ - -I../systemd/src/systemd \ - -I$fill \ - ../systemd/src/basic/{alloc-util,architecture,bus-label,cgroup-util,device-nodes,dirent-util,env-util,escape,extract-word,fd-util,fileio,fs-util,gunicode,glob-util,hashmap,hash-funcs,hexdecoct,hostname-util,io-util,log,login-util,mempool,mkdir,path-util,proc-cmdline,parse-util,prioq,process-util,random-util,signal-util,siphash24,socket-util,stat-util,string-table,string-util,strv,strxcpyx,syslog-util,terminal-util,time-util,unit-name,user-util,utf8,util,virt,MurmurHash2}.c -$host-ar cr libudev.a *.o - -mkdir -p $out/lib/pkgconfig $out/include -cp libudev.a $out/lib/ -cp ../systemd/src/libudev/libudev.h $out/include/ - -cat > $out/lib/pkgconfig/libudev.pc < - - With parts from the musl C library - Copyright 2005-2014 Rich Felker, et al. - - systemd is free software; you can redistribute it and/or modify it - under the terms of the GNU Lesser General Public License as published by - the Free Software Foundation; either version 2.1 of the License, or - (at your option) any later version. - - systemd is distributed in the hope that it will be useful, but - WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public License - along with systemd; If not, see . -***/ - -#include -#include -#include - -static const char *consume_nonarg(const char *fmt) -{ - do { - if (*fmt == '\0') - return fmt; - } while (*fmt++ != '%'); - return fmt; -} - -static const char *consume_num(const char *fmt) -{ - for (;*fmt >= '0' && *fmt <= '9'; fmt++) - /* do nothing */; - return fmt; -} - -static const char *consume_argn(const char *fmt, size_t *arg) -{ - const char *p = fmt; - size_t val = 0; - - if (*p < '1' || *p > '9') - return fmt; - do { - val = 10*val + (*p++ - '0'); - } while (*p >= '0' && *p <= '9'); - - if (*p != '$') - return fmt; - *arg = val; - return p+1; -} - -static const char *consume_flags(const char *fmt) -{ - while (1) { - switch (*fmt) { - case '#': - case '0': - case '-': - case ' ': - case '+': - case '\'': - case 'I': - fmt++; - continue; - } - return fmt; - } -} - -enum state { - BARE, - LPRE, - LLPRE, - HPRE, - HHPRE, - BIGLPRE, - ZTPRE, - JPRE, - STOP -}; - -enum type { - NONE, - PTR, - INT, - UINT, - ULLONG, - LONG, - ULONG, - SHORT, - USHORT, - CHAR, - UCHAR, - LLONG, - SIZET, - IMAX, - UMAX, - PDIFF, - UIPTR, - DBL, - LDBL, - MAXTYPE -}; - -static const short pa_types[MAXTYPE] = { - [NONE] = PA_INT, - [PTR] = PA_POINTER, - [INT] = PA_INT, - [UINT] = PA_INT, - [ULLONG] = PA_INT | PA_FLAG_LONG_LONG, - [LONG] = PA_INT | PA_FLAG_LONG, - [ULONG] = PA_INT | PA_FLAG_LONG, - [SHORT] = PA_INT | PA_FLAG_SHORT, - [USHORT] = PA_INT | PA_FLAG_SHORT, - [CHAR] = PA_CHAR, - [UCHAR] = PA_CHAR, - [LLONG] = PA_INT | PA_FLAG_LONG_LONG, - [SIZET] = PA_INT | PA_FLAG_LONG, - [IMAX] = PA_INT | PA_FLAG_LONG_LONG, - [UMAX] = PA_INT | PA_FLAG_LONG_LONG, - [PDIFF] = PA_INT | PA_FLAG_LONG_LONG, - [UIPTR] = PA_INT | PA_FLAG_LONG, - [DBL] = PA_DOUBLE, - [LDBL] = PA_DOUBLE | PA_FLAG_LONG_DOUBLE -}; - -#define S(x) [(x)-'A'] -#define E(x) (STOP + (x)) - -static const unsigned char states[]['z'-'A'+1] = { - { /* 0: bare types */ - S('d') = E(INT), S('i') = E(INT), - S('o') = E(UINT),S('u') = E(UINT),S('x') = E(UINT), S('X') = E(UINT), - S('e') = E(DBL), S('f') = E(DBL), S('g') = E(DBL), S('a') = E(DBL), - S('E') = E(DBL), S('F') = E(DBL), S('G') = E(DBL), S('A') = E(DBL), - S('c') = E(CHAR),S('C') = E(INT), - S('s') = E(PTR), S('S') = E(PTR), S('p') = E(UIPTR),S('n') = E(PTR), - S('m') = E(NONE), - S('l') = LPRE, S('h') = HPRE, S('L') = BIGLPRE, - S('z') = ZTPRE, S('j') = JPRE, S('t') = ZTPRE - }, { /* 1: l-prefixed */ - S('d') = E(LONG), S('i') = E(LONG), - S('o') = E(ULONG),S('u') = E(ULONG),S('x') = E(ULONG),S('X') = E(ULONG), - S('e') = E(DBL), S('f') = E(DBL), S('g') = E(DBL), S('a') = E(DBL), - S('E') = E(DBL), S('F') = E(DBL), S('G') = E(DBL), S('A') = E(DBL), - S('c') = E(INT), S('s') = E(PTR), S('n') = E(PTR), - S('l') = LLPRE - }, { /* 2: ll-prefixed */ - S('d') = E(LLONG), S('i') = E(LLONG), - S('o') = E(ULLONG),S('u') = E(ULLONG), - S('x') = E(ULLONG),S('X') = E(ULLONG), - S('n') = E(PTR) - }, { /* 3: h-prefixed */ - S('d') = E(SHORT), S('i') = E(SHORT), - S('o') = E(USHORT),S('u') = E(USHORT), - S('x') = E(USHORT),S('X') = E(USHORT), - S('n') = E(PTR), - S('h') = HHPRE - }, { /* 4: hh-prefixed */ - S('d') = E(CHAR), S('i') = E(CHAR), - S('o') = E(UCHAR),S('u') = E(UCHAR), - S('x') = E(UCHAR),S('X') = E(UCHAR), - S('n') = E(PTR) - }, { /* 5: L-prefixed */ - S('e') = E(LDBL),S('f') = E(LDBL),S('g') = E(LDBL), S('a') = E(LDBL), - S('E') = E(LDBL),S('F') = E(LDBL),S('G') = E(LDBL), S('A') = E(LDBL), - S('n') = E(PTR) - }, { /* 6: z- or t-prefixed (assumed to be same size) */ - S('d') = E(PDIFF),S('i') = E(PDIFF), - S('o') = E(SIZET),S('u') = E(SIZET), - S('x') = E(SIZET),S('X') = E(SIZET), - S('n') = E(PTR) - }, { /* 7: j-prefixed */ - S('d') = E(IMAX), S('i') = E(IMAX), - S('o') = E(UMAX), S('u') = E(UMAX), - S('x') = E(UMAX), S('X') = E(UMAX), - S('n') = E(PTR) - } -}; - -size_t parse_printf_format(const char *fmt, size_t n, int *types) -{ - size_t i = 0; - size_t last = 0; - - memset(types, 0, n); - - while (1) { - size_t arg; - unsigned int state; - - fmt = consume_nonarg(fmt); - if (*fmt == '\0') - break; - if (*fmt == '%') { - fmt++; - continue; - } - arg = 0; - fmt = consume_argn(fmt, &arg); - /* flags */ - fmt = consume_flags(fmt); - /* width */ - if (*fmt == '*') { - size_t warg = 0; - fmt = consume_argn(fmt+1, &warg); - if (warg == 0) - warg = ++i; - if (warg > last) - last = warg; - if (warg <= n && types[warg-1] == NONE) - types[warg-1] = INT; - } else - fmt = consume_num(fmt); - /* precision */ - if (*fmt == '.') { - fmt++; - if (*fmt == '*') { - size_t parg = 0; - fmt = consume_argn(fmt+1, &parg); - if (parg == 0) - parg = ++i; - if (parg > last) - last = parg; - if (parg <= n && types[parg-1] == NONE) - types[parg-1] = INT; - } else { - if (*fmt == '-') - fmt++; - fmt = consume_num(fmt); - } - } - /* length modifier and conversion specifier */ - state = BARE; - do { - unsigned char c = *fmt++; - - if (c < 'A' || c > 'z') - continue; - state = states[state]S(c); - if (state == 0) - continue; - } while (state < STOP); - - if (state == E(NONE)) - continue; - - if (arg == 0) - arg = ++i; - if (arg > last) - last = arg; - if (arg <= n) - types[arg-1] = state - STOP; - } - - if (last > n) - last = n; - for (i = 0; i < last; i++) - types[i] = pa_types[types[i]]; - - return last; -} diff --git a/nix/nixcrpkgs/pkgs/libudev/fill/printf.h b/nix/nixcrpkgs/pkgs/libudev/fill/printf.h deleted file mode 100644 index ee64bdca4..000000000 --- a/nix/nixcrpkgs/pkgs/libudev/fill/printf.h +++ /dev/null @@ -1,50 +0,0 @@ -/*-*- Mode: C; c-basic-offset: 8; indent-tabs-mode: nil -*-*/ - -/*** - This file is part of systemd. - - Copyright 2014 Emil Renner Berthing - - With parts from the GNU C Library - Copyright 1991-2014 Free Software Foundation, Inc. - - systemd is free software; you can redistribute it and/or modify it - under the terms of the GNU Lesser General Public License as published by - the Free Software Foundation; either version 2.1 of the License, or - (at your option) any later version. - - systemd is distributed in the hope that it will be useful, but - WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public License - along with systemd; If not, see . -***/ - -#pragma once - -#include - -enum { /* C type: */ - PA_INT, /* int */ - PA_CHAR, /* int, cast to char */ - PA_WCHAR, /* wide char */ - PA_STRING, /* const char *, a '\0'-terminated string */ - PA_WSTRING, /* const wchar_t *, wide character string */ - PA_POINTER, /* void * */ - PA_FLOAT, /* float */ - PA_DOUBLE, /* double */ - PA_LAST -}; - -/* Flag bits that can be set in a type returned by `parse_printf_format'. */ -#define PA_FLAG_MASK 0xff00 -#define PA_FLAG_LONG_LONG (1 << 8) -#define PA_FLAG_LONG_DOUBLE PA_FLAG_LONG_LONG -#define PA_FLAG_LONG (1 << 9) -#define PA_FLAG_SHORT (1 << 10) -#define PA_FLAG_PTR (1 << 11) - -size_t parse_printf_format(const char *fmt, size_t n, int *types); - diff --git a/nix/nixcrpkgs/pkgs/libudev/license_builder.sh b/nix/nixcrpkgs/pkgs/libudev/license_builder.sh deleted file mode 100644 index 9b612b976..000000000 --- a/nix/nixcrpkgs/pkgs/libudev/license_builder.sh +++ /dev/null @@ -1,14 +0,0 @@ -source $setup - -tar -xf $src -mv systemd-* systemd - -license=$(cat systemd/LICENSE.LGPL2.1) - -cat > $out <libudev (part of systemd) - -
-$license
-
-EOF diff --git a/nix/nixcrpkgs/pkgs/libudev/megapatch.patch b/nix/nixcrpkgs/pkgs/libudev/megapatch.patch deleted file mode 100644 index a22af551f..000000000 --- a/nix/nixcrpkgs/pkgs/libudev/megapatch.patch +++ /dev/null @@ -1,102 +0,0 @@ -diff -ur systemd-234-orig/src/basic/glob-util.c systemd-234/src/basic/glob-util.c ---- systemd-234-orig/src/basic/glob-util.c 2017-07-17 19:46:03.031674662 -0700 -+++ systemd-234/src/basic/glob-util.c 2017-07-22 20:11:56.931514364 -0700 -@@ -31,22 +31,8 @@ - int safe_glob(const char *path, int flags, glob_t *pglob) { - int k; - -- /* We want to set GLOB_ALTDIRFUNC ourselves, don't allow it to be set. */ -- assert(!(flags & GLOB_ALTDIRFUNC)); -- -- if (!pglob->gl_closedir) -- pglob->gl_closedir = (void (*)(void *)) closedir; -- if (!pglob->gl_readdir) -- pglob->gl_readdir = (struct dirent *(*)(void *)) readdir_no_dot; -- if (!pglob->gl_opendir) -- pglob->gl_opendir = (void *(*)(const char *)) opendir; -- if (!pglob->gl_lstat) -- pglob->gl_lstat = lstat; -- if (!pglob->gl_stat) -- pglob->gl_stat = stat; -- - errno = 0; -- k = glob(path, flags | GLOB_ALTDIRFUNC, NULL, pglob); -+ k = glob(path, flags, NULL, pglob); - - if (k == GLOB_NOMATCH) - return -ENOENT; -@@ -66,7 +52,7 @@ - - assert(path); - -- k = safe_glob(path, GLOB_NOSORT|GLOB_BRACE, &g); -+ k = safe_glob(path, GLOB_NOSORT, &g); - if (k == -ENOENT) - return false; - if (k < 0) -@@ -78,7 +64,7 @@ - _cleanup_globfree_ glob_t g = {}; - int k; - -- k = safe_glob(path, GLOB_NOSORT|GLOB_BRACE, &g); -+ k = safe_glob(path, GLOB_NOSORT, &g); - if (k < 0) - return k; - -diff -ur systemd-234-orig/src/basic/missing.h systemd-234/src/basic/missing.h ---- systemd-234-orig/src/basic/missing.h 2017-07-17 19:46:03.031674662 -0700 -+++ systemd-234/src/basic/missing.h 2017-07-21 08:02:12.349505168 -0700 -@@ -40,6 +40,22 @@ - #include - #include - -+static __inline__ char * canonicalize_file_name(const char * path) -+{ -+ return realpath(path, NULL); -+} -+ -+static __inline__ char * strndupa(const char * s, size_t n) -+{ -+ size_t length = strnlen(s, n); -+ char * new_string = (char *)__builtin_alloca(length + 1); -+ new_string[length] = 0; -+ memcpy(new_string, s, length); -+ return new_string; -+} -+ -+typedef int comparison_fn_t(const void *, const void *); -+ - #ifdef HAVE_AUDIT - #include - #endif -@@ -550,7 +566,7 @@ - # ifdef HAVE___SECURE_GETENV - # define secure_getenv __secure_getenv - # else --# error "neither secure_getenv nor __secure_getenv are available" -+# define secure_getenv getenv - # endif - #endif - -diff -ur systemd-234-orig/src/basic/mkdir.c systemd-234/src/basic/mkdir.c ---- systemd-234-orig/src/basic/mkdir.c 2017-07-17 19:46:03.031674662 -0700 -+++ systemd-234/src/basic/mkdir.c 2017-07-22 21:09:51.065274838 -0700 -@@ -28,6 +28,7 @@ - #include "path-util.h" - #include "stat-util.h" - #include "user-util.h" -+#include "missing.h" - - int mkdir_safe_internal(const char *path, mode_t mode, uid_t uid, gid_t gid, mkdir_func_t _mkdir) { - struct stat st; -diff -ur systemd-234-orig/src/basic/parse-util.c systemd-234/src/basic/parse-util.c ---- systemd-234-orig/src/basic/parse-util.c 2017-07-17 19:46:03.031674662 -0700 -+++ systemd-234/src/basic/parse-util.c 2017-07-21 07:59:05.337491775 -0700 -@@ -30,6 +30,7 @@ - #include "parse-util.h" - #include "process-util.h" - #include "string-util.h" -+#include "missing.h" - - int parse_boolean(const char *v) { - assert(v); diff --git a/nix/nixcrpkgs/pkgs/libusb/builder.sh b/nix/nixcrpkgs/pkgs/libusb/builder.sh deleted file mode 100644 index 211ee465a..000000000 --- a/nix/nixcrpkgs/pkgs/libusb/builder.sh +++ /dev/null @@ -1,27 +0,0 @@ -source $setup - -tar -xf $src -mv libusb-* libusb - -mkdir build -cd build - -if [ -n "$libudev" ]; then - export CFLAGS="${CFLAGS:=} -isystem $libudev/include" - export LDFLAGS="${LDFLAGS:=} -L$libudev/lib" -fi - -../libusb/configure \ - --prefix=$out \ - --host=$host \ - --enable-static \ - --disable-shared - -make - -make install - -if [ -n "$libudev" ]; then - ln -s $libudev/lib/pkgconfig/*.pc $out/lib/pkgconfig/ - echo "Requires: libudev" >> $out/lib/pkgconfig/libusb-1.0.pc -fi diff --git a/nix/nixcrpkgs/pkgs/libusb/default.nix b/nix/nixcrpkgs/pkgs/libusb/default.nix deleted file mode 100644 index 3edc45f0b..000000000 --- a/nix/nixcrpkgs/pkgs/libusb/default.nix +++ /dev/null @@ -1,20 +0,0 @@ -{ crossenv, libudev }: - -let - version = "1.0.22"; - - name = "libusbp-${version}"; - - src = crossenv.nixpkgs.fetchurl { - url = "https://github.com/libusb/libusb/releases/download/v1.0.22/libusb-1.0.22.tar.bz2"; - sha256 = "0mw1a5ss4alg37m6bd4k44v35xwrcwp5qm4s686q1nsgkbavkbkm"; - }; - - lib = crossenv.make_derivation { - inherit version name src; - builder = ./builder.sh; - libudev = if crossenv.os == "linux" then libudev else null; - }; - -in - lib diff --git a/nix/nixcrpkgs/pkgs/libusbp/builder.sh b/nix/nixcrpkgs/pkgs/libusbp/builder.sh deleted file mode 100644 index f117fa578..000000000 --- a/nix/nixcrpkgs/pkgs/libusbp/builder.sh +++ /dev/null @@ -1,23 +0,0 @@ -source $setup - -tar -xf $src -mv libusbp-* libusbp - -mkdir build -cd build - -cmake-cross ../libusbp \ - -DCMAKE_INSTALL_PREFIX=$out \ - -DBUILD_SHARED_LIBS=false - -make - -make install - -if [ -d $out/bin ]; then - find $out/bin -type f -exec $host-strip {} + -fi - -if [ -n "$libudev" ]; then - ln -s $libudev/lib/pkgconfig/*.pc $out/lib/pkgconfig/ -fi diff --git a/nix/nixcrpkgs/pkgs/libusbp/default.nix b/nix/nixcrpkgs/pkgs/libusbp/default.nix deleted file mode 100644 index 75c58a241..000000000 --- a/nix/nixcrpkgs/pkgs/libusbp/default.nix +++ /dev/null @@ -1,43 +0,0 @@ -{ crossenv, libudev }: - -let - version = "1.1.0"; - - name = "libusbp-${version}"; - - src = crossenv.nixpkgs.fetchurl { - url = "https://github.com/pololu/libusbp/archive/${version}.tar.gz"; - sha256 = "18l34580ci1pq8p3133dnp8nzlz17qw2796xsz1gn0aca6978izc"; - }; - - lib = crossenv.make_derivation { - inherit version name src; - builder = ./builder.sh; - - cross_inputs = - if crossenv.os == "linux" then - [ libudev ] - else - []; - - libudev = if crossenv.os == "linux" then libudev else null; - }; - - examples = crossenv.make_derivation { - name = "${name}-examples"; - inherit src version; - builder = ./examples_builder.sh; - cross_inputs = [ lib ]; - }; - - license = crossenv.native.make_derivation { - name = "${name}-license"; - inherit src; - builder = ./license_builder.sh; - }; - - license_set = - (if crossenv.os == "linux" then libudev.license_set else {}) // - { "${name}" = license; }; -in - lib // { inherit examples license_set; } diff --git a/nix/nixcrpkgs/pkgs/libusbp/examples_builder.sh b/nix/nixcrpkgs/pkgs/libusbp/examples_builder.sh deleted file mode 100644 index 563608be0..000000000 --- a/nix/nixcrpkgs/pkgs/libusbp/examples_builder.sh +++ /dev/null @@ -1,15 +0,0 @@ -source $setup - -tar -xf $src -mv libusbp-* libusbp - -mkdir build -cd build - -FLAGS="-std=gnu++11 $(pkg-config-cross --cflags --libs libusbp-1)" - -$host-g++ ../libusbp/examples/lsusb/*.cpp -o lsusb$exe_suffix $FLAGS -$host-g++ ../libusbp/examples/lsport/*.cpp -o lsport$exe_suffix $FLAGS - -mkdir -p $out/bin -cp * $out/bin/ diff --git a/nix/nixcrpkgs/pkgs/libusbp/license_builder.sh b/nix/nixcrpkgs/pkgs/libusbp/license_builder.sh deleted file mode 100644 index 93111231a..000000000 --- a/nix/nixcrpkgs/pkgs/libusbp/license_builder.sh +++ /dev/null @@ -1,20 +0,0 @@ -source $setup - -tar -xf $src -mv libusbp-* libusbp - -license=$(cat libusbp/LICENSE.txt) - -{ - cat > $out <Pololu USB library (libusbp) - -

- The Pololu USB Library (libusbp) is licensed under the following license: -

- -
-$license
-
-EOF -} > $out diff --git a/nix/nixcrpkgs/pkgs/libx11/builder.sh b/nix/nixcrpkgs/pkgs/libx11/builder.sh deleted file mode 100644 index 9f46ef8ac..000000000 --- a/nix/nixcrpkgs/pkgs/libx11/builder.sh +++ /dev/null @@ -1,24 +0,0 @@ -source $setup - -tar -xf $src -mv libX11-* libx11 - -mkdir build -cd build - -PKG_CONFIG=pkg-config-cross \ -../libx11/configure --prefix $out $configure_flags - -make - -make install - -# Make static linking work. -sed -i 's/Requires.private/Requires/' $out/lib/pkgconfig/*.pc - -ln -s x11-xcb.pc $out/lib/pkgconfig/X11-xcb.pc -ln -s x11.pc $out/lib/pkgconfig/X11.pc - -ln -sf $xproto/lib/pkgconfig/*.pc $out/lib/pkgconfig/ -ln -sf $kbproto/lib/pkgconfig/*.pc $out/lib/pkgconfig/ -ln -sf $libxcb/lib/pkgconfig/*.pc $out/lib/pkgconfig/ diff --git a/nix/nixcrpkgs/pkgs/libx11/default.nix b/nix/nixcrpkgs/pkgs/libx11/default.nix deleted file mode 100644 index a20b63f75..000000000 --- a/nix/nixcrpkgs/pkgs/libx11/default.nix +++ /dev/null @@ -1,55 +0,0 @@ -{ crossenv, xorg-macros, xproto, libxcb, xtrans, - xextproto, inputproto, kbproto }: - -let - version = "1.6.5"; - - name = "libx11-${version}"; - - src = crossenv.nixpkgs.fetchurl { - url = "https://xorg.freedesktop.org/releases/individual/libX11-${version}.tar.bz2"; - sha256 = "0pa3cfp6h9rl2vxmkph65250gfqyki0ccqyaan6bl9d25gdr0f2d"; - }; - - lib = crossenv.make_derivation rec { - inherit version name src; - - builder = ./builder.sh; - - configure_flags = - "--host=${crossenv.host} " + - "--disable-malloc0returnsnull " + - "--enable-static " + - "--disable-shared"; - - cross_inputs = [ - xorg-macros - xproto - libxcb - xtrans - xextproto - inputproto - kbproto - ]; - - inherit kbproto xproto libxcb; - }; - - license = crossenv.native.make_derivation { - name = "${name}-license"; - inherit src; - builder = ./license_builder.sh; - }; - - license_set = - xorg-macros.license_set // - xproto.license_set // - libxcb.license_set // - xtrans.license_set // - xextproto.license_set // - inputproto.license_set // - kbproto.license_set // - { "${name}" = license; }; - -in - lib // { inherit license_set; } diff --git a/nix/nixcrpkgs/pkgs/libx11/license_builder.sh b/nix/nixcrpkgs/pkgs/libx11/license_builder.sh deleted file mode 100644 index 030bafd88..000000000 --- a/nix/nixcrpkgs/pkgs/libx11/license_builder.sh +++ /dev/null @@ -1,14 +0,0 @@ -source $setup - -tar -xf $src -mv libX11-* libx11 - -license=$(cat libx11/COPYING) - -cat > $out <libx11 - -
-$license
-
-EOF diff --git a/nix/nixcrpkgs/pkgs/libxall/builder.rb b/nix/nixcrpkgs/pkgs/libxall/builder.rb deleted file mode 100644 index f7937666a..000000000 --- a/nix/nixcrpkgs/pkgs/libxall/builder.rb +++ /dev/null @@ -1,40 +0,0 @@ -require 'pathname' -require 'fileutils' -include FileUtils - -OutDir = Pathname(ENV.fetch('out')) -LibDirs = ENV.fetch('libs').split(' ').map { |s| Pathname(s) } - -def symlink_file(target, dest) - real_target = target.realpath - - if dest.exist? - if !dest.symlink? - raise "Want to link #{dest} (to #{target}) " \ - "but it already exists and is not a symlink." - end - - current_target = dest.readlink - if current_target != real_target - raise "Conflict: #{dest} links to #{current_target} " \ - "but we want to link it to #{real_target}." - end - else - dest.make_symlink(real_target) - end -end - -def recursive_symlink(target, dest) - if target.directory? - dest.mkdir if !dest.directory? - target.children(false).each do |c| - recursive_symlink(target + c, dest + c) - end - else - symlink_file(target, dest) - end -end - -LibDirs.each do |libdir| - recursive_symlink(libdir, OutDir) -end diff --git a/nix/nixcrpkgs/pkgs/libxall/default.nix b/nix/nixcrpkgs/pkgs/libxall/default.nix deleted file mode 100644 index f570802ea..000000000 --- a/nix/nixcrpkgs/pkgs/libxall/default.nix +++ /dev/null @@ -1,15 +0,0 @@ -# Amalgamates all of our X libraries into one derivation to make it easier to -# build projects like Qt that expect them all to be installed in one place. - -{ crossenv, libs }: - -let - lib = crossenv.make_derivation { - name = "libxall"; - builder.ruby = ./builder.rb; - inherit libs; - }; - - license_set = builtins.foldl' (x: y: x // y) {} (map (x: x.license_set) libs); -in - lib // { inherit license_set; } diff --git a/nix/nixcrpkgs/pkgs/libxau/builder.sh b/nix/nixcrpkgs/pkgs/libxau/builder.sh deleted file mode 100644 index b1dd74899..000000000 --- a/nix/nixcrpkgs/pkgs/libxau/builder.sh +++ /dev/null @@ -1,16 +0,0 @@ -source $setup - -tar -xf $src -mv libXau-* libxau - -mkdir build -cd build - -PKG_CONFIG=pkg-config-cross \ -../libxau/configure --prefix=$out $configure_flags - -make - -make install - -ln -s $xproto/lib/pkgconfig/xproto.pc $out/lib/pkgconfig/ diff --git a/nix/nixcrpkgs/pkgs/libxau/default.nix b/nix/nixcrpkgs/pkgs/libxau/default.nix deleted file mode 100644 index fcfabbb0c..000000000 --- a/nix/nixcrpkgs/pkgs/libxau/default.nix +++ /dev/null @@ -1,40 +0,0 @@ -{ crossenv, xorg-macros, xproto }: - -let - version = "1.0.8"; - - name = "libxau-${version}"; - - src = crossenv.nixpkgs.fetchurl { - url = "https://www.x.org/archive/individual/lib/libXau-${version}.tar.bz2"; - sha256 = "1wm4pv12f36cwzhldpp7vy3lhm3xdcnp4f184xkxsp7b18r7gm7x"; - }; - - lib = crossenv.make_derivation rec { - inherit version name src; - - builder = ./builder.sh; - - configure_flags = - "--host=${crossenv.host} " + - "--enable-static " + - "--disable-shared"; - - cross_inputs = [ xorg-macros xproto ]; - - inherit xproto; - }; - - license = crossenv.native.make_derivation { - name = "${name}-license"; - inherit src; - builder = ./license_builder.sh; - }; - - license_set = - xorg-macros.license_set // - xproto.license_set // - { "${name}" = license; }; - -in - lib // { inherit license_set; } diff --git a/nix/nixcrpkgs/pkgs/libxau/license_builder.sh b/nix/nixcrpkgs/pkgs/libxau/license_builder.sh deleted file mode 100644 index fb3825f7a..000000000 --- a/nix/nixcrpkgs/pkgs/libxau/license_builder.sh +++ /dev/null @@ -1,14 +0,0 @@ -source $setup - -tar -xf $src -mv libXau-* libxau - -license=$(cat libxau/COPYING) - -cat > $out <libxau - -
-$license
-
-EOF diff --git a/nix/nixcrpkgs/pkgs/libxcb/builder.sh b/nix/nixcrpkgs/pkgs/libxcb/builder.sh deleted file mode 100644 index 19162ca98..000000000 --- a/nix/nixcrpkgs/pkgs/libxcb/builder.sh +++ /dev/null @@ -1,28 +0,0 @@ -source $setup - -tar -xf $src -mv libxcb-* libxcb - -cd libxcb -for patch in $patches; do - echo applying patch $patch - patch -p1 -i $patch -done -cd .. - -mkdir build -cd build - -PKG_CONFIG=pkg-config-cross \ -../libxcb/configure --prefix=$out $configure_flags - -make - -make install - -# Make static linking work. -sed -i 's/Requires.private/Requires/' $out/lib/pkgconfig/*.pc -sed -i 's/Libs.private/Libs/' $out/lib/pkgconfig/*.pc - -ln -sf $libxau/lib/pkgconfig/*.pc $out/lib/pkgconfig/ - diff --git a/nix/nixcrpkgs/pkgs/libxcb/default.nix b/nix/nixcrpkgs/pkgs/libxcb/default.nix deleted file mode 100644 index d927f1555..000000000 --- a/nix/nixcrpkgs/pkgs/libxcb/default.nix +++ /dev/null @@ -1,56 +0,0 @@ -{ crossenv, xcb-proto, libxau }: - -let - version = "1.12"; - - name = "libxcb-${version}"; - - src = crossenv.nixpkgs.fetchurl { - url = "https://xcb.freedesktop.org/dist/libxcb-${version}.tar.bz2"; - sha256 = "0nvv0la91cf8p5qqlb3r5xnmg1jn2wphn4fb5jfbr6byqsvv3psa"; - }; - - lib = crossenv.make_derivation rec { - inherit version name src; - - patches = [ ./no-pthread-stubs.patch ]; - - builder = ./builder.sh; - - configure_flags = - "--host=${crossenv.host} " + - "--enable-static " + - "--disable-shared " + - "--enable-xinput " + - "--enable-xkb"; - - cross_inputs = [ xcb-proto libxau ]; - - inherit libxau; - - native_inputs = [ crossenv.nixpkgs.python2 ]; - }; - - examples = crossenv.make_derivation rec { - name = "libxcb-examples"; - - builder = ./examples_builder.sh; - - cross_inputs = [ lib ]; - - example1 = ./example1.c; - }; - - license = crossenv.native.make_derivation { - name = "${name}-license"; - inherit src; - builder = ./license_builder.sh; - }; - - license_set = - xcb-proto.license_set // - libxau.license_set // - { "${name}" = license; }; - -in - lib // { inherit examples license_set; } diff --git a/nix/nixcrpkgs/pkgs/libxcb/example1.c b/nix/nixcrpkgs/pkgs/libxcb/example1.c deleted file mode 100644 index 48c284636..000000000 --- a/nix/nixcrpkgs/pkgs/libxcb/example1.c +++ /dev/null @@ -1,68 +0,0 @@ -// Source: https://en.wikipedia.org/wiki/XCB#Example - -#include -#include -#include - -int main(void) -{ - xcb_connection_t *c; - xcb_screen_t *s; - xcb_window_t w; - xcb_gcontext_t g; - xcb_generic_event_t *e; - uint32_t mask; - uint32_t values[2]; - int done = 0; - xcb_rectangle_t r = { 20, 20, 60, 60 }; - - /* open connection with the server */ - c = xcb_connect(NULL,NULL); - if (xcb_connection_has_error(c)) { - printf("Cannot open display\n"); - exit(1); - } - /* get the first screen */ - s = xcb_setup_roots_iterator( xcb_get_setup(c) ).data; - - /* create black graphics context */ - g = xcb_generate_id(c); - w = s->root; - mask = XCB_GC_FOREGROUND | XCB_GC_GRAPHICS_EXPOSURES; - values[0] = s->black_pixel; - values[1] = 0; - xcb_create_gc(c, g, w, mask, values); - - /* create window */ - w = xcb_generate_id(c); - mask = XCB_CW_BACK_PIXEL | XCB_CW_EVENT_MASK; - values[0] = s->white_pixel; - values[1] = XCB_EVENT_MASK_EXPOSURE | XCB_EVENT_MASK_KEY_PRESS; - xcb_create_window(c, s->root_depth, w, s->root, - 10, 10, 100, 100, 1, - XCB_WINDOW_CLASS_INPUT_OUTPUT, s->root_visual, - mask, values); - - /* map (show) the window */ - xcb_map_window(c, w); - - xcb_flush(c); - - /* event loop */ - while (!done && (e = xcb_wait_for_event(c))) { - switch (e->response_type & ~0x80) { - case XCB_EXPOSE: /* draw or redraw the window */ - xcb_poly_fill_rectangle(c, w, g, 1, &r); - xcb_flush(c); - break; - case XCB_KEY_PRESS: /* exit on key press */ - done = 1; - break; - } - free(e); - } - /* close connection to server */ - xcb_disconnect(c); - - return 0; -} diff --git a/nix/nixcrpkgs/pkgs/libxcb/examples_builder.sh b/nix/nixcrpkgs/pkgs/libxcb/examples_builder.sh deleted file mode 100644 index 1a936f734..000000000 --- a/nix/nixcrpkgs/pkgs/libxcb/examples_builder.sh +++ /dev/null @@ -1,10 +0,0 @@ -source $setup - -pkg-config-cross xcb --cflags --libs - -$host-gcc -Wall $example1 \ - $(pkg-config-cross xcb --cflags --libs) \ - -o example1$exe_suffix - -mkdir -p $out/bin -cp example1$exe_suffix $out/bin/ diff --git a/nix/nixcrpkgs/pkgs/libxcb/license_builder.sh b/nix/nixcrpkgs/pkgs/libxcb/license_builder.sh deleted file mode 100644 index 31dd9f821..000000000 --- a/nix/nixcrpkgs/pkgs/libxcb/license_builder.sh +++ /dev/null @@ -1,14 +0,0 @@ -source $setup - -tar -xf $src -mv libxcb-* libxcb - -license=$(cat libxcb/COPYING) - -cat > $out <libxcb - -
-$license
-
-EOF diff --git a/nix/nixcrpkgs/pkgs/libxcb/no-pthread-stubs.patch b/nix/nixcrpkgs/pkgs/libxcb/no-pthread-stubs.patch deleted file mode 100644 index 53c66b74e..000000000 --- a/nix/nixcrpkgs/pkgs/libxcb/no-pthread-stubs.patch +++ /dev/null @@ -1,12 +0,0 @@ -diff -ur libxcb-1.12-orig/configure libxcb-1.12/configure ---- libxcb-1.12-orig/configure 2017-07-29 22:28:37.986987240 -0700 -+++ libxcb-1.12/configure 2017-07-29 22:51:26.410210675 -0700 -@@ -19666,7 +19666,7 @@ - $as_echo "yes" >&6; } - - fi --NEEDED="pthread-stubs xau >= 0.99.2" -+NEEDED="xau >= 0.99.2" - - pkg_failed=no - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for NEEDED" >&5 diff --git a/nix/nixcrpkgs/pkgs/libxext/builder.sh b/nix/nixcrpkgs/pkgs/libxext/builder.sh deleted file mode 100644 index 53bec1552..000000000 --- a/nix/nixcrpkgs/pkgs/libxext/builder.sh +++ /dev/null @@ -1,19 +0,0 @@ -source $setup - -tar -xf $src -mv libXext-* xext - -mkdir build -cd build - -PKG_CONFIG=pkg-config-cross \ -../xext/configure --prefix=$out $configure_flags - -make - -make install - -sed -i 's/Requires.private/Requires/' $out/lib/pkgconfig/*.pc - -ln -sf $xextproto/lib/pkgconfig/*.pc $out/lib/pkgconfig/ -ln -sf $libx11/lib/pkgconfig/*.pc $out/lib/pkgconfig/ diff --git a/nix/nixcrpkgs/pkgs/libxext/default.nix b/nix/nixcrpkgs/pkgs/libxext/default.nix deleted file mode 100644 index 7cb0295ba..000000000 --- a/nix/nixcrpkgs/pkgs/libxext/default.nix +++ /dev/null @@ -1,43 +0,0 @@ -{ crossenv, xproto, libx11, xextproto }: - -let - version = "1.3.3"; - - name = "libxext-${version}"; - - src = crossenv.nixpkgs.fetchurl { - url = "https://www.x.org/releases/individual/lib/libXext-${version}.tar.bz2"; - sha256 = "0dbfn5bznnrhqzvkrcmw4c44yvvpwdcsrvzxf4rk27r36b9x865m"; - }; - - lib = crossenv.make_derivation rec { - inherit version name src; - - builder = ./builder.sh; - - configure_flags = - "--host=${crossenv.host} " + - "--disable-malloc0returnsnull " + - "--enable-static " + - "--disable-shared"; - - cross_inputs = [ xproto libx11 xextproto ]; - - inherit xextproto libx11; - }; - - license = crossenv.native.make_derivation { - name = "${name}-license"; - inherit src; - builder = ./license_builder.sh; - }; - - license_set = - xproto.license_set // - libx11.license_set // - xextproto.license_set // - { "${name}" = license; }; - -in - lib // { inherit license_set; } - diff --git a/nix/nixcrpkgs/pkgs/libxext/license_builder.sh b/nix/nixcrpkgs/pkgs/libxext/license_builder.sh deleted file mode 100644 index 991196310..000000000 --- a/nix/nixcrpkgs/pkgs/libxext/license_builder.sh +++ /dev/null @@ -1,14 +0,0 @@ -source $setup - -tar -xf $src -mv libXext-* libxext - -license=$(cat libxext/COPYING) - -cat > $out <libxext - -
-$license
-
-EOF diff --git a/nix/nixcrpkgs/pkgs/libxfixes/builder.sh b/nix/nixcrpkgs/pkgs/libxfixes/builder.sh deleted file mode 100644 index 0217ce743..000000000 --- a/nix/nixcrpkgs/pkgs/libxfixes/builder.sh +++ /dev/null @@ -1,20 +0,0 @@ -source $setup - -tar -xf $src -mv libXfixes-* xfixes - -mkdir build -cd build - -PKG_CONFIG=pkg-config-cross \ -../xfixes/configure --prefix=$out $configure_flags - -make - -make install - -sed -i 's/Requires.private/Requires/' $out/lib/pkgconfig/*.pc - -ln -sf $xproto/lib/pkgconfig/*.pc $out/lib/pkgconfig/ -ln -sf $fixesproto/lib/pkgconfig/*.pc $out/lib/pkgconfig/ -ln -sf $libx11/lib/pkgconfig/*.pc $out/lib/pkgconfig/ diff --git a/nix/nixcrpkgs/pkgs/libxfixes/default.nix b/nix/nixcrpkgs/pkgs/libxfixes/default.nix deleted file mode 100644 index fb98228ec..000000000 --- a/nix/nixcrpkgs/pkgs/libxfixes/default.nix +++ /dev/null @@ -1,42 +0,0 @@ -{ crossenv, xproto, xextproto, libx11, fixesproto }: - -let - version = "5.0.3"; - - name = "libxfixes-${version}"; - - src = crossenv.nixpkgs.fetchurl { - url = "https://www.x.org/releases/individual/lib/libXfixes-${version}.tar.bz2"; - sha256 = "1miana3y4hwdqdparsccmygqr3ic3hs5jrqfzp70hvi2zwxd676y"; - }; - - lib = crossenv.make_derivation rec { - inherit version name src; - - builder = ./builder.sh; - - configure_flags = - "--host=${crossenv.host} " + - "--enable-static " + - "--disable-shared"; - - cross_inputs = [ xproto xextproto libx11 fixesproto ]; - - inherit xproto libx11 fixesproto; - }; - - license = crossenv.native.make_derivation { - name = "${name}-license"; - inherit src; - builder = ./license_builder.sh; - }; - - license_set = - xproto.license_set // - xextproto.license_set // - libx11.license_set // - fixesproto.license_set // - { "${name}" = license; }; - -in - lib // { inherit license_set; } diff --git a/nix/nixcrpkgs/pkgs/libxfixes/license_builder.sh b/nix/nixcrpkgs/pkgs/libxfixes/license_builder.sh deleted file mode 100644 index 72bd5a222..000000000 --- a/nix/nixcrpkgs/pkgs/libxfixes/license_builder.sh +++ /dev/null @@ -1,14 +0,0 @@ -source $setup - -tar -xf $src -mv libXfixes-* libxfixes - -license=$(cat libxfixes/COPYING) - -cat > $out <libxfixes - -
-$license
-
-EOF diff --git a/nix/nixcrpkgs/pkgs/libxi/builder.sh b/nix/nixcrpkgs/pkgs/libxi/builder.sh deleted file mode 100644 index 8c07f9da2..000000000 --- a/nix/nixcrpkgs/pkgs/libxi/builder.sh +++ /dev/null @@ -1,21 +0,0 @@ -source $setup - -tar -xf $src -mv libXi-* libxi - -mkdir build -cd build - -PKG_CONFIG=pkg-config-cross \ -../libxi/configure --prefix=$out $configure_flags - -make - -make install - -sed -i 's/Requires.private/Requires/' $out/lib/pkgconfig/*.pc - -ln -sf $inputproto/lib/pkgconfig/*.pc $out/lib/pkgconfig/ -ln -sf $libx11/lib/pkgconfig/*.pc $out/lib/pkgconfig/ -ln -sf $libxext/lib/pkgconfig/*.pc $out/lib/pkgconfig/ -ln -sf $libxfixes/lib/pkgconfig/*.pc $out/lib/pkgconfig/ diff --git a/nix/nixcrpkgs/pkgs/libxi/default.nix b/nix/nixcrpkgs/pkgs/libxi/default.nix deleted file mode 100644 index ab41d41d8..000000000 --- a/nix/nixcrpkgs/pkgs/libxi/default.nix +++ /dev/null @@ -1,45 +0,0 @@ -{ crossenv, xproto, xextproto, inputproto, libx11, libxext, libxfixes }: - -let - version = "1.7.9"; - - name = "libxi-${version}"; - - src = crossenv.nixpkgs.fetchurl { - url = "https://www.x.org/releases/individual/lib/libXi-${version}.tar.bz2"; - sha256 = "0idg1wc01hndvaa820fvfs7phvd1ymf0lldmq6386i7rhkzvirn2"; - }; - - lib = crossenv.make_derivation rec { - inherit version name src; - - builder = ./builder.sh; - - configure_flags = - "--host=${crossenv.host} " + - "--disable-malloc0returnsnull " + - "--enable-static " + - "--disable-shared"; - - cross_inputs = [ xproto xextproto inputproto libx11 libxext libxfixes ]; - - inherit inputproto libx11 libxext libxfixes; - }; - - license = crossenv.native.make_derivation { - name = "${name}-license"; - inherit src; - builder = ./license_builder.sh; - }; - - license_set = - xproto.license_set // - xextproto.license_set // - inputproto.license_set // - libx11.license_set // - libxext.license_set // - libxfixes.license_set // - { "${name}" = license; }; - -in - lib // { inherit license_set; } diff --git a/nix/nixcrpkgs/pkgs/libxi/license_builder.sh b/nix/nixcrpkgs/pkgs/libxi/license_builder.sh deleted file mode 100644 index 966f93096..000000000 --- a/nix/nixcrpkgs/pkgs/libxi/license_builder.sh +++ /dev/null @@ -1,14 +0,0 @@ -source $setup - -tar -xf $src -mv libXi-* libxi - -license=$(cat libxi/COPYING) - -cat > $out <libxi - -
-$license
-
-EOF diff --git a/nix/nixcrpkgs/pkgs/lmdb/builder.sh b/nix/nixcrpkgs/pkgs/lmdb/builder.sh deleted file mode 100644 index a8d2909d8..000000000 --- a/nix/nixcrpkgs/pkgs/lmdb/builder.sh +++ /dev/null @@ -1,16 +0,0 @@ -source $setup - -cp -r $src ./src -chmod -R u+w ./src -cd src/libraries/liblmdb - -sed -i 's/liblmdb.a liblmdb..SOEXT.$/liblmdb.a/' Makefile -sed -i "s/gcc/$host-gcc/" Makefile -sed -i "s/ar/$host-ar/" Makefile -sed -i 's/^CC.*/CC = '"$host-gcc/" Makefile - -cat Makefile - -make CFLAGS+="-fPIC" - -make DESTDIR="$out" prefix=/ install diff --git a/nix/nixcrpkgs/pkgs/lmdb/default.nix b/nix/nixcrpkgs/pkgs/lmdb/default.nix deleted file mode 100644 index 140fc1609..000000000 --- a/nix/nixcrpkgs/pkgs/lmdb/default.nix +++ /dev/null @@ -1,14 +0,0 @@ -{ crossenv }: - -crossenv.make_derivation rec { - name = "lmdb-${version}"; - version = "0.9.23"; - builder = ./builder.sh; - - src = crossenv.nixpkgs.fetchFromGitHub { - owner = "LMDB"; - repo = "lmdb"; - rev = "LMDB_${version}"; - sha256 = "0ag7l5180ajvm73y59m7sn3p52xm8m972d08cshxhpwgwa4v35k6"; - }; -} diff --git a/nix/nixcrpkgs/pkgs/ncurses/builder.sh b/nix/nixcrpkgs/pkgs/ncurses/builder.sh deleted file mode 100644 index d7740f353..000000000 --- a/nix/nixcrpkgs/pkgs/ncurses/builder.sh +++ /dev/null @@ -1,14 +0,0 @@ -source $setup - -tar -xf $src -cd * - -./configure --host=$host --prefix=$out $configureFlags - -make - -make install.{libs,includes,data} - -# TODO Why do I need to do this? -mkdir -p $out/lib/pkgconfig -cp misc/*.pc $out/lib/pkgconfig diff --git a/nix/nixcrpkgs/pkgs/ncurses/default.nix b/nix/nixcrpkgs/pkgs/ncurses/default.nix deleted file mode 100644 index e602b2ec6..000000000 --- a/nix/nixcrpkgs/pkgs/ncurses/default.nix +++ /dev/null @@ -1,27 +0,0 @@ -{ crossenv }: - -crossenv.make_derivation rec { - name = "ncurses-${version}"; - version = "6.1-20181027"; - builder = ./builder.sh; - - # Needs to be the same version. - native_inputs = [ crossenv.nixpkgs.ncurses ]; - - configureFlags = [ - "--without-debug" - "--enable-pc-files" - "--enable-symlinks" - # "--with-manpage-format=normal" - "--without-cxx" - # "--enable-widec" - ]; - - src = crossenv.nixpkgs.fetchurl { - urls = [ - "https://invisible-mirror.net/archives/ncurses/current/ncurses-${version}.tgz" - "ftp://ftp.invisible-island.net/ncurses/current/ncurses-${version}.tgz" - ]; - sha256 = "1xn6wpi22jc61158w4ifq6s1fvilhmsy1in2srn3plk8pm0d4902"; - }; -} diff --git a/nix/nixcrpkgs/pkgs/openocd/builder.sh b/nix/nixcrpkgs/pkgs/openocd/builder.sh deleted file mode 100644 index c1c388d88..000000000 --- a/nix/nixcrpkgs/pkgs/openocd/builder.sh +++ /dev/null @@ -1,24 +0,0 @@ -source $setup - -cp -r $src openocd -chmod -R u+w openocd - -cd openocd -SKIP_SUBMODULE=1 ./bootstrap -cd .. - -mkdir build -cd build - -PKG_CONFIG=pkg-config-cross ../openocd/configure \ - --prefix=$out \ - --host=$host \ - --disable-dependency-tracking \ - --enable-static \ - --disable-shared - -make - -make install - -$host-strip $out/bin/openocd diff --git a/nix/nixcrpkgs/pkgs/openocd/default.nix b/nix/nixcrpkgs/pkgs/openocd/default.nix deleted file mode 100644 index 756ce9989..000000000 --- a/nix/nixcrpkgs/pkgs/openocd/default.nix +++ /dev/null @@ -1,39 +0,0 @@ -{ crossenv, libusb }: - -let - version = "2018-08-16"; - - name = "openocd-${version}"; - - nixpkgs = crossenv.nixpkgs; - - src = nixpkgs.fetchgit { - url = "git://repo.or.cz/openocd"; # official mirror - rev = "b2d259f67cc3ee4b689e704228d97943bae94064"; - sha256 = "0c5zpjplwp0ivl4mpiix628j0iad9gkmg9f7lidgqjr5a80cr6hg"; - deepClone = true; - }; - - drv = crossenv.make_derivation { - inherit version name src; - builder = ./builder.sh; - - native_inputs = [ - nixpkgs.autoconf - nixpkgs.automake - nixpkgs.libtool - nixpkgs.m4 - ]; - - ACLOCAL_PATH = - "${nixpkgs.libtool}/share/aclocal:" + - "${crossenv.native.pkgconf}/share/aclocal"; - - # Avoid a name conflict: get_home_dir is also defined in libudev. - CFLAGS = "-Dget_home_dir=openocd_get_home_dir"; - - cross_inputs = [ libusb ]; - }; - -in - drv diff --git a/nix/nixcrpkgs/pkgs/openssl/builder.sh b/nix/nixcrpkgs/pkgs/openssl/builder.sh deleted file mode 100644 index e47e43ab1..000000000 --- a/nix/nixcrpkgs/pkgs/openssl/builder.sh +++ /dev/null @@ -1,46 +0,0 @@ -source $setup - -tar -xf $src - -mkdir build -cd build - -err () { echo ERR "$@" >&2; } - -case $host in - i686-linux-musleabi) - confighost=linux-x86;; - x86_64-linux-musleabi) - confighost=linux-x86_64;; - x86_64-apple-darwin*) - confighost=darwin64-x86_64-cc;; - *) - err openssl builder.sh needs to excplicitly translate - err "'host=$host'" to something openssl understands. - confighost=$host;; -esac - -# TODO The `no-async` option seems weird, but -# https://github.com/openssl/openssl/issues/1607 - -# TODO I stole the no-dso option from the here[1], but is it -# needed? I seems to be related to shared libraries, which we aren't using -# anyways, but I don't like not understanding. -# -# [1]: https://github.com/rust-embedded/cross/blob/master/docker/openssl.sh - -# TODO Why `-fPIC`? I stole it from [2] -# -# [2]: https://github.com/rust-embedded/cross/pull/218/files - -../openssl-$version/Configure \ - --prefix=$out \ - --cross-compile-prefix=$host- \ - no-shared \ - no-dso \ - no-async \ - $confighost \ - -fPIC - -make -make install diff --git a/nix/nixcrpkgs/pkgs/openssl/default.nix b/nix/nixcrpkgs/pkgs/openssl/default.nix deleted file mode 100644 index de9b876f9..000000000 --- a/nix/nixcrpkgs/pkgs/openssl/default.nix +++ /dev/null @@ -1,83 +0,0 @@ -{ crossenv }: - -crossenv.make_derivation rec { - name = "openssl-${version}"; - version = "1.1.1"; - - native_inputs = [ crossenv.nixpkgs.perl ]; - - src = crossenv.nixpkgs.fetchurl { - url = "https://www.openssl.org/source/${name}.tar.gz"; - sha256 = "0gbab2fjgms1kx5xjvqx8bxhr98k4r8l2fa8vw7kvh491xd8fdi8"; - }; - - builder = ./builder.sh; -} - -# let -# -# coreutils = crossenv.nixpkgs.coreutils; -# -# # with stdenv.lib; -# -# in -# -# { -# -# patches = [ ./nix-ssl-cert-file.patch ]; -# -# native_inputs = [ crossenv.nixpkgs.perl ]; -# -# postPatch = '' -# patchShebangs Configure -# '' + optionalString (versionAtLeast version "1.1.1") '' -# substituteInPlace config --replace '/usr/bin/env' '${coreutils}/bin/env' -# '' + optionalString (versionAtLeast version "1.1.0" && stdenv.hostPlatform.isMusl) '' -# substituteInPlace crypto/async/arch/async_posix.h \ -# --replace '!defined(__ANDROID__) && !defined(__OpenBSD__)' \ -# '!defined(__ANDROID__) && !defined(__OpenBSD__) && 0' -# ''; -# -# configureScript = { -# "x86_64-darwin" = "./Configure darwin64-x86_64-cc"; -# "x86_64-solaris" = "./Configure solaris64-x86_64-gcc"; -# "armv6l-linux" = "./Configure linux-armv4 -march=armv6"; -# "armv7l-linux" = "./Configure linux-armv4 -march=armv7-a"; -# }.${stdenv.hostPlatform.system} or ( -# if stdenv.hostPlatform == stdenv.buildPlatform -# then "./config" -# else if stdenv.hostPlatform.isMinGW -# then "./Configure mingw${optionalString -# (stdenv.hostPlatform.parsed.cpu.bits != 32) -# (toString stdenv.hostPlatform.parsed.cpu.bits)}" -# else if stdenv.hostPlatform.isLinux -# then "./Configure linux-generic${toString stdenv.hostPlatform.parsed.cpu.bits}" -# else if stdenv.hostPlatform.isiOS -# then "./Configure ios${toString stdenv.hostPlatform.parsed.cpu.bits}-cross" -# else -# throw "Not sure what configuration to use for ${stdenv.hostPlatform.config}" -# ); -# -# configureFlags = [ -# "shared" # "shared" builds both shared and static libraries -# "--libdir=lib" -# "--openssldir=etc/ssl" -# ] ++ stdenv.lib.optionals withCryptodev [ -# "-DHAVE_CRYPTODEV" -# "-DUSE_CRYPTODEV_DIGESTS" -# ] -# ++ stdenv.lib.optional (versionAtLeast version "1.1.0" && stdenv.hostPlatform.isAarch64) "no-afalgeng"; -# -# postInstall = '' -# mkdir -p $bin -# mv $out/bin $bin/ -# -# mkdir $dev -# mv $out/include $dev/ -# -# # remove dependency on Perl at runtime -# rm -r $out/etc/ssl/misc -# -# rmdir $out/etc/ssl/{certs,private} -# ''; -# }; diff --git a/nix/nixcrpkgs/pkgs/p-load/builder.sh b/nix/nixcrpkgs/pkgs/p-load/builder.sh deleted file mode 100644 index 7ebc2e6ac..000000000 --- a/nix/nixcrpkgs/pkgs/p-load/builder.sh +++ /dev/null @@ -1,14 +0,0 @@ -source $setup - -tar -xf $src -mv p-load-* p-load - -mkdir build -cd build - -cmake-cross ../p-load \ - -DCMAKE_INSTALL_PREFIX=$out - -make - -make install diff --git a/nix/nixcrpkgs/pkgs/p-load/default.nix b/nix/nixcrpkgs/pkgs/p-load/default.nix deleted file mode 100644 index 6ce88528b..000000000 --- a/nix/nixcrpkgs/pkgs/p-load/default.nix +++ /dev/null @@ -1,16 +0,0 @@ -{ crossenv, libusbp }: - -crossenv.make_derivation rec { - name = "p-load-${version}"; - - version = "2041b02"; # 2.1.0ish - - src = crossenv.nixpkgs.fetchurl { - url = "https://github.com/pololu/p-load/archive/${version}.tar.gz"; - sha256 = "07xn0k96pkvirsh45zn9976lwliiqkfx76vy1yrbx6kp55ssp2zp"; - }; - - builder = ./builder.sh; - - cross_inputs = [ libusbp ]; -} diff --git a/nix/nixcrpkgs/pkgs/pavr2/builder.sh b/nix/nixcrpkgs/pkgs/pavr2/builder.sh deleted file mode 100644 index b516b6926..000000000 --- a/nix/nixcrpkgs/pkgs/pavr2/builder.sh +++ /dev/null @@ -1,14 +0,0 @@ -source $setup - -tar -xf $src -mv pololu-usb-avr-programmer-v2-* pavr2 - -mkdir build -cd build - -cmake-cross ../pavr2 \ - -DCMAKE_INSTALL_PREFIX=$out - -make - -make install diff --git a/nix/nixcrpkgs/pkgs/pavr2/default.nix b/nix/nixcrpkgs/pkgs/pavr2/default.nix deleted file mode 100644 index 379e61b51..000000000 --- a/nix/nixcrpkgs/pkgs/pavr2/default.nix +++ /dev/null @@ -1,16 +0,0 @@ -{ crossenv, qt, libusbp }: - -crossenv.make_derivation rec { - name = "pavr2-${version}"; - - version = "a113a3b"; # 1.0.2ish - - src = crossenv.nixpkgs.fetchurl { - url = "https://github.com/pololu/pololu-usb-avr-programmer-v2/archive/${version}.tar.gz"; - sha256 = "1mg467jx7mpcn01vh8rq80w7p8mbj7l69dmpyni0nik44ggsj7ij"; - }; - - builder = ./builder.sh; - - cross_inputs = [ libusbp qt ]; -} diff --git a/nix/nixcrpkgs/pkgs/pdcurses/builder.sh b/nix/nixcrpkgs/pkgs/pdcurses/builder.sh deleted file mode 100644 index e9dc46205..000000000 --- a/nix/nixcrpkgs/pkgs/pdcurses/builder.sh +++ /dev/null @@ -1,37 +0,0 @@ -source $setup - -tar -xf $src -mv PDCurses-* pdcurses - -mkdir build -cd build - -source_files=../pdcurses/pdcurses/*.c - -if [ "$os" == "windows" ]; then - os_files=../pdcurses/win32/*.c -fi - -if [ "$os" == "linux" ]; then - os_files= -fi - -source_files="$source_files $os_files" - -for s in $source_files; do - echo "compiling $s" - $host-gcc -g -O2 -I../pdcurses \ - -DPDC_WIDE -DPDC_FORCE_UTF8 -c "$s" -o "$(basename $s).o" -done - -$host-ar r libpdcurses.a *.o -$host-ranlib libpdcurses.a - -mkdir -p $out/{lib,include} -cp libpdcurses.a $out/lib/libpdcurses.a - -# Make libcurses.a so programs like GDB can find pdcurses. -ln -s $out/lib/libpdcurses.a $out/lib/libcurses.a - -cd ../pdcurses -cp curses.h panel.h term.h $out/include/ diff --git a/nix/nixcrpkgs/pkgs/pdcurses/default.nix b/nix/nixcrpkgs/pkgs/pdcurses/default.nix deleted file mode 100644 index 0a47f6564..000000000 --- a/nix/nixcrpkgs/pkgs/pdcurses/default.nix +++ /dev/null @@ -1,16 +0,0 @@ -# Note: This only seems to work on Windows. - -{ crossenv }: - -let - - pdcurses = import ./lib.nix { - inherit crossenv; - }; - - examples = import ./examples.nix { - inherit crossenv pdcurses; - }; - -in - pdcurses // { inherit examples; } diff --git a/nix/nixcrpkgs/pkgs/pdcurses/demos_builder.sh b/nix/nixcrpkgs/pkgs/pdcurses/demos_builder.sh deleted file mode 100644 index eea6a6737..000000000 --- a/nix/nixcrpkgs/pkgs/pdcurses/demos_builder.sh +++ /dev/null @@ -1,25 +0,0 @@ -source $setup - -tar -xf $src -mv PDCurses-$version/demos . -rm -r PDCurses-$version - -mkdir build -cd build - -CFLAGS="-g -O2 -I$pdcurses/include -DPDC_WIDE" - -$host-gcc $CFLAGS -c ../demos/tui.c -o tui.o -$host-ar r tui.a tui.o - -demos="firework newdemo ptest rain testcurs worm xmas tuidemo" - -for name in $demos; do - src=../demos/$name.c - echo "compiling $name" - $host-gcc $CFLAGS -L"$pdcurses/lib" \ - "$src" tui.a -lpdcurses -o "$name.exe" -done - -mkdir -p $out/bin -mv *.exe $out/bin/ diff --git a/nix/nixcrpkgs/pkgs/pdcurses/examples.nix b/nix/nixcrpkgs/pkgs/pdcurses/examples.nix deleted file mode 100644 index 8b3dbee38..000000000 --- a/nix/nixcrpkgs/pkgs/pdcurses/examples.nix +++ /dev/null @@ -1,11 +0,0 @@ -{ crossenv, pdcurses }: - -crossenv.make_derivation rec { - name = "pdcurses_demos-${version}"; - - inherit pdcurses; - inherit (pdcurses) src version; - - builder = ./demos_builder.sh; -} - diff --git a/nix/nixcrpkgs/pkgs/pdcurses/lib.nix b/nix/nixcrpkgs/pkgs/pdcurses/lib.nix deleted file mode 100644 index ef4293502..000000000 --- a/nix/nixcrpkgs/pkgs/pdcurses/lib.nix +++ /dev/null @@ -1,16 +0,0 @@ -{ crossenv }: - -crossenv.make_derivation rec { - name = "pdcurses-${version}"; - - version = "3.4"; - - src = crossenv.nixpkgs.fetchurl { - # Sourceforge went down. The original URL was: - # url = "mirror://sourceforge/pdcurses/PDCurses-${version}.tar.gz"; - url = "https://files.tmphax.com/repo1/pdcurses-${version}.tar.gz"; - sha256 = "0jz6l8552fnf1j542yhzifgknrdzrisxg158ks0l87g777a8zba6"; - }; - - builder = ./builder.sh; -} diff --git a/nix/nixcrpkgs/pkgs/qt/absolute-paths.patch b/nix/nixcrpkgs/pkgs/qt/absolute-paths.patch deleted file mode 100644 index 93ab1e1fc..000000000 --- a/nix/nixcrpkgs/pkgs/qt/absolute-paths.patch +++ /dev/null @@ -1,63 +0,0 @@ -diff -ur qtbase-opensource-src-5.9.2-orig/configure qtbase-opensource-src-5.9.2/configure ---- qtbase-opensource-src-5.9.2-orig/configure 2017-10-26 08:10:12.932646805 -0700 -+++ qtbase-opensource-src-5.9.2/configure 2017-11-01 08:48:44.973917507 -0700 -@@ -36,9 +36,9 @@ - relconf=`basename $0` - # the directory of this script is the "source tree" - relpath=`dirname $0` --relpath=`(cd "$relpath"; /bin/pwd)` -+relpath=`(cd "$relpath"; pwd)` - # the current directory is the "build tree" or "object tree" --outpath=`/bin/pwd` -+outpath=`pwd` - - WHICH="which" - -@@ -232,7 +232,7 @@ - - sdk=$(getSingleQMakeVariable "QMAKE_MAC_SDK" "$1") - if [ -z "$sdk" ]; then echo "QMAKE_MAC_SDK must be set when building on Mac" >&2; exit 1; fi -- sysroot=$(/usr/bin/xcrun --sdk $sdk --show-sdk-path 2>/dev/null) -+ sysroot=$(xcrun --sdk $sdk --show-sdk-path 2>/dev/null) - if [ -z "$sysroot" ]; then echo "Failed to resolve SDK path for '$sdk'" >&2; exit 1; fi - - case "$sdk" in -@@ -267,7 +267,7 @@ - # Prefix tool with toolchain path - var=$(echo "$line" | cut -d '=' -f 1) - val=$(echo "$line" | cut -d '=' -f 2-) -- sdk_val=$(/usr/bin/xcrun -sdk $sdk -find $(echo $val | cut -d ' ' -f 1)) -+ sdk_val=$(xcrun -sdk $sdk -find $(echo $val | cut -d ' ' -f 1)) - val=$(echo $sdk_val $(echo $val | cut -s -d ' ' -f 2-)) - echo "$var=$val" - ;; -@@ -305,9 +305,6 @@ - UNAME_SYSTEM=`(uname -s) 2>/dev/null` || UNAME_SYSTEM=unknown - - BUILD_ON_MAC=no --if [ -d /System/Library/Frameworks/Carbon.framework ]; then -- BUILD_ON_MAC=yes --fi - if [ "$OSTYPE" = "msys" ]; then - relpath=`(cd "$relpath"; pwd -W)` - outpath=`pwd -W` -@@ -318,7 +315,7 @@ - #------------------------------------------------------------------------------- - - if [ "$BUILD_ON_MAC" = "yes" ]; then -- if ! /usr/bin/xcode-select --print-path >/dev/null 2>&1; then -+ if ! xcode-select --print-path >/dev/null 2>&1; then - echo >&2 - echo " No Xcode selected. Please install Xcode via the App Store, " >&2 - echo " or the command line developer tools via xcode-select --install, " >&2 -@@ -329,8 +326,8 @@ - fi - - # In the else case we are probably using a Command Line Tools installation -- if /usr/bin/xcrun -find xcodebuild >/dev/null 2>&1; then -- if ! /usr/bin/xcrun xcodebuild -license check 2>/dev/null; then -+ if xcrun -find xcodebuild >/dev/null 2>&1; then -+ if ! xcrun xcodebuild -license check 2>/dev/null; then - echo >&2 - echo " Xcode setup not complete. You need to confirm the license" >&2 - echo " agreement by running 'sudo xcrun xcodebuild -license accept'." >&2 diff --git a/nix/nixcrpkgs/pkgs/qt/builder.sh b/nix/nixcrpkgs/pkgs/qt/builder.sh deleted file mode 100644 index 1668ce280..000000000 --- a/nix/nixcrpkgs/pkgs/qt/builder.sh +++ /dev/null @@ -1,23 +0,0 @@ -source $setup - -mkdir -p $out -pushd $out -tar -xf $src -mv qtbase-opensource-src-* src -cd src -for patch in $patches; do - echo applying patch $patch - patch -p1 -i $patch -done -popd - -mkdir build -cd build - -PKG_CONFIG=pkg-config-cross \ -$out/src/configure -prefix $out $configure_flags - -make - -make install - diff --git a/nix/nixcrpkgs/pkgs/qt/core_macros.cmake b/nix/nixcrpkgs/pkgs/qt/core_macros.cmake deleted file mode 100644 index f3ef672fd..000000000 --- a/nix/nixcrpkgs/pkgs/qt/core_macros.cmake +++ /dev/null @@ -1,106 +0,0 @@ -# These macros come from src/corelib/Qt5CoreMacros.cmake originally. - -#============================================================================= -# Copyright 2005-2011 Kitware, Inc. -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# -# * Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in the -# documentation and/or other materials provided with the distribution. -# -# * Neither the name of Kitware, Inc. nor the names of its -# contributors may be used to endorse or promote products derived -# from this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -#============================================================================= - -macro(QT5_MAKE_OUTPUT_FILE infile prefix ext outfile ) - string(LENGTH ${CMAKE_CURRENT_BINARY_DIR} _binlength) - string(LENGTH ${infile} _infileLength) - set(_checkinfile ${CMAKE_CURRENT_SOURCE_DIR}) - if(_infileLength GREATER _binlength) - string(SUBSTRING "${infile}" 0 ${_binlength} _checkinfile) - if(_checkinfile STREQUAL "${CMAKE_CURRENT_BINARY_DIR}") - file(RELATIVE_PATH rel ${CMAKE_CURRENT_BINARY_DIR} ${infile}) - else() - file(RELATIVE_PATH rel ${CMAKE_CURRENT_SOURCE_DIR} ${infile}) - endif() - else() - file(RELATIVE_PATH rel ${CMAKE_CURRENT_SOURCE_DIR} ${infile}) - endif() - if(WIN32 AND rel MATCHES "^([a-zA-Z]):(.*)$") # absolute path - set(rel "${CMAKE_MATCH_1}_${CMAKE_MATCH_2}") - endif() - set(_outfile "${CMAKE_CURRENT_BINARY_DIR}/${rel}") - string(REPLACE ".." "__" _outfile ${_outfile}) - get_filename_component(outpath ${_outfile} PATH) - get_filename_component(_outfile ${_outfile} NAME_WE) - file(MAKE_DIRECTORY ${outpath}) - set(${outfile} ${outpath}/${prefix}${_outfile}.${ext}) -endmacro() - -function(_QT5_PARSE_QRC_FILE infile _out_depends _rc_depends) - get_filename_component(rc_path ${infile} PATH) - if(EXISTS "${infile}") - file(READ "${infile}" RC_FILE_CONTENTS) - string(REGEX MATCHALL "]*>" "" RC_FILE "${RC_FILE}") - if(NOT IS_ABSOLUTE "${RC_FILE}") - set(RC_FILE "${rc_path}/${RC_FILE}") - endif() - set(RC_DEPENDS ${RC_DEPENDS} "${RC_FILE}") - endforeach() - qt5_make_output_file("${infile}" "" "qrc.depends" out_depends) - configure_file("${infile}" "${out_depends}" COPYONLY) - else() - set(out_depends) - endif() - set(${_out_depends} ${out_depends} PARENT_SCOPE) - set(${_rc_depends} ${RC_DEPENDS} PARENT_SCOPE) -endfunction() - -function(QT5_ADD_RESOURCES outfiles ) - set(options) - set(oneValueArgs) - set(multiValueArgs OPTIONS) - cmake_parse_arguments(_RCC "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN}) - set(rcc_files ${_RCC_UNPARSED_ARGUMENTS}) - set(rcc_options ${_RCC_OPTIONS}) - - if("${rcc_options}" MATCHES "-binary") - message(WARNING "Use qt5_add_binary_resources for binary option") - endif() - - foreach(it ${rcc_files}) - get_filename_component(outfilename ${it} NAME_WE) - get_filename_component(infile ${it} ABSOLUTE) - set(outfile ${CMAKE_CURRENT_BINARY_DIR}/qrc_${outfilename}.cpp) - _QT5_PARSE_QRC_FILE(${infile} _out_depends _rc_depends) - add_custom_command(OUTPUT ${outfile} - COMMAND ${Qt5Core_RCC_EXECUTABLE} - ARGS ${rcc_options} --name ${outfilename} --output ${outfile} ${infile} - MAIN_DEPENDENCY ${infile} - DEPENDS ${_rc_depends} "${out_depends}" VERBATIM) - list(APPEND ${outfiles} ${outfile}) - endforeach() - set(${outfiles} ${${outfiles}} PARENT_SCOPE) -endfunction() diff --git a/nix/nixcrpkgs/pkgs/qt/dbus-null-pointer.patch b/nix/nixcrpkgs/pkgs/qt/dbus-null-pointer.patch deleted file mode 100644 index 8e8543c62..000000000 --- a/nix/nixcrpkgs/pkgs/qt/dbus-null-pointer.patch +++ /dev/null @@ -1,12 +0,0 @@ -diff -ur qtbase-opensource-src-5.9.6-orig/src/platformsupport/linuxaccessibility/dbusconnection.cpp qtbase-opensource-src-5.9.6/src/platformsupport/linuxaccessibility/dbusconnection.cpp ---- qtbase-opensource-src-5.9.6-orig/src/platformsupport/linuxaccessibility/dbusconnection.cpp 2018-06-19 12:42:00.533895696 -0700 -+++ qtbase-opensource-src-5.9.6/src/platformsupport/linuxaccessibility/dbusconnection.cpp 2018-06-19 12:45:03.308744607 -0700 -@@ -75,7 +75,7 @@ - connect(dbusWatcher, SIGNAL(serviceRegistered(QString)), this, SLOT(serviceRegistered())); - - // If it is registered already, setup a11y right away -- if (c.interface()->isServiceRegistered(A11Y_SERVICE)) -+ if (c.interface() && c.interface()->isServiceRegistered(A11Y_SERVICE)) - serviceRegistered(); - - // In addition try if there is an xatom exposing the bus address, this allows applications run as root to work diff --git a/nix/nixcrpkgs/pkgs/qt/default.nix b/nix/nixcrpkgs/pkgs/qt/default.nix deleted file mode 100644 index 5a9f4d675..000000000 --- a/nix/nixcrpkgs/pkgs/qt/default.nix +++ /dev/null @@ -1,165 +0,0 @@ -# TODO: look into why were compiling with this impure option on Linux: -# -DDFLT_XKB_CONFIG_ROOT=\"/usr/share/X11/xkb\" - -# TODO: patch qt to not use /bin/pwd, test building it in a sandbox - -{ crossenv, libudev, libxall, at-spi2-headers, dejavu-fonts }: - -let - version = "5.9.6"; - - name = "qtbase-${version}"; - - platform = - let - os_code = - if crossenv.os == "windows" then "win32" - else if crossenv.os == "macos" then "macx" - else if crossenv.os == "linux" then "devices/linux-generic" - else crossenv.os; - compiler_code = - if crossenv.compiler == "gcc" then "g++" - else crossenv.compiler; - in "${os_code}-${compiler_code}"; - - base_src = crossenv.nixpkgs.fetchurl { - url = "https://download.qt.io/official_releases/qt/5.9/${version}/submodules/qtbase-opensource-src-${version}.tar.xz"; - sha256 = "0vz3rgx7bk50jzy78lxv5pff2l8xqmqs9iiz7gc9n6cb4v5j1mpf"; - }; - - base_raw = crossenv.make_derivation { - name = "qtbase-raw-${version}"; - inherit version; - src = base_src; - builder = ./builder.sh; - - patches = [ - # Purity issue: Don't look at the build system using absolute paths. - ./absolute-paths.patch - - # macOS configuration: Don't run tools from /usr/bin, use the right - # compiler, and don't pass redundant options to it (-arch, -isysroot, - # -mmacosx-version-min). - ./macos-config.patch - - # libX11.a depends on libxcb.a. This makes tests.xlib in - # src/gui/configure.json pass, enabling lots of X functionality in Qt. - ./find-x-libs.patch - - # Fix the build error caused by https://bugreports.qt.io/browse/QTBUG-63637 - ./win32-link-object-max.patch - - # The .pc files have incorrect library names without this (e.g. Qt5Cored) - ./pc-debug-name.patch - - # uxtheme.h test is broken, always returns false, and results in QtWidgets - # apps looking bad on Windows. https://stackoverflow.com/q/44784414/28128 - ./dont-test-uxtheme.patch - - # When cross-compiling, Qt uses some heuristics about whether to trust the - # pkg-config executable supplied by the PKG_CONFIG environment variable. - # These heuristics are wrong for us, so disable them, making qt use - # pkg-config-cross. - ./pkg-config-cross.patch - - # When the DBus session bus is not available, Qt tries to dereference a - # null pointer, so Linux applications can't start up. - ./dbus-null-pointer.patch - - # Look for fonts in the same directory as the application by default if - # the QT_QPA_FONTDIR environment variable is not present. Without this - # patch, Qt tries to look for a font directory in the nix store that does - # not exists, and prints warnings. - # You must ship a .ttf, .ttc, .pfa, .pfb, or .otf font file - # with your application (e.g. https://dejavu-fonts.github.io/ ). - # That list of extensions comes from qbasicfontdatabase.cpp. - ./font-dir.patch - ]; - - configure_flags = - "-opensource -confirm-license " + - "-xplatform ${platform} " + - "-device-option CROSS_COMPILE=${crossenv.host}- " + - "-release " + # change to -debug if you want debugging symbols - "-static " + - "-pkg-config " + - "-nomake examples " + - "-no-icu " + - "-no-fontconfig " + - "-no-reduce-relocations " + - ( if crossenv.os == "windows" then - "-opengl desktop" - else if crossenv.os == "linux" then - "-qpa xcb " + - "-system-xcb " + - "-no-opengl " + - "-device-option QMAKE_INCDIR_X11=${libxall}/include " + - "-device-option QMAKE_LIBDIR_X11=${libxall}/lib" - else if crossenv.os == "macos" then - "-device-option QMAKE_MAC_SDK.macosx.--show-sdk-path=" + - "${crossenv.sdk} " + - "-device-option QMAKE_MAC_SDK.macosx.--show-sdk-platform-path=" + - "${crossenv.sdk}/does-not-exist " + - "-device-option QMAKE_MAC_SDK.macosx.--show-sdk-version=" + - "${crossenv.macos_version_min} " + - "-device-option QMAKE_XCODE_VERSION=7.0" - else "" ); - - cross_inputs = - if crossenv.os == "linux" then [ - libudev # not sure if this helps, but Qt does look for it - libxall - at-spi2-headers # for accessibility - ] - else []; - }; - - # This wrapper aims to make Qt easier to use by generating CMake package files - # for it. The existing support for CMake in Qt does not handle static - # linking; other projects maintian large, messy patches to fix it, but we - # prefer to generate the CMake files in a clean way from scratch. - base = crossenv.make_derivation { - inherit version name; - os = crossenv.os; - qtbase = base_raw; - cross_inputs = base_raw.cross_inputs; - builder.ruby = ./wrapper_builder.rb; - core_macros = ./core_macros.cmake; - }; - - examples = crossenv.make_derivation { - name = "qtbase-examples-${version}"; - inherit version; - os = crossenv.os; - qtbase = base; - cross_inputs = [ base ]; - dejavu = dejavu-fonts; - builder = ./examples_builder.sh; - }; - - license_fragment = crossenv.native.make_derivation { - name = "qtbase-${version}-license-fragment"; - inherit version; - src = base_src; - builder = ./license_builder.sh; - }; - - license_set = - ( - if crossenv.os == "linux" then - libudev.license_set // - libxall.license_set // - at-spi2-headers.license_set - else - {} - ) // - { "${name}" = license_fragment; }; -in - base // { - recurseForDerivations = true; - inherit base_src; - inherit base_raw; - inherit base; - inherit examples; - inherit license_set; - } diff --git a/nix/nixcrpkgs/pkgs/qt/dont-test-uxtheme.patch b/nix/nixcrpkgs/pkgs/qt/dont-test-uxtheme.patch deleted file mode 100644 index c41620138..000000000 --- a/nix/nixcrpkgs/pkgs/qt/dont-test-uxtheme.patch +++ /dev/null @@ -1,24 +0,0 @@ -diff -ur qtbase-opensource-src-5.9.2-orig/src/widgets/configure.json qtbase-opensource-src-5.9.2/src/widgets/configure.json ---- qtbase-opensource-src-5.9.2-orig/src/widgets/configure.json 2017-10-25 13:52:49.173421900 -0700 -+++ qtbase-opensource-src-5.9.2/src/widgets/configure.json 2017-10-25 13:53:42.891341214 -0700 -@@ -28,11 +28,6 @@ - }, - - "tests": { -- "uxtheme": { -- "label": "uxtheme.h", -- "type": "files", -- "files": [ "uxtheme.h" ] -- } - }, - - "features": { -@@ -57,7 +52,7 @@ - }, - "style-windowsxp": { - "label": "WindowsXP", -- "condition": "features.style-windows && config.win32 && !config.winrt && tests.uxtheme", -+ "condition": "features.style-windows && config.win32 && !config.winrt", - "output": [ "privateFeature", "styles" ] - }, - "style-windowsvista": { diff --git a/nix/nixcrpkgs/pkgs/qt/examples_builder.sh b/nix/nixcrpkgs/pkgs/qt/examples_builder.sh deleted file mode 100644 index d5d56e11c..000000000 --- a/nix/nixcrpkgs/pkgs/qt/examples_builder.sh +++ /dev/null @@ -1,88 +0,0 @@ -source $setup - -examples=$qtbase/src/examples - -mkdir build -cd build -mkdir bin moc obj - -cat > obj/plugins.cpp < -#ifdef _WIN32 -Q_IMPORT_PLUGIN (QWindowsIntegrationPlugin); -#endif -#ifdef __linux__ -Q_IMPORT_PLUGIN (QLinuxFbIntegrationPlugin); -Q_IMPORT_PLUGIN (QXcbIntegrationPlugin); -#endif -EOF - -CFLAGS="-std=gnu++11" - -echo "compiling reference to plugins" -$host-g++ $CFLAGS \ - $(pkg-config-cross --cflags Qt5Core) \ - -c obj/plugins.cpp \ - -o obj/plugins.o - -CFLAGS="$CFLAGS -g -I. $(pkg-config-cross --cflags Qt5Widgets)" -LIBS="$(pkg-config-cross --libs Qt5Widgets)" -LDFLAGS="" - -if [ $os = "windows" ]; then - CFLAGS="-mwindows $CFLAGS" -fi - -echo "compiling dynamiclayouts" -$qtbase/bin/moc $examples/widgets/layouts/dynamiclayouts/dialog.h > moc/dynamiclayouts.cpp -$host-g++ $CFLAGS $LDFLAGS \ - $examples/widgets/layouts/dynamiclayouts/dialog.cpp \ - $examples/widgets/layouts/dynamiclayouts/main.cpp \ - moc/dynamiclayouts.cpp \ - obj/plugins.o \ - $LIBS -o bin/dynamiclayouts$exe_suffix - -echo "compiling rasterwindow" -$qtbase/bin/moc $examples/gui/rasterwindow/rasterwindow.h > moc/rasterwindow.cpp -$host-g++ $CFLAGS $LDFLAGS \ - $examples/gui/rasterwindow/rasterwindow.cpp \ - $examples/gui/rasterwindow/main.cpp \ - moc/rasterwindow.cpp \ - obj/plugins.o \ - $LIBS -o bin/rasterwindow$exe_suffix - -echo "compiling analogclock" -$host-g++ $CFLAGS $LDFLAGS \ - -I$examples/gui/rasterwindow/ \ - $examples/gui/analogclock/main.cpp \ - $examples/gui/rasterwindow/rasterwindow.cpp \ - moc/rasterwindow.cpp \ - obj/plugins.o \ - $LIBS -o bin/analogclock$exe_suffix - -# We haven't gotten OpenGL support to work on Linux yet (TODO) -if [ $os != "linux" ]; then - echo "compiling openglwindow" - $qtbase/bin/moc $examples/gui/openglwindow/openglwindow.h > moc/openglwindow.cpp - $host-g++ $CFLAGS $LDFLAGS \ - $examples/gui/openglwindow/main.cpp \ - $examples/gui/openglwindow/openglwindow.cpp \ - moc/openglwindow.cpp \ - obj/plugins.o \ - $LIBS -o bin/openglwindow$exe_suffix -fi - -# TODO: try to compile some stuff with $qtbase/bin/qmake too, make sure that works - -mkdir -p $out/bin - -for prog in analogclock dynamiclayouts openglwindow rasterwindow; do - if [ -f bin/$prog ]; then - $host-strip bin/$prog - cp bin/$prog $out/bin/ - fi -done - -if [ $os = "linux" ]; then - cp $dejavu/ttf/DejaVuSans.ttf $out/bin/ -fi diff --git a/nix/nixcrpkgs/pkgs/qt/find-x-libs.patch b/nix/nixcrpkgs/pkgs/qt/find-x-libs.patch deleted file mode 100644 index 73bd77005..000000000 --- a/nix/nixcrpkgs/pkgs/qt/find-x-libs.patch +++ /dev/null @@ -1,12 +0,0 @@ -diff -ur qtbase-opensource-src-5.9.2-orig/mkspecs/common/linux.conf qtbase-opensource-src-5.9.2/mkspecs/common/linux.conf ---- qtbase-opensource-src-5.9.2-orig/mkspecs/common/linux.conf 2017-10-26 08:10:12.922646692 -0700 -+++ qtbase-opensource-src-5.9.2/mkspecs/common/linux.conf 2017-10-26 21:44:37.695088447 -0700 -@@ -28,7 +28,7 @@ - - QMAKE_LIBS = - QMAKE_LIBS_DYNLOAD = -ldl --QMAKE_LIBS_X11 = -lXext -lX11 -lm -+QMAKE_LIBS_X11 = -lXext -lX11 -lxcb -lXau - QMAKE_LIBS_EGL = -lEGL - QMAKE_LIBS_OPENGL = -lGL - QMAKE_LIBS_OPENGL_ES2 = -lGLESv2 diff --git a/nix/nixcrpkgs/pkgs/qt/font-dir.patch b/nix/nixcrpkgs/pkgs/qt/font-dir.patch deleted file mode 100644 index ab8384764..000000000 --- a/nix/nixcrpkgs/pkgs/qt/font-dir.patch +++ /dev/null @@ -1,11 +0,0 @@ ---- qt-5.8.0-orig/src/gui/text/qplatformfontdatabase.cpp -+++ qt-5.8.0/src/gui/text/qplatformfontdatabase.cpp -@@ -396,7 +396,7 @@ - { - QString fontpath = QString::fromLocal8Bit(qgetenv("QT_QPA_FONTDIR")); - if (fontpath.isEmpty()) -- fontpath = QLibraryInfo::location(QLibraryInfo::LibrariesPath) + QLatin1String("/fonts"); -+ fontpath = QCoreApplication::applicationDirPath(); - - return fontpath; - } diff --git a/nix/nixcrpkgs/pkgs/qt/license_builder.sh b/nix/nixcrpkgs/pkgs/qt/license_builder.sh deleted file mode 100644 index f0fec2354..000000000 --- a/nix/nixcrpkgs/pkgs/qt/license_builder.sh +++ /dev/null @@ -1,151 +0,0 @@ -# Last updated for qtbase-opensource-src-5.8.0.tar.xz - -source $setup - -if [ "$version" != "5.9.6" ]; then - echo "You need to update the license fragment builder for Qt $version." - exit 1 -fi - -tar -xf $src -mv qtbase-* qtbase - -# Read the license files here instead of in the big string so it is a fatal -# error if any of them are missing. -license_qt=$(cat qtbase/LICENSE.LGPLv3) -cd qtbase/src/3rdparty -license_android=$(cat android/LICENSE) -license_angle1=$(cat angle/LICENSE) -license_angle2=$(cat angle/TRACEEVENT_LICENSE) -license_angle3=$(cat angle/SYSTEMINFO_LICENSE) -license_dc=$(cat double-conversion/LICENSE) -license_easing=$(cat easing/LICENSE) -license_forkfd=$(cat forkfd/LICENSE) -license_freebsd=$(cat freebsd/LICENSE) -license_freetype=$(cat freetype/docs/GPLv2.TXT) -license_gradle=$(cat gradle/LICENSE-GRADLEW.txt) -license_harfbuzz=$(cat harfbuzz/COPYING) -license_harfbuzz_ng=$(cat harfbuzz-ng/COPYING) -license_ia2=$(cat iaccessible2/LICENSE) -license_libjpeg=$(cat libjpeg/LICENSE) -license_libpng=$(cat libpng/LICENSE) -license_pcre2=$(cat pcre2/LICENCE) -license_pixman=$(cat pixman/LICENSE) -license_rfc6234=$(cat rfc6234/LICENSE) -license_sha3_1=$(cat sha3/BRG_ENDIAN_LICENSE) -license_sha3_2=$(cat sha3/CC0_LICENSE) -license_xcb=$(cat xcb/LICENSE) -license_xkbcommon=$(cat xkbcommon/COPYING) -license_zlib=$(cat zlib/LICENSE) - -cat > $out <Qt - -

- The Qt Toolkit is licensed under the - GNU Lesser General Public License Version 3 (LGPLv3) as shown below. -

- -
-$license_qt
-
- -

Third-party components bundled with Qt

- -

- This software might include code from third-party comoponents bundled with Qt. - The copyright notices of those components are reproduced below. -

- -
-$license_android
-
- -
-$license_angle1
-
- -
-$license_angle2
-
- -
-$license_angle3
-
- -
-$license_dc
-
- -
-$license_easing
-
- -
-$license_forkfd
-
- -
-$license_freebsd
-
- -
-$license_freetype
-
- -
-$license_gradle
-
- -
-$license_harfbuzz
-
- -
-$license_harfbuzz_ng
-
- -
-$license_ia2
-
- -
-$license_libjpeg
-
- -
-$license_libpng
-
- -
-$license_pcre2
-
- -
-$license_pixman
-
- -
-$license_rfc6234
-
- -
-$license_sha3_1
-
- -
-$license_sha3_2
-
- -
-$license_xcb
-
- -
-$license_xkbcommon
-
- -
-$license_zlib
-
-EOF diff --git a/nix/nixcrpkgs/pkgs/qt/macos-config.patch b/nix/nixcrpkgs/pkgs/qt/macos-config.patch deleted file mode 100644 index de8c3a282..000000000 --- a/nix/nixcrpkgs/pkgs/qt/macos-config.patch +++ /dev/null @@ -1,167 +0,0 @@ -diff -ur qtbase-opensource-src-5.9.2-orig/mkspecs/common/clang.conf qtbase-opensource-src-5.9.2-mac/mkspecs/common/clang.conf ---- qtbase-opensource-src-5.9.2-orig/mkspecs/common/clang.conf 2017-11-03 20:37:01.001539490 -0700 -+++ qtbase-opensource-src-5.9.2-mac/mkspecs/common/clang.conf 2017-11-03 20:46:20.159382848 -0700 -@@ -4,8 +4,8 @@ - - QMAKE_COMPILER = gcc clang llvm # clang pretends to be gcc - --QMAKE_CC = clang --QMAKE_CXX = clang++ -+QMAKE_CC = $${CROSS_COMPILE}clang -+QMAKE_CXX = $${CROSS_COMPILE}clang++ - - QMAKE_LINK_C = $$QMAKE_CC - QMAKE_LINK_C_SHLIB = $$QMAKE_CC -diff -ur qtbase-opensource-src-5.9.2-orig/mkspecs/common/clang-mac.conf qtbase-opensource-src-5.9.2-mac/mkspecs/common/clang-mac.conf ---- qtbase-opensource-src-5.9.2-orig/mkspecs/common/clang-mac.conf 2017-11-03 20:37:01.001539490 -0700 -+++ qtbase-opensource-src-5.9.2-mac/mkspecs/common/clang-mac.conf 2017-11-03 20:55:13.878575754 -0700 -@@ -6,8 +6,6 @@ - - QMAKE_XCODE_GCC_VERSION = com.apple.compilers.llvm.clang.1_0 - --QMAKE_CXXFLAGS += -stdlib=libc++ --QMAKE_LFLAGS += -stdlib=libc++ - QMAKE_AR_LTCG = libtool -static -o - - QMAKE_CFLAGS_APPLICATION_EXTENSION = -fapplication-extension -diff -ur qtbase-opensource-src-5.9.2-orig/mkspecs/common/mac.conf qtbase-opensource-src-5.9.2-mac/mkspecs/common/mac.conf ---- qtbase-opensource-src-5.9.2-orig/mkspecs/common/mac.conf 2017-11-03 20:37:01.001539490 -0700 -+++ qtbase-opensource-src-5.9.2-mac/mkspecs/common/mac.conf 2017-11-03 22:03:30.960602142 -0700 -@@ -35,10 +35,10 @@ - - QMAKE_ACTOOL = actool - --QMAKE_DSYMUTIL = dsymutil --QMAKE_STRIP = strip -+QMAKE_DSYMUTIL = $${CROSS_COMPILE}dsymutil -+QMAKE_STRIP = $${CROSS_COMPILE}strip - QMAKE_STRIPFLAGS_LIB += -S -x - --QMAKE_AR = ar cq --QMAKE_RANLIB = ranlib -s --QMAKE_NM = nm -P -+QMAKE_AR = $${CROSS_COMPILE}ar cq -+QMAKE_RANLIB = $${CROSS_COMPILE}ranlib -s -+QMAKE_NM = $${CROSS_COMPILE}nm -P -diff -ur qtbase-opensource-src-5.9.2-orig/mkspecs/features/mac/default_post.prf qtbase-opensource-src-5.9.2-mac/mkspecs/features/mac/default_post.prf ---- qtbase-opensource-src-5.9.2-orig/mkspecs/features/mac/default_post.prf 2017-11-03 20:37:01.008206202 -0700 -+++ qtbase-opensource-src-5.9.2-mac/mkspecs/features/mac/default_post.prf 2017-11-03 21:06:25.247871399 -0700 -@@ -2,29 +2,6 @@ - - !no_objective_c:CONFIG += objective_c - --qt { -- qtConfig(static) { -- # C++11 support means using libc++ instead of libstd++. As the -- # two libraries are incompatible we need to ensure the end user -- # project is built using the same C++11 support/no support as Qt. -- qtConfig(c++11) { -- CONFIG += c++11 -- } else: c++11 { -- warning("Qt was not built with C++11 enabled, disabling feature") -- CONFIG -= c++11 -- } -- -- !c++11 { -- # Explicitly use libstdc++ if C++11 support is not enabled, -- # as otherwise the compiler will choose the standard library -- # based on the deployment target, which for iOS 7 and OS X 10.9 -- # is libc++, and we can't mix and match the two. -- QMAKE_CXXFLAGS += -stdlib=libstdc++ -- QMAKE_LFLAGS += -stdlib=libstdc++ -- } -- } --} -- - # Add the same default rpaths as Xcode does for new projects. - # This is especially important for iOS/tvOS/watchOS where no other option is possible. - !no_default_rpath { -@@ -89,10 +66,6 @@ - - arch_flags = $(EXPORT_ARCH_ARGS) - -- QMAKE_CFLAGS += $$arch_flags -- QMAKE_CXXFLAGS += $$arch_flags -- QMAKE_LFLAGS += $$arch_flags -- - QMAKE_PCH_ARCHS = $$VALID_ARCHS - - macos: deployment_target = $$QMAKE_MACOSX_DEPLOYMENT_TARGET -@@ -149,9 +122,6 @@ - else: \ - version_identifier = $$device.deployment_identifier - version_min_flag = -m$${version_identifier}-version-min=$$deployment_target -- QMAKE_CFLAGS += -isysroot $$QMAKE_MAC_SDK_PATH $$version_min_flag -- QMAKE_CXXFLAGS += -isysroot $$QMAKE_MAC_SDK_PATH $$version_min_flag -- QMAKE_LFLAGS += -Wl,-syslibroot,$$QMAKE_MAC_SDK_PATH $$version_min_flag - } - - # Enable precompiled headers for multiple architectures -diff -ur qtbase-opensource-src-5.9.2-orig/mkspecs/features/mac/default_pre.prf qtbase-opensource-src-5.9.2-mac/mkspecs/features/mac/default_pre.prf ---- qtbase-opensource-src-5.9.2-orig/mkspecs/features/mac/default_pre.prf 2017-11-03 20:37:01.008206202 -0700 -+++ qtbase-opensource-src-5.9.2-mac/mkspecs/features/mac/default_pre.prf 2017-11-03 20:46:20.159382848 -0700 -@@ -1,43 +1,6 @@ - CONFIG = asset_catalogs rez $$CONFIG - load(default_pre) - --isEmpty(QMAKE_XCODE_DEVELOPER_PATH) { -- # Get path of Xcode's Developer directory -- QMAKE_XCODE_DEVELOPER_PATH = $$system("/usr/bin/xcode-select --print-path 2>/dev/null") -- isEmpty(QMAKE_XCODE_DEVELOPER_PATH): \ -- error("Xcode path is not set. Please use xcode-select to choose Xcode installation path.") -- -- # Make sure Xcode path is valid -- !exists($$QMAKE_XCODE_DEVELOPER_PATH): \ -- error("Xcode is not installed in $${QMAKE_XCODE_DEVELOPER_PATH}. Please use xcode-select to choose Xcode installation path.") --} -- --isEmpty(QMAKE_XCODEBUILD_PATH): \ -- QMAKE_XCODEBUILD_PATH = $$system("/usr/bin/xcrun -find xcodebuild 2>/dev/null") -- --!isEmpty(QMAKE_XCODEBUILD_PATH) { -- # Make sure Xcode is set up properly -- !system("/usr/bin/xcrun xcodebuild -license check 2>/dev/null"): \ -- error("Xcode not set up properly. You need to confirm the license agreement by running 'sudo xcrun xcodebuild -license accept'.") -- -- isEmpty(QMAKE_XCODE_VERSION) { -- # Extract Xcode version using xcodebuild -- xcode_version = $$system("/usr/bin/xcrun xcodebuild -version") -- QMAKE_XCODE_VERSION = $$member(xcode_version, 1) -- isEmpty(QMAKE_XCODE_VERSION): error("Could not resolve Xcode version.") -- unset(xcode_version) -- } --} -- --isEmpty(QMAKE_TARGET_BUNDLE_PREFIX) { -- QMAKE_XCODE_PREFERENCES_FILE = $$(HOME)/Library/Preferences/com.apple.dt.Xcode.plist -- exists($$QMAKE_XCODE_PREFERENCES_FILE): \ -- QMAKE_TARGET_BUNDLE_PREFIX = $$system("/usr/libexec/PlistBuddy -c 'print IDETemplateOptions:bundleIdentifierPrefix' $$QMAKE_XCODE_PREFERENCES_FILE 2>/dev/null") -- -- !isEmpty(_QMAKE_CACHE_):!isEmpty(QMAKE_TARGET_BUNDLE_PREFIX): \ -- cache(QMAKE_TARGET_BUNDLE_PREFIX) --} -- - QMAKE_ASSET_CATALOGS_APP_ICON = AppIcon - - # Make the default debug info format for static debug builds -diff -ur qtbase-opensource-src-5.9.2-orig/mkspecs/features/mac/sdk.prf qtbase-opensource-src-5.9.2-mac/mkspecs/features/mac/sdk.prf ---- qtbase-opensource-src-5.9.2-orig/mkspecs/features/mac/sdk.prf 2017-11-03 20:37:01.008206202 -0700 -+++ qtbase-opensource-src-5.9.2-mac/mkspecs/features/mac/sdk.prf 2017-11-03 20:46:20.159382848 -0700 -@@ -18,7 +18,7 @@ - sdk = $$QMAKE_MAC_SDK - - isEmpty(QMAKE_MAC_SDK.$${sdk}.$${info}) { -- QMAKE_MAC_SDK.$${sdk}.$${info} = $$system("/usr/bin/xcrun --sdk $$sdk $$info 2>/dev/null") -+ QMAKE_MAC_SDK.$${sdk}.$${info} = $$system("xcrun --sdk $$sdk $$info 2>/dev/null") - # --show-sdk-platform-path won't work for Command Line Tools; this is fine - # only used by the XCTest backend to testlib - isEmpty(QMAKE_MAC_SDK.$${sdk}.$${info}):if(!isEmpty(QMAKE_XCODEBUILD_PATH)|!equals(info, "--show-sdk-platform-path")): \ -@@ -50,7 +50,7 @@ - value = $$eval($$tool) - isEmpty(value): next() - -- sysrooted = $$system("/usr/bin/xcrun -sdk $$QMAKE_MAC_SDK -find $$first(value) 2>/dev/null") -+ sysrooted = $$system("xcrun -sdk $$QMAKE_MAC_SDK -find $$first(value) 2>/dev/null") - isEmpty(sysrooted): next() - - $$tool = $$sysrooted $$member(value, 1, -1) diff --git a/nix/nixcrpkgs/pkgs/qt/pc-debug-name.patch b/nix/nixcrpkgs/pkgs/qt/pc-debug-name.patch deleted file mode 100644 index 690e8bea7..000000000 --- a/nix/nixcrpkgs/pkgs/qt/pc-debug-name.patch +++ /dev/null @@ -1,33 +0,0 @@ -From 995313e0795df5500fd84350e80a3f88202b473d Mon Sep 17 00:00:00 2001 -From: Martchus -Date: Sun, 18 Sep 2016 14:01:14 +0200 -Subject: [PATCH 07/30] Prevent debug library names in pkg-config files - -qmake generates the pkgconfig .pc files two times, once for the -release build and once for the debug build (which we're not actually -building in this package). For both generations the exact same -pkgconfig file name is used. This causes references to the debug -build ending up in the .pc files which are unwanted -Prevent this from happening by giving the pkgconfig .pc -files for the debug build an unique file name. ---- - qmake/generators/makefile.cpp | 3 +++ - 1 file changed, 3 insertions(+) - -diff --git a/qmake/generators/makefile.cpp b/qmake/generators/makefile.cpp -index 182fe79238..a762443fe2 100644 ---- a/qmake/generators/makefile.cpp -+++ b/qmake/generators/makefile.cpp -@@ -3164,6 +3164,9 @@ MakefileGenerator::pkgConfigFileName(bool fixify, bool onlyPrependDestdir) - if (dot != -1) - ret = ret.left(dot); - } -+ if (project->isActiveConfig("debug")) { -+ ret += "d"; -+ } - ret += Option::pkgcfg_ext; - QString subdir = project->first("QMAKE_PKGCONFIG_DESTDIR").toQString(); - if(!subdir.isEmpty()) { --- -2.11.1 - diff --git a/nix/nixcrpkgs/pkgs/qt/pkg-config-cross.patch b/nix/nixcrpkgs/pkgs/qt/pkg-config-cross.patch deleted file mode 100644 index 506df0ff8..000000000 --- a/nix/nixcrpkgs/pkgs/qt/pkg-config-cross.patch +++ /dev/null @@ -1,13 +0,0 @@ -diff -ur qt-orig/configure.pri qt/configure.pri ---- qt-orig/configure.pri 2017-07-27 18:16:48.205591390 -0700 -+++ qt/configure.pri 2017-07-29 13:11:08.957085166 -0700 -@@ -139,7 +139,8 @@ - } - } - -- $$qtConfEvaluate("features.cross_compile") { -+ qtLog("Blindly trusting this pkg-config to be valid."); -+ false { - # cross compiling, check that pkg-config is set up sanely - sysroot = $$config.input.sysroot - diff --git a/nix/nixcrpkgs/pkgs/qt/win32-link-object-max.patch b/nix/nixcrpkgs/pkgs/qt/win32-link-object-max.patch deleted file mode 100644 index c47279b2e..000000000 --- a/nix/nixcrpkgs/pkgs/qt/win32-link-object-max.patch +++ /dev/null @@ -1,16 +0,0 @@ -diff -ur qtbase-opensource-src-5.9.6-orig/mkspecs/win32-g++/qmake.conf qtbase-opensource-src-5.9.6/mkspecs/win32-g++/qmake.conf ---- qtbase-opensource-src-5.9.6-orig/mkspecs/win32-g++/qmake.conf 2018-06-19 12:41:49.061465695 -0700 -+++ qtbase-opensource-src-5.9.6/mkspecs/win32-g++/qmake.conf 2018-06-19 12:42:15.406453120 -0700 -@@ -54,10 +54,8 @@ - QMAKE_LFLAGS_WINDOWS = -Wl,-subsystem,windows - QMAKE_LFLAGS_DLL = -shared - QMAKE_LFLAGS_GCSECTIONS = -Wl,--gc-sections --equals(QMAKE_HOST.os, Windows) { -- QMAKE_LINK_OBJECT_MAX = 10 -- QMAKE_LINK_OBJECT_SCRIPT = object_script --} -+QMAKE_LINK_OBJECT_MAX = 10 -+QMAKE_LINK_OBJECT_SCRIPT = object_script - QMAKE_EXT_OBJ = .o - QMAKE_EXT_RES = _res.o - QMAKE_PREFIX_SHLIB = diff --git a/nix/nixcrpkgs/pkgs/qt/wrapper_builder.rb b/nix/nixcrpkgs/pkgs/qt/wrapper_builder.rb deleted file mode 100644 index 49b3efd8a..000000000 --- a/nix/nixcrpkgs/pkgs/qt/wrapper_builder.rb +++ /dev/null @@ -1,499 +0,0 @@ -require 'pathname' -require 'fileutils' -include FileUtils - -STDOUT.sync = true - -ENV['PATH'] = ENV.fetch('_PATH') - -Os = ENV.fetch('os') -QtVersionString = ENV.fetch('version') -QtVersionMajor = QtVersionString.split('.').first.to_i - -QtBaseDir = Pathname(ENV.fetch('qtbase')) - -OutDir = Pathname(ENV.fetch('out')) -OutPcDir = OutDir + 'lib' + 'pkgconfig' -CMakeDir = OutDir + 'lib' + 'cmake' -OutIncDir = OutDir + 'include' -MocExe = OutDir + 'bin' + 'moc' -RccExe = OutDir + 'bin' + 'rcc' - -DepGraph = {} -DepGraphBack = {} - -DepInfo = {} -DepInfo.default_proc = proc do |hash, name| - hash[name] = find_dep_info(name) -end - -case Os -when "windows" - PrlPrefix = '' -else - PrlPrefix = 'lib' -end - -# Note: These dependencies just came from me fixing errors for specific -# programs. There are likely misisng dependencies in this graph, and there -# might be a few dependencies that could be safely removed because they are -# purely transitive. -def make_dep_graph - # High-level dependencies. - add_dep 'Qt5Widgets.x', 'libQt5Widgets.a' - add_dep 'Qt5Widgets.x', 'Qt5Gui.x' - add_dep 'Qt5Gui.x', 'Qt5GuiNoPlugins.x' - add_dep 'Qt5GuiNoPlugins.x', 'libQt5Gui.a' - add_dep 'Qt5GuiNoPlugins.x', 'Qt5Core.x' - add_dep 'Qt5Core.x', 'libQt5Core.a' - - # Include directories. - add_dep 'Qt5Core.x', '-I' + OutIncDir.to_s - add_dep 'Qt5Core.x', '-I' + (OutIncDir + 'QtCore').to_s - add_dep 'Qt5Gui.x', '-I' + (OutIncDir + 'QtGui').to_s - add_dep 'Qt5Widgets.x', '-I' + (OutIncDir + 'QtWidgets').to_s - - # Libraries that Qt depends on. - add_dep 'libQt5Widgets.a', 'libQt5Gui.a' - add_dep 'libQt5FontDatabaseSupport.a', 'libqtfreetype.a' - add_dep 'libQt5Gui.a', 'libQt5Core.a' - add_dep 'libQt5Gui.a', 'libqtlibpng.a' - add_dep 'libQt5Gui.a', 'libqtharfbuzz.a' - add_dep 'libQt5Core.a', 'libqtpcre2.a' - - if Os == 'windows' - add_dep 'Qt5Gui.x', 'qwindows.x' - add_dep 'qwindows.x', 'libqwindows.a' - - add_dep 'libqwindows.a', '-ldwmapi' - add_dep 'libqwindows.a', '-limm32' - add_dep 'libqwindows.a', '-loleaut32' - add_dep 'libqwindows.a', 'libQt5Gui.a' - add_dep 'libqwindows.a', 'libQt5EventDispatcherSupport.a' - add_dep 'libqwindows.a', 'libQt5FontDatabaseSupport.a' - add_dep 'libqwindows.a', 'libQt5ThemeSupport.a' - - add_dep 'libQt5Core.a', '-lole32' - add_dep 'libQt5Core.a', '-luuid' - add_dep 'libQt5Core.a', '-lversion' - add_dep 'libQt5Core.a', '-lwinmm' - add_dep 'libQt5Core.a', '-lws2_32' - - add_dep 'libQt5Gui.a', '-lopengl32' - - add_dep 'libQt5Widgets.a', '-luxtheme' - end - - if Os == 'linux' - add_dep 'Qt5Gui.x', 'qlinuxfb.x' - add_dep 'Qt5Gui.x', 'qxcb.x' - add_dep 'qlinuxfb.x', 'libqlinuxfb.a' - add_dep 'qxcb.x', 'libqxcb.a' - - add_dep 'libqlinuxfb.a', 'libQt5FbSupport.a' - add_dep 'libqlinuxfb.a', 'libQt5InputSupport.a' - - add_dep 'libqxcb.a', 'libQt5XcbQpa.a' - - add_dep 'libQt5DBus.a', 'libQt5Core.a' - add_dep 'libQt5DBus.a', 'libQt5Gui.a' - add_dep 'libQt5DeviceDiscoverySupport.a', 'libudev.pc' - add_dep 'libQt5InputSupport.a', 'libQt5DeviceDiscoverySupport.a' - add_dep 'libQt5LinuxAccessibilitySupport.a', 'libQt5AccessibilitySupport.a' - add_dep 'libQt5LinuxAccessibilitySupport.a', 'libQt5DBus.a' - add_dep 'libQt5LinuxAccessibilitySupport.a', 'xcb-aux.pc' - add_dep 'libQt5ThemeSupport.a', 'libQt5DBus.a' - - add_dep 'libQt5XcbQpa.a', 'libQt5EventDispatcherSupport.a' - add_dep 'libQt5XcbQpa.a', 'libQt5FontDatabaseSupport.a' - add_dep 'libQt5XcbQpa.a', 'libQt5Gui.a' - add_dep 'libQt5XcbQpa.a', 'libQt5LinuxAccessibilitySupport.a' - add_dep 'libQt5XcbQpa.a', 'libQt5ServiceSupport.a' - add_dep 'libQt5XcbQpa.a', 'libQt5ThemeSupport.a' - add_dep 'libQt5XcbQpa.a', 'x11.pc' - add_dep 'libQt5XcbQpa.a', 'x11-xcb.pc' - add_dep 'libQt5XcbQpa.a', 'xcb.pc' - add_dep 'libQt5XcbQpa.a', 'xcb-icccm.pc' - add_dep 'libQt5XcbQpa.a', 'xcb-image.pc' - add_dep 'libQt5XcbQpa.a', 'xcb-keysyms.pc' - add_dep 'libQt5XcbQpa.a', 'xcb-randr.pc' - add_dep 'libQt5XcbQpa.a', 'xcb-renderutil.pc' - add_dep 'libQt5XcbQpa.a', 'xcb-shape.pc' - add_dep 'libQt5XcbQpa.a', 'xcb-shm.pc' - add_dep 'libQt5XcbQpa.a', 'xcb-sync.pc' - add_dep 'libQt5XcbQpa.a', 'xcb-xfixes.pc' - add_dep 'libQt5XcbQpa.a', 'xcb-xinerama.pc' - add_dep 'libQt5XcbQpa.a', 'xcb-xkb.pc' - add_dep 'libQt5XcbQpa.a', 'xi.pc' - end - - if Os == 'macos' - add_dep 'Qt5Gui.x', 'qcocoa.x' - add_dep 'qcocoa.x', 'libqcocoa.a' - - add_dep 'libqcocoa.a', 'libcocoaprintersupport.a' - add_dep 'libqcocoa.a', '-lcups' # Also available: -lcups.2 - add_dep 'libqcocoa.a', 'libQt5AccessibilitySupport.a' - add_dep 'libqcocoa.a', 'libQt5ClipboardSupport.a' - add_dep 'libqcocoa.a', 'libQt5CglSupport.a' - add_dep 'libqcocoa.a', 'libQt5GraphicsSupport.a' - add_dep 'libqcocoa.a', 'libQt5FontDatabaseSupport.a' - add_dep 'libqcocoa.a', 'libQt5ThemeSupport.a' - add_dep 'libqcocoa.a', 'libQt5PrintSupport.a' - - add_dep 'libqtlibpng.a', '-lz' - - add_dep 'libQt5Core.a', '-lobjc' - add_dep 'libQt5Core.a', '-framework CoreServices' - add_dep 'libQt5Core.a', '-framework CoreText' - add_dep 'libQt5Gui.a', '-framework CoreGraphics' - add_dep 'libQt5Gui.a', '-framework OpenGL' - add_dep 'libQt5Widgets.a', '-framework Carbon' - add_dep 'libQt5Widgets.a', '-framework AppKit' - end - - add_deps_of_pc_files -end - -# Qt depends on some system libraries with .pc files. It tends to only depend -# on these things at link time, not compile time. So use pkg-config with --libs -# to get those dependencies, for use in .cmake files. -def add_deps_of_pc_files - DepGraph.keys.each do |dep| - next if determine_dep_type(dep) != :pc - name = dep.chomp('.pc') - new_deps = `pkg-config-cross --libs #{name}`.split(' ') - raise "Failed to #{dep} libs" if $?.exitstatus != 0 - new_deps.each do |new_dep| - add_dep dep, new_dep - end - end -end - -def add_dep(library, *deps) - a = DepGraph[library] ||= [] - DepGraphBack[library] ||= [] - deps.each do |dep| - DepGraph[dep] ||= [] - a << dep unless a.include? dep - (DepGraphBack[dep] ||= []) << library - end -end - -# Given a name of a dep in the graph, figure out what kind of dep -# it use. -def determine_dep_type(name) - extension = Pathname(name).extname - case - when extension == '.a' then :a - when extension == '.pc' then :pc - when extension == '.x' then :x - when name.start_with?('-I') then :incdirflag - when name.start_with?('-L') then :libdirflag - when name.start_with?('-l') then :ldflag - when name.start_with?('-framework') then :ldflag - end -end - -def find_pkg_config_file(name) - ENV.fetch('PKG_CONFIG_CROSS_PATH').split(':').each do |dir| - path = Pathname(dir) + name - return path if path.exist? - end - nil -end - -def find_qt_library(name) - debug_name = Pathname(name).sub_ext("d.a").to_s - - search_dirs = [ OutDir + 'lib' ] + - (OutDir + 'plugins').children - - search_dirs.each do |dir| - lib = dir + name - return lib if lib.exist? - end - - search_dirs.each do |dir| - lib = dir + debug_name - return lib if lib.exist? - end - - nil -end - -def find_dep_info(name) - case determine_dep_type(name) - when :a then find_qt_library(name) - when :pc then find_pkg_config_file(name) - end -end - -# Given an array of dependencies and a block for retrieving dependencies of an -# dependency, returns an array of dependencies with three guarantees: -# -# 1) Contains all the listed dependencies. -# 2) Has no duplicates. -# 3) For any dependency in the list, all of its dependencies are before it. -# -# Guarantee 3 only holds if the underlying graph has no circul dependencies. If -# there is a circular dependency, it will not be detected, but it will not cause -# an infinite loop either. -def flatten_deps(deps) - work = [].concat(deps) - expanded = {} - output = {} - while !work.empty? - dep = work.last - if expanded[dep] - output[dep] = true - work.pop - else - expanded[dep] = true - deps = yield dep - work.concat(deps) - end - end - output.keys # relies on Ruby's ordered hashes -end - -def canonical_x_file(dep) - return nil if determine_dep_type(dep) != :a - x_files = DepGraphBack.fetch(dep).select do |name| - determine_dep_type(name) == :x - end - if x_files.size > 2 - raise "There is more than one .x file #{dep}." - end - x_files.first -end - -# Note: It would be nice to find some solution so that Qt5Widgets.pc does not -# require Qt5GuiNoPlugins, since it already requires Qt5Gui. -def flatten_deps_for_pc_file(pc_file) - flatten_deps(DepGraph[pc_file]) do |dep| - deps = case determine_dep_type(dep) - when :x, :pc then - # Don't expand dependencies for a .pc file because we can just - # refer to them with the Requires line in our .pc file. - [] - else DepGraph.fetch(dep) - end - - # Replace .a files with a canonical .x file if there is one. - deps.map do |name| - substitute = canonical_x_file(name) - substitute = nil if substitute == pc_file - substitute || name - end - end -end - -def flatten_deps_for_cmake_file(cmake_file) - flatten_deps(DepGraph[cmake_file]) do |dep| - DepGraph.fetch(dep) - end -end - -def create_pc_file(name) - requires = [] - libdirs = [] - ldflags = [] - cflags = [] - - deps = flatten_deps_for_pc_file(name) - - deps.each do |dep| - dep = dep.dup - case determine_dep_type(dep) - when :a then - full_path = DepInfo[dep] - raise "Could not find library: #{dep}" if !full_path - libdir = full_path.dirname.to_s - libdir.sub!((OutDir + 'lib').to_s, '${libdir}') - libdir.sub!(OutDir.to_s, '${prefix}') - libname = full_path.basename.to_s - libname.sub!(/\Alib/, '') - libname.sub!(/.a\Z/, '') - libdirs << "-L#{libdir}" - ldflags << "-l#{libname}" - when :x then - dep.chomp!('.x') - requires << dep - when :pc then - dep.chomp!('.pc') - requires << dep - when :ldflag then - ldflags << dep - when :libdirflag then - libdirs << dep - when :incdirflag then - dep.sub!(OutIncDir.to_s, '${includedir}') - cflags << dep - end - end - - r = "" - r << "prefix=#{OutDir}\n" - r << "libdir=${prefix}/lib\n" - r << "includedir=${prefix}/include\n" - r << "Version: #{QtVersionString}\n" - if !libdirs.empty? || !ldflags.empty? - r << "Libs: #{libdirs.reverse.uniq.join(' ')} #{ldflags.reverse.join(' ')}\n" - end - if !cflags.empty? - r << "Cflags: #{cflags.join(' ')}\n" - end - if !requires.empty? - r << "Requires: #{requires.sort.join(' ')}\n" - end - - path = OutPcDir + Pathname(name).sub_ext(".pc") - File.open(path.to_s, 'w') do |f| - f.write r - end -end - -# For .pc files we depend on, add symlinks to the .pc file and any other .pc -# files in the same directory which might be transitive dependencies. -def symlink_pc_file_closure(name) - dep_pc_dir = DepInfo[name].dirname - dep_pc_dir.each_child do |target| - link = OutPcDir + target.basename - - # Skip it if we already made this link. - next if link.symlink? - - # Link directly to the real PC file. - target = target.realpath - - ln_s target, link - end -end - -def create_pc_files - mkdir OutPcDir - DepGraph.each_key do |name| - case determine_dep_type(name) - when :x then create_pc_file(name) - when :pc then symlink_pc_file_closure(name) - end - end -end - -def set_property(f, target_name, property_name, value) - if value.is_a?(Array) - value = value.map do |entry| - if entry.to_s.include?(' ') - "\"#{entry}\"" - else - entry - end - end.join(' ') - end - - f.puts "set_property(TARGET #{target_name} " \ - "PROPERTY #{property_name} #{value})" -end - -def set_properties(f, target_name, properties) - properties.each do |property_name, value| - set_property(f, target_name, property_name, value) - end -end - -def import_static_lib(f, target_name, properties) - f.puts "add_library(#{target_name} STATIC IMPORTED)" - set_properties(f, target_name, properties) -end - -def create_cmake_core_files - File.open(CMakeDir + 'core.cmake', 'w') do |f| - f.puts "set(QT_VERSION_MAJOR #{QtVersionMajor})" - f.puts - - f.puts "set(QT_MOC_EXECUTABLE #{MocExe})" - f.puts "add_executable(Qt5::moc IMPORTED)" - f.puts "set_target_properties(Qt5::moc PROPERTIES " \ - "IMPORTED_LOCATION ${QT_MOC_EXECUTABLE})" - f.puts - - f.puts "add_executable(Qt5::rcc IMPORTED)" - f.puts "set_target_properties(Qt5::rcc PROPERTIES " \ - "IMPORTED_LOCATION #{RccExe})" - f.puts "set(Qt5Core_RCC_EXECUTABLE Qt5::rcc)" - f.puts - - f.write File.read(ENV.fetch('core_macros')) - end -end - -def create_cmake_qt5widgets - mkdir CMakeDir + 'Qt5Widgets' - - widgets_a = find_qt_library('libQt5Widgets.a') || raise - - deps = flatten_deps_for_cmake_file('Qt5Widgets.x') - - incdirs = [] - libdirflags = [] - ldflags = [] - deps.each do |dep| - dep = dep.dup - case determine_dep_type(dep) - when :a then - full_path = DepInfo[dep] - raise "Could not find library: #{dep}" if !full_path - libdir = full_path.dirname.to_s - libname = full_path.basename.to_s - libname.sub!(/\Alib/, '') - libname.sub!(/.a\Z/, '') - libdirflags << "-L#{libdir}" - ldflags << "-l#{libname}" - when :ldflag then - ldflags << dep - when :libdirflag then - libdirflags << dep - when :incdirflag then - incdir = dep.sub(/\A-I/, '') - incdirs << incdir - end - end - - File.open(CMakeDir + 'Qt5Widgets' + 'Qt5WidgetsConfig.cmake', 'w') do |f| - import_static_lib f, 'Qt5::Widgets', - IMPORTED_LOCATION: widgets_a, - IMPORTED_LINK_INTERFACE_LANGUAGES: 'CXX', - INTERFACE_LINK_LIBRARIES: libdirflags.reverse.uniq + ldflags.reverse, - INTERFACE_INCLUDE_DIRECTORIES: incdirs, - INTERFACE_COMPILE_DEFINITIONS: 'QT_STATIC' - - f.puts "include(#{CMakeDir + 'core.cmake'})" - end -end - -def main - # Symlink the include, bin, and plugins directories into $out. - mkdir OutDir - ln_s QtBaseDir + 'include', OutDir + 'include' - ln_s QtBaseDir + 'bin', OutDir + 'bin' - ln_s QtBaseDir + 'plugins', OutDir + 'plugins' - ln_s QtBaseDir + 'src', OutDir + 'src' - - # Symlink the .a files and copy the .prl files into $out/lib. - mkdir OutDir + 'lib' - (QtBaseDir + 'lib').each_child do |c| - ln_s c, OutDir + 'lib' if c.extname == '.a' - cp c, OutDir + 'lib' if c.extname == '.prl' - end - - make_dep_graph - - create_pc_files - - mkdir CMakeDir - create_cmake_core_files - create_cmake_qt5widgets -end - -main diff --git a/nix/nixcrpkgs/pkgs/readline/builder.sh b/nix/nixcrpkgs/pkgs/readline/builder.sh deleted file mode 100644 index defaa8b7d..000000000 --- a/nix/nixcrpkgs/pkgs/readline/builder.sh +++ /dev/null @@ -1,32 +0,0 @@ -source $setup - -# This is from the mingw-w64-readline AUR arch package. -export bash_cv_wcwidth_broken=no - -tar -xf $src - -cd readline-$version -for patch in $patches_p2; do - echo applying patch $patch - patch -p2 -i $patch -done -for patch in $patches; do - echo applying patch $patch - patch -p1 -i $patch -done -cd .. - -mkdir build -cd build - -../readline-$version/configure \ - --prefix=$out --host=$host \ - --enable-static --disable-shared \ - --with-curses=$curses - -make - -make install - -mkdir $out/license -cp ../readline-$version/COPYING $out/license/LICENSE diff --git a/nix/nixcrpkgs/pkgs/readline/default.nix b/nix/nixcrpkgs/pkgs/readline/default.nix deleted file mode 100644 index b1369e70f..000000000 --- a/nix/nixcrpkgs/pkgs/readline/default.nix +++ /dev/null @@ -1,41 +0,0 @@ -# Note: This has only been tested on Windows, and is using pdcurses -# which only seems to work on Windows. - -{ crossenv, curses }: - -let - fetchurl = crossenv.nixpkgs.fetchurl; -in -crossenv.make_derivation rec { - name = "readline-${version}"; - - version = "7.0"; - - src = fetchurl { - url = "mirror://gnu/readline/readline-${version}.tar.gz"; - sha256 = "0d13sg9ksf982rrrmv5mb6a2p4ys9rvg9r71d6il0vr8hmql63bm"; - }; - - patches_p2 = [ - (fetchurl { - url = "mirror://gnu/readline/readline-7.0-patches/readline70-001"; - sha256 = "0xm3sxvwmss7ddyfb11n6pgcqd1aglnpy15g143vzcf75snb7hcs"; - }) - (fetchurl { - url = "mirror://gnu/readline/readline-7.0-patches/readline70-002"; - sha256 = "0n1dxmqsbjgrfxb1hgk5c6lsraw4ncbnzxlsx7m35nym6lncjiw7"; - }) - (fetchurl { - url = "mirror://gnu/readline/readline-7.0-patches/readline70-003"; - sha256 = "1027kmymniizcy0zbdlrczxfx3clxcdln5yq05q9yzlc6y9slhwy"; - }) - ]; - - patches = [ - ./readline-1.patch - ]; - - builder = ./builder.sh; - - inherit curses; -} diff --git a/nix/nixcrpkgs/pkgs/readline/readline-1.patch b/nix/nixcrpkgs/pkgs/readline/readline-1.patch deleted file mode 100644 index 52938b804..000000000 --- a/nix/nixcrpkgs/pkgs/readline/readline-1.patch +++ /dev/null @@ -1,171 +0,0 @@ -We got this patch from the mingw-w64-readline AUR Arch package. - -This patch originall comes from MXE, and is licensed under the MIT license. - -https://github.com/mxe/mxe/blob/master/src/readline-1.patch - -Copyright (c) 2007-2016 - -Volker Diels-Grabsch -Mark Brand -Tony Theodore -Martin Gerhardy -Tiancheng "Timothy" Gu -Boris Nagaev -... and many other contributors -(contact via the project mailing list or issue tracker) - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated -documentation files (the "Software"), to deal in the Software without restriction, including without limitation -the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and -to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of -the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO -THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF -CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. - -From 6896ffa4fc85bf0dfae58e69a860d2076c1d9fd2 Mon Sep 17 00:00:00 2001 -From: Timothy Gu -Date: Tue, 30 Sep 2014 17:16:32 -0700 -Subject: [PATCH 2/2] Handle missing S_IS* macros more gracefully - -diff --git a/colors.c b/colors.c -index 89d9035..ec19844 100644 ---- a/colors.c -+++ b/colors.c -@@ -152,14 +152,22 @@ _rl_print_color_indicator (char *f) - { - colored_filetype = C_FILE; - -+#if defined (S_ISUID) - if ((mode & S_ISUID) != 0 && is_colored (C_SETUID)) - colored_filetype = C_SETUID; -- else if ((mode & S_ISGID) != 0 && is_colored (C_SETGID)) -+ else -+#endif -+#if defined (S_ISGID) -+ if ((mode & S_ISGID) != 0 && is_colored (C_SETGID)) - colored_filetype = C_SETGID; -- else if (is_colored (C_CAP) && 0) //f->has_capability) -+ else -+#endif -+ if (is_colored (C_CAP) && 0) //f->has_capability) - colored_filetype = C_CAP; -+#if defined(S_IXUGO) - else if ((mode & S_IXUGO) != 0 && is_colored (C_EXEC)) - colored_filetype = C_EXEC; -+#endif - else if ((1 < astat.st_nlink) && is_colored (C_MULTIHARDLINK)) - colored_filetype = C_MULTIHARDLINK; - } -@@ -173,8 +181,10 @@ _rl_print_color_indicator (char *f) - colored_filetype = C_STICKY_OTHER_WRITABLE; - else - #endif -+#if defined (S_IWOTH) - if ((mode & S_IWOTH) != 0 && is_colored (C_OTHER_WRITABLE)) - colored_filetype = C_OTHER_WRITABLE; -+#endif - #if defined (S_ISVTX) - else if ((mode & S_ISVTX) != 0 && is_colored (C_STICKY)) - colored_filetype = C_STICKY; -diff --git a/colors.h b/colors.h -index fc926e5..e62edd0 100644 ---- a/colors.h -+++ b/colors.h -@@ -96,7 +96,7 @@ enum indicator_no - }; - - --#if !S_IXUGO -+#if !S_IXUGO && defined(S_IXUSR) && defined(S_IXGRP) && defined(S_IXOTH) - # define S_IXUGO (S_IXUSR | S_IXGRP | S_IXOTH) - #endif - -diff --git a/posixstat.h b/posixstat.h -index 3eb7f29..854a2c9 100644 ---- a/posixstat.h -+++ b/posixstat.h -@@ -78,30 +78,44 @@ - - #if defined (S_IFBLK) && !defined (S_ISBLK) - #define S_ISBLK(m) (((m)&S_IFMT) == S_IFBLK) /* block device */ -+#elif !defined (S_IFBLK) -+#define S_ISBLK(m) 0 - #endif - - #if defined (S_IFCHR) && !defined (S_ISCHR) - #define S_ISCHR(m) (((m)&S_IFMT) == S_IFCHR) /* character device */ -+#elif !defined (S_IFCHR) -+#define S_ISCHR(m) 0 - #endif - - #if defined (S_IFDIR) && !defined (S_ISDIR) - #define S_ISDIR(m) (((m)&S_IFMT) == S_IFDIR) /* directory */ -+#elif !defined (S_IFDIR) -+#define S_ISDIR(m) 0 - #endif - - #if defined (S_IFREG) && !defined (S_ISREG) - #define S_ISREG(m) (((m)&S_IFMT) == S_IFREG) /* file */ -+#elif !defined (S_IFREG) -+#define S_ISREG(m) 0 - #endif - - #if defined (S_IFIFO) && !defined (S_ISFIFO) - #define S_ISFIFO(m) (((m)&S_IFMT) == S_IFIFO) /* fifo - named pipe */ -+#elif !defined (S_IFIFO) -+#define S_ISFIFO(m) 0 - #endif - - #if defined (S_IFLNK) && !defined (S_ISLNK) - #define S_ISLNK(m) (((m)&S_IFMT) == S_IFLNK) /* symbolic link */ -+#elif !defined (S_IFLNK) -+#define S_ISLNK(m) 0 - #endif - - #if defined (S_IFSOCK) && !defined (S_ISSOCK) - #define S_ISSOCK(m) (((m)&S_IFMT) == S_IFSOCK) /* socket */ -+#elif !defined (S_IFSOCK) -+#define S_ISSOCK(m) 0 - #endif - - /* -@@ -137,6 +151,8 @@ - /* These are non-standard, but are used in builtins.c$symbolic_umask() */ - #define S_IRUGO (S_IRUSR | S_IRGRP | S_IROTH) - #define S_IWUGO (S_IWUSR | S_IWGRP | S_IWOTH) -+#if defined(S_IXUSR) && defined(S_IXGRP) && defined(S_IXOTH) - #define S_IXUGO (S_IXUSR | S_IXGRP | S_IXOTH) -+#endif - - #endif /* _POSIXSTAT_H_ */ --- -1.8.3.2 - -diff --git a/histfile.c b/histfile.c ---- a/histfile.c -+++ b/histfile.c -@@ -610,8 +610,6 @@ - user is running this, it's a no-op. If the shell is running after sudo - with a shared history file, we don't want to leave the history file - owned by root. */ -- if (rv == 0 && exists) -- r = chown (filename, finfo.st_uid, finfo.st_gid); - - xfree (filename); - FREE (tempname); -@@ -757,8 +755,6 @@ - user is running this, it's a no-op. If the shell is running after sudo - with a shared history file, we don't want to leave the history file - owned by root. */ -- if (rv == 0 && exists) -- mode = chown (histname, finfo.st_uid, finfo.st_gid); - - FREE (histname); - FREE (tempname); diff --git a/nix/nixcrpkgs/pkgs/tic/builder.sh b/nix/nixcrpkgs/pkgs/tic/builder.sh deleted file mode 100644 index 5052b2583..000000000 --- a/nix/nixcrpkgs/pkgs/tic/builder.sh +++ /dev/null @@ -1,15 +0,0 @@ -source $setup - -tar -xf $src -mv pololu-tic-software-* tic - -mkdir build -cd build - -cmake-cross ../tic \ - -DCMAKE_INSTALL_PREFIX=$out \ - -DBUILD_SHARED_LIBS=false - -make - -make install diff --git a/nix/nixcrpkgs/pkgs/tic/default.nix b/nix/nixcrpkgs/pkgs/tic/default.nix deleted file mode 100644 index 6f870d911..000000000 --- a/nix/nixcrpkgs/pkgs/tic/default.nix +++ /dev/null @@ -1,16 +0,0 @@ -{ crossenv, qt, libusbp }: - -crossenv.make_derivation rec { - name = "tic-${version}"; - - version = "e1693cd"; # 1.5.0ish - - src = crossenv.nixpkgs.fetchurl { - url = "https://github.com/pololu/pololu-tic-software/archive/${version}.tar.gz"; - sha256 = "07m75w0walr61yqki7h1ipzbfz7x417g7qnx0p1l6qdz89fyc7i8"; - }; - - builder = ./builder.sh; - - cross_inputs = [ libusbp qt ]; -} diff --git a/nix/nixcrpkgs/pkgs/usbview/builder.sh b/nix/nixcrpkgs/pkgs/usbview/builder.sh deleted file mode 100644 index 3aa4cb901..000000000 --- a/nix/nixcrpkgs/pkgs/usbview/builder.sh +++ /dev/null @@ -1,31 +0,0 @@ -source $setup - -cp --no-preserve=mode -r $src/usb/usbview . - -cd usbview -rm usbschema.hpp xmlhelper.cpp -for patch in $patches; do - echo applying patch $patch - patch -p1 -i $patch -done -cp $my_xmlhelper_c . -cd .. - -mkdir build -cd build - -$host-windres ../usbview/uvcview.rc rc.o - -# TODO: after fixing bug with selectany in GCC, remove -DINITGUID - -$host-gcc -mwindows -std=gnu99 -O2 \ - -Iinclude \ - -DNTDDI_VERSION=0x06020000 -D_WIN32_WINNT=0x0602 \ - -DSTRSAFE_NO_DEPRECATE -Doffsetof=__builtin_offsetof \ - ../usbview/*.c rc.o \ - -lcomctl32 -lcomdlg32 -lsetupapi -lshell32 -lshlwapi -lole32 -lgdi32 \ - -o usbview.exe - -mkdir -p $out/bin $out/license -cp usbview.exe $out/bin -cp $src/LICENSE $out/license diff --git a/nix/nixcrpkgs/pkgs/usbview/default.nix b/nix/nixcrpkgs/pkgs/usbview/default.nix deleted file mode 100644 index 7d5260859..000000000 --- a/nix/nixcrpkgs/pkgs/usbview/default.nix +++ /dev/null @@ -1,22 +0,0 @@ -{ crossenv }: - -if crossenv.os != "windows" then "windows only" else - -crossenv.make_derivation rec { - name = "usbview-${version}"; - - version = "2017-05-01"; - - src = crossenv.nixpkgs.fetchFromGitHub { - owner = "Microsoft"; - repo = "Windows-driver-samples"; - rev = "4c5c5e0297c7a61e151f92af702cdac650a14489"; - sha256 = "1drq26bnad98xqn805qx0b6g4y65lmrdj7v40b3jhhzdsp8993pf"; - }; - - patches = [ ./megapatch.patch ]; - - my_xmlhelper_c = ./my_xmlhelper.c; - - builder = ./builder.sh; -} diff --git a/nix/nixcrpkgs/pkgs/usbview/megapatch.patch b/nix/nixcrpkgs/pkgs/usbview/megapatch.patch deleted file mode 100644 index fe3227aee..000000000 --- a/nix/nixcrpkgs/pkgs/usbview/megapatch.patch +++ /dev/null @@ -1,107 +0,0 @@ -diff -ur usbview-orig/usbdesc.h usbview/usbdesc.h ---- usbview-orig/usbdesc.h 2017-04-01 16:00:09.314007997 -0700 -+++ usbview/usbdesc.h 2017-04-01 16:10:23.667341332 -0700 -@@ -81,7 +81,7 @@ - #define USB_OTHER_SPEED_CONFIGURATION_DESCRIPTOR_TYPE 0x07 - #define USB_INTERFACE_POWER_DESCRIPTOR_TYPE 0x08 - #define USB_OTG_DESCRIPTOR_TYPE 0x09 --#define USB_DEBUG_DESCRIPTOR_TYPE 0x0A -+//#define USB_DEBUG_DESCRIPTOR_TYPE 0x0A - #define USB_IAD_DESCRIPTOR_TYPE 0x0B - - // -diff -ur usbview-orig/uvcdesc.h usbview/uvcdesc.h ---- usbview-orig/uvcdesc.h 2017-04-01 16:00:09.314007997 -0700 -+++ usbview/uvcdesc.h 2017-04-01 17:43:09.134007999 -0700 -@@ -15,7 +15,7 @@ - - - // USB Video Device Class Code --#define USB_DEVICE_CLASS_VIDEO 0x0E -+//#define USB_DEVICE_CLASS_VIDEO 0x0E - - // Video sub-classes - #define SUBCLASS_UNDEFINED 0x00 -diff -ur usbview-orig/uvcview.h usbview/uvcview.h ---- usbview-orig/uvcview.h 2017-04-01 16:00:09.314007997 -0700 -+++ usbview/uvcview.h 2017-04-03 20:25:08.145676664 -0700 -@@ -34,10 +33,10 @@ - #include - #include - #include --#include - #include - #include - #include -+#include - #include - #include - #include -@@ -50,6 +49,8 @@ - #include - #include - #include -+#include -+#include - - // This is mostly a private USB Audio descriptor header - #include "usbdesc.h" -@@ -381,7 +382,7 @@ - // ENUM.C - // - --PCHAR ConnectionStatuses[]; -+extern PCHAR ConnectionStatuses[]; - - // - // DISPVID.C -Only in usbview: uvcview.h.orig -diff -ur usbview-orig/uvcview.rc usbview/uvcview.rc ---- usbview-orig/uvcview.rc 2017-04-01 16:00:09.314007997 -0700 -+++ usbview/uvcview.rc 2017-04-01 16:04:07.210674665 -0700 -@@ -22,19 +22,19 @@ - // - // ICON - // --IDI_ICON ICON DISCARDABLE "USB.ICO" --IDI_BADICON ICON DISCARDABLE "BANG.ICO" --IDI_COMPUTER ICON DISCARDABLE "MONITOR.ICO" --IDI_HUB ICON DISCARDABLE "HUB.ICO" --IDI_NODEVICE ICON DISCARDABLE "PORT.ICO" --IDI_NOSSDEVICE ICON DISCARDABLE "SSPORT.ICO" --IDI_SSICON ICON DISCARDABLE "SSUSB.ICO" -+IDI_ICON ICON DISCARDABLE "usb.ico" -+IDI_BADICON ICON DISCARDABLE "bang.ico" -+IDI_COMPUTER ICON DISCARDABLE "monitor.ico" -+IDI_HUB ICON DISCARDABLE "hub.ico" -+IDI_NODEVICE ICON DISCARDABLE "port.ico" -+IDI_NOSSDEVICE ICON DISCARDABLE "ssport.ico" -+IDI_SSICON ICON DISCARDABLE "ssusb.ico" - - ////////////////////////////////////////////////////////////////////////////// - // - // Cursor - // --IDC_SPLIT CURSOR DISCARDABLE "SPLIT.CUR" -+IDC_SPLIT CURSOR DISCARDABLE "split.cur" - - ///////////////////////////////////////////////////////////////////////////// - // -@@ -84,7 +84,7 @@ - BEGIN - MENUITEM "&Refresh\tF5", ID_REFRESH - MENUITEM SEPARATOR -- MENUITEM "Save Current &View ..." ID_SAVE -+ MENUITEM "Save Current &View ...", ID_SAVE - MENUITEM "Save As (&txt) ...", ID_SAVEALL - MENUITEM "Save As (&xml) ...\tF2", ID_SAVEXML - MENUITEM SEPARATOR -@@ -130,7 +130,7 @@ - BEGIN - IDS_STANDARD_FONT "Courier" - IDS_STANDARD_FONT_HEIGHT "\13" -- IDS_STANDARD_FONT_WIDTH "\8" -+ IDS_STANDARD_FONT_WIDTH "\08" - END - - STRINGTABLE DISCARDABLE diff --git a/nix/nixcrpkgs/pkgs/usbview/my_xmlhelper.c b/nix/nixcrpkgs/pkgs/usbview/my_xmlhelper.c deleted file mode 100644 index 0cdf29140..000000000 --- a/nix/nixcrpkgs/pkgs/usbview/my_xmlhelper.c +++ /dev/null @@ -1,47 +0,0 @@ -#include "xmlhelper.h" - -EXTERN_C HRESULT InitXmlHelper() -{ - return 0; -} - -EXTERN_C HRESULT ReleaseXmlWriter() -{ - return 0; -} - -EXTERN_C HRESULT SaveXml(LPTSTR szfileName, DWORD dwCreationDisposition) -{ - MessageBox(NULL, - "Sorry, XML saving is not supported in this build.", - "XML not supported", - MB_OK | MB_ICONEXCLAMATION); - return 0; -} - -EXTERN_C HRESULT XmlAddHostController( - PSTR hcName, - PUSBHOSTCONTROLLERINFO hcInfo - ) -{ - return 0; -} - -EXTERN_C HRESULT XmlAddRootHub(PSTR rhName, PUSBROOTHUBINFO rhInfo) -{ - return 0; -} - -EXTERN_C HRESULT XmlAddExternalHub(PSTR ehName, PUSBEXTERNALHUBINFO ehInfo) -{ - return 0; -} - -EXTERN_C HRESULT XmlAddUsbDevice(PSTR devName, PUSBDEVICEINFO deviceInfo) -{ - return 0; -} - -EXTERN_C VOID XmlNotifyEndOfNodeList(PVOID pContext) -{ -} diff --git a/nix/nixcrpkgs/pkgs/xcb-proto/builder.sh b/nix/nixcrpkgs/pkgs/xcb-proto/builder.sh deleted file mode 100644 index e83c5bbb3..000000000 --- a/nix/nixcrpkgs/pkgs/xcb-proto/builder.sh +++ /dev/null @@ -1,14 +0,0 @@ -source $setup - -tar -xf $src -ls -mv xcb-proto-* xcb-proto - -mkdir build -cd build - -../xcb-proto/configure --prefix=$out - -make - -make install diff --git a/nix/nixcrpkgs/pkgs/xcb-proto/default.nix b/nix/nixcrpkgs/pkgs/xcb-proto/default.nix deleted file mode 100644 index b9cdc0b1f..000000000 --- a/nix/nixcrpkgs/pkgs/xcb-proto/default.nix +++ /dev/null @@ -1,28 +0,0 @@ -{ crossenv }: - -let - version = "1.12"; - - name = "xcb-proto-${version}"; - - src = crossenv.nixpkgs.fetchurl { - url = "https://xcb.freedesktop.org/dist/xcb-proto-${version}.tar.bz2"; - sha256 = "01j91946q8f34l1mbvmmgvyc393sm28ym4lxlacpiav4qsjan8jr"; - }; - - lib = crossenv.native.make_derivation rec { - inherit version name src; - builder = ./builder.sh; - native_inputs = [ crossenv.nixpkgs.python2 ]; - }; - - license = crossenv.native.make_derivation { - name = "${name}-license"; - inherit src; - builder = ./license_builder.sh; - }; - - license_set = { "${name}" = license; }; - -in - lib // { inherit license_set; } diff --git a/nix/nixcrpkgs/pkgs/xcb-proto/license_builder.sh b/nix/nixcrpkgs/pkgs/xcb-proto/license_builder.sh deleted file mode 100644 index 2ea711435..000000000 --- a/nix/nixcrpkgs/pkgs/xcb-proto/license_builder.sh +++ /dev/null @@ -1,14 +0,0 @@ -source $setup - -tar -xf $src -mv xcb-proto-* xcb-proto - -license=$(cat xcb-proto/COPYING) - -cat > $out <xcb-proto - -
-$license
-
-EOF diff --git a/nix/nixcrpkgs/pkgs/xcb-util-image/default.nix b/nix/nixcrpkgs/pkgs/xcb-util-image/default.nix deleted file mode 100644 index 14a5b5458..000000000 --- a/nix/nixcrpkgs/pkgs/xcb-util-image/default.nix +++ /dev/null @@ -1,40 +0,0 @@ -{ crossenv, libxcb, xcb-util }: - -let - version = "0.4.0"; - - name = "xcb-util-image-${version}"; - - src = crossenv.nixpkgs.fetchurl { - url = "https://xcb.freedesktop.org/dist/xcb-util-image-${version}.tar.bz2"; - sha256 = "1z1gxacg7q4cw6jrd26gvi5y04npsyavblcdad1xccc8swvnmf9d"; - }; - - lib = crossenv.make_derivation rec { - inherit version name src; - builder = ./util_image_builder.sh; - - configure_flags = - "--host=${crossenv.host} " + - "--enable-static " + - "--disable-shared"; - - cross_inputs = [ libxcb xcb-util ]; - - inherit libxcb; - libxcb_util = xcb-util; - }; - - license = crossenv.native.make_derivation { - name = "${name}-license"; - inherit src; - builder = ./license_builder.sh; - }; - - license_set = - libxcb.license_set // - xcb-util.license_set // - { "${name}" = license; }; - -in - lib // { inherit license_set; } diff --git a/nix/nixcrpkgs/pkgs/xcb-util-image/license_builder.sh b/nix/nixcrpkgs/pkgs/xcb-util-image/license_builder.sh deleted file mode 100644 index 9cc75651e..000000000 --- a/nix/nixcrpkgs/pkgs/xcb-util-image/license_builder.sh +++ /dev/null @@ -1,14 +0,0 @@ -source $setup - -tar -xf $src -mv xcb-util-image-* xcb-util-image - -license=$(cat xcb-util-image/COPYING) - -cat > $out <xcb-util-image - -
-$license
-
-EOF diff --git a/nix/nixcrpkgs/pkgs/xcb-util-image/util_image_builder.sh b/nix/nixcrpkgs/pkgs/xcb-util-image/util_image_builder.sh deleted file mode 100644 index 3b5d1e6cf..000000000 --- a/nix/nixcrpkgs/pkgs/xcb-util-image/util_image_builder.sh +++ /dev/null @@ -1,20 +0,0 @@ -source $setup - -tar -xf $src -mv xcb-* util - -mkdir build -cd build - -PKG_CONFIG=pkg-config-cross \ -../util/configure --prefix=$out $configure_flags - -make - -make install - -# xcb-util-image-0.4.0/image/xcb_image.c includes -echo "Requires: xcb-aux" >> $out/lib/pkgconfig/xcb-image.pc -ln -sf $libxcb/lib/pkgconfig/*.pc $out/lib/pkgconfig/ -ln -sf $libxcb_util/lib/pkgconfig/*.pc $out/lib/pkgconfig/ - diff --git a/nix/nixcrpkgs/pkgs/xcb-util-keysyms/default.nix b/nix/nixcrpkgs/pkgs/xcb-util-keysyms/default.nix deleted file mode 100644 index 6d5b8298f..000000000 --- a/nix/nixcrpkgs/pkgs/xcb-util-keysyms/default.nix +++ /dev/null @@ -1,37 +0,0 @@ -{ crossenv, libxcb }: - -let - version = "0.4.0"; - - name = "xcb-util-keysyms"; - - src = crossenv.nixpkgs.fetchurl { - url = "https://xcb.freedesktop.org/dist/xcb-util-keysyms-${version}.tar.bz2"; - sha256 = "1nbd45pzc1wm6v5drr5338j4nicbgxa5hcakvsvm5pnyy47lky0f"; - }; - - lib = crossenv.make_derivation rec { - inherit version name src; - - builder = ./util_keysyms_builder.sh; - - configure_flags = - "--host=${crossenv.host} " + - "--enable-static " + - "--disable-shared"; - - cross_inputs = [ libxcb ]; - }; - - license = crossenv.native.make_derivation { - name = "${name}-license"; - inherit src; - builder = ./license_builder.sh; - }; - - license_set = - libxcb.license_set // - { "${name}" = license; }; - -in - lib // { inherit license_set; } diff --git a/nix/nixcrpkgs/pkgs/xcb-util-keysyms/license_builder.sh b/nix/nixcrpkgs/pkgs/xcb-util-keysyms/license_builder.sh deleted file mode 100644 index 66175097f..000000000 --- a/nix/nixcrpkgs/pkgs/xcb-util-keysyms/license_builder.sh +++ /dev/null @@ -1,14 +0,0 @@ -source $setup - -tar -xf $src -mv xcb-util-keysyms-* xcb-util-keysyms - -license=$(head -n31 xcb-util-keysyms/keysyms/keysyms.c) - -cat > $out <xcb-util-keysyms - -
-$license
-
-EOF diff --git a/nix/nixcrpkgs/pkgs/xcb-util-keysyms/util_keysyms_builder.sh b/nix/nixcrpkgs/pkgs/xcb-util-keysyms/util_keysyms_builder.sh deleted file mode 100644 index eaa898225..000000000 --- a/nix/nixcrpkgs/pkgs/xcb-util-keysyms/util_keysyms_builder.sh +++ /dev/null @@ -1,14 +0,0 @@ -source $setup - -tar -xf $src -mv xcb-* util - -mkdir build -cd build - -PKG_CONFIG=pkg-config-cross \ -../util/configure --prefix=$out $configure_flags - -make - -make install diff --git a/nix/nixcrpkgs/pkgs/xcb-util-renderutil/default.nix b/nix/nixcrpkgs/pkgs/xcb-util-renderutil/default.nix deleted file mode 100644 index f20b271e3..000000000 --- a/nix/nixcrpkgs/pkgs/xcb-util-renderutil/default.nix +++ /dev/null @@ -1,40 +0,0 @@ -{ crossenv, libxcb }: - -let - version = "0.3.9"; - - name = "xcb-util-renderutil"; # TODO: add -${version} (mass rebuild) - - src = crossenv.nixpkgs.fetchurl { - url = "https://xcb.freedesktop.org/dist/xcb-util-renderutil-${version}.tar.bz2"; - sha256 = "0nza1csdvvxbmk8vgv8vpmq7q8h05xrw3cfx9lwxd1hjzd47xsf6"; - }; - - lib = crossenv.make_derivation { - inherit version name src; - - # TODO: rename all xcb-util builders to builder.sh (mass rebuild) - builder = ./util_renderutil_builder.sh; - - configure_flags = - "--host=${crossenv.host} " + - "--enable-static " + - "--disable-shared"; - - cross_inputs = [ libxcb ]; - - xcb = libxcb; - }; - - license = crossenv.native.make_derivation { - name = "${name}-license"; - inherit src; - builder = ./license_builder.sh; - }; - - license_set = - libxcb.license_set // - { "${name}" = license; }; - -in - lib // { inherit license_set; } diff --git a/nix/nixcrpkgs/pkgs/xcb-util-renderutil/license_builder.sh b/nix/nixcrpkgs/pkgs/xcb-util-renderutil/license_builder.sh deleted file mode 100644 index 96f60bcf9..000000000 --- a/nix/nixcrpkgs/pkgs/xcb-util-renderutil/license_builder.sh +++ /dev/null @@ -1,14 +0,0 @@ -source $setup - -tar -xf $src -mv xcb-util-renderutil-* xcb-util-renderutil - -license=$(cat xcb-util-renderutil/COPYING) - -cat > $out <xcb-util-renderutil - -
-$license
-
-EOF diff --git a/nix/nixcrpkgs/pkgs/xcb-util-renderutil/util_renderutil_builder.sh b/nix/nixcrpkgs/pkgs/xcb-util-renderutil/util_renderutil_builder.sh deleted file mode 100644 index 4540eae4c..000000000 --- a/nix/nixcrpkgs/pkgs/xcb-util-renderutil/util_renderutil_builder.sh +++ /dev/null @@ -1,16 +0,0 @@ -source $setup - -tar -xf $src -mv xcb-* util - -mkdir build -cd build - -PKG_CONFIG=pkg-config-cross \ -../util/configure --prefix=$out $configure_flags - -make - -make install - -ln -s $xcb/lib/pkgconfig/{xcb,xcb-render}.pc $out/lib/pkgconfig/ diff --git a/nix/nixcrpkgs/pkgs/xcb-util-wm/default.nix b/nix/nixcrpkgs/pkgs/xcb-util-wm/default.nix deleted file mode 100644 index 1390a4abd..000000000 --- a/nix/nixcrpkgs/pkgs/xcb-util-wm/default.nix +++ /dev/null @@ -1,39 +0,0 @@ -{ crossenv, libxcb }: - -let - version = "0.4.1"; - - name = "xcb-util-wm-${version}"; - - src = crossenv.nixpkgs.fetchurl { - url = "https://xcb.freedesktop.org/dist/xcb-util-wm-${version}.tar.bz2"; - sha256 = "0gra7hfyxajic4mjd63cpqvd20si53j1q3rbdlkqkahfciwq3gr8"; - }; - - lib = crossenv.make_derivation rec { - inherit version name src; - - builder = ./util_wm_builder.sh; - - configure_flags = - "--host=${crossenv.host} " + - "--enable-static " + - "--disable-shared"; - - cross_inputs = [ libxcb ]; - - native_inputs = [ crossenv.nixpkgs.m4 ]; - }; - - license = crossenv.native.make_derivation { - name = "${name}-license"; - inherit src; - builder = ./license_builder.sh; - }; - - license_set = - libxcb.license_set // - { "${name}" = license; }; - -in - lib // { inherit license_set; } diff --git a/nix/nixcrpkgs/pkgs/xcb-util-wm/license_builder.sh b/nix/nixcrpkgs/pkgs/xcb-util-wm/license_builder.sh deleted file mode 100644 index adbaa5ed5..000000000 --- a/nix/nixcrpkgs/pkgs/xcb-util-wm/license_builder.sh +++ /dev/null @@ -1,14 +0,0 @@ -source $setup - -tar -xf $src -mv xcb-util-wm-* xcb-util-wm - -license=$(cat xcb-util-wm/COPYING) - -cat > $out <xcb-util-wm - -
-$license
-
-EOF diff --git a/nix/nixcrpkgs/pkgs/xcb-util-wm/util_wm_builder.sh b/nix/nixcrpkgs/pkgs/xcb-util-wm/util_wm_builder.sh deleted file mode 100644 index eaa898225..000000000 --- a/nix/nixcrpkgs/pkgs/xcb-util-wm/util_wm_builder.sh +++ /dev/null @@ -1,14 +0,0 @@ -source $setup - -tar -xf $src -mv xcb-* util - -mkdir build -cd build - -PKG_CONFIG=pkg-config-cross \ -../util/configure --prefix=$out $configure_flags - -make - -make install diff --git a/nix/nixcrpkgs/pkgs/xcb-util/default.nix b/nix/nixcrpkgs/pkgs/xcb-util/default.nix deleted file mode 100644 index 48cadb58d..000000000 --- a/nix/nixcrpkgs/pkgs/xcb-util/default.nix +++ /dev/null @@ -1,37 +0,0 @@ -{ crossenv, libxcb }: - -let - version = "0.4.0"; - - name = "xcb-util-${version}"; - - src = crossenv.nixpkgs.fetchurl { - url = "https://xcb.freedesktop.org/dist/xcb-util-${version}.tar.bz2"; - sha256 = "1sahmrgbpyki4bb72hxym0zvxwnycmswsxiisgqlln9vrdlr9r26"; - }; - - lib = crossenv.make_derivation rec { - inherit version name src; - - builder = ./util_builder.sh; - - configure_flags = - "--host=${crossenv.host} " + - "--enable-static " + - "--disable-shared"; - - cross_inputs = [ libxcb ]; - }; - - license = crossenv.native.make_derivation { - name = "${name}-license"; - inherit src; - builder = ./license_builder.sh; - }; - - license_set = - libxcb.license_set // - { "${name}" = license; }; - -in - lib // { inherit license_set; } diff --git a/nix/nixcrpkgs/pkgs/xcb-util/license_builder.sh b/nix/nixcrpkgs/pkgs/xcb-util/license_builder.sh deleted file mode 100644 index 7d65ade3b..000000000 --- a/nix/nixcrpkgs/pkgs/xcb-util/license_builder.sh +++ /dev/null @@ -1,14 +0,0 @@ -source $setup - -tar -xf $src -mv xcb-util-* xcb-util - -license=$(cat xcb-util/COPYING) - -cat > $out <xcb-util - -
-$license
-
-EOF diff --git a/nix/nixcrpkgs/pkgs/xcb-util/util_builder.sh b/nix/nixcrpkgs/pkgs/xcb-util/util_builder.sh deleted file mode 100644 index 6264407a6..000000000 --- a/nix/nixcrpkgs/pkgs/xcb-util/util_builder.sh +++ /dev/null @@ -1,14 +0,0 @@ -source $setup - -tar -xf $src -mv xcb-util-* util - -mkdir build -cd build - -PKG_CONFIG=pkg-config-cross \ -../util/configure --prefix=$out $configure_flags - -make - -make install diff --git a/nix/nixcrpkgs/pkgs/xextproto/builder.sh b/nix/nixcrpkgs/pkgs/xextproto/builder.sh deleted file mode 100644 index 6bf1c02c5..000000000 --- a/nix/nixcrpkgs/pkgs/xextproto/builder.sh +++ /dev/null @@ -1,13 +0,0 @@ -source $setup - -tar -xf $src -mv xextproto-* xextproto - -mkdir build -cd build - -../xextproto/configure --prefix=$out - -make - -make install diff --git a/nix/nixcrpkgs/pkgs/xextproto/default.nix b/nix/nixcrpkgs/pkgs/xextproto/default.nix deleted file mode 100644 index 82b0a626e..000000000 --- a/nix/nixcrpkgs/pkgs/xextproto/default.nix +++ /dev/null @@ -1,27 +0,0 @@ -{ crossenv }: - -let - version = "7.3.0"; - - name = "xextproto-${version}"; - - src = crossenv.nixpkgs.fetchurl { - url = "https://xorg.freedesktop.org/releases/individual/proto/xextproto-${version}.tar.bz2"; - sha256 = "1c2vma9gqgc2v06rfxdiqgwhxmzk2cbmknwf1ng3m76vr0xb5x7k"; - }; - - lib = crossenv.native.make_derivation rec { - inherit version name src; - builder = ./builder.sh; - }; - - license = crossenv.native.make_derivation { - name = "${name}-license"; - inherit src; - builder = ./license_builder.sh; - }; - - license_set = { "${name}" = license; }; - -in - lib // { inherit license_set; } diff --git a/nix/nixcrpkgs/pkgs/xextproto/license_builder.sh b/nix/nixcrpkgs/pkgs/xextproto/license_builder.sh deleted file mode 100644 index 9fbf3543d..000000000 --- a/nix/nixcrpkgs/pkgs/xextproto/license_builder.sh +++ /dev/null @@ -1,14 +0,0 @@ -source $setup - -tar -xf $src -mv xextproto-* xextproto - -license=$(cat xextproto/COPYING) - -cat > $out <xextproto - -
-$license
-
-EOF diff --git a/nix/nixcrpkgs/pkgs/xorg-macros/builder.sh b/nix/nixcrpkgs/pkgs/xorg-macros/builder.sh deleted file mode 100644 index f940d965f..000000000 --- a/nix/nixcrpkgs/pkgs/xorg-macros/builder.sh +++ /dev/null @@ -1,18 +0,0 @@ -source $setup - -tar -xf $src -ls -mv util-macros-* macros - -mkdir build -cd build - -../macros/configure --prefix=$out - -make - -make install - -# The .pc files gets installed to /share/pkgconfig, but we want to see it in -# /lib/pkgconfig. -ln -s share $out/lib diff --git a/nix/nixcrpkgs/pkgs/xorg-macros/default.nix b/nix/nixcrpkgs/pkgs/xorg-macros/default.nix deleted file mode 100644 index 2f5c8508f..000000000 --- a/nix/nixcrpkgs/pkgs/xorg-macros/default.nix +++ /dev/null @@ -1,27 +0,0 @@ -{ crossenv }: - -let - version = "1.19.1"; - - name = "xorg-macros-${version}"; - - src = crossenv.nixpkgs.fetchurl { - url = "https://www.x.org/releases/individual/util/util-macros-1.19.1.tar.gz"; - sha256 = "1f27cmbxq0kdyvqsplxpsi9pxm5qy45lcagxr9gby2hy3pjd0aj7"; - }; - - lib = crossenv.native.make_derivation { - inherit version name src; - builder = ./builder.sh; - }; - - license = crossenv.native.make_derivation { - name = "${name}-license"; - inherit src; - builder = ./license_builder.sh; - }; - - license_set = { "${name}" = license; }; - -in - lib // { inherit license_set; } diff --git a/nix/nixcrpkgs/pkgs/xorg-macros/license_builder.sh b/nix/nixcrpkgs/pkgs/xorg-macros/license_builder.sh deleted file mode 100644 index fb723a09d..000000000 --- a/nix/nixcrpkgs/pkgs/xorg-macros/license_builder.sh +++ /dev/null @@ -1,14 +0,0 @@ -source $setup - -tar -xf $src -mv util-macros-* xorg-macros - -license=$(cat xorg-macros/COPYING) - -cat > $out <xorg-macros - -
-$license
-
-EOF diff --git a/nix/nixcrpkgs/pkgs/xproto/builder.sh b/nix/nixcrpkgs/pkgs/xproto/builder.sh deleted file mode 100644 index 7f6b13edb..000000000 --- a/nix/nixcrpkgs/pkgs/xproto/builder.sh +++ /dev/null @@ -1,16 +0,0 @@ -source $setup - -tar -xf $src -mv xproto-* xproto - -cp $gnu_config/{config.guess,config.sub} xproto - -mkdir build -cd build - -PKG_CONFIG=pkg-config-cross \ -../xproto/configure --prefix=$out $configure_flags - -make - -make install diff --git a/nix/nixcrpkgs/pkgs/xproto/default.nix b/nix/nixcrpkgs/pkgs/xproto/default.nix deleted file mode 100644 index 6b16b0b4e..000000000 --- a/nix/nixcrpkgs/pkgs/xproto/default.nix +++ /dev/null @@ -1,38 +0,0 @@ -{ crossenv, xorg-macros }: - -let - version = "7.0.31"; - - name = "xproto-${version}"; - - src = crossenv.nixpkgs.fetchurl { - url = "https://www.x.org/releases/individual/proto/xproto-${version}.tar.gz"; - sha256 = "1is3xl0zjk4l0d8d0zinkfbfapgdby2i56jjfp6caibvwam5wxbd"; - }; - - lib = crossenv.make_derivation { - inherit version name src; - - builder = ./builder.sh; - - configure_flags = - "--host=${crossenv.host} " + - "--enable-static " + - "--disable-shared"; - - cross_inputs = [ xorg-macros ]; - - # Need the latest version of config.sub so we can support musl. - gnu_config = crossenv.native.gnu_config; - }; - - license = crossenv.native.make_derivation { - name = "${name}-license"; - inherit src; - builder = ./license_builder.sh; - }; - - license_set = xorg-macros.license_set // { "${name}" = license; }; - -in - lib // { inherit license_set; } diff --git a/nix/nixcrpkgs/pkgs/xproto/license_builder.sh b/nix/nixcrpkgs/pkgs/xproto/license_builder.sh deleted file mode 100644 index 4ed0509f1..000000000 --- a/nix/nixcrpkgs/pkgs/xproto/license_builder.sh +++ /dev/null @@ -1,14 +0,0 @@ -source $setup - -tar -xf $src -mv xproto-* xproto - -license=$(cat xproto/COPYING) - -cat > $out <xproto - -
-$license
-
-EOF diff --git a/nix/nixcrpkgs/pkgs/xtrans/builder.sh b/nix/nixcrpkgs/pkgs/xtrans/builder.sh deleted file mode 100644 index 354670cb0..000000000 --- a/nix/nixcrpkgs/pkgs/xtrans/builder.sh +++ /dev/null @@ -1,16 +0,0 @@ -source $setup - -tar -xf $src -mv xtrans-* xtrans - -mkdir build -cd build - -../xtrans/configure --prefix $out - -make - -make install - -# So we can find the pkgconfig files in lib/pkgconfig -ln -s share $out/lib diff --git a/nix/nixcrpkgs/pkgs/xtrans/default.nix b/nix/nixcrpkgs/pkgs/xtrans/default.nix deleted file mode 100644 index 44daf818a..000000000 --- a/nix/nixcrpkgs/pkgs/xtrans/default.nix +++ /dev/null @@ -1,27 +0,0 @@ -{ crossenv }: - -let - version = "1.3.5"; - - name = "xtrans-${version}"; - - src = crossenv.nixpkgs.fetchurl { - url = "https://xorg.freedesktop.org/releases/individual/lib/xtrans-${version}.tar.bz2"; - sha256 = "00c3ph17acnsch3gbdmx33b9ifjnl5w7vx8hrmic1r1cjcv3pgdd"; - }; - - lib = crossenv.native.make_derivation rec { - inherit version name src; - builder = ./builder.sh; - }; - - license = crossenv.native.make_derivation { - name = "${name}-license"; - inherit src; - builder = ./license_builder.sh; - }; - - license_set = { "${name}" = license; }; - -in - lib // { inherit license_set; } diff --git a/nix/nixcrpkgs/pkgs/xtrans/license_builder.sh b/nix/nixcrpkgs/pkgs/xtrans/license_builder.sh deleted file mode 100644 index 0b43e8b78..000000000 --- a/nix/nixcrpkgs/pkgs/xtrans/license_builder.sh +++ /dev/null @@ -1,14 +0,0 @@ -source $setup - -tar -xf $src -mv xtrans-* xtrans - -license=$(cat xtrans/COPYING) - -cat > $out <xtrans - -
-$license
-
-EOF diff --git a/nix/nixcrpkgs/pkgs/zlib/builder.sh b/nix/nixcrpkgs/pkgs/zlib/builder.sh deleted file mode 100644 index b7906b5a2..000000000 --- a/nix/nixcrpkgs/pkgs/zlib/builder.sh +++ /dev/null @@ -1,15 +0,0 @@ -source $setup - -tar -xf $src - -mkdir build -cd build - -sed -i 's$Darwin. | darwin.$Ignore* | ignore*$' ../zlib-$version/configure - -CHOST=$host \ -../zlib-$version/configure --prefix=$out --static - -make - -make install diff --git a/nix/nixcrpkgs/pkgs/zlib/default.nix b/nix/nixcrpkgs/pkgs/zlib/default.nix deleted file mode 100644 index 11242407a..000000000 --- a/nix/nixcrpkgs/pkgs/zlib/default.nix +++ /dev/null @@ -1,14 +0,0 @@ -{ crossenv }: - -crossenv.make_derivation rec { - name = "zlib-${version}"; - - version = "1.2.11"; - - src = crossenv.nixpkgs.fetchurl { - url = "https://zlib.net/zlib-${version}.tar.gz"; - sha256 = "18dighcs333gsvajvvgqp8l4cx7h1x7yx9gd5xacnk80spyykrf3"; - }; - - builder = ./builder.sh; -} diff --git a/nix/nixcrpkgs/pretend_stdenv/setup b/nix/nixcrpkgs/pretend_stdenv/setup deleted file mode 100644 index a0f3af700..000000000 --- a/nix/nixcrpkgs/pretend_stdenv/setup +++ /dev/null @@ -1 +0,0 @@ -export PATH=$_PATH diff --git a/nix/nixcrpkgs/support/derivations.txt b/nix/nixcrpkgs/support/derivations.txt deleted file mode 100644 index d5d9f09af..000000000 --- a/nix/nixcrpkgs/support/derivations.txt +++ /dev/null @@ -1,58 +0,0 @@ -define all = win32,win64,linux32,linux64,linux-rpi,mac -define windows = win32,win64 -define linux = linux32,linux64,linux-rpi - -# Cross-compiler toolchains -{$windows,$linux}.gcc slow=1 -mac.toolchain slow=1 - -# Packages -{$windows}.angle{,.examples} slow=1 -{$windows}.angle.examples -omni.at-spi2-headers -{$all}.avrdude -omni.dejavu-fonts -{$windows}.devcon -{$all}.expat -omni.fixesproto -{$windows}.gdb -{$all}.hello -{$all}.hello_cpp -omni.inputproto -{$all}.ion -omni.kbproto -{$linux}.libudev -{$all}.libusb -{$all}.libusbp{,.examples} -{$linux}.libx11 -{$linux}.libxall -{$all}.libxau -{$all}.libxcb{,.examples} -{$linux}.libxext -{$linux}.libxfixes -{$linux}.libxi -{$all}.openocd -{$all}.pavr2 -{$windows}.pdcurses{,.examples} -{$all}.p-load -{$all}.qt slow=1 -{$all}.qt.examples -{$windows}.readline -{$all}.tic -{$windows}.usbview -omni.xcb-proto -{$all}.xcb-util -{$all}.xcb-util-image -{$all}.xcb-util-keysyms -{$all}.xcb-util-renderutil -{$all}.xcb-util-wm -omni.xextproto -omni.xorg-macros -{$all}.xproto -omni.xtrans -{$all}.zlib - -# Derivations we care about at Pololu -{win32,linux32,linux-rpi,mac}.{p-load,pavr2,tic} priority=1 - -# TODO: Test building the license_sets somehow too. diff --git a/nix/nixcrpkgs/support/expand_brackets.rb b/nix/nixcrpkgs/support/expand_brackets.rb deleted file mode 100644 index fccee4510..000000000 --- a/nix/nixcrpkgs/support/expand_brackets.rb +++ /dev/null @@ -1,43 +0,0 @@ -def expand_brackets_core(str, depth) - finished_parts = [] - active_parts = [+''] - while true - if str.empty? - raise AnticipatedError, "Unmatched opening brace" if depth > 0 - break - elsif str.start_with?('}') - str.slice!(0) - raise AnticipatedError, "Unmatched closing brace" if depth == 0 - break - elsif str.start_with?('{') - # Recurse, which removes everything up to and - # including the matching closing brace. - str.slice!(0) - options = expand_brackets_core(str, depth + 1) - raise if options.empty? - active_parts = active_parts.flat_map { |p1| - options.map { |p2| p1 + p2 } - } - elsif str.start_with?(',') - raise AnticipatedError, "Comma at top level" if depth == 0 - # Remove the comma, mark the parts we are working - # on as finished, and start a new part. - str.slice!(0) - finished_parts += active_parts - active_parts = [''] - else - part_length = str.index(/[{},]|$/) - raise if part_length < 1 - part = str.slice!(0, part_length) - active_parts.each do |s| - s.insert(-1, part) - end - end - end - finished_parts + active_parts -end - -# Expands something like "{a,b}{,.x}" to ["a", "a.x", "b", "b.x"] -def expand_brackets(str) - expand_brackets_core(str.dup, 0) -end diff --git a/nix/nixcrpkgs/support/graph.rb b/nix/nixcrpkgs/support/graph.rb deleted file mode 100644 index b1db81137..000000000 --- a/nix/nixcrpkgs/support/graph.rb +++ /dev/null @@ -1,74 +0,0 @@ -def print_graph(graph) - graph.each do |parent, children| - puts "#{parent} ->" - children.each do |child| - puts " #{child}" - end - end -end - -def check_graph!(graph) - graph.each do |parent, children| - children.each do |child| - if !graph.key?(child) - raise "Graph is missing an entry for #{child}" - end - end - end -end - -def depth_first_search_exclude_start(graph, start) - stack = [graph.fetch(start).to_a.reverse] - visited = Set.new - until stack.empty? - node = stack.last.pop - if node.nil? - stack.pop - next - end - next if visited.include?(node) - visited << node - stack << graph.fetch(node).to_a.reverse - yield node - end -end - -def transitive_closure(graph) - tc = {} - graph.each_key do |node| - tc[node] = enum_for(:depth_first_search_exclude_start, graph, node).to_a - end - tc -end - -def restricted_transitive_closure(graph, allowed) - rtc = {} - graph.each_key do |node| - next if !allowed.include?(node) - reached_nodes = [] - depth_first_search_exclude_start(graph, node) do |reached_node| - next if !allowed.include?(reached_node) - reached_nodes << reached_node - end - rtc[node] = reached_nodes - end - rtc -end - -def transitive_reduction(graph) - tr = {} - graph.each do |start_node, nodes| - nodes_with_max_distance_1 = Set.new(nodes) - distance = 1 - until nodes.empty? - nodes = Set.new nodes.flat_map &graph.method(:fetch) - nodes_with_max_distance_1 -= nodes - distance += 1 - if distance > graph.size - raise "Cycle detected: this algorithm only works with DAGs." - end - end - tr[start_node] = nodes_with_max_distance_1.to_a - end - tr -end diff --git a/nix/nixcrpkgs/support/manage b/nix/nixcrpkgs/support/manage deleted file mode 100755 index a21f3ec9f..000000000 --- a/nix/nixcrpkgs/support/manage +++ /dev/null @@ -1,541 +0,0 @@ -#!/usr/bin/env ruby - -# This part of hte code is under construction. It will eventually be a script that -# helps us check that the derivations we care about are all building, -# and prints the status of those builds. - - -# This requires Ruby 2.5.0 or later because it uses a new syntax for rescuing -# exceptions in a block with needing to make an extra begin/end pair. - -require 'open3' -require 'pathname' -require 'set' -require 'sqlite3' # gem install sqlite3 -require_relative 'graph' -require_relative 'expand_brackets' - -ResultsDir = Pathname('support/results') - -class AnticipatedError < RuntimeError -end - -# Don't automatically change directory because maybe people want to test one -# nixcrpkgs repository using the test script from another one. But do give an -# early, friendly warning if they are running in the wrong directory. -def check_directory! - return if File.directory?('pretend_stdenv') - $stderr.puts "You should run this script from the nixcrpkgs directory." - dir = Pathname(__FILE__).parent.parent - $stderr.puts "Try running these commands:\n cd #{dir}\n test/test.rb" - exit 1 -end - -def substitute_definitions(defs, str) - str.gsub(/\$([\w-]+)/) do |x| - defs.fetch($1) - end -end - -def parse_derivation_list(filename) - defs = {} - all_paths = Set.new - all_attrs = {} - File.foreach(filename).with_index do |line, line_index| - line.strip! - - # Handle empty lines and comments. - next if line.empty? || line.start_with?('#') - - # Handle variable definitions (e.g. "define windows = win32,win64"). - if line.start_with?('define') - md = line.match(/^define\s+([\w-]+)\s*=\s*(.*)$/) - if !md - raise AnticipatedError, "Invalid definition syntax." - end - name, value = md[1], md[2] - defs[name] = value - next - end - - # Expand variable definitions (e.g. $windows expands to "win32,win64"). - line = substitute_definitions(defs, line) - - # Figure out which parts of the line are attribute paths with brackets and - # which are attributes. - items = line.split(' ') - attr_defs, path_items = items.partition { |p| p.include?('=') } - - # Expand any brackets in the attribute paths to get the complete list of - # paths specified on this line. - paths = path_items.flat_map { |p| expand_brackets(p) }.map(&:to_sym) - - # Process attribute definitions on the line, like "priority=1". - attrs = {} - attr_defs.each do |attr_def| - md = attr_def.match(/^(\w+)=(\d+)$/) - if !md - raise AnticipatedError, "Invalid attribute definition: #{attr_def.inspect}." - end - name, value = md[1], md[2] - case name - when 'priority', 'slow' - attrs[name.to_sym] = value.to_i - else - raise AnticipatedError, "Unrecognized attribute: #{name.inspect}." - end - end - - # Record the paths for this line and the attributes for those paths, - # overriding previous attributes values if necessary. - all_paths += paths - if !attrs.empty? - paths.each do |path| - (all_attrs[path] ||= {}).merge!(attrs) - end - end - rescue AnticipatedError => e - raise AnticipatedError, "#{filename}:#{line_index + 1}: error: #{e}" - end - - if all_paths.empty? - raise AnticipatedError, "#{filename} specifies no paths" - end - - all_paths.each do |path| - if !path.match?(/^[\w.-]+$/) - raise "Invalid characters in path name: #{path}" - end - end - - { defs: defs, paths: all_paths.to_a, attrs: all_attrs } -end - -# Make a hash holding the priority of each Nix attribute path we want to build. -# This routine determines the default priority. -def make_path_priority_map(settings) - attrs = settings.fetch(:attrs) - m = {} - settings.fetch(:paths).each do |path| - m[path] = attrs.fetch(path, {}).fetch(:priority, 0) - end - m -end - -# Make a hash holding the relative build time of each Nix attribute path we want -# to build. This routine detrmines the default time, and what "slow" means. -def make_path_time_map(settings) - attrs = settings.fetch(:attrs) - m = {} - settings.fetch(:paths).each do |path| - m[path] = attrs.fetch(path, {})[:slow] ? 100 : 1 - end - m -end - -def instantiate_drvs(paths) - cmd = 'nix-instantiate ' + paths.map { |p| "-A #{p}" }.join(' ') - stdout_str, stderr_str, status = Open3.capture3(cmd) - if !status.success? - $stderr.puts stderr_str - raise AnticipatedError, "Failed to instantiate derivations." - end - paths.zip(stdout_str.split.map(&:to_sym)).to_h -end - -# We want there to be a one-to-one mapping between paths in the derivations.txt -# list and derivations, so we can make a graph of dependencies of the -# derivations and each derivation in the graph will have a unique path in the -# derivations.txt list. -def check_paths_are_unique!(path_drv_map) - set = Set.new - path_drv_map.each do |key, drv| - if set.include?(drv) - raise AnticipatedError, "The derivation #{key} is the same as " \ - "other derivations in the list. Maybe use the 'omni' namespace." - end - set << drv - end -end - -# Makes a new map that has the same keys as map1, and the values -# have all been mapped by map2. -# -# Requires map2 to have a key for every value in map1. -def map_compose(map1, map2) - map1.transform_values &map2.method(:fetch) -end - -# Like map_compose, but excludes keys from map1 where the corresponding map1 -# value is not a key of map2. -def map_join(map1, map2) - r = {} - map1.each do |key, value| - if map2.key?(value) - r[key] = map2.fetch(value) - end - end - r -end - -def nix_db - return $db if $db - $db = SQLite3::Database.new '/nix/var/nix/db/db.sqlite', readonly: true -end - -# Given an array of derivations (paths to .drv files in /nix), this function -# queries the Nix database and returns hash table mapping derivations to -# a boolean that is true if they have already been built. -def get_build_status(drvs) - drv_list_str = drvs.map { |d| "\"#{d}\"" }.join(", ") - query = < 0 - more_attrs << " penwidth=3" - end - - # Draw slow nodes as a double octagon. - if path_time_map.fetch(path) > 10 - more_attrs << " shape=doubleoctagon" - end - f.puts "\"#{path}\" [label=\"#{component}\"#{more_attrs}]" - end - f.puts "}" - end - - # Output dependencies between nodes. - visible_paths.each do |path| - path_graph.fetch(path).each do |dep| - next if decompose.(dep).first == 'omni' - f.puts "\"#{path}\" -> \"#{dep}\"" - end - end - f.puts "}" - end -end - -def make_build_plan(path_state) - path_graph = path_state.fetch(:graph) - path_priority_map = path_state.fetch(:priority_map) - path_time_map = path_state.fetch(:time_map) - path_built_map = path_state.fetch(:built_map) - - # It's handy to be able to get all the dependencies of a node in one step, and - # we will use that frequently to calculate how expensive it is to build a - # node and to make the toplogical sort. - path_graph = transitive_closure(path_graph).freeze - - # The paths we need to build. In the future we could filter this by priority. - required_paths = Set.new(path_graph.keys).freeze - - # built_paths: The set of paths that are already built. We will mutate this - # as we simulate our build plan. - built_paths = Set.new - path_built_map.each do |path, built| - built_paths << path if built - end - - # List of paths to build. Each path should only be built once all the paths it - # depends on are built. I know nix-build can take care of that for us, but it's - # nice to see the precise order of what is going to be built so we can tell when - # slow things will get built. - build_plan = [] - - # Computes the time to build a path, taking into account what has already been - # built. - calculate_time = lambda do |path| - deps = path_graph.fetch(path) + [path] - deps.reject! &built_paths.method(:include?) - deps.map(&path_time_map.method(:fetch)).sum - end - - # Adds plans to build this path and all of its unbuilt depedencies. - add_to_build_plan = lambda do |path| - deps = path_graph.fetch(path) + [path] - - # Remove dependencies that are already built. - deps.reject! &built_paths.method(:include?) - - # Topological sort - deps.sort! do |p1, p2| - case - when path_graph.fetch(p1).include?(p2) then 1 - when path_graph.fetch(p2).include?(p1) then -1 - else 0 - end - end - - deps.each do |path| - build_plan << path - built_paths << path - end - end - - while true - unbuilt_required_paths = required_paths - built_paths - break if unbuilt_required_paths.empty? - - # Find the maximum priority of the unbuilt required paths. - max_priority = nil - unbuilt_required_paths.each do |path| - priority = path_priority_map.fetch(path) - if !max_priority || priority > max_priority - max_priority = priority - end - end - - top_priority_paths = unbuilt_required_paths.select do |path| - path_priority_map.fetch(path) == max_priority - end - - target = top_priority_paths.min_by(&calculate_time) - - add_to_build_plan.(target) - end - - build_plan -end - -# Updates the 'support/results' directory, which holds -# symbolic links to all the derivations defined by nixcrpkgs and -# listed in support/derivations.txt which have already been built. -# -# Intended use: -# ln -s $PWD/support/results /nix/var/nix/gcroots/nixcrpkgs-results -# support/manage results -# nix-collect-garbage -def update_results_dir(path_valid_results_map) - ResultsDir.mkdir if !ResultsDir.directory? - ResultsDir.children.each do |p| - p.unlink - end - modern_links = Set.new - path_valid_results_map.each do |path, results_map| - results_map.each do |id, result| - suffix = id == :out ? '' : ".#{id}" - link_name = "#{path}#{suffix}" - (ResultsDir + link_name).make_symlink(result) - modern_links << link_name - end - end -end - -def build_paths(path_graph, path_built_map, build_plan, keep_going: true) - path_built_map = path_built_map.dup - path_graph = transitive_closure(path_graph) - build_plan.each do |path| - if !path_graph.fetch(path).all?(&path_built_map.method(:fetch)) - # One of the dependencies of this path has not been built, presumably - # because there was an error. - puts "# skipping #{path}" - next - end - - print "nix-build -A #{path}" - system("nix-build -A #{path} > /dev/null 2> /dev/null") - - if $?.success? - path_built_map[path] = true - puts - else - puts " # failed" - return false if !keep_going - end - end - true -end - -def parse_args(argv) - action = case argv.first - when 'graph' then :graph - when 'results' then :results - when 'build' then :build - when 'plan' then :plan - when 'stats', nil then :stats - else raise AnticipatedError, "Invalid action: #{argv.first.inspect}" - end - - { action: action } -end - -begin - check_directory! - args = parse_args(ARGV) - action = args.fetch(:action) - - settings = parse_derivation_list('support/derivations.txt') - - path_drv_map = instantiate_drvs(settings.fetch(:paths)) - check_paths_are_unique!(path_drv_map) - - drvs = path_drv_map.values.uniq - drv_built_map = get_build_status(drvs) - - if [:graph, :build, :plan].include?(action) - global_drv_graph = get_drv_graph - drv_graph = graph_restrict_nodes(global_drv_graph, drvs) - path_state = { - graph: graph_unmap(drv_graph, path_drv_map).freeze, - priority_map: make_path_priority_map(settings).freeze, - time_map: make_path_time_map(settings).freeze, - built_map: map_compose(path_drv_map, drv_built_map).freeze, - }.freeze - end - - if action == :graph - output_graphviz(path_state) - end - - if [:build, :plan].include?(action) - build_plan = make_build_plan(path_state) - end - - if action == :plan - puts "Build plan:" - build_plan.each do |path| - puts "nix-build -A #{path}" - end - end - - if action == :build - success = build_paths(path_state[:graph], path_state[:built_map], build_plan) - exit(1) if !success - end - - if action == :results || action == :build - drv_valid_results_map = get_valid_results(drvs) - path_valid_results_map = map_join(path_drv_map, drv_valid_results_map).freeze - update_results_dir(path_valid_results_map) - end - - if action == :stats - print_stats(drv_built_map) - end -rescue AnticipatedError => e - $stderr.puts e -end diff --git a/nix/nixcrpkgs/top.nix b/nix/nixcrpkgs/top.nix deleted file mode 100644 index 5e1aa63a6..000000000 --- a/nix/nixcrpkgs/top.nix +++ /dev/null @@ -1,72 +0,0 @@ -{ osx_sdk, nixpkgs }: - -rec { - inherit nixpkgs; - - # Some native build tools. - native = import ./native { inherit nixpkgs; }; - - # Cross-compiling environments for each target system. - crossenvs = { - i686-w64-mingw32 = import ./mingw-w64 { inherit native; arch = "i686"; }; - x86_64-w64-mingw32 = import ./mingw-w64 { inherit native; arch = "x86_64"; }; - i686-linux-musl = import ./linux { inherit native; arch = "i686"; }; - x86_64-linux-musl = import ./linux { inherit native; arch = "x86_64"; }; - armv6-linux-musl = import ./linux { - inherit native; - arch = "armv6"; - gcc_options = "--with-fpu=vfp --with-float=hard "; - }; - macos = import ./macos { inherit osx_sdk native; }; - }; - - pkgFun = crossenv: import ./pkgs.nix { inherit crossenv; } // crossenv; - - # Sets of packages for each target system. - i686-w64-mingw32 = pkgFun crossenvs.i686-w64-mingw32; - x86_64-w64-mingw32 = pkgFun crossenvs.x86_64-w64-mingw32; - i686-linux-musl = pkgFun crossenvs.i686-linux-musl; - x86_64-linux-musl = pkgFun crossenvs.x86_64-linux-musl; - armv6-linux-musl = pkgFun crossenvs.armv6-linux-musl; - macos = pkgFun crossenvs.macos; - - # omni is convenient name for packages that are used for cross-compiling but - # are actually the same on all platforms. You can just refer to it by - # 'omni.package_name' instead of 'some_platform.package_name'. - omni = pkgFun { inherit native nixpkgs; }; - - # Handy aliases. - win32 = i686-w64-mingw32; - win64 = x86_64-w64-mingw32; - linux32 = i686-linux-musl; - linux-x86 = i686-linux-musl; - linux-i686 = i686-linux-musl; - linux64 = x86_64-linux-musl; - linux-x86_64 = x86_64-linux-musl; - linux-rpi = armv6-linux-musl; - rpi = armv6-linux-musl; - mac = macos; - - # filter is a function that can be applied to a local directory to filter out - # files that are likely to change frequently without affecting the build, - # causing unneeded rebuilds. - filter_func = name: type: let bn = baseNameOf (toString name); in !( - (type == "directory" && bn == ".git") || - (type == "symlink" && nixpkgs.lib.hasPrefix "result" bn) || - (type == "directory" && bn == "nix") || - (type == "directory" && bn == "build") || - nixpkgs.lib.hasSuffix ".nix" bn || - nixpkgs.lib.hasSuffix "~" bn - ); - filter = builtins.filterSource filter_func; - - # bundle is a function that takes a set of derivations and makes a - # derivation for a bundle that has symbolic links in it to each of - # the input derivations. - bundle = drvs: native.make_derivation rec { - name = "bundle"; - builder = ./bundle_builder.sh; - names = builtins.attrNames drvs; - dirs = builtins.attrValues drvs; - }; -} diff --git a/nix/nixpkgs.nix b/nix/nixpkgs.nix deleted file mode 100644 index fd2d2cd4a..000000000 --- a/nix/nixpkgs.nix +++ /dev/null @@ -1,13 +0,0 @@ -let - - rev = "61c3169a0e17d789c566d5b241bfe309ce4a6275"; - hash = "0qbycg7wkb71v20rchlkafrjfpbk2fnlvvbh3ai9pyfisci5wxvq"; - pkgs = builtins.fetchTarball { - name = "nixpkgs-2019-01-15"; - url = "https://github.com/nixos/nixpkgs/archive/${rev}.tar.gz"; - sha256 = hash; - }; - -in - -import pkgs {} diff --git a/nix/ops/boot-ship.nix b/nix/ops/boot-ship.nix deleted file mode 100644 index b54a7a90a..000000000 --- a/nix/ops/boot-ship.nix +++ /dev/null @@ -1,19 +0,0 @@ -{ pkgs ? import ../nixpkgs.nix -, debug ? false -, ship ? "zod" -, pill ? ../../bin/solid.pill -}: - -let - - deps = import ../deps { inherit pkgs; }; - tlon = import ../pkgs { inherit pkgs; }; - arvo = tlon.arvo; - urbit = tlon.urbit; - herb = tlon.herb; - -in - -import ./fakeship { - inherit pkgs arvo pill ship urbit herb; -} diff --git a/nix/ops/brass/builder.sh b/nix/ops/brass/builder.sh deleted file mode 100755 index 75f76e872..000000000 --- a/nix/ops/brass/builder.sh +++ /dev/null @@ -1,22 +0,0 @@ -source $stdenv/setup - -set -ex - -cp -r $PIER ./pier -chmod -R u+rw ./pier - -$URBIT -d ./pier - -cleanup () { - if [ -e ./pier/.vere.lock ] - then kill $(< ./pier/.vere.lock) || true; - fi -} - -trap cleanup EXIT - -herb ./pier -P brass.pill -d '+brass' - -mv brass.pill $out - -set +x diff --git a/nix/ops/brass/default.nix b/nix/ops/brass/default.nix deleted file mode 100644 index ea83c4ff7..000000000 --- a/nix/ops/brass/default.nix +++ /dev/null @@ -1,11 +0,0 @@ -{ pkgs, herb, urbit, pier, arvo }: - -pkgs.stdenv.mkDerivation rec { - name = "brass"; - builder = ./builder.sh; - buildInputs = [ herb pkgs.coreutils ]; - - URBIT = urbit.meta.exe; - PIER = pier; - ARVO = arvo; -} diff --git a/nix/ops/default.nix b/nix/ops/default.nix deleted file mode 100644 index d048d783e..000000000 --- a/nix/ops/default.nix +++ /dev/null @@ -1,86 +0,0 @@ -{ pkgs ? import ../nixpkgs.nix, debug ? false }: - -let - - tlon = import ../pkgs { inherit pkgs; }; - arvo = tlon.arvo; - arvo-ropsten = tlon.arvo-ropsten; - herb = tlon.herb; - urbit = if debug then tlon.urbit-debug else tlon.urbit; - - bootbrass = ../../bin/brass.pill; - bootsolid = ../../bin/solid.pill; - - rawzod = import ./fakeship { - inherit pkgs herb urbit; - pill = bootsolid; - ship = "zod"; - arvo = null; - }; - - ropzod = import ./fakeship { - inherit pkgs herb urbit; - pill = bootsolid; - ship = "zod"; - arvo = arvo-ropsten; - }; - - zod = import ./fakeship { - inherit pkgs herb urbit arvo; - pill = bootsolid; - ship = "zod"; - }; - -in - -rec { - - bus = import ./fakeship { - inherit pkgs herb urbit arvo; - pill = bootsolid; - ship = "bus"; - }; - - test = import ./test { - inherit pkgs herb urbit; - ship = bus; - }; - - solid = import ./solid { - inherit pkgs herb urbit arvo; - pier = rawzod; - }; - - brass = import ./brass { - inherit pkgs herb urbit arvo; - pier = zod; - }; - - brass-ropsten = import ./brass { - inherit pkgs herb urbit; - arvo = arvo-ropsten; - pier = ropzod; - }; - - ivory = import ./ivory { - inherit pkgs herb urbit arvo; - pier = zod; - }; - - ivory-ropsten = import ./ivory { - inherit pkgs herb urbit; - arvo = arvo-ropsten; - pier = ropzod; - }; - - image = import ./image { - inherit pkgs herb urbit solid; - }; - - image-ropsten = import ./image { - inherit pkgs herb urbit; - brass = brass-ropsten; - ivory = ivory-ropsten; - }; - -} diff --git a/nix/ops/fakeship/builder.sh b/nix/ops/fakeship/builder.sh deleted file mode 100755 index 19ef10b87..000000000 --- a/nix/ops/fakeship/builder.sh +++ /dev/null @@ -1,26 +0,0 @@ -source $stdenv/setup - -set -ex - -if [ -z "$ARVO" ] -then - $URBIT -d -F $SHIP -B "$PILL" $out -else - $URBIT -d -F $SHIP -A "$ARVO" -B "$PILL" $out -fi - -check () { - [ 3 -eq "$(herb $out -d 3)" ] -} - -if check && sleep 10 && check -then - echo "Boot success." >&2 - herb $out -p hood -d '+hood/exit' || true -else - echo "Boot failure." >&2 - kill $(< $out/.vere.lock) || true - exit 1 -fi - -set +x diff --git a/nix/ops/fakeship/default.nix b/nix/ops/fakeship/default.nix deleted file mode 100644 index 8ebfa4a5e..000000000 --- a/nix/ops/fakeship/default.nix +++ /dev/null @@ -1,11 +0,0 @@ -{ pkgs, herb, urbit, pill, ship, arvo }: - -pkgs.stdenv.mkDerivation { - name = "fake" + ship; - builder = ./builder.sh; - buildInputs = [ herb ]; - URBIT = urbit.meta.exe; - ARVO = arvo; - PILL = pill; - SHIP = ship; -} diff --git a/nix/ops/image/default.nix b/nix/ops/image/default.nix deleted file mode 100644 index ecfc97d26..000000000 --- a/nix/ops/image/default.nix +++ /dev/null @@ -1,47 +0,0 @@ -{ pkgs -, herb -, urbit -, solid ? null -, brass ? null -, ivory ? null -}: - -let - link = pill: path: - if pill == null then "" - else "${pkgs.coreutils}/bin/ln -sf ${pill} ${path}"; - -in pkgs.dockerTools.buildImage { - name = urbit.meta.name; - - runAsRoot = '' - #!${pkgs.stdenv.shell} - - set -euo pipefail - - ${pkgs.dockerTools.shadowSetup} - - mkdir -p /share /data /tmp - - ${link solid "/share/solid.pill"} - ${link brass "/share/brass.pill"} - ${link ivory "/share/ivory.pill"} - ''; - - contents = [ urbit herb ]; - - config = { - Entrypoint = [ urbit.meta.name ]; - - WorkingDir = "/data"; - - Volumes = { - "/data" = {}; - }; - - ExposedPorts = { - "80/tcp" = {}; - "443/tcp" = {}; - }; - }; -} diff --git a/nix/ops/ivory/builder.sh b/nix/ops/ivory/builder.sh deleted file mode 100755 index 1e929a84f..000000000 --- a/nix/ops/ivory/builder.sh +++ /dev/null @@ -1,22 +0,0 @@ -source $stdenv/setup - -set -ex - -cp -r $PIER ./pier -chmod -R u+rw ./pier - -$URBIT -d ./pier - -cleanup () { - if [ -e ./pier/.vere.lock ] - then kill $(< ./pier/.vere.lock) || true; - fi -} - -trap cleanup EXIT - -herb ./pier -P ivory.pill -d '+ivory' - -mv ivory.pill $out - -set +x diff --git a/nix/ops/ivory/default.nix b/nix/ops/ivory/default.nix deleted file mode 100644 index 36532b7b5..000000000 --- a/nix/ops/ivory/default.nix +++ /dev/null @@ -1,11 +0,0 @@ -{ pkgs, herb, urbit, pier, arvo }: - -pkgs.stdenv.mkDerivation rec { - name = "ivory"; - builder = ./builder.sh; - buildInputs = [ herb pkgs.coreutils ]; - - URBIT = urbit.meta.exe; - PIER = pier; - ARVO = arvo; -} diff --git a/nix/ops/solid/builder.sh b/nix/ops/solid/builder.sh deleted file mode 100755 index f5b39a057..000000000 --- a/nix/ops/solid/builder.sh +++ /dev/null @@ -1,82 +0,0 @@ -source $stdenv/setup - -set -ex - -cp -r $PIER ./pier -chmod -R u+rw ./pier - -$URBIT -d ./pier - -shutdown () { - if [ -e ./pier/.vere.lock ] - then kill $(< ./pier/.vere.lock) || true; - fi -} - -trap shutdown EXIT - -# update pill strategy to ensure correct staging -# - -herb ./pier -p hood -d "+hood/mount /=home=" - -until [ -d ./pier/home ] -do - sleep 1 -done - -# update :lens, :dojo and dependencies -# -# XX reduce this list -# -cp $ARVO/app/lens.hoon ./pier/home/app/ 2>/dev/null || true -cp $ARVO/app/dojo.hoon ./pier/home/app/ 2>/dev/null || true -cp $ARVO/lib/base64.hoon ./pier/home/lib/ 2>/dev/null || true -cp $ARVO/lib/server.hoon ./pier/home/lib/ 2>/dev/null || true -cp $ARVO/lib/sole.hoon ./pier/home/lib/ 2>/dev/null || true -cp $ARVO/lib/xray.hoon ./pier/home/lib/ 2>/dev/null || true -cp $ARVO/lib/pprint.hoon ./pier/home/lib/ 2>/dev/null || true -mkdir -p ./pier/home/mar/lens/ -cp $ARVO/mar/lens/* ./pier/home/mar/lens/ 2>/dev/null || true - -cp $ARVO/sur/lens.hoon ./pier/home/sur/ 2>/dev/null || true -cp $ARVO/sur/sole.hoon ./pier/home/sur/ 2>/dev/null || true - -# update +solid and its dependencies -# -cp $ARVO/lib/pill.hoon ./pier/home/lib/ 2>/dev/null || true -cp $ARVO/gen/solid.hoon ./pier/home/gen/ 2>/dev/null || true - -chmod -R u+rw ./pier/home/ - -herb ./pier -p hood -d "+hood/commit %home" -herb ./pier -p hood -d "+hood/unmount %home" - -# XX horrible hack to ensure the update is applied first -# -sleep 10 - -# stage new desk for pill contents -# -herb ./pier -p hood -d '+hood/merge %stage our %home' -herb ./pier -p hood -d "+hood/mount /=stage=" - -until [ -d ./pier/stage ] -do - sleep 1 -done - -rm -rf ./pier/stage -cp -r $ARVO ./pier/stage -chmod -R u+rw ./pier/stage - -herb ./pier -p hood -d "+hood/commit %stage" -herb ./pier -p hood -d "+hood/unmount %stage" - -herb ./pier -P solid.pill -d '+solid /=stage=/sys, =dub &' - -herb ./pier -p hood -d '+hood/exit' || true - -mv solid.pill $out - -set +x diff --git a/nix/ops/solid/default.nix b/nix/ops/solid/default.nix deleted file mode 100644 index 050763c3b..000000000 --- a/nix/ops/solid/default.nix +++ /dev/null @@ -1,11 +0,0 @@ -{ pkgs, herb, urbit, pier, arvo }: - -pkgs.stdenv.mkDerivation rec { - name = "solid"; - builder = ./builder.sh; - buildInputs = [ herb pkgs.coreutils ]; - - URBIT = urbit.meta.exe; - PIER = pier; - ARVO = arvo; -} diff --git a/nix/ops/test/builder.sh b/nix/ops/test/builder.sh deleted file mode 100644 index 2746797ab..000000000 --- a/nix/ops/test/builder.sh +++ /dev/null @@ -1,80 +0,0 @@ -source $stdenv/setup - -set -ex - -cp -r $SHIP ./ship -chmod -R u+rw ./ship - -$URBIT -d ./ship 2> urbit-output - -tail -f urbit-output >&2 & -tailproc=$! - -shutdown () { - if [ -e ./ship/.vere.lock ] - then kill $(< ./ship/.vere.lock) || true; - fi - - kill "$tailproc" || true; -} - -trap shutdown EXIT - -herb ./ship -p hood -d '+hood/mass' - -# Run the unit tests and then print scrollback -herb ./ship -d '~& ~ ~& %test-unit-start ~' -herb ./ship -d '####-test %/tests' -herb ./ship -d '~& ~ ~& %test-unit-end ~' - -# Start and run the test app -herb ./ship -p hood -d '+hood/start %test' - -herb ./ship -d '~& ~ ~& %test-agents-start ~' -herb ./ship -p test -d '%agents' -herb ./ship -d '~& ~ ~& %test-agents-end ~' - -herb ./ship -d '~& ~ ~& %test-generators-start ~' -herb ./ship -p test -d '%generators' -herb ./ship -d '~& ~ ~& %test-generators-end ~' - -herb ./ship -d '~& ~ ~& %test-marks-start ~' -herb ./ship -p test -d '%marks' -herb ./ship -d '~& ~ ~& %test-marks-end ~' - -# compact the loom, comparing memory use before and after -herb ./ship -p hood -d '+hood/mass' - -herb ./ship -d '~& ~ ~& %pack-start ~' -herb ./ship -p hood -d '+hood/pack' -herb ./ship -d '~& ~ ~& %pack-end ~' - -herb ./ship -p hood -d '+hood/mass' - -shutdown - -# Collect output - -cp urbit-output test-output-unit -cp urbit-output test-output-agents -cp urbit-output test-output-generators -cp urbit-output test-output-marks -rm urbit-output - -sed -i '0,/test-unit-start/d' test-output-unit -sed -i '/test-unit-end/,$d' test-output-unit - -sed -i '0,/test-agents-start/d' test-output-agents -sed -i '/test-agents-end/,$d' test-output-agents - -sed -i '0,/test-generators-start/d' test-output-generators -sed -i '/test-generators-end/,$d' test-output-generators - -sed -i '0,/test-marks-start/d' test-output-marks -sed -i '/test-marks-end/,$d' test-output-marks - -mkdir $out - -cp -r test-output-* $out/ - -set +x diff --git a/nix/ops/test/default.nix b/nix/ops/test/default.nix deleted file mode 100644 index e901d2dc5..000000000 --- a/nix/ops/test/default.nix +++ /dev/null @@ -1,10 +0,0 @@ -{ pkgs, herb, urbit, ship }: - -pkgs.stdenv.mkDerivation rec { - name = "test"; - builder = ./builder.sh; - buildInputs = [ herb ]; - - URBIT = urbit.meta.exe; - SHIP = ship; -} diff --git a/nix/overlays/arm.nix b/nix/overlays/arm.nix new file mode 100644 index 000000000..62bb1eb0d --- /dev/null +++ b/nix/overlays/arm.nix @@ -0,0 +1,13 @@ +final: prev: + +let + + isAarch64 = prev.stdenv.hostPlatform.isAarch64; + +in prev.lib.optionalAttrs isAarch64 { + libsigsegv = prev.libsigsegv.overrideAttrs (attrs: { + preConfigure = (old.preConfigure or "") + '' + sed -i 's/^CFG_FAULT=$/CFG_FAULT=fault-linux-arm.h/' configure + ''; + }); +} diff --git a/nix/overlays/musl.nix b/nix/overlays/musl.nix new file mode 100644 index 000000000..857cb7504 --- /dev/null +++ b/nix/overlays/musl.nix @@ -0,0 +1,28 @@ +final: prev: + +let + + isMusl = prev.stdenv.hostPlatform.isMusl; + + optionalList = xs: if xs == null then [ ] else xs; + + overrideStdenv = pkg: pkg.override { stdenv = prev.gcc9Stdenv; }; + +in prev.lib.optionalAttrs isMusl { + libsigsegv = prev.libsigsegv.overrideAttrs (attrs: { + preConfigure = (attrs.preConfigure or "") + '' + sed -i 's/^CFG_FAULT=$/CFG_FAULT=fault-linux-i386.h/' configure + ''; + }); + + secp256k1 = prev.secp256k1.overrideAttrs (attrs: { + nativeBuildInputs = (attrs.nativeBuildInputs or [ ]) + ++ [ prev.buildPackages.stdenv.cc ]; + }); + + rhash = overrideStdenv prev.rhash; + + numactl = overrideStdenv prev.numactl; + + lmdb = overrideStdenv prev.lmdb; +} diff --git a/nix/overlays/native.nix b/nix/overlays/native.nix new file mode 100644 index 000000000..2ab55e375 --- /dev/null +++ b/nix/overlays/native.nix @@ -0,0 +1,28 @@ +final: prev: + +let + + optionalList = xs: if xs == null then [ ] else xs; + +in { + h2o = prev.h2o.overrideAttrs (_attrs: { + version = final.sources.h2o.rev; + src = final.sources.h2o; + outputs = [ "out" "dev" "lib" ]; + }); + + libsigsegv = prev.libsigsegv.overrideAttrs (attrs: { + patches = optionalList attrs.patches ++ [ + ../pkgs/libsigsegv/disable-stackvma_fault-linux-arm.patch + ../pkgs/libsigsegv/disable-stackvma_fault-linux-i386.patch + ]; + }); + + curlMinimal = prev.curl.override { + http2Support = false; + scpSupport = false; + gssSupport = false; + ldapSupport = false; + brotliSupport = false; + }; +} diff --git a/nix/overlays/static.nix b/nix/overlays/static.nix new file mode 100644 index 000000000..fa0cfdd89 --- /dev/null +++ b/nix/overlays/static.nix @@ -0,0 +1,37 @@ +final: prev: + +let + + # https://github.com/NixOS/nixpkgs/pull/97047/files + # Will make pkgs.stdenv.isStatic available indepedent of the platform. + # isStatic = prev.stdenv.hostPlatform.isStatic; + + configureFlags = attrs: { + configureFlags = (attrs.configureFlags or [ ]) + ++ [ "--disable-shared" "--enable-static" ]; + }; + + enableStatic = pkg: pkg.overrideAttrs configureFlags; + +in { + gmp = enableStatic prev.gmp; + + curlMinimal = enableStatic prev.curlMinimal; + + libuv = enableStatic prev.libuv; + + libffi = enableStatic prev.libffi; + + secp256k1 = enableStatic prev.secp256k1; + + lmdb = prev.lmdb.overrideAttrs (old: + configureFlags old // { + # Why remove the so version? It's easier than preventing it from being + # built with lmdb's custom Makefiles, and it can't exist in the output + # because otherwise the linker will preferentially choose the .so over + # the .a. + postInstall = '' + rm $out/lib/liblmdb.so + ''; + }); +} diff --git a/nix/pkgs/argon2u/default.nix b/nix/pkgs/argon2u/default.nix new file mode 100644 index 000000000..60c7a1089 --- /dev/null +++ b/nix/pkgs/argon2u/default.nix @@ -0,0 +1,30 @@ +{ stdenv, sources, enableParallelBuilding ? true }: + +stdenv.mkDerivation { + pname = "argon2u"; + version = sources.argon2u.rev; + src = sources.argon2u; + + postPatch = '' + substituteInPlace Makefile --replace 'ar rcs' '$(AR) rcs' + ''; + + buildPhase = '' + make libargon2.a + ''; + + installPhase = '' + mkdir -p $out/{lib,include} + cp libargon2.a $out/lib/ + cp include/argon2.h $out/include/ + cp ./src/blake2/*.h $out/include/ + ''; + + makeFlags = [ + "AR=${stdenv.cc.targetPrefix}ar" # Fix cross-compilation + ]; + + NO_THREADS = true; + + inherit enableParallelBuilding; +} diff --git a/nix/pkgs/arvo-ropsten/builder.sh b/nix/pkgs/arvo-ropsten/builder.sh deleted file mode 100644 index af133a2f2..000000000 --- a/nix/pkgs/arvo-ropsten/builder.sh +++ /dev/null @@ -1,26 +0,0 @@ -source $stdenv/setup - -cp -r $src tmp -chmod -R u+w tmp - -ZUSE=tmp/sys/zuse.hoon -AMES=tmp/sys/vane/ames.hoon -ACME=tmp/app/acme.hoon - -# replace the mainnet azimuth contract with the ropsten contract -sed --in-place \ - 's/\(\+\+ contracts \)mainnet\-contracts/\1ropsten-contracts/' \ - $ZUSE - -# increment the %ames protocol version -sed -r --in-place \ - 's_^(=/ protocol\-version=\?\(.*\) %)([0-7])_echo "\1$(echo "(\2+1) % 8" | bc)"_e' \ - $AMES - -# use the staging API in :acme -sed --in-place \ - 's_https://acme-v02.api.letsencrypt.org/directory_https://acme-staging-v02.api.letsencrypt.org/directory_' \ - $ACME - -cp -r tmp $out -chmod -R u+w $out diff --git a/nix/pkgs/arvo-ropsten/default.nix b/nix/pkgs/arvo-ropsten/default.nix deleted file mode 100644 index 9f4bb7e7d..000000000 --- a/nix/pkgs/arvo-ropsten/default.nix +++ /dev/null @@ -1,8 +0,0 @@ -{ pkgs }: - -pkgs.stdenv.mkDerivation { - name = "arvo-ropsten"; - buildInputs = [ pkgs.bc ]; - builder = ./builder.sh; - src = pkgs.buildRustCrateHelpers.exclude [ ".git" ] ../../../pkg/arvo; -} diff --git a/nix/pkgs/arvo/builder.sh b/nix/pkgs/arvo/builder.sh deleted file mode 100644 index 3a0c49ad2..000000000 --- a/nix/pkgs/arvo/builder.sh +++ /dev/null @@ -1,4 +0,0 @@ -source $stdenv/setup - -cp -r $src/ $out -chmod -R u+w $out diff --git a/nix/pkgs/arvo/default.nix b/nix/pkgs/arvo/default.nix index 8b31237cf..c9204a01f 100644 --- a/nix/pkgs/arvo/default.nix +++ b/nix/pkgs/arvo/default.nix @@ -1,10 +1,46 @@ -{ pkgs }: +{ lib, stdenvNoCC, bc }: -pkgs.stdenv.mkDerivation { +stdenvNoCC.mkDerivation { name = "arvo"; - builder = ./builder.sh; - src = pkgs.buildRustCrateHelpers.exclude [ ".git" ] ../../../pkg/arvo; - meta = { - priority = 0; - }; + src = lib.cleanSource ../../../pkg/arvo; + + buildInputs = [ bc ]; + + outputs = [ "out" "ropsten" ]; + + phases = [ "mainnetPhase" "ropstenPhase" ]; + + mainnetPhase = '' + cp -r $src/ $out + chmod -R u+w $out + ''; + + ropstenPhase = '' + cp -r $src tmp + chmod -R u+w tmp + + ZUSE=tmp/sys/zuse.hoon + AMES=tmp/sys/vane/ames.hoon + ACME=tmp/app/acme.hoon + + # Replace the mainnet azimuth contract with the ropsten contract + sed --in-place \ + 's/\(\+\+ contracts \)mainnet\-contracts/\1ropsten-contracts/' \ + $ZUSE + + # Increment the %ames protocol version + sed -r --in-place \ + 's_^(=/ protocol\-version=\?\(.*\) %)([0-7])_echo "\1$(echo "(\2+1) % 8" | bc)"_e' \ + $AMES + + # Use the staging API in :acme + sed --in-place \ + 's_https://acme-v02.api.letsencrypt.org/directory_https://acme-staging-v02.api.letsencrypt.org/directory_' \ + $ACME + + cp -r tmp $ropsten + chmod -R u+w $ropsten + ''; + + preferLocalBuild = true; } diff --git a/nix/pkgs/ca-bundle/default.nix b/nix/pkgs/ca-bundle/default.nix new file mode 100644 index 000000000..ddef1fb3e --- /dev/null +++ b/nix/pkgs/ca-bundle/default.nix @@ -0,0 +1,29 @@ +{ stdenvNoCC, xxd, cacert }: + +stdenvNoCC.mkDerivation { + name = "ca-bundle"; + + nativeBuildInputs = [ cacert xxd ]; + + phases = [ "installPhase" ]; + + installPhase = '' + set -euo pipefail + + if ! [ -f "$SSL_CERT_FILE" ]; then + header "$SSL_CERT_FILE doesn't exist" + exit 1 + fi + + mkdir include + + cat $SSL_CERT_FILE > include/ca-bundle.crt + xxd -i include/ca-bundle.crt > ca-bundle.h + + mkdir -p $out/include + + mv ca-bundle.h $out/include + ''; + + preferLocalBuild = true; +} diff --git a/nix/pkgs/default.nix b/nix/pkgs/default.nix deleted file mode 100644 index ba1129dd3..000000000 --- a/nix/pkgs/default.nix +++ /dev/null @@ -1,33 +0,0 @@ -{ pkgs ? import ../nixpkgs.nix }: - -let - - deps = import ../deps { inherit pkgs; }; - - ent = import ./ent { inherit pkgs; }; - arvo = import ./arvo { inherit pkgs; }; - arvo-ropsten = import ./arvo-ropsten { inherit pkgs; }; - herb = import ../../pkg/herb { inherit pkgs; }; - - ge-additions = import ./ge-additions { - inherit pkgs; - inherit (deps) ed25519; - }; - - libaes_siv = import ./libaes_siv { - inherit pkgs; - }; - - mkUrbit = { debug }: - import ./urbit { - inherit pkgs ent debug ge-additions libaes_siv; - inherit (deps) argon2 murmur3 uv ed25519 scrypt softfloat3; - inherit (deps) secp256k1 h2o ivory-header ca-header; - }; - - urbit = mkUrbit { debug = false; }; - urbit-debug = mkUrbit { debug = true; }; - -in - -{ inherit ent ge-additions libaes_siv arvo arvo-ropsten herb urbit urbit-debug; } diff --git a/nix/pkgs/ed25519/default.nix b/nix/pkgs/ed25519/default.nix new file mode 100644 index 000000000..7edd85223 --- /dev/null +++ b/nix/pkgs/ed25519/default.nix @@ -0,0 +1,21 @@ +{ stdenv, sources }: + +stdenv.mkDerivation { + pname = "ed25519"; + version = sources.ed25519.rev; + src = sources.ed25519; + + buildPhase = '' + CFLAGS="-O3 -Wall -I$src/src" + + for f in $(find src -type f -name '*.c'); do + $CC $CFLAGS -c $f -o "''${f//\//_}.o" + done + ''; + + installPhase = '' + mkdir -p $out/{lib,include} + $AR rcs $out/lib/libed25519.a *.o + cp $src/src/*.h $out/include/ + ''; +} diff --git a/nix/pkgs/ent/builder.sh b/nix/pkgs/ent/builder.sh deleted file mode 100644 index 464f62e0b..000000000 --- a/nix/pkgs/ent/builder.sh +++ /dev/null @@ -1,8 +0,0 @@ -source $stdenv/setup - -cp -r $src ./src -chmod -R u+w ./src -cd ./src - -bash ./configure -PREFIX=$out make install diff --git a/nix/pkgs/ent/cross.nix b/nix/pkgs/ent/cross.nix deleted file mode 100644 index e84d7a01e..000000000 --- a/nix/pkgs/ent/cross.nix +++ /dev/null @@ -1,10 +0,0 @@ -{ env_name, env, deps }: - -env.make_derivation rec { - name = "ent-7506f"; - builder = ./builder.sh; - src = ../../../pkg/ent; - - CC = "${env.host}-gcc"; - AR = "${env.host}-ar"; -} diff --git a/nix/pkgs/ent/default.nix b/nix/pkgs/ent/default.nix index ce7c2088c..ee13758f5 100644 --- a/nix/pkgs/ent/default.nix +++ b/nix/pkgs/ent/default.nix @@ -1,7 +1,14 @@ -{ pkgs }: +{ lib, stdenv, enableParallelBuilding ? true }: -pkgs.stdenv.mkDerivation rec { - name = "ent-7506f"; - builder = ./builder.sh; - src = ../../../pkg/ent; +stdenv.mkDerivation { + name = "ent"; + src = lib.cleanSource ../../../pkg/ent; + + postPatch = '' + patchShebangs ./configure + ''; + + installFlags = [ "PREFIX=$(out)" ]; + + inherit enableParallelBuilding; } diff --git a/nix/pkgs/ge-additions/builder.sh b/nix/pkgs/ge-additions/builder.sh deleted file mode 100644 index 5a0404377..000000000 --- a/nix/pkgs/ge-additions/builder.sh +++ /dev/null @@ -1,7 +0,0 @@ -source $stdenv/setup - -cp -r $src ./src -chmod -R u+w ./src -cd ./src - -PREFIX=$out make install diff --git a/nix/pkgs/ge-additions/cross.nix b/nix/pkgs/ge-additions/cross.nix deleted file mode 100644 index f16afad0b..000000000 --- a/nix/pkgs/ge-additions/cross.nix +++ /dev/null @@ -1,12 +0,0 @@ -{ env_name, env, deps }: - -env.make_derivation rec { - name = "ge-additions"; - builder = ./release.sh; - src = ../../../pkg/ge-additions; - - cross_inputs = [ deps.ed25519 ]; - - CC = "${env.host}-gcc"; - AR = "${env.host}-ar"; -} diff --git a/nix/pkgs/ge-additions/default.nix b/nix/pkgs/ge-additions/default.nix index e77098cff..e317972d6 100644 --- a/nix/pkgs/ge-additions/default.nix +++ b/nix/pkgs/ge-additions/default.nix @@ -1,9 +1,13 @@ -{ pkgs, ed25519 }: +{ lib, stdenv, ed25519, enableParallelBuilding ? true }: -pkgs.stdenv.mkDerivation rec { - name = "ge-additions"; - builder = ./builder.sh; - src = ../../../pkg/ge-additions; +stdenv.mkDerivation { + name = "ge-additions"; + src = lib.cleanSource ../../../pkg/ge-additions; - nativeBuildInputs = [ ed25519 ]; + buildInputs = [ ed25519 ]; + + installFlags = [ "PREFIX=$(out)" ]; + + inherit enableParallelBuilding; } + diff --git a/nix/pkgs/ge-additions/release.sh b/nix/pkgs/ge-additions/release.sh deleted file mode 100644 index aaa54b5e1..000000000 --- a/nix/pkgs/ge-additions/release.sh +++ /dev/null @@ -1,13 +0,0 @@ -source $setup - -cp -r $src ./src -chmod -R u+w ./src -cd ./src - -for dep in $cross_inputs; do - export CFLAGS="${CFLAGS-} -I$dep/include" - export LDFLAGS="${LDFLAGS-} -L$dep/lib" -done - -PREFIX=$out make install - diff --git a/nix/pkgs/herb/default.nix b/nix/pkgs/herb/default.nix new file mode 100644 index 000000000..ac9ae8dca --- /dev/null +++ b/nix/pkgs/herb/default.nix @@ -0,0 +1,29 @@ +{ lib, stdenvNoCC, python }: + +# Avoid using `python.withPackages` as it creates a wrapper script to set +# PYTHONPATH, and the script is used verbatim as a python shebang. +# +# Unfortunately Darwin does not allow scripts as a shebang - so to get a +# cross platform python interpreter with appropriate site-packages setup +# we use `wrapPython/Packages` which handles these cases correctly. + +stdenvNoCC.mkDerivation { + name = "herb"; + src = ../../../pkg/herb/herb; + + nativeBuildInputs = [ python.pkgs.wrapPython ]; + buildInputs = [ python python.pkgs.requests ]; + pythonPath = [ python.pkgs.requests ]; + + phases = [ "installPhase" "fixupPhase" ]; + + installPhase = '' + mkdir -p $out/bin + cp $src $out/bin/herb + chmod +x $out/bin/herb + ''; + + postFixup = '' + wrapPythonPrograms + ''; +} diff --git a/nix/pkgs/hs/default.nix b/nix/pkgs/hs/default.nix new file mode 100644 index 000000000..91a2102e5 --- /dev/null +++ b/nix/pkgs/hs/default.nix @@ -0,0 +1,87 @@ +{ lib, stdenv, darwin, haskell-nix, gmp, zlib, libffi, brass +, enableStatic ? stdenv.hostPlatform.isStatic }: + +haskell-nix.stackProject { + compiler-nix-name = "ghc884"; + index-state = "2020-09-24T00:00:00Z"; + + # This is incredibly difficult to get right, almost everything goes wrong. + # See: https://github.com/input-output-hk/haskell.nix/issues/496 + src = haskell-nix.haskellLib.cleanSourceWith { + # Otherwise this depends on the name in the parent directory, which + # reduces caching, and is particularly bad on Hercules. + # See: https://github.com/hercules-ci/support/issues/40 + name = "urbit-hs"; + src = ../../../pkg/hs; + }; + + modules = [{ + # This corresponds to the set of packages (boot libs) that ship with GHC. + # We declare them yere to ensure any dependency gets them from GHC itself + # rather than trying to re-install them into the package database. + nonReinstallablePkgs = [ + "Cabal" + "Win32" + "array" + "base" + "binary" + "bytestring" + "containers" + "deepseq" + "directory" + "filepath" + "ghc" + "ghc-boot" + "ghc-boot-th" + "ghc-compact" + "ghc-heap" + "ghc-prim" + "ghci" + "ghcjs-prim" + "ghcjs-th" + "haskeline" + "hpc" + "integer-gmp" + "integer-simple" + "mtl" + "parsec" + "pretty" + "process" + "rts" + "stm" + "template-haskell" + "terminfo" + "text" + "time" + "transformers" + "unix" + "xhtml" + ]; + + # Override various project-local flags and build configuration. + packages = { + urbit-king.components.exes.urbit-king = { + enableStatic = enableStatic; + enableShared = !enableStatic; + + configureFlags = lib.optionals enableStatic [ + "--ghc-option=-optl=-L${gmp}/lib" + "--ghc-option=-optl=-L${libffi}/lib" + "--ghc-option=-optl=-L${zlib}/lib" + ] ++ lib.optionals (enableStatic && stdenv.isDarwin) + [ "--ghc-option=-optl=-L${darwin.libiconv}/lib" ]; + + postInstall = lib.optionalString (enableStatic && stdenv.isDarwin) '' + find "$out/bin" -type f -exec \ + install_name_tool -change \ + ${stdenv.cc.libc}/lib/libSystem.B.dylib \ + /usr/lib/libSystem.B.dylib {} \; + ''; + }; + + urbit-king.components.tests.urbit-king-tests.testFlags = + [ "--brass-pill=${brass.lfs}" ]; + }; + }]; +} + diff --git a/nix/pkgs/libaes_siv/builder.sh b/nix/pkgs/libaes_siv/builder.sh deleted file mode 100644 index 5a0404377..000000000 --- a/nix/pkgs/libaes_siv/builder.sh +++ /dev/null @@ -1,7 +0,0 @@ -source $stdenv/setup - -cp -r $src ./src -chmod -R u+w ./src -cd ./src - -PREFIX=$out make install diff --git a/nix/pkgs/libaes_siv/cross.nix b/nix/pkgs/libaes_siv/cross.nix deleted file mode 100644 index f78240058..000000000 --- a/nix/pkgs/libaes_siv/cross.nix +++ /dev/null @@ -1,12 +0,0 @@ -{ env_name, env, deps }: - -env.make_derivation rec { - name = "libaes_siv"; - builder = ./release.sh; - src = ../../../pkg/libaes_siv; - - cross_inputs = [ env.openssl ]; - - CC = "${env.host}-gcc"; - AR = "${env.host}-ar"; -} diff --git a/nix/pkgs/libaes_siv/default.nix b/nix/pkgs/libaes_siv/default.nix index fbdda7ebe..8b298f72f 100644 --- a/nix/pkgs/libaes_siv/default.nix +++ b/nix/pkgs/libaes_siv/default.nix @@ -1,9 +1,12 @@ -{ pkgs }: +{ lib, stdenv, openssl, enableParallelBuilding ? true }: -pkgs.stdenv.mkDerivation rec { - name = "libaes_siv"; - builder = ./builder.sh; - src = ../../../pkg/libaes_siv; +stdenv.mkDerivation { + name = "libaes_siv"; + src = lib.cleanSource ../../../pkg/libaes_siv; - nativeBuildInputs = [ pkgs.openssl ]; + buildInputs = [ openssl ]; + + installFlags = [ "PREFIX=$(out)" ]; + + inherit enableParallelBuilding; } diff --git a/nix/pkgs/libaes_siv/release.sh b/nix/pkgs/libaes_siv/release.sh deleted file mode 100644 index aaa54b5e1..000000000 --- a/nix/pkgs/libaes_siv/release.sh +++ /dev/null @@ -1,13 +0,0 @@ -source $setup - -cp -r $src ./src -chmod -R u+w ./src -cd ./src - -for dep in $cross_inputs; do - export CFLAGS="${CFLAGS-} -I$dep/include" - export LDFLAGS="${LDFLAGS-} -L$dep/lib" -done - -PREFIX=$out make install - diff --git a/nix/pkgs/libscrypt/default.nix b/nix/pkgs/libscrypt/default.nix new file mode 100644 index 000000000..55ed88f2c --- /dev/null +++ b/nix/pkgs/libscrypt/default.nix @@ -0,0 +1,35 @@ +{ stdenv, sources }: + +stdenv.mkDerivation { + pname = "libscrypt"; + version = sources.libscrypt.rev; + src = sources.libscrypt; + + buildPhase = '' + sources=" \ + crypto_scrypt-check \ + crypto_scrypt-hash \ + crypto_scrypt-hexconvert \ + crypto_scrypt-nosse \ + crypto-mcf \ + crypto-scrypt-saltgen \ + slowequals \ + sha256 \ + b64 \ + " + + CFLAGS="-I$src -Wall -ffast-math -O3 -D_FORTIFY_SOURCE=2 -fstack-protector" + + for s in $sources; do + $CC $CFLAGS -c $src/$s.c -o $s.o + done + + $AR rcs libscrypt.a *.o + ''; + + installPhase = '' + mkdir -p $out/{lib,include} + cp libscrypt.a $out/lib + cp $src/*.h $out/include/ + ''; +} diff --git a/nix/pkgs/libsigsegv/disable-stackvma_fault-linux-arm.patch b/nix/pkgs/libsigsegv/disable-stackvma_fault-linux-arm.patch new file mode 100644 index 000000000..56d2b4b75 --- /dev/null +++ b/nix/pkgs/libsigsegv/disable-stackvma_fault-linux-arm.patch @@ -0,0 +1,11 @@ +--- a/src/fault-linux-arm.h ++++ b/src/fault-linux-arm.h +@@ -17,6 +17,8 @@ + + #include "fault-posix-ucontext.h" + ++#define HAVE_STACKVMA 0 ++ + #if defined(__aarch64__) || defined(__ARM_64BIT_STATE) || defined(__ARM_PCS_AAPCS64) /* 64-bit */ + + /* See glibc/sysdeps/unix/sysv/linux/aarch64/sys/ucontext.h. diff --git a/nix/pkgs/libsigsegv/disable-stackvma_fault-linux-i386.patch b/nix/pkgs/libsigsegv/disable-stackvma_fault-linux-i386.patch new file mode 100644 index 000000000..9d7d03bd1 --- /dev/null +++ b/nix/pkgs/libsigsegv/disable-stackvma_fault-linux-i386.patch @@ -0,0 +1,11 @@ +--- a/src/fault-linux-i386.h ++++ b/src/fault-linux-i386.h +@@ -18,6 +18,8 @@ + + #include "fault-posix-ucontext.h" + ++#define HAVE_STACKVMA 0 ++ + #if defined __x86_64__ + /* 64 bit registers */ + diff --git a/nix/pkgs/murmur3/default.nix b/nix/pkgs/murmur3/default.nix new file mode 100644 index 000000000..abac84c95 --- /dev/null +++ b/nix/pkgs/murmur3/default.nix @@ -0,0 +1,17 @@ +{ stdenv, sources }: + +stdenv.mkDerivation { + pname = "murmur3"; + version = sources.murmur3.rev; + src = sources.murmur3; + + buildPhase = '' + $CC -fPIC -O3 -o murmur3.o -c $src/murmur3.c + ''; + + installPhase = '' + mkdir -p $out/{lib,include} + $AR rcs $out/lib/libmurmur3.a murmur3.o + cp $src/*.h $out/include/ + ''; +} diff --git a/nix/pkgs/pill/brass.nix b/nix/pkgs/pill/brass.nix new file mode 100644 index 000000000..54a816df0 --- /dev/null +++ b/nix/pkgs/pill/brass.nix @@ -0,0 +1,22 @@ +{ lib, stdenvNoCC, fetchGitHubLFS, bootFakeShip, solid, urbit, arvo, herb +, withRopsten ? false }: + +let + + lfs = fetchGitHubLFS { src = ../../../bin/brass.pill; }; + +in { + build = import ./builder.nix { + inherit stdenvNoCC urbit herb; + + name = "brass" + lib.optionalString withRopsten "-ropsten"; + builder = ./brass.sh; + arvo = if withRopsten then arvo.ropsten else arvo; + pier = bootFakeShip { + inherit urbit herb; + + pill = solid.lfs; + ship = "zod"; + }; + }; +} // lib.optionalAttrs (!withRopsten) { inherit lfs; } diff --git a/nix/pkgs/pill/brass.sh b/nix/pkgs/pill/brass.sh new file mode 100644 index 000000000..114578d6b --- /dev/null +++ b/nix/pkgs/pill/brass.sh @@ -0,0 +1,23 @@ +source $stdenv/setup + +set -euo pipefail + +cp -r $src ./pier +chmod -R u+rw ./pier + +urbit -d ./pier + +cleanup () { + if [ -f ./pier/.vere.lock ]; then + kill $(< ./pier/.vere.lock) || true + fi +} + +trap cleanup EXIT + +header "running herb +brass" + +herb ./pier -P brass.pill -d '+brass' +herb ./pier -p hood -d '+hood/exit' + +stopNest diff --git a/nix/pkgs/pill/builder.nix b/nix/pkgs/pill/builder.nix new file mode 100644 index 000000000..f97d0f81e --- /dev/null +++ b/nix/pkgs/pill/builder.nix @@ -0,0 +1,16 @@ +{ stdenvNoCC, urbit, arvo, herb, name, builder, pier }: + +stdenvNoCC.mkDerivation { + name = "${name}.pill"; + src = pier; + buildInputs = [ urbit herb ]; + dontUnpack = true; + + buildPhase = builtins.readFile builder; + + installPhase = '' + mv ${name}.pill $out + ''; + + ARVO = arvo; +} diff --git a/nix/pkgs/pill/ivory.nix b/nix/pkgs/pill/ivory.nix new file mode 100644 index 000000000..354cfbd39 --- /dev/null +++ b/nix/pkgs/pill/ivory.nix @@ -0,0 +1,42 @@ +{ lib, stdenvNoCC, fetchGitHubLFS, bootFakeShip, solid, urbit, arvo, herb, xxd +, withRopsten ? false }: + +let + + lfs = fetchGitHubLFS { src = ../../../bin/ivory.pill; }; + +in { + build = import ./builder.nix { + inherit stdenvNoCC urbit herb; + + name = "ivory" + lib.optionalString withRopsten "-ropsten"; + builder = ./ivory.sh; + arvo = if withRopsten then arvo.ropsten else arvo; + pier = bootFakeShip { + inherit urbit herb; + + pill = solid.lfs; + ship = "zod"; + }; + }; + + # The hexdump of the `.lfs` pill contents as a C header. + header = stdenvNoCC.mkDerivation { + name = "ivory-header"; + src = lfs; + nativeBuildInputs = [ xxd ]; + phases = [ "installPhase" ]; + + installPhase = '' + file=u3_Ivory.pill + + header "writing $file" + + mkdir -p $out/include + cat $src > $file + xxd -i $file > $out/include/ivory.h + ''; + + preferLocalBuild = true; + }; +} // lib.optionalAttrs (!withRopsten) { inherit lfs; } diff --git a/nix/pkgs/pill/ivory.sh b/nix/pkgs/pill/ivory.sh new file mode 100644 index 000000000..fc5ced9a6 --- /dev/null +++ b/nix/pkgs/pill/ivory.sh @@ -0,0 +1,23 @@ +source $stdenv/setup + +set -euo pipefail + +cp -r $src ./pier +chmod -R u+rw ./pier + +urbit -d ./pier + +cleanup () { + if [ -f ./pier/.vere.lock ]; then + kill $(< ./pier/.vere.lock) || true + fi +} + +trap cleanup EXIT + +header "running herb +ivory" + +herb ./pier -P ivory.pill -d '+ivory' +herb ./pier -p hood -d '+hood/exit' + +stopNest diff --git a/nix/pkgs/pill/solid.nix b/nix/pkgs/pill/solid.nix new file mode 100644 index 000000000..17bcd4433 --- /dev/null +++ b/nix/pkgs/pill/solid.nix @@ -0,0 +1,23 @@ +{ stdenvNoCC, fetchGitHubLFS, bootFakeShip, solid, urbit, arvo, herb }: + +let + + lfs = fetchGitHubLFS { src = ../../../bin/solid.pill; }; + +in { + inherit lfs; + + build = import ./builder.nix { + inherit stdenvNoCC urbit arvo herb; + + name = "solid"; + builder = ./solid.sh; + pier = bootFakeShip { + inherit urbit herb; + + arvo = null; + pill = solid.lfs; + ship = "zod"; + }; + }; +} diff --git a/nix/pkgs/pill/solid.sh b/nix/pkgs/pill/solid.sh new file mode 100644 index 000000000..1e395f09f --- /dev/null +++ b/nix/pkgs/pill/solid.sh @@ -0,0 +1,73 @@ +source $stdenv/setup + +set -euo pipefail + +ARVO=${ARVO?:ARVO location is unset} + +cp -r $src ./pier +chmod -R u+rw ./pier + +urbit -d ./pier + +cleanup () { + if [ -f ./pier/.vere.lock ]; then + kill $(< ./pier/.vere.lock) || true + fi +} + +trap cleanup EXIT + +# Update pill strategy to ensure correct staging +herb ./pier -p hood -d "+hood/mount /=home=" + +until [ -d ./pier/home ]; do + sleep 1 +done + +# Update :lens, :dojo and dependencies +# FIXME: reduce this list +cp $ARVO/app/lens.hoon ./pier/home/app/ +cp $ARVO/app/dojo.hoon ./pier/home/app/ +cp $ARVO/lib/base64.hoon ./pier/home/lib/ +cp $ARVO/lib/server.hoon ./pier/home/lib/ +cp $ARVO/lib/sole.hoon ./pier/home/lib/ +cp $ARVO/lib/xray.hoon ./pier/home/lib/ +cp $ARVO/lib/pprint.hoon ./pier/home/lib/ + +mkdir -p ./pier/home/mar/lens/ + +cp $ARVO/mar/lens/* ./pier/home/mar/lens/ + +cp $ARVO/sur/lens.hoon ./pier/home/sur/ +cp $ARVO/sur/sole.hoon ./pier/home/sur/ + +# Update +solid and its dependencies +cp $ARVO/lib/pill.hoon ./pier/home/lib/ +cp $ARVO/gen/solid.hoon ./pier/home/gen/ + +chmod -R u+rw ./pier/home/ + +herb ./pier -p hood -d "+hood/commit %home" +herb ./pier -p hood -d "+hood/unmount %home" + +# FIXME: horrible hack to ensure the update is applied first +sleep 10 + +# Stage new desk for pill contents +herb ./pier -p hood -d '+hood/merge %stage our %home' +herb ./pier -p hood -d "+hood/mount /=stage=" + +until [ -d ./pier/stage ]; do + sleep 1 +done + +rm -rf ./pier/stage +cp -r $ARVO ./pier/stage +chmod -R u+rw ./pier/stage + +herb ./pier -p hood -d "+hood/commit %stage" +herb ./pier -p hood -d "+hood/unmount %stage" +herb ./pier -P solid.pill -d '+solid /=stage=/sys, =dub &' +herb ./pier -p hood -d '+hood/exit' + +stopNest diff --git a/nix/pkgs/softfloat3/default.nix b/nix/pkgs/softfloat3/default.nix new file mode 100644 index 000000000..cae76a366 --- /dev/null +++ b/nix/pkgs/softfloat3/default.nix @@ -0,0 +1,28 @@ +{ stdenv, sources, enableParallelBuilding ? true }: + +stdenv.mkDerivation { + pname = "softfloat3"; + version = sources.softfloat3.rev; + src = sources.softfloat3; + + postPatch = '' + for f in $(find build -type f -name 'Makefile'); do + substituteInPlace $f \ + --replace 'gcc' '$(CC)' \ + --replace 'ar crs' '$(AR) crs' + done + ''; + + preBuild = '' + cd build/Linux-x86_64-GCC + ''; + + installPhase = '' + mkdir -p $out/{lib,include} + cp $src/source/include/*.h $out/include/ + cp softfloat.a $out/lib/libsoftfloat3.a + ''; + + inherit enableParallelBuilding; +} + diff --git a/nix/pkgs/urbit/default.nix b/nix/pkgs/urbit/default.nix index 19befa1ef..c92543964 100644 --- a/nix/pkgs/urbit/default.nix +++ b/nix/pkgs/urbit/default.nix @@ -1,61 +1,76 @@ -{ - pkgs, - debug, - argon2, ed25519, ent, ge-additions, libaes_siv, h2o, murmur3, scrypt, secp256k1, softfloat3, uv, ivory-header, ca-header -}: +{ lib, stdenv, coreutils, pkgconfig, argon2u, cacert, ca-bundle, curlMinimal +, ed25519, ent, ge-additions, gmp, h2o, herb, ivory, libaes_siv, libscrypt +, libsigsegv, libuv, lmdb, murmur3, openssl, secp256k1, softfloat3, zlib +, enableStatic ? stdenv.hostPlatform.isStatic, enableDebug ? false +, doCheck ? true, enableParallelBuilding ? true, dontStrip ? true }: let - name = - if debug then "urbit-debug" else "urbit"; + src = lib.cleanSource ../../../pkg/urbit; - meta = rec { - inherit debug; - bin = "${urbit}/bin/${name}"; - flags = if debug then [ "-g" ] else []; - exe = ''${meta.bin} ${pkgs.lib.strings.concatStringsSep " " meta.flags}''; + version = builtins.readFile "${src}/version"; + +in stdenv.mkDerivation { + inherit src version; + + pname = "urbit" + lib.optionalString enableDebug "-debug" + + lib.optionalString enableStatic "-static"; + + nativeBuildInputs = [ pkgconfig ]; + + buildInputs = [ + argon2u + cacert + ca-bundle + curlMinimal + ed25519 + ent + ge-additions + gmp + h2o + ivory.header + libaes_siv + libscrypt + libsigsegv + libuv + lmdb + murmur3 + openssl + secp256k1 + softfloat3 + zlib + ]; + + checkInputs = [ herb ]; + + # Ensure any `/usr/bin/env bash` shebang is patched. + postPatch = '' + patchShebangs ./configure + ''; + + checkTarget = "test"; + + installPhase = '' + mkdir -p $out/bin + cp ./build/urbit $out/bin/urbit + cp ./build/urbit-worker $out/bin/urbit-worker + ''; + + CFLAGS = [ (if enableDebug then "-O0" else "-O3") "-g" ] + ++ lib.optionals (!enableDebug) [ "-Werror" ] + ++ lib.optionals enableStatic [ "-static" ]; + + MEMORY_DEBUG = enableDebug; + CPU_DEBUG = enableDebug; + EVENT_TIME_DEBUG = false; + + # See https://github.com/NixOS/nixpkgs/issues/18995 + hardeningDisable = lib.optionals enableDebug [ "all" ]; + + inherit enableParallelBuilding doCheck dontStrip; + + meta = { + debug = enableDebug; + arguments = lib.optionals enableDebug [ "-g" ]; }; - - sigseg = - pkgs.libsigsegv.overrideAttrs (oldAttrs: rec { - patches = [ ./libsigsegv_fix.patch ]; - }); - - deps = - with pkgs; - [ curl gmp sigseg openssl zlib lmdb ]; - - vendor = - [ argon2 softfloat3 ed25519 ent ge-additions libaes_siv h2o scrypt uv murmur3 secp256k1 ivory-header ca-header ]; - - urbit = pkgs.stdenv.mkDerivation { - inherit name meta; - exename = name; - src = ../../../pkg/urbit; - nativeBuildInputs = deps ++ vendor; - - configurePhase = '' - bash ./configure - ''; - - installPhase = '' - make all -j8 - make test - - mkdir -p $out/bin - cp ./build/urbit $out/bin/$exename - cp ./build/urbit-worker $out/bin/$exename-worker - ''; - - # See https://github.com/NixOS/nixpkgs/issues/18995 - hardeningDisable = if debug then [ "all" ] else []; - - CFLAGS = "-O3 -g -Werror"; - MEMORY_DEBUG = debug; - CPU_DEBUG = debug; - EVENT_TIME_DEBUG = false; - }; - -in - -urbit +} diff --git a/nix/pkgs/urbit/libsigsegv_fix.patch b/nix/pkgs/urbit/libsigsegv_fix.patch deleted file mode 100644 index a3b630860..000000000 --- a/nix/pkgs/urbit/libsigsegv_fix.patch +++ /dev/null @@ -1,10 +0,0 @@ ---- a/src/fault-linux-i386.h 2020-06-25 23:46:02.099235491 +0000 -+++ b/src/fault-linux-i386.h 2020-06-25 23:45:48.679156892 +0000 -@@ -18,6 +18,7 @@ - - #include "fault-posix-ucontext.h" - -+#define HAVE_STACKVMA 0 - #if defined __x86_64__ - /* 64 bit registers */ - diff --git a/nix/pkgs/urbit/release.nix b/nix/pkgs/urbit/release.nix deleted file mode 100644 index 9d486f3df..000000000 --- a/nix/pkgs/urbit/release.nix +++ /dev/null @@ -1,36 +0,0 @@ -{ env_name, env, deps }: - -{ - ent, - name ? "urbit", - debug ? false, - ge-additions, - libaes_siv -}: - -let - - crossdeps = - with env; - [ curl libgmp libsigsegv openssl zlib lmdb ]; - - vendor = - with deps; - [ argon2 softfloat3 ed25519 ge-additions libaes_siv h2o scrypt uv murmur3 secp256k1 ivory-header ca-header ]; - -in - -env.make_derivation { - CFLAGS = if debug then "-O0 -g" else "-O3 -g"; - # binary stripping disabled - # LDFLAGS = if debug then "" else "-s"; - MEMORY_DEBUG = debug; - CPU_DEBUG = debug; - EVENT_TIME_DEBUG = false; - - name = "${name}-${env_name}"; - exename = name; - src = ../../../pkg/urbit; - cross_inputs = crossdeps ++ vendor ++ [ ent ]; - builder = ./release.sh; -} diff --git a/nix/pkgs/urbit/release.sh b/nix/pkgs/urbit/release.sh deleted file mode 100644 index 4bff817e4..000000000 --- a/nix/pkgs/urbit/release.sh +++ /dev/null @@ -1,21 +0,0 @@ -source $setup - -cp -r $src ./src -chmod -R u+w ./src -cd src - -for dep in $cross_inputs; do - export CFLAGS="${CFLAGS-} -I$dep/include" - export LDFLAGS="${LDFLAGS-} -L$dep/lib" -done - -CC=$host-gcc \ -PKG_CONFIG=pkg-config-cross \ -HOST=$host \ -bash ./configure - -make build/urbit build/urbit-worker -j8 - -mkdir -p $out/bin -cp ./build/urbit $out/bin/$exename -cp ./build/urbit-worker $out/bin/$exename-worker diff --git a/nix/pkgs/urbit/shell.nix b/nix/pkgs/urbit/shell.nix deleted file mode 100644 index dee99d962..000000000 --- a/nix/pkgs/urbit/shell.nix +++ /dev/null @@ -1,16 +0,0 @@ -let - - pkgs = import ../../nixpkgs.nix; - deps = import ../../deps { inherit pkgs; }; - tlon = import ../../pkgs { inherit pkgs; }; - -in - -import ./default.nix { - inherit pkgs; - debug = false; - inherit (tlon) - ent ge-additions libaes_siv; - inherit (deps) - argon2 ed25519 h2o murmur3 scrypt secp256k1 softfloat3 uv ivory-header ca-header; -} diff --git a/nix/release.nix b/nix/release.nix deleted file mode 100644 index 2ccd1098b..000000000 --- a/nix/release.nix +++ /dev/null @@ -1,54 +0,0 @@ -let - - nixpkgs = import ./nixpkgs.nix; - nixcrpkgs = import ./nixcrpkgs.nix; - crossdeps = import ./crossdeps.nix; - - release = - env_name: env: { - inherit env env_name; - deps = crossdeps env; - }; - - linux64 = release "linux64" nixcrpkgs.linux64; - darwin = release "darwin" nixcrpkgs.mac; - - ent = env: - import ./pkgs/ent/cross.nix env; - - ge-additions = env: - import ./pkgs/ge-additions/cross.nix env; - - libaes_siv = env: - import ./pkgs/libaes_siv/cross.nix env; - - urbit = { env, debug }: - import ./pkgs/urbit/release.nix env { - inherit debug; - name = if debug then "urbit-debug" else "urbit"; - ent = ent env; - ge-additions = ge-additions env; - libaes_siv = libaes_siv env; - }; - - builds-for-platform = plat: - plat.deps // { - inherit (plat.env) curl libgmp libsigsegv openssl zlib lmdb; - inherit (plat.env) cmake_toolchain; - ent = ent plat; - ge-additions = ge-additions plat; - libaes_siv = libaes_siv plat; - urbit = urbit { env = plat; debug = false; }; - urbit-debug = urbit { env = plat; debug = true; }; - }; - - darwin_extra = { - inherit (darwin.env) ranlib ld sdk ar toolchain tapi strip; - }; - -in - -{ - linux64 = builds-for-platform linux64; - darwin = darwin_extra // builds-for-platform darwin; -} diff --git a/nix/sources.json b/nix/sources.json new file mode 100644 index 000000000..abe295cd0 --- /dev/null +++ b/nix/sources.json @@ -0,0 +1,134 @@ +{ + "argon2u": { + "branch": "master", + "description": "With argon2u. Based off https://github.com/P-H-C/phc-winner-argon2", + "homepage": "", + "owner": "urbit", + "repo": "argon2", + "rev": "4da94a611ee62bad87ab2b131ffda3bcc0723d9c", + "sha256": "0bqq1hg367l4jkb6cqhxlblpvdbwz3l586qsfakwzfd9wdvnm3yc", + "type": "tarball", + "url": "https://github.com/urbit/argon2/archive/4da94a611ee62bad87ab2b131ffda3bcc0723d9c.tar.gz", + "url_template": "https://github.com///archive/.tar.gz" + }, + "ed25519": { + "branch": "master", + "description": "Submodule included by Urbit", + "homepage": null, + "owner": "urbit", + "repo": "ed25519", + "rev": "76385f2ebbbc9580a9c236952d68d11d73a6135c", + "sha256": "0s1spif4s9lgcwcny3fl2fvpbw6acqn3s8r6qxnrmkd9icgyw4cp", + "type": "tarball", + "url": "https://github.com/urbit/ed25519/archive/76385f2ebbbc9580a9c236952d68d11d73a6135c.tar.gz", + "url_template": "https://github.com///archive/.tar.gz" + }, + "h2o": { + "branch": "master", + "description": "H2O - the optimized HTTP/1, HTTP/2, HTTP/3 server", + "homepage": "https://h2o.examp1e.net", + "owner": "h2o", + "repo": "h2o", + "rev": "v2.2.4", + "sha256": "0176x0bzjry19zs074a9i5vhncc842xikmx43wj61jky318nq4w4", + "type": "tarball", + "url": "https://github.com/h2o/h2o/archive/v2.2.4.tar.gz", + "url_template": "https://github.com///archive/.tar.gz" + }, + "hackage.nix": { + "branch": "master", + "description": "Automatically generated Nix expressions for Hackage", + "homepage": "", + "owner": "input-output-hk", + "repo": "hackage.nix", + "rev": "ed4d2759c9e6ca8133a4170f99fabdd76f30f51a", + "sha256": "1n5fk8zsxnbca96zk4ikh74iz3lzh35m302q65zk1rx3nmy4027d", + "type": "tarball", + "url": "https://github.com/input-output-hk/hackage.nix/archive/ed4d2759c9e6ca8133a4170f99fabdd76f30f51a.tar.gz", + "url_template": "https://github.com///archive/.tar.gz" + }, + "haskell.nix": { + "branch": "master", + "description": "Alternative Haskell Infrastructure for Nixpkgs", + "homepage": "https://input-output-hk.github.io/haskell.nix", + "owner": "input-output-hk", + "repo": "haskell.nix", + "rev": "bbb34dcdf7b90d478002f91713531f418ddf1b53", + "sha256": "1qq397j8vnlp5npk8r675fzjfimg74fcvrkxcdgx7vj48315bh2w", + "type": "tarball", + "url": "https://github.com/input-output-hk/haskell.nix/archive/bbb34dcdf7b90d478002f91713531f418ddf1b53.tar.gz", + "url_template": "https://github.com///archive/.tar.gz" + }, + "libscrypt": { + "branch": "master", + "description": null, + "homepage": null, + "owner": "urbit", + "repo": "libscrypt", + "rev": "029693ff1cbe4f69d3a2da87d0f4f034f92cc0c2", + "sha256": "17pcxypzjmmrvacw45cacvibm6mlr9ip30hy30l1appsnywx679n", + "type": "tarball", + "url": "https://github.com/urbit/libscrypt/archive/029693ff1cbe4f69d3a2da87d0f4f034f92cc0c2.tar.gz", + "url_template": "https://github.com///archive/.tar.gz" + }, + "murmur3": { + "branch": "master", + "description": null, + "homepage": null, + "owner": "urbit", + "repo": "murmur3", + "rev": "71a75d57ca4e7ca0f7fc2fd84abd93595b0624ca", + "sha256": "0k7jq2nb4ad9ajkr6wc4w2yy2f2hkwm3nkbj2pklqgwsg6flxzwg", + "type": "tarball", + "url": "https://github.com/urbit/murmur3/archive/71a75d57ca4e7ca0f7fc2fd84abd93595b0624ca.tar.gz", + "url_template": "https://github.com///archive/.tar.gz" + }, + "niv": { + "branch": "master", + "description": "Easy dependency management for Nix projects", + "homepage": "https://github.com/nmattia/niv", + "owner": "nmattia", + "repo": "niv", + "rev": "9d35b9e4837ab88517210b1701127612c260eccf", + "sha256": "0q50xhnm8g2yfyakrh0nly4swyygxpi0a8cb9gp65wcakcgvzvdh", + "type": "tarball", + "url": "https://github.com/nmattia/niv/archive/9d35b9e4837ab88517210b1701127612c260eccf.tar.gz", + "url_template": "https://github.com///archive/.tar.gz" + }, + "nixpkgs": { + "branch": "master", + "description": "Nix Packages collection", + "homepage": null, + "owner": "nixos", + "repo": "nixpkgs", + "rev": "166ab9d237409c4b74b1f8ca31476ead35e8fe53", + "sha256": "13i43kvbkdl3dh8b986j6mxbn355mqjhcxrd8cni8zfx1z0wrscr", + "type": "tarball", + "url": "https://github.com/nixos/nixpkgs/archive/166ab9d237409c4b74b1f8ca31476ead35e8fe53.tar.gz", + "url_template": "https://github.com///archive/.tar.gz" + }, + "softfloat3": { + "branch": "master", + "description": null, + "homepage": null, + "owner": "urbit", + "repo": "berkeley-softfloat-3", + "rev": "ec4c7e31b32e07aad80e52f65ff46ac6d6aad986", + "sha256": "1lz4bazbf7lns1xh8aam19c814a4n4czq5xsq5rmi9sgqw910339", + "type": "tarball", + "url": "https://github.com/urbit/berkeley-softfloat-3/archive/ec4c7e31b32e07aad80e52f65ff46ac6d6aad986.tar.gz", + "url_template": "https://github.com///archive/.tar.gz" + }, + "stackage.nix": { + "branch": "master", + "description": "Automatically generated Nix expressions of Stackage snapshots", + "homepage": "", + "owner": "input-output-hk", + "repo": "stackage.nix", + "rev": "08312f475f4f5f3b6578e7a78dc501de6fea8792", + "sha256": "15j1l6616kfv7351jxwgb9kj6y8227fcm87nxwabmbn1q6a8q2kf", + "type": "tarball", + "url": "https://github.com/input-output-hk/stackage.nix/archive/08312f475f4f5f3b6578e7a78dc501de6fea8792.tar.gz", + "url_template": "https://github.com///archive/.tar.gz" + } +} diff --git a/nix/sources.nix b/nix/sources.nix new file mode 100644 index 000000000..0286dec14 --- /dev/null +++ b/nix/sources.nix @@ -0,0 +1,154 @@ +# This file has been generated by Niv. + +let + + # + # The fetchers. fetch_ fetches specs of type . + # + + fetch_file = pkgs: spec: + if spec.builtin or true then + builtins_fetchurl { inherit (spec) url sha256; } + else + pkgs.fetchurl { inherit (spec) url sha256; }; + + fetch_tarball = pkgs: name: spec: + let + ok = str: !builtins.isNull (builtins.match "[a-zA-Z0-9+-._?=]" str); + # sanitize the name, though nix will still fail if name starts with period + name' = stringAsChars (x: if !ok x then "-" else x) "${name}-src"; + in if spec.builtin or true then + builtins_fetchTarball { + name = name'; + inherit (spec) url sha256; + } + else + pkgs.fetchzip { + name = name'; + inherit (spec) url sha256; + }; + + fetch_git = spec: + builtins.fetchGit { + url = spec.repo; + inherit (spec) rev ref; + }; + + fetch_local = spec: spec.path; + + fetch_builtin-tarball = name: + throw '' + [${name}] The niv type "builtin-tarball" is deprecated. You should instead use `builtin = true`. + $ niv modify ${name} -a type=tarball -a builtin=true''; + + fetch_builtin-url = name: + throw '' + [${name}] The niv type "builtin-url" will soon be deprecated. You should instead use `builtin = true`. + $ niv modify ${name} -a type=file -a builtin=true''; + + # + # Various helpers + # + + # The set of packages used when specs are fetched using non-builtins. + mkPkgs = sources: + let + sourcesNixpkgs = + import (builtins_fetchTarball { inherit (sources.nixpkgs) url sha256; }) + { }; + hasNixpkgsPath = builtins.any (x: x.prefix == "nixpkgs") builtins.nixPath; + hasThisAsNixpkgsPath = == ./.; + in if builtins.hasAttr "nixpkgs" sources then + sourcesNixpkgs + else if hasNixpkgsPath && !hasThisAsNixpkgsPath then + import { } + else + abort '' + Please specify either (through -I or NIX_PATH=nixpkgs=...) or + add a package called "nixpkgs" to your sources.json. + ''; + + # The actual fetching function. + fetch = pkgs: name: spec: + + if !builtins.hasAttr "type" spec then + abort "ERROR: niv spec ${name} does not have a 'type' attribute" + else if spec.type == "file" then + fetch_file pkgs spec + else if spec.type == "tarball" then + fetch_tarball pkgs name spec + else if spec.type == "git" then + fetch_git spec + else if spec.type == "local" then + fetch_local spec + else if spec.type == "builtin-tarball" then + fetch_builtin-tarball name + else if spec.type == "builtin-url" then + fetch_builtin-url name + else + abort + "ERROR: niv spec ${name} has unknown type ${builtins.toJSON spec.type}"; + + # Ports of functions for older nix versions + + # a Nix version of mapAttrs if the built-in doesn't exist + mapAttrs = builtins.mapAttrs or (f: set: + with builtins; + listToAttrs (map (attr: { + name = attr; + value = f attr set.${attr}; + }) (attrNames set))); + + # https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/lists.nix#L295 + range = first: last: + if first > last then + [ ] + else + builtins.genList (n: first + n) (last - first + 1); + + # https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/strings.nix#L257 + stringToCharacters = s: + map (p: builtins.substring p 1 s) (range 0 (builtins.stringLength s - 1)); + + # https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/strings.nix#L269 + stringAsChars = f: s: concatStrings (map f (stringToCharacters s)); + concatStrings = builtins.concatStringsSep ""; + + # fetchTarball version that is compatible between all the versions of Nix + builtins_fetchTarball = { url, name, sha256 }@attrs: + let inherit (builtins) lessThan nixVersion fetchTarball; + in if lessThan nixVersion "1.12" then + fetchTarball { inherit name url; } + else + fetchTarball attrs; + + # fetchurl version that is compatible between all the versions of Nix + builtins_fetchurl = { url, sha256 }@attrs: + let inherit (builtins) lessThan nixVersion fetchurl; + in if lessThan nixVersion "1.12" then + fetchurl { inherit url; } + else + fetchurl attrs; + + # Create the final "sources" from the config + mkSources = config: + mapAttrs (name: spec: + if builtins.hasAttr "outPath" spec then + abort + "The values in sources.json should not have an 'outPath' attribute" + else + spec // { outPath = fetch config.pkgs name spec; }) config.sources; + + # The "config" used by the fetchers + mkConfig = { sourcesFile ? ./sources.json + , sources ? builtins.fromJSON (builtins.readFile sourcesFile) + , pkgs ? mkPkgs sources }: rec { + # The sources, i.e. the attribute set of spec name to spec + inherit sources; + + # The "pkgs" (evaluated nixpkgs) to use for e.g. non-builtin fetchers + inherit pkgs; + }; +in mkSources (mkConfig { }) // { + __functor = _: settings: mkSources (mkConfig settings); +} diff --git a/pkg/arvo/app/chat-cli.hoon b/pkg/arvo/app/chat-cli.hoon index ca063af8f..d5cef8823 100644 --- a/pkg/arvo/app/chat-cli.hoon +++ b/pkg/arvo/app/chat-cli.hoon @@ -10,7 +10,7 @@ :: and trust it to take care of the rest. :: /- view=chat-view, hook=chat-hook, *group, - *permission-store, *group-store, *invite-store, + *permission-store, *group-store, inv=invite-store, sole /+ shoe, default-agent, verb, dbug, store=chat-store, group-store, grpl=group, resource @@ -27,7 +27,7 @@ +$ state-2 $: %2 grams=(list mail) :: all messages - known=(set [target serial]) :: known message lookup + known=(set [target serial:store]) :: known message lookup count=@ud :: (lent grams) bound=(map target glyph) :: bound circle glyphs binds=(jug glyph target) :: circle glyph lookup @@ -54,7 +54,7 @@ :: +$ state-0 $: grams=(list [[=ship =path] envelope:store]) :: all messages - known=(set [[=ship =path] serial]) :: known message lookup + known=(set [[=ship =path] serial:store]) :: known message lookup count=@ud :: (lent grams) bound=(map [=ship =path] glyph) :: bound circle glyphs binds=(jug glyph [=ship =path]) :: circle glyph lookup @@ -161,7 +161,7 @@ %fact ?+ p.cage.sign ~|([%chat-cli-bad-sub-mark wire p.cage.sign] !!) %chat-update (diff-chat-update:tc wire !<(update:store q.cage.sign)) - %invite-update (handle-invite-update:tc !<(invite-update q.cage.sign)) + %invite-update (handle-invite-update:tc !<(update:inv q.cage.sign)) == == [cards this] @@ -224,9 +224,9 @@ grams ~ ::NOTE this only impacts historic message lookup in chat-cli :: known - ^- (set [target serial]) + ^- (set [target serial:store]) %- ~(run in known.u.old) - |= [t=[ship path] s=serial] + |= [t=[ship path] s=serial:store] [`target`[| t] s] :: bound @@ -324,7 +324,7 @@ :: +handle-invite-update: get new invites :: ++ handle-invite-update - |= upd=invite-update + |= upd=update:inv ^- (quip card _state) ?+ -.upd [~ state] %invite [[(show-invite:sh-out invite.upd) ~] state] @@ -722,12 +722,11 @@ %poke %invite-action :: - !> - ^- invite-action - :^ %invite /chat + !> ^- action:inv + :^ %invite %chat (shax (jam [our-self where] who)) - ^- invite - [our-self %chat-hook where who ''] + ^- invite:inv + [our-self %chat-hook (de-path:resource where) who ''] == :: +set-target: set audience, update prompt :: @@ -865,7 +864,7 @@ |= =letter:store ^- (quip card _state) ~! bowl - =/ =serial (shaf %msg-uid eny.bowl) + =/ =serial:store (shaf %msg-uid eny.bowl) :_ state ^- (list card) %+ turn ~(tap in audience) @@ -1132,11 +1131,9 @@ :: +show-invite: print incoming invite notification :: ++ show-invite - |= invite + |= invite:inv ^- card - %- note - %+ weld "invited to: " - ~(phat tr (path-to-target path)) + (note "invited to: {(scow %p entity.resource)} {(trip name.resource)}") -- :: :: +tr: render targets diff --git a/pkg/arvo/app/chat-hook.hoon b/pkg/arvo/app/chat-hook.hoon index 3dff6c5d6..340d1e6c1 100644 --- a/pkg/arvo/app/chat-hook.hoon +++ b/pkg/arvo/app/chat-hook.hoon @@ -2,7 +2,7 @@ :: mirror chat data from foreign to local based on read permissions :: allow sending chat messages to foreign paths based on write perms :: -/- *permission-store, *invite-store, *metadata-store, +/- *permission-store, inv=invite-store, *metadata-store, *permission-hook, *group-store, *permission-group-hook, ::TMP for upgrade hook=chat-hook, view=chat-view, @@ -52,7 +52,7 @@ +$ poke $% [%chat-action action:store] [%permission-action permission-action] - [%invite-action invite-action] + [%invite-action action:inv] [%chat-view-action action:view] == :: @@ -77,7 +77,7 @@ ++ on-init ^- (quip card _this) :_ this(invite-created %.y) - :~ (invite-poke:cc [%create /chat]) + :~ (invite-poke:cc [%create %chat]) [%pass /invites %agent [our.bol %invite-store] %watch /invitatory/chat] watch-groups:cc == @@ -406,7 +406,7 @@ :: %invite-update =^ cards state - (fact-invite-update:cc wire !<(invite-update q.cage.sign)) + (fact-invite-update:cc wire !<(update:inv q.cage.sign)) [cards this] :: %group-update @@ -719,15 +719,18 @@ == :: ++ fact-invite-update - |= [wir=wire fact=invite-update] + |= [wir=wire fact=update:inv] ^- (quip card _state) :_ state ?+ -.fact ~ %accepted - =/ ask-history ?~((chat-scry path.invite.fact) %.y %.n) - =* shp ship.invite.fact - =* app-path path.invite.fact - ~[(chat-view-poke [%join shp app-path ask-history])] + =* resource resource.invite.fact + =/ =path [(scot %p entity.resource) name.resource ~] + :_ ~ + %- chat-view-poke + :^ %join ship.invite.fact + path + ?=(~ (chat-scry path)) == :: ++ fact-group-update @@ -919,9 +922,9 @@ [%pass / %agent [our.bol %chat-view] %poke %chat-view-action !>(act)] :: ++ invite-poke - |= act=invite-action + |= =action:inv ^- card - [%pass / %agent [our.bol %invite-store] %poke %invite-action !>(act)] + [%pass / %agent [our.bol %invite-store] %poke %invite-action !>(action)] :: ++ sec-to-perm |= [pax=path =kind] @@ -936,9 +939,9 @@ [%mailbox pax] :: ++ invite-scry - |= uid=serial - ^- (unit invite) - %^ scry (unit invite) + |= uid=serial:inv + ^- (unit invite:inv) + %^ scry (unit invite:inv) %invite-store /invite/chat/(scot %uv uid) :: diff --git a/pkg/arvo/app/chat-view.hoon b/pkg/arvo/app/chat-view.hoon index f12a64edb..648f42082 100644 --- a/pkg/arvo/app/chat-view.hoon +++ b/pkg/arvo/app/chat-view.hoon @@ -6,7 +6,7 @@ /- *permission-store, *permission-hook, *group, - *invite-store, + inv=invite-store, *metadata-store, group-hook, *permission-group-hook, @@ -220,8 +220,7 @@ ~& %chat-already-exists ~ %- zing - :~ (create-chat app-path.act allow-history.act) - %- create-group + :~ %- create-group :* group-path.act app-path.act policy.act @@ -231,6 +230,7 @@ managed.act == (create-metadata title.act description.act group-path.act app-path.act) + (create-chat app-path.act allow-history.act) == :: %delete @@ -407,13 +407,14 @@ ^- card =/ managed=? !=(ship+app-path group-path) - =/ =invite + =/ =invite:inv :* our.bol ?:(managed %contact-hook %chat-hook) - ?:(managed group-path app-path) + (de-path:resource ?:(managed group-path ship+app-path)) ship '' == - =/ act=invite-action [%invite ?:(managed /contacts /chat) (shaf %msg-uid eny.bol) invite] + =/ act=action:inv + [%invite ?:(managed %contacts %chat) (shaf %msg-uid eny.bol) invite] [%pass / %agent [our.bol %invite-hook] %poke %invite-action !>(act)] :: ++ chat-scry @@ -487,8 +488,8 @@ (en-path:resource rid) ?> ?=(^ path) :~ (group-pull-hook-poke %add ship rid) - (chat-hook-poke %add-synced ship t.path ask-history) (metadata-hook-poke %add-synced ship path) + (chat-hook-poke %add-synced ship t.path ask-history) == :: ++ diff-chat-update diff --git a/pkg/arvo/app/contact-hook.hoon b/pkg/arvo/app/contact-hook.hoon index ccdb41010..087120183 100644 --- a/pkg/arvo/app/contact-hook.hoon +++ b/pkg/arvo/app/contact-hook.hoon @@ -4,7 +4,7 @@ /- group-hook, *contact-hook, *contact-view, - *invite-store, + inv=invite-store, *metadata-hook, *metadata-store, *group @@ -44,7 +44,7 @@ ++ on-init ^- (quip card _this) :_ this(invite-created %.y) - :~ (invite-poke:cc [%create /contacts]) + :~ (invite-poke:cc [%create %contacts]) [%pass /inv %agent [our.bol %invite-store] %watch /invitatory/contacts] [%pass /group %agent [our.bol %group-store] %watch /groups] == @@ -467,20 +467,10 @@ (contact-poke [%delete path]) (contact-poke [%remove path ship]) == - :: - ++ send-invite-poke - |= [=path =ship] - ^- card - =/ =invite - :* our.bol %contact-hook - path ship '' - == - =/ act=invite-action [%invite /contacts (shaf %msg-uid eny.bol) invite] - [%pass / %agent [our.bol %invite-hook] %poke %invite-action !>(act)] -- :: ++ invite-poke - |= act=invite-action + |= act=action:inv ^- card [%pass / %agent [our.bol %invite-store] %poke %invite-action !>(act)] :: diff --git a/pkg/arvo/app/contact-view.hoon b/pkg/arvo/app/contact-view.hoon index 9fab4648c..2abbbb0f6 100644 --- a/pkg/arvo/app/contact-view.hoon +++ b/pkg/arvo/app/contact-view.hoon @@ -5,7 +5,7 @@ :: /- group-hook, - *invite-store, + inv=invite-store, *contact-hook, *metadata-store, *metadata-hook, @@ -161,27 +161,22 @@ %+ turn ~(tap in pending.policy.act) |= =ship - (send-invite our.bol %contacts path ship '') + (send-invite our.bol %contacts rid ship '') == :: %join - =/ =path - (en-path:resource resource.act) =/ =cage :- %group-update !> ^- update:group-store [%add-members resource.act (sy our.bol ~)] =/ =wire - [%join-group path] + [%join-group (en-path:resource resource.act)] [%pass wire %agent [entity.resource.act %group-push-hook] %poke cage]~ :: %invite =* rid resource.act - =/ =path - (en-path:resource rid) - =/ =group - (need (scry-group:grp rid)) - :- (send-invite entity.rid %contacts path ship.act text.act) + =/ =group (need (scry-group:grp rid)) + :- (send-invite entity.rid %contacts rid ship.act text.act) ?. ?=(%invite -.policy.group) ~ ~[(add-pending rid ship.act)] :: @@ -276,12 +271,12 @@ [%pass / %agent [entity.rid app] %poke cage] :: ++ send-invite - |= =invite + |= =invite:inv ^- card =/ =cage :- %invite-action - !> ^- invite-action - [%invite /contacts (shaf %invite-uid eny.bol) invite] + !> ^- action:inv + [%invite %contacts (shaf %invite-uid eny.bol) invite] [%pass / %agent [recipient.invite %invite-hook] %poke cage] :: ++ contact-poke diff --git a/pkg/arvo/app/glob.hoon b/pkg/arvo/app/glob.hoon index c031faa9b..472f93833 100644 --- a/pkg/arvo/app/glob.hoon +++ b/pkg/arvo/app/glob.hoon @@ -5,7 +5,7 @@ /- glob /+ default-agent, verb, dbug |% -++ hash 0v2.1vtfh.0l23v.30s7f.n57l9.dpjvi +++ hash 0v4.fpa4r.s6dtc.h8tps.62jv0.qn0fj +$ state-0 [%0 hash=@uv glob=(unit (each glob:glob tid=@ta))] +$ all-states $% state-0 @@ -89,7 +89,7 @@ =+ .^(=map=tube:clay %cc (weld home /map/mime)) =+ .^(arch %cy (weld home /app/landscape/js/bundle)) =/ bundle-hash=@t - %- need + %- need ^- (unit @t) %- ~(rep by dir) |= [[file=@t ~] out=(unit @t)] diff --git a/pkg/arvo/app/graph-pull-hook.hoon b/pkg/arvo/app/graph-pull-hook.hoon index 5971ef948..70ed3e22e 100644 --- a/pkg/arvo/app/graph-pull-hook.hoon +++ b/pkg/arvo/app/graph-pull-hook.hoon @@ -20,6 +20,7 @@ +* this . def ~(. (default-agent this %|) bowl) dep ~(. (default:pull-hook this config) bowl) + gra ~(. graph bowl) :: ++ on-init on-init:def ++ on-save !>(~) @@ -35,6 +36,7 @@ |= [=resource =tang] ^- (quip card _this) :_ this + ?. (~(has in get-keys:gra) resource) ~ =- [%pass /pull-nack %agent [our.bowl %graph-store] %poke %graph-update -]~ !> ^- update:store [%0 now.bowl [%archive-graph resource]] @@ -42,7 +44,7 @@ ++ on-pull-kick |= =resource ^- (unit path) - =/ maybe-time (peek-update-log:graph resource) + =/ maybe-time (peek-update-log:gra resource) ?~ maybe-time `/ `/(scot %da u.maybe-time) -- diff --git a/pkg/arvo/app/graph-push-hook.hoon b/pkg/arvo/app/graph-push-hook.hoon index 5b4ea54b3..e4d409754 100644 --- a/pkg/arvo/app/graph-push-hook.hoon +++ b/pkg/arvo/app/graph-push-hook.hoon @@ -32,6 +32,22 @@ ?| (is-member:grp src.bowl i.group-paths) (is-admin:grp src.bowl i.group-paths) == +:: +++ is-allowed-remove + |= [=resource:res indices=(set index:store) =bowl:gall] + ^- ? + =/ gra ~(. graph bowl) + ?. (is-allowed resource bowl %.n) + %.n + %+ levy + ~(tap in indices) + |= =index:store + ^- ? + =/ =node:store + (got-node:gra resource index) + ?| =(author.post.node src.bowl) + (is-allowed resource bowl %.y) + == -- :: %- agent:dbug @@ -63,7 +79,7 @@ %add-graph (is-allowed resource.q.update bowl %.y) %remove-graph (is-allowed resource.q.update bowl %.y) %add-nodes (is-allowed resource.q.update bowl %.n) - %remove-nodes (is-allowed resource.q.update bowl %.y) + %remove-nodes (is-allowed-remove resource.q.update indices.q.update bowl) %add-signatures (is-allowed resource.uid.q.update bowl %.n) %remove-signatures (is-allowed resource.uid.q.update bowl %.y) %archive-graph (is-allowed resource.q.update bowl %.y) @@ -108,6 +124,8 @@ (get-graph:gra resource) :: resubscribe :: + ?~ (get-update-log:gra resource) + (get-graph:gra resource) =/ =time (slav %da i.path) =/ =update-log:store (get-update-log-subset:gra resource time) [%0 now.bowl [%run-updates resource update-log]] diff --git a/pkg/arvo/app/graph-store.hoon b/pkg/arvo/app/graph-store.hoon index 34f4d9040..ae47b6158 100644 --- a/pkg/arvo/app/graph-store.hoon +++ b/pkg/arvo/app/graph-store.hoon @@ -7,14 +7,20 @@ +$ card card:agent:gall +$ versioned-state $% state-0 + state-1 + state-2 == :: +$ state-0 [%0 network:store] ++$ state-1 [%1 network:store] ++$ state-2 [%2 network:store] +:: ++ orm orm:store ++ orm-log orm-log:store ++$ debug-input [%validate-graph =resource:store] -- :: -=| state-0 +=| state-2 =* state - :: %- agent:dbug @@ -27,9 +33,160 @@ ++ on-init [~ this] ++ on-save !>(state) ++ on-load - |= old=vase + |= =old=vase ^- (quip card _this) - [~ this(state !<(state-0 old))] + =+ !<(old=versioned-state old-vase) + =| cards=(list card) + |^ + ?- -.old + %0 + %_ $ + -.old %1 + :: + validators.old + (~(put in validators.old) %graph-validator-link) + :: + cards + %+ weld cards + %+ turn + ~(tap in (~(put in validators.old) %graph-validator-link)) + |= validator=@t + ^- card + =/ =wire /validator/[validator] + =/ =rave:clay [%sing %b [%da now.bowl] /[validator]] + [%pass wire %arvo %c %warp our.bowl [%home `rave]] + :: + graphs.old + %- ~(run by graphs.old) + |= [=graph:store q=(unit mark)] + ^- [graph:store (unit mark)] + :- (convert-unix-timestamped-graph graph) + ?^ q q + `%graph-validator-link + :: + update-logs.old + %- ~(run by update-logs.old) + |=(a=* *update-log:store) + == + :: + %1 + %_ $ + -.old %2 + graphs.old (~(run by graphs.old) change-revision-graph) + :: + update-logs.old + %- ~(run by update-logs.old) + |=(a=* *update-log:store) + == + :: + %2 [cards this(state old)] + == + :: + ++ change-revision-graph + |= [=graph:store q=(unit mark)] + ^- [graph:store (unit mark)] + |^ + :_ q + ?+ q graph + [~ %graph-validator-link] convert-links + [~ %graph-validator-publish] convert-publish + == + :: + ++ convert-links + %+ gas:orm *graph:store + %+ turn (tap:orm graph) + |= [=atom =node:store] + ^- [^atom node:store] + :: top-level + :: + :+ atom post.node + ?: ?=(%empty -.children.node) + [%empty ~] + :- %graph + %+ gas:orm *graph:store + %+ turn (tap:orm p.children.node) + |= [=^atom =node:store] + ^- [^^atom node:store] + :: existing comments get turned into containers for revisions + :: + :^ atom + post.node(contents ~, hash ~) + %graph + %+ gas:orm *graph:store + :_ ~ :- %1 + :_ [%empty ~] + post.node(index (snoc index.post.node atom), hash ~) + :: + ++ convert-publish + %+ gas:orm *graph:store + %+ turn (tap:orm graph) + |= [=atom =node:store] + ^- [^atom node:store] + :: top-level + :: + :+ atom post.node + ?: ?=(%empty -.children.node) + [%empty ~] + :- %graph + %+ gas:orm *graph:store + %+ turn (tap:orm p.children.node) + |= [=^atom =node:store] + ^- [^^atom node:store] + :: existing container for publish note revisions + :: + ?+ atom !! + %1 [atom node] + %2 + :+ atom post.node + ?: ?=(%empty -.children.node) + [%empty ~] + :- %graph + %+ gas:orm *graph:store + %+ turn (tap:orm p.children.node) + |= [=^^atom =node:store] + ^- [^^^atom node:store] + :+ atom post.node(contents ~, hash ~) + :- %graph + %+ gas:orm *graph:store + :_ ~ :- %1 + :_ [%empty ~] + post.node(index (snoc index.post.node atom), hash ~) + == + -- + :: + ++ maybe-unix-to-da + |= =atom + ^- @ + :: (bex 127) is roughly 226AD + ?. (lte atom (bex 127)) + atom + (add ~1970.1.1 (div (mul ~s1 atom) 1.000)) + :: + ++ convert-unix-timestamped-node + |= =node:store + ^- node:store + =. index.post.node + (convert-unix-timestamped-index index.post.node) + ?. ?=(%graph -.children.node) + node + :+ post.node + %graph + (convert-unix-timestamped-graph p.children.node) + :: + ++ convert-unix-timestamped-index + |= =index:store + (turn index maybe-unix-to-da) + :: + ++ convert-unix-timestamped-graph + |= =graph:store + %+ gas:orm *graph:store + %+ turn + (tap:orm graph) + |= [=atom =node:store] + ^- [^atom node:store] + :- (maybe-unix-to-da atom) + (convert-unix-timestamped-node node) + -- :: ++ on-watch ~/ %graph-store-watch @@ -60,6 +217,7 @@ =^ cards state ?+ mark (on-poke:def mark vase) %graph-update (graph-update !<(update:store vase)) + %noun (debug !<(debug-input vase)) == [cards this] :: @@ -68,6 +226,7 @@ ^- (quip card _state) |^ ?> ?=(%0 -.update) + =? p.update =(p.update *time) now.bowl ?- -.q.update %add-graph (add-graph +.q.update) %remove-graph (remove-graph +.q.update) @@ -86,23 +245,30 @@ == :: ++ add-graph - |= [=resource:store =graph:store mark=(unit mark:store)] + |= $: =resource:store + =graph:store + mark=(unit mark:store) + overwrite=? + == ^- (quip card _state) - ?< (~(has by archive) resource) - ?< (~(has by graphs) resource) + ?> ?| overwrite + ?& !(~(has by archive) resource) + !(~(has by graphs) resource) + == == ?> (validate-graph graph mark) :_ %_ state graphs (~(put by graphs) resource [graph mark]) update-logs (~(put by update-logs) resource (gas:orm-log ~ ~)) + archive (~(del by archive) resource) validators ?~ mark validators (~(put in validators) u.mark) == %- zing - :~ (give [/updates /keys ~] [%add-graph resource graph mark]) + :~ (give [/updates /keys ~] [%add-graph resource graph mark overwrite]) ?~ mark ~ ?: (~(has in validators) u.mark) ~ - =/ wire (weld /graph (en-path:res resource)) + =/ wire /validator/[u.mark] =/ =rave:clay [%sing %b [%da now.bowl] /[u.mark]] [%pass wire %arvo %c %warp our.bowl [%home `rave]]~ == @@ -395,52 +561,57 @@ ^- (quip card _state) ?< (~(has by archive) resource) ?> (~(has by graphs) resource) - :_ state - %+ turn (tap:orm-log update-log) - |= [=time update=logged-update:store] - ^- card - ?> ?=(%0 -.update) - :* %pass - /run-updates/(scot %da time) - %agent - [our.bowl %graph-store] - %poke - :- %graph-update - !> - ^- update:store - ?- -.q.update - %add-nodes update(resource.q resource) - %remove-nodes update(resource.q resource) - %add-signatures update(resource.uid.q resource) - %remove-signatures update(resource.uid.q resource) - == - == - :: - ++ validate-graph - |= [=graph:store mark=(unit mark:store)] - ^- ? - ?~ mark %.y - ?~ graph %.y - =/ =dais:clay - .^ =dais:clay - %cb - /(scot %p our.bowl)/[q.byk.bowl]/(scot %da now.bowl)/[u.mark] + =/ updates=(list [=time upd=logged-update:store]) + (tap:orm-log update-log) + =| cards=(list card) + |- ^- (quip card _state) + ?~ updates + [cards state] + =* update upd.i.updates + =^ crds state + %- graph-update + ^- update:store + ?- -.q.update + %add-nodes update(resource.q resource) + %remove-nodes update(resource.q resource) + %add-signatures update(resource.uid.q resource) + %remove-signatures update(resource.uid.q resource) == - %+ roll (tap:orm graph) - |= [[=atom =node:store] out=?] - ?& out - =(%& -:(mule |.((vale:dais [atom post.node])))) - ?- -.children.node - %empty %.y - %graph ^$(graph p.children.node) - == - == + $(cards (weld cards crds), updates t.updates) :: ++ give |= [paths=(list path) update=update-0:store] ^- (list card) [%give %fact paths [%graph-update !>([%0 now.bowl update])]]~ -- + :: + ++ debug + |= =debug-input + ^- (quip card _state) + =/ [=graph:store mark=(unit mark:store)] + (~(got by graphs) resource.debug-input) + ?> (validate-graph graph mark) + [~ state] + :: + ++ validate-graph + |= [=graph:store mark=(unit mark:store)] + ^- ? + ?~ mark %.y + ?~ graph %.y + =/ =dais:clay + .^ =dais:clay + %cb + /(scot %p our.bowl)/[q.byk.bowl]/(scot %da now.bowl)/[u.mark] + == + %+ roll (tap:orm graph) + |= [[=atom =node:store] out=?] + ?& out + =(%& -:(mule |.((vale:dais [atom post.node])))) + ?- -.children.node + %empty %.y + %graph ^$(graph p.children.node) + == + == -- :: ++ on-peek @@ -450,6 +621,14 @@ |^ ?> (team:title our.bowl src.bowl) ?+ path (on-peek:def path) + [%x %graph-mark @ @ ~] + =/ =ship (slav %p i.t.t.path) + =/ =term i.t.t.t.path + =/ result=(unit marked-graph:store) + (~(get by graphs) [ship term]) + ?~ result [~ ~] + ``noun+!>(q.u.result) + :: [%x %keys ~] :- ~ :- ~ :- %graph-update !>(`update:store`[%0 now.bowl [%keys ~(key by graphs)]]) @@ -472,7 +651,7 @@ !> ^- update:store :+ %0 now.bowl - [%add-graph [ship term] `graph:store`p.u.result q.u.result] + [%add-graph [ship term] `graph:store`p.u.result q.u.result %.y] :: :: note: near-duplicate of /x/graph :: @@ -488,7 +667,7 @@ !> ^- update:store :+ %0 now.bowl - [%add-graph [ship term] `graph:store`p.u.result q.u.result] + [%add-graph [ship term] `graph:store`p.u.result q.u.result %.y] :: [%x %graph-subset @ @ @ @ ~] =/ =ship (slav %p i.t.t.path) @@ -513,7 +692,7 @@ =/ =ship (slav %p i.t.t.path) =/ =term i.t.t.t.path =/ =index:store - (turn t.t.t.t.path |=(=cord (slav %ud cord))) + (turn t.t.t.t.path (cury slav %ud)) =/ node=(unit node:store) (get-node ship term index) ?~ node [~ ~] :- ~ :- ~ :- %graph-update @@ -543,7 +722,7 @@ :+ %add-nodes [ship term] %- ~(gas by *(map index:store node:store)) - %+ turn (tap:orm `graph:store`(subset:orm p.children.u.node start end)) + %+ turn (tap:orm `graph:store`(subset:orm p.children.u.node end start)) |= [=atom =node:store] ^- [index:store node:store] [(snoc index atom) node] @@ -556,7 +735,8 @@ =/ end=(unit time) (slaw %da i.t.t.t.t.t.path) =/ update-log=(unit update-log:store) (~(get by update-logs) [ship term]) ?~ update-log [~ ~] - ``noun+!>((subset:orm-log u.update-log start end)) + :: orm-log is ordered backwards, so swap start and end + ``noun+!>((subset:orm-log u.update-log end start)) :: [%x %update-log @ @ ~] =/ =ship (slav %p i.t.t.path) @@ -600,15 +780,15 @@ ++ on-arvo |= [=wire =sign-arvo] ^- (quip card _this) - ?+ -.sign-arvo (on-arvo:def wire sign-arvo) - %c + ?+ wire (on-arvo:def wire sign-arvo) + :: + :: old wire, do nothing + [%graph *] [~ this] + :: + [%validator @ ~] :_ this - ?> ?=([%graph @ *] wire) - =/ =resource:store (de-path:res t.wire) - =/ gra=(unit marked-graph:store) (~(get by graphs) resource) - ?~ gra ~ - ?~ q.u.gra ~ - =/ =rave:clay [%next %b [%da now.bowl] /[u.q.u.gra]] + =* validator i.t.wire + =/ =rave:clay [%next %b [%da now.bowl] /[validator]] [%pass wire %arvo %c %warp our.bowl [%home `rave]]~ == :: diff --git a/pkg/arvo/app/hark-chat-hook.hoon b/pkg/arvo/app/hark-chat-hook.hoon new file mode 100644 index 000000000..dcd1449ab --- /dev/null +++ b/pkg/arvo/app/hark-chat-hook.hoon @@ -0,0 +1,214 @@ +:: hark-chat-hook: notifications for chat-store [landscape] +:: +/- store=hark-store, post, group-store, metadata-store, hook=hark-chat-hook +/+ resource, metadata, default-agent, dbug, chat-store, grpl=group +:: +~% %hark-chat-hook-top ..is ~ +|% ++$ card card:agent:gall ++$ versioned-state + $% state-0 + == +:: ++$ state-0 + $: %0 + watching=(set path) + mentions=_& + == +:: +-- +:: +=| state-0 +=* state - +:: +=> + |_ =bowl:gall + :: + ++ give + |= [paths=(list path) =update:hook] + ^- (list card) + [%give %fact paths hark-chat-hook-update+!>(update)]~ + :: + ++ watch-chat + ^- card + [%pass /chat %agent [our.bowl %chat-store] %watch /all] + -- +%- agent:dbug +^- agent:gall +~% %hark-chat-hook-agent ..card ~ +|_ =bowl:gall ++* this . + ha ~(. +> bowl) + def ~(. (default-agent this %|) bowl) + met ~(. metadata bowl) + grp ~(. grpl bowl) +:: +++ on-init + :_ this + ~[watch-chat:ha] +:: +++ on-save !>(state) +++ on-load + |= old=vase + ^- (quip card _this) + :_ this(state !<(state-0 old)) + ?: (~(has by wex.bowl) [/chat our.bowl %chat-store]) + ~ + ~[watch-chat:ha] +:: +++ on-watch + |= =path + ^- (quip card _this) + =^ cards state + ?+ path (on-watch:def path) + :: + [%updates ~] + :_ state + %+ give:ha ~ + :* %initial + watching + == + == + [cards this] +:: +++ on-poke + ~/ %hark-chat-hook-poke + |= [=mark =vase] + ^- (quip card _this) + |^ + ?> (team:title our.bowl src.bowl) + =^ cards state + ?+ mark (on-poke:def mark vase) + %hark-chat-hook-action + (hark-chat-hook-action !<(action:hook vase)) + == + [cards this] + :: + ++ hark-chat-hook-action + |= =action:hook + ^- (quip card _state) + |^ + :- (give:ha ~[/updates] action) + ?- -.action + %listen (listen +.action) + %ignore (ignore +.action) + %set-mentions (set-mentions +.action) + == + ++ listen + |= chat=path + ^+ state + state(watching (~(put in watching) chat)) + :: + ++ ignore + |= chat=path + ^+ state + state(watching (~(del in watching) chat)) + :: + ++ set-mentions + |= ment=? + ^+ state + state(mentions ment) + -- + -- +:: +++ on-agent + ~/ %hark-chat-hook-agent + |= [=wire =sign:agent:gall] + ^- (quip card _this) + |^ + ?+ -.sign (on-agent:def wire sign) + %kick + :_ this + ?. ?=([%chat ~] wire) + ~ + ~[watch-chat:ha] + :: + %fact + ?. ?=(%chat-update p.cage.sign) + (on-agent:def wire sign) + =^ cards state + (chat-update !<(update:chat-store q.cage.sign)) + [cards this] + == + :: + ++ chat-update + |= =update:chat-store + ^- (quip card _state) + ?+ -.update `state + %initial (process-initial +.update) + %create (process-new +.update) + :: + %message + :_ state + (process-envelope path.update envelope.update) + :: + %messages + :_ state + %- zing + (turn envelopes.update (cury process-envelope path.update)) + == + ++ process-initial + |= =inbox:chat-store + ^- (quip card _state) + =/ keys=(list path) + ~(tap in ~(key by inbox)) + =| cards=(list card) + |- + ?~ keys + [cards state] + =* path i.keys + =^ cs state + (process-new path) + $(cards (weld cards cs), keys t.keys) + :: + ++ process-new + |= chat=path + ^- (quip card _state) + =/ groups=(list path) + (groups-from-resource:met %chat chat) + ?~ groups + `state + ?: (is-managed-path:grp i.groups) + `state + `state(watching (~(put in watching) chat)) + :: + ++ is-mention + |= =envelope:chat-store + ?. ?=(%text -.letter.envelope) %.n + ?& mentions + ?= ^ + (find (scow %p our.bowl) (trip text.letter.envelope)) + == + :: + ++ is-notification + |= [=path =envelope:chat-store] + ?& (~(has in watching) path) + !=(author.envelope our.bowl) + == + :: + ++ process-envelope + |= [=path =envelope:chat-store] + ^- (list card) + =/ mention=? + (is-mention envelope) + ?. ?|(mention (is-notification path envelope)) + ~ + =/ =index:store + [%chat path mention] + =/ =contents:store + [%chat ~[envelope]] + ~[(poke-store %add index when.envelope %.n contents)] + :: + ++ poke-store + |= =action:store + ^- card + =- [%pass /store %agent [our.bowl %hark-store] %poke -] + hark-action+!>(action) + -- +:: +++ on-peek on-peek:def +:: +++ on-leave on-leave:def +++ on-arvo on-arvo:def +++ on-fail on-fail:def +-- diff --git a/pkg/arvo/app/hark-graph-hook.hoon b/pkg/arvo/app/hark-graph-hook.hoon new file mode 100644 index 000000000..edf87a6f8 --- /dev/null +++ b/pkg/arvo/app/hark-graph-hook.hoon @@ -0,0 +1,267 @@ +:: hark-graph-hook: notifications for graph-store [landscape] +:: +/- store=hark-store, post, group-store, metadata-store, hook=hark-graph-hook +/+ resource, metadata, default-agent, dbug, graph-store +:: +~% %hark-graph-hook-top ..is ~ +|% ++$ card card:agent:gall ++$ versioned-state + $% state-0 + == +:: ++$ state-0 + $: %0 + watching=(set [resource index:post]) + mentions=_& + watch-on-self=_& + == +:: +-- +:: +=| state-0 +=* state - +:: +=> + |_ =bowl:gall + :: + ++ scry + |* [=mold p=path] + ?> ?=(^ p) + ?> ?=(^ t.p) + .^(mold i.p (scot %p our.bowl) i.t.p (scot %da now.bowl) t.t.p) + :: + ++ give + |= [paths=(list path) =update:hook] + ^- (list card) + [%give %fact paths hark-graph-hook-update+!>(update)]~ + :: + ++ watch-graph + ^- card + [%pass /graph %agent [our.bowl %graph-store] %watch /updates] + -- +%- agent:dbug +^- agent:gall +~% %hark-graph-hook-agent ..card ~ +|_ =bowl:gall ++* this . + ha ~(. +> bowl) + def ~(. (default-agent this %|) bowl) + met ~(. metadata bowl) +:: +++ on-init + :_ this + ~[watch-graph:ha] +:: +++ on-save !>(state) +++ on-load + |= old=vase + ^- (quip card _this) + `this(state !<(state-0 old)) +:: +++ on-watch + |= =path + ^- (quip card _this) + =^ cards state + ?+ path (on-watch:def path) + :: + [%updates ~] + :_ state + %+ give:ha ~ + :* %initial + watching + mentions + watch-on-self + == + == + [cards this] +:: +++ on-poke + ~/ %hark-graph-hook-poke + |= [=mark =vase] + ^- (quip card _this) + |^ + ?> (team:title our.bowl src.bowl) + =^ cards state + ?+ mark (on-poke:def mark vase) + %hark-graph-hook-action + (hark-graph-hook-action !<(action:hook vase)) + == + [cards this] + :: + ++ hark-graph-hook-action + |= =action:hook + ^- (quip card _state) + |^ + :- (give:ha ~[/updates] action) + ?- -.action + %listen (listen +.action) + %ignore (ignore +.action) + %set-mentions (set-mentions +.action) + %set-watch-on-self (set-watch-on-self +.action) + == + ++ listen + |= [graph=resource =index:post] + ^+ state + state(watching (~(put in watching) [graph index])) + :: + ++ ignore + |= [graph=resource =index:post] + ^+ state + state(watching (~(del in watching) [graph index])) + :: + ++ set-mentions + |= ment=? + ^+ state + state(mentions ment) + :: + ++ set-watch-on-self + |= self=? + ^+ state + state(watch-on-self self) + -- + -- +:: +++ on-agent + ~/ %hark-graph-hook-agent + |= [=wire =sign:agent:gall] + ^- (quip card _this) + |^ + ?+ -.sign (on-agent:def wire sign) + %kick + :_ this + ?. ?=([%graph ~] wire) + ~ + ~[watch-graph:ha] + :: + %fact + ?. ?=(%graph-update p.cage.sign) + (on-agent:def wire sign) + =^ cards state + (graph-update !<(update:graph-store q.cage.sign)) + [cards this] + == + ++ add-graph + |= rid=resource + ^- (quip card _state) + ?. &(watch-on-self =(our.bowl entity.rid)) + [~ state] + `state(watching (~(put in watching) [rid ~])) + :: + ++ graph-update + |= =update:graph-store + ^- (quip card _state) + ?: ?=(%add-graph -.q.update) + (add-graph resource.q.update) + ?. ?=(%add-nodes -.q.update) + [~ state] + =/ group=resource + (need (group-from-app-resource:met %graph resource.q.update)) + =/ =metadata:metadata-store + (need (peek-metadata:met %graph group resource.q.update)) + =* rid resource.q.update + =+ %+ scry:ha + ,mark=(unit mark) + /gx/graph-store/graph-mark/(scot %p entity.rid)/[name.rid]/noun + =+ %+ scry:ha + ,=tube:clay + /cc/[q.byk.bowl]/[(fall mark %graph-validator-link)]/notification-kind + =/ nodes=(list [p=index:graph-store q=node:graph-store]) + ~(tap by nodes.q.update) + =| cards=(list card) + |^ + ?~ nodes + [cards state] + =* index p.i.nodes + =* node q.i.nodes + =^ node-cards state + (check-node node tube) + %_ $ + nodes t.nodes + cards (weld node-cards cards) + == + :: + ++ check-node-children + |= [=node:graph-store =tube:clay] + ^- (quip card _state) + ?: ?=(%empty -.children.node) + [~ state] + =/ children=(list [=atom =node:graph-store]) + (tap:orm:graph-store p.children.node) + =| cards=(list card) + |- ^- (quip card _state) + ?~ children + [cards state] + =^ new-cards state + (check-node node.i.children tube) + %_ $ + cards (weld cards new-cards) + children t.children + == + :: + ++ check-node + |= [=node:graph-store =tube:clay] + ^- (quip card _state) + =^ child-cards state + (check-node-children node tube) + ?: =(our.bowl author.post.node) + =^ self-cards state + (self-post node) + :_ state + (weld child-cards self-cards) + =+ !< notif-kind=(unit [name=@t parent-lent=@ud]) + (tube !>([0 post.node])) + ?~ notif-kind + [child-cards state] + =/ desc=@t + ?: (is-mention contents.post.node) + %mention + name.u.notif-kind + =/ parent=index:post + (scag parent-lent.u.notif-kind index.post.node) + ?. ?| =(desc %mention) + (~(has in watching) [rid parent]) + == + [child-cards state] + =/ notif-index=index:store + [%graph group rid module.metadata desc] + =/ =contents:store + [%graph (limo post.node ~)] + :_ state + %+ snoc child-cards + (add-unread notif-index [time-sent.post.node %.n contents]) + :: + ++ is-mention + |= contents=(list content:post) + ^- ? + ?. mentions %.n + ?~ contents %.n + ?. ?=(%mention -.i.contents) + $(contents t.contents) + ?: =(our.bowl ship.i.contents) + %.y + $(contents t.contents) + :: + ++ self-post + |= =node:graph-store + ^- (quip card _state) + ?. ?=(%.y watch-on-self) + [~ state] + `state(watching (~(put in watching) [rid index.post.node])) + :: + ++ add-unread + |= [=index:store =notification:store] + ^- card + =- [%pass / %agent [our.bowl %hark-store] %poke -] + hark-action+!>([%add index notification]) + :: + -- + -- +:: +++ on-peek on-peek:def +:: +++ on-leave on-leave:def +++ on-arvo on-arvo:def +++ on-fail on-fail:def +-- + diff --git a/pkg/arvo/app/hark-group-hook.hoon b/pkg/arvo/app/hark-group-hook.hoon new file mode 100644 index 000000000..4b685c938 --- /dev/null +++ b/pkg/arvo/app/hark-group-hook.hoon @@ -0,0 +1,169 @@ +:: hark-group-hook: notifications for groups [landscape] +:: +/- store=hark-store, post, group-store, metadata-store, hook=hark-group-hook +/+ resource, metadata, default-agent, dbug, graph-store +:: +~% %hark-group-hook-top ..is ~ +|% ++$ card card:agent:gall ++$ versioned-state + $% state-0 + == +:: ++$ state-0 + $: %0 + watching=(set resource) + == +:: +-- +:: +=| state-0 +=* state - +:: +=< +%- agent:dbug +^- agent:gall +~% %hark-group-hook-agent ..card ~ +|_ =bowl:gall ++* this . + ha ~(. +> bowl) + def ~(. (default-agent this %|) bowl) + met ~(. metadata bowl) +:: +++ on-init + :_ this + :~ watch-metadata:ha + watch-groups:ha + == +:: +++ on-save !>(state) +++ on-load + |= old=vase + ^- (quip card _this) + `this(state !<(state-0 old)) +:: +++ on-watch + |= =path + ?. ?=([%updates ~] path) + (on-watch:def path) + :_ this + =; =cage + [%give %fact ~ cage]~ + :- %hark-group-hook-update + !> ^- update:hook + [%initial watching] +:: +++ on-poke + ~/ %hark-group-hook-poke + |= [=mark =vase] + ^- (quip card _this) + |^ + ?> (team:title our.bowl src.bowl) + =^ cards state + ?+ mark (on-poke:def mark vase) + %hark-group-hook-action + (hark-group-hook-action !<(action:hook vase)) + == + [cards this] + :: + ++ hark-group-hook-action + |= =action:hook + ^- (quip card _state) + |^ + ?- -.action + %listen (listen +.action) + %ignore (ignore +.action) + == + ++ listen + |= group=resource + ^- (quip card _state) + :- (give %listen group) + state(watching (~(put in watching) group)) + :: + ++ ignore + |= group=resource + ^- (quip card _state) + :- (give %ignore group) + state(watching (~(del in watching) group)) + :: + ++ give + |= =update:hook + ^- (list card) + [%give %fact ~[/updates] %hark-group-hook-update !>(update)]~ + -- + -- +:: +++ on-agent + ~/ %hark-group-hook-agent + |= [=wire =sign:agent:gall] + ^- (quip card _this) + |^ + ?+ -.sign (on-agent:def wire sign) + %kick + :_ this + ?+ wire ~ + [%group ~] ~[watch-groups:ha] + [%metadata ~] ~[watch-metadata:ha] + == + :: + %fact + ?+ p.cage.sign (on-agent:def wire sign) + %group-update + =^ cards state + (group-update !<(update:group-store q.cage.sign)) + [cards this] + :: + %metadata-update + =^ cards state + (metadata-update !<(metadata-update:metadata-store q.cage.sign)) + [cards this] + == + == + :: + ++ group-update + |= =update:group-store + ^- (quip card _state) + ?. ?=(?(%add-members %remove-members) -.update) + [~ state] + ?. (~(has in watching) resource.update) + [~ state] + =/ =contents:store + [%group ~[update]] + =/ =notification:store [now.bowl %.n contents] + =/ =index:store + [%group resource.update -.update] + :_ state + ~[(add-unread index notification)] + :: +metadata-update is stubbed for now, for the following reasons + :: - There's no semantic difference in metadata-store between + :: adding and editing a channel + :: - We have no way of retrieving old metadata to e.g. get a + :: channel's old name when it is renamed + ++ metadata-update + |= update=metadata-update:metadata-store + ^- (quip card _state) + [~ state] + :: + ++ add-unread + |= [=index:store =notification:store] + ^- card + =- [%pass / %agent [our.bowl %hark-store] %poke -] + hark-action+!>([%add index notification]) + -- +:: +++ on-peek on-peek:def +++ on-leave on-leave:def +++ on-arvo on-arvo:def +++ on-fail on-fail:def +-- +|_ =bowl:gall ++* met ~(. metadata bowl) +:: +++ watch-groups + ^- card + [%pass /group %agent [our.bowl %group-store] %watch /groups] +:: +++ watch-metadata + ^- card + [%pass /metadata %agent [our.bowl %metadata-store] %watch /updates] +-- diff --git a/pkg/arvo/app/hark-store.hoon b/pkg/arvo/app/hark-store.hoon new file mode 100644 index 000000000..476ea8d92 --- /dev/null +++ b/pkg/arvo/app/hark-store.hoon @@ -0,0 +1,363 @@ +:: hark-store: notifications [landscape] +:: +/- store=hark-store, post, group-store, metadata-store +/+ resource, metadata, default-agent, dbug, graph-store +:: +~% %hark-store-top ..is ~ +|% ++$ card card:agent:gall ++$ versioned-state + $% state-0 + == +:: ++$ state-0 + $: %0 + =notifications:store + archive=notifications:store + last-seen=@da + dnd=_| + == ++$ inflated-state + $: state-0 + cache + == +:: $cache: useful to have precalculated, but can be derived from state +:: albeit expensively ++$ cache + $: unread-count=@ud + by-index=(jug index:store @da) + ~ + == +:: +++ orm ((ordered-map @da timebox:store) gth) +-- +:: +=| inflated-state +=* state - +:: +=< +%- agent:dbug +^- agent:gall +~% %hark-store-agent ..card ~ +|_ =bowl:gall ++* this . + ha ~(. +> bowl) + def ~(. (default-agent this %|) bowl) + met ~(. metadata bowl) +:: +++ on-init + :_ this + ~[autoseen-timer] +:: +++ on-save !>(-.state) +++ on-load + |= =old=vase + ^- (quip card _this) + =/ old + !<(state-0 old-vase) + =. notifications.old + (gas:orm *notifications:store (tap:orm notifications.old)) + =. archive.old + (gas:orm *notifications:store (tap:orm archive.old)) + `this(-.state old, +.state (inflate-cache old)) +:: +++ on-watch + |= =path + ^- (quip card _this) + |^ + ?+ path (on-watch:def path) + :: + [%updates ~] + :_ this + [%give %fact ~ hark-update+!>(initial-updates)]~ + == + :: + ++ initial-updates + ^- update:store + :- %more + ^- (list update:store) + :- unreads + :+ [%set-dnd dnd] + [%count unread-count] + %+ weld + %+ turn + %+ scag 3 + (tap-nonempty:ha archive) + (timebox-update &) + %+ turn + %+ scag 3 + (tap-nonempty:ha notifications) + (timebox-update |) + :: + ++ unreads + ^- update:store + :- %unreads + ^- (list [index:store @ud]) + %+ turn + ~(tap by by-index) + |=([=index:store =(set @da)] [index ~(wyt in set)]) + :: + ++ timebox-update + |= archived=? + |= [time=@da =timebox:store] + ^- update:store + [%timebox time archived ~(tap by timebox)] + -- +:: +++ on-peek + |= =path + ^- (unit (unit cage)) + ?+ path (on-peek:def path) + :: + [%x %recent ?(%archive %inbox) @ @ ~] + =/ is-archive + =(%archive i.t.t.path) + =/ offset=@ud + (slav %ud i.t.t.t.path) + =/ length=@ud + (slav %ud i.t.t.t.t.path) + :^ ~ ~ %hark-update + !> ^- update:store + :- %more + %+ turn + %+ scag length + %+ slag offset + %- tap-nonempty:ha + ?:(is-archive archive notifications) + |= [time=@da =timebox:store] + ^- update:store + :^ %timebox time is-archive + ~(tap by timebox) + == +:: +++ on-poke + ~/ %hark-store-poke + |= [=mark =vase] + ^- (quip card _this) + |^ + ?> (team:title our.bowl src.bowl) + =^ cards state + ?+ mark (on-poke:def mark vase) + %hark-action (hark-action !<(action:store vase)) + == + [cards this] + :: + ++ hark-action + |= =action:store + ^- (quip card _state) + |^ + ?- -.action + %add (add +.action) + %archive (do-archive +.action) + %seen seen + %read (read +.action) + %read-index (read-index +.action) + %unread (unread +.action) + %set-dnd (set-dnd +.action) + == + ++ add + |= [=index:store =notification:store] + ^- (quip card _state) + =/ =timebox:store + (gut-orm:ha notifications last-seen) + =/ existing-notif + (~(get by timebox) index) + =/ new=notification:store + ?~ existing-notif + notification + (merge-notification:ha u.existing-notif notification) + =/ new-timebox=timebox:store + (~(put by timebox) index new) + :- (give:ha [/updates]~ %added last-seen index new) + %_ state + + ?~(existing-notif (upd-unreads:ha index last-seen %.n) +.state) + notifications (put:orm notifications last-seen new-timebox) + == + ++ read-index + |= =index:store + ^- (quip card _state) + =/ times=(list @da) + ~(tap in (~(gut by by-index) index ~)) + =| cards=(list card) + |- + ?~ times + [cards state] + =* time i.times + =^ crds state + (read time index) + $(cards (weld cards crds), times t.times) + :: + ++ do-archive + |= [time=@da =index:store] + ^- (quip card _state) + =/ =timebox:store + (gut-orm:ha notifications time) + =/ =notification:store + (~(got by timebox) index) + =/ new-timebox=timebox:store + (~(del by timebox) index) + :- (give:ha [/updates]~ %archive time index) + %_ state + + ?.(read.notification (upd-unreads:ha index time %.y) +.state) + :: + notifications + (put:orm notifications time new-timebox) + :: + archive + %^ jub-orm:ha archive time + |= archive-box=timebox:store + ^- timebox:store + (~(put by archive-box) index notification(read %.y)) + == + :: + ++ read + |= [time=@da =index:store] + ^- (quip card _state) + :- (give:ha [/updates]~ %read time index) + %_ state + + (upd-unreads:ha index time %.y) + unread-count (dec unread-count) + notifications (change-read-status:ha time index %.y) + == + :: + ++ unread + |= [time=@da =index:store] + ^- (quip card _state) + :- (give:ha [/updates]~ %unread time index) + %_ state + + (upd-unreads:ha index time %.n) + unread-count +(unread-count) + notifications (change-read-status:ha time index %.n) + == + :: + ++ seen + ^- (quip card _state) + :_ state(last-seen now.bowl) + :~ cancel-autoseen:ha + autoseen-timer:ha + == + :: + ++ set-dnd + |= d=? + ^- (quip card _state) + :_ state(dnd d) + (give:ha [/updates]~ %set-dnd d) + -- + -- +:: +++ on-agent on-agent:def +:: +++ on-leave on-leave:def +++ on-arvo + |= [=wire =sign-arvo] + ^- (quip card _this) + ?. ?=([%autoseen ~] wire) + (on-arvo:def wire sign-arvo) + ?> ?=([%b %wake *] sign-arvo) + :_ this(last-seen now.bowl) + ~[autoseen-timer:ha] +:: +++ on-fail on-fail:def +-- +|_ =bowl:gall ++* met ~(. metadata bowl) +:: +++ tap-nonempty + |= =notifications:store + ^- (list [@da timebox:store]) + %+ skip (tap:orm notifications) + |=([@da =timebox:store] =(0 ~(wyt by timebox))) +:: +++ merge-notification + |= [existing=notification:store new=notification:store] + ^- notification:store + ?- -.contents.existing + :: + %chat + ?> ?=(%chat -.contents.new) + existing(list.contents (weld list.contents.existing list.contents.new)) + :: + %graph + ?> ?=(%graph -.contents.new) + existing(list.contents (weld list.contents.existing list.contents.new)) + :: + %group + ?> ?=(%group -.contents.new) + existing(list.contents (weld list.contents.existing list.contents.new)) + == +:: +++ change-read-status + |= [time=@da =index:store read=?] + ^+ notifications + %^ jub-orm notifications time + |= =timebox:store + %+ ~(jab by timebox) index + |= =notification:store + ?> !=(read read.notification) + notification(read read) +:: +key-orm: +key:by for ordered maps +++ key-orm + |= =notifications:store + ^- (list @da) + (turn (tap:orm notifications) |=([key=@da =timebox:store] key)) +:: +jub-orm: combo +jab/+gut for ordered maps +:: TODO: move to zuse.hoon +++ jub-orm + |= [=notifications:store time=@da fun=$-(timebox:store timebox:store)] + ^- notifications:store + =/ =timebox:store + (fun (gut-orm notifications time)) + (put:orm notifications time timebox) +:: +gut-orm: +gut:by for ordered maps +:: TODO: move to zuse.hoon +++ gut-orm + |= [=notifications:store time=@da] + ^- timebox:store + (fall (get:orm notifications time) ~) +:: +++ autoseen-interval ~h3 +++ cancel-autoseen + ^- card + [%pass /autoseen %arvo %b %rest (add last-seen autoseen-interval)] +:: +++ autoseen-timer + ^- card + [%pass /autoseen %arvo %b %wait (add now.bowl autoseen-interval)] +:: +++ give + |= [paths=(list path) update=update:store] + ^- (list card) + [%give %fact paths [%hark-update !>(update)]]~ +:: +++ upd-unreads + |= [=index:store time=@da read=?] + ^+ +.state + %_ +.state + :: + by-index + %. [index time] + ?: read + ~(del ju by-index) + ~(put ju by-index) + == +:: +++ inflate-cache + |= state-0 + ^+ +.state + =/ nots=(list [p=@da =timebox:store]) + (tap:orm notifications) + |- =* outer $ + ?~ nots + +.state + =/ unreads ~(tap by timebox.i.nots) + |- =* inner $ + ?~ unreads + outer(nots t.nots) + =* notification q.i.unreads + =* index p.i.unreads + ?: read.notification + inner(unreads t.unreads) + =. +.state + (upd-unreads index p.i.nots %.n) + inner(unreads t.unreads) +-- diff --git a/pkg/arvo/app/herm.hoon b/pkg/arvo/app/herm.hoon new file mode 100644 index 000000000..4434685c3 --- /dev/null +++ b/pkg/arvo/app/herm.hoon @@ -0,0 +1,101 @@ +:: herm: stand-in for term.c with http interface +:: +/+ default-agent, dbug, verb +=, able:jael +|% ++$ state-0 [%0 ~] +-- +:: +=| state-0 +=* state - +%+ verb | +%- agent:dbug +^- agent:gall +=> |% + ++ request-tube + |= [bowl:gall from=mark to=mark next=?] + ^- card:agent:gall + :* %pass /tube/[from]/[to] + %arvo %c %warp + our q.byk ~ + :: + ?: next + [%next %c da+now /[from]/[to]] + [%sing %c da+now /[from]/[to]] + == + -- +|_ =bowl:gall ++* this . + def ~(. (default-agent this %|) bowl) +:: +++ on-init + ^- (quip card:agent:gall _this) + :_ this + :: set up dill session subscription, + :: and ensure the tubes we use are in cache + :: + :~ [%pass [%view %$ ~] %arvo %d %view ~] + (request-tube bowl %blit %json |) + (request-tube bowl %json %belt |) + == +:: +++ on-save !>([%0 ~]) +++ on-load + |= old=vase + ^- (quip card:agent:gall _this) + [~ this(state [%0 ~])] +:: +++ on-watch + |= =path + ^- (quip card:agent:gall _this) + ?> ?=([%session @ ~] path) + :_ this + :: scry prompt and cursor position out of dill for initial response + :: + =/ base=^path + /dx/(scot %p our.bowl)//(scot %da now.bowl)/sessions + :~ [%give %fact ~ %blit !>(.^(blit:dill (weld base //line)))] + [%give %fact ~ %blit !>(`blit:dill`hop+.^(@ud (weld base //cursor)))] + == +:: +++ on-arvo + |= [=wire =sign-arvo] + ^- (quip card:agent:gall _this) + ?+ wire !! + :: pass on dill blits for the session + :: + [%view %$ ~] + ?. ?=([%d %blit *] sign-arvo) + ~| [%unexpected-sign [- +<]:sign-arvo] + !! + :_ this + %+ turn p.sign-arvo + |= =blit:dill + [%give %fact [%session %$ ~]~ %blit !>(blit)] + :: + :: ensure the tubes we need remain in cache + :: + [%tube @ @ ~] + =* from i.t.wire + =* to i.t.t.wire + ?. ?=([%c %writ *] sign-arvo) + ~| [%unexpected-sign [- +<]:sign-arvo] + !! + :_ this + [(request-tube bowl from to &)]~ + == +:: +++ on-poke + |= [=mark =vase] + ^- (quip card:agent:gall _this) + ?. ?=(%belt mark) + ~| [%unexpected-mark mark] + !! + :_ this + [%pass [%belt %$ ~] %arvo %d %belt !<(belt:dill vase)]~ +:: +++ on-leave on-leave:def +++ on-peek on-peek:def +++ on-agent on-agent:def +++ on-fail on-fail:def +-- diff --git a/pkg/arvo/app/hood.hoon b/pkg/arvo/app/hood.hoon index 433ccb2e4..7ea33fe6b 100644 --- a/pkg/arvo/app/hood.hoon +++ b/pkg/arvo/app/hood.hoon @@ -2,7 +2,7 @@ /+ drum=hood-drum, helm=hood-helm, kiln=hood-kiln |% +$ state - $: %10 + $: %11 drum=state:drum helm=state:helm kiln=state:kiln @@ -13,6 +13,7 @@ [%7 drum=state:drum helm=state:helm kiln=state:kiln] [%8 drum=state:drum helm=state:helm kiln=state:kiln] [%9 drum=state:drum helm=state:helm kiln=state:kiln] + [%10 drum=state:drum helm=state:helm kiln=state:kiln] == +$ any-state-tuple $: drum=any-state:drum diff --git a/pkg/arvo/app/invite-hook.hoon b/pkg/arvo/app/invite-hook.hoon index 0e7b93d97..8cde26eb6 100644 --- a/pkg/arvo/app/invite-hook.hoon +++ b/pkg/arvo/app/invite-hook.hoon @@ -1,123 +1,121 @@ -:: invite-hook [landscape]: +:: invite-hook [landscape]: receive invites from any source :: -:: receive invites from any source +:: only handles %invite actions: +:: - can be poked by the host team to send an invite out to someone. +:: - can be poked by foreign ships to send an invite to us. :: -:: only handles %invite actions. accepts json, but only from the host team. -:: can be poked by the host team to send an invite out to someone. -:: can be poked by foreign ships to send an invite to us. -:: -/+ *invite-json, default-agent, verb, dbug +/- *invite-store +/+ default-agent, dbug :: |% +$ state-0 [%0 ~] -:: +$ card card:agent:gall -- :: =| state-0 =* state - -:: -%+ verb | %- agent:dbug ^- agent:gall -=< - |_ =bowl:gall - +* this . - do ~(. +> bowl) - def ~(. (default-agent this %|) bowl) - :: - ++ on-init - ^- (quip card _this) - [~ this] - :: - ++ on-save !>(state) - ++ on-load - |= old=vase - ^- (quip card _this) - [~ this(state !<(state-0 old))] - :: - ++ on-poke - |= [=mark =vase] - ^- (quip card _this) - :_ this - ?+ mark (on-poke:def mark vase) - %json - :: only accept json from ourselves. +:: +|_ =bowl:gall ++* this . + def ~(. (default-agent this %|) bowl) +:: +++ on-init [~ this] +++ on-save !>(state) +++ on-load + |= old=vase + ^- (quip card _this) + [~ this(state !<(state-0 old))] +:: +++ on-poke + |= [=mark =vase] + ^- (quip card _this) + |^ + :_ this + ?+ mark (on-poke:def mark vase) + %invite-action + =/ act=action !<(action vase) + ?+ -.act ~ + %invites + ?. (team:title [our src]:bowl) ~ + :: outgoing. we must be inviting other ships. send them each an invite :: - ?> (team:title our.bowl src.bowl) - =/ act (json-to-action !<(json vase)) - ?> ?=(%invite -.act) - [(invite-hook-poke:do recipient.invite.act act)]~ + %+ turn ~(tap in recipients.invites.act) + |= recipient=ship + ^- card + ?< (team:title our.bowl recipient) + %+ invite-hook-poke recipient + :^ %invite term.act uid.act + ^- invite + :* ship.invites.act + app.invites.act + resource.invites.act + recipient + text.invites.act + == :: - %invite-action - =/ act=invite-action !<(invite-action vase) - ?. ?=(%invite -.act) ~ - ?: (team:title our.bowl src.bowl) + %invite + ?: (team:title [our src]:bowl) :: outgoing. we must be inviting another ship. send them the invite. :: ?< (team:title our.bowl recipient.invite.act) - [(invite-hook-poke:do recipient.invite.act act)]~ + [(invite-hook-poke recipient.invite.act act)]~ :: else incoming. ensure invitatory exists and invite is not a duplicate. :: - ?> ?=(^ (invitatory-scry:do path.act)) - ?> ?=(~ (invite-scry:do path.act uid.act)) - [(invite-poke:do path.act act)]~ + ?> ?=(^ (invitatory-scry term.act)) + ?> ?=(~ (invite-scry term.act uid.act)) + [(invite-poke term.act act)]~ + == + == + :: + ++ invite-hook-poke + |= [=ship =action] + ^- card + :* %pass + /invite-hook + %agent + [ship %invite-hook] + %poke + %invite-action + !>(action) == :: - ++ on-peek on-peek:def - ++ on-watch on-watch:def - ++ on-leave on-leave:def - ++ on-agent on-agent:def - ++ on-arvo on-arvo:def - ++ on-fail on-fail:def + ++ invite-poke + |= [=term =action] + ^- card + :* %pass + /[term] + %agent + [our.bowl %invite-store] + %poke + %invite-action + !>(action) + == + :: + ++ invitatory-scry + |= =term + .^ (unit invitatory) + %gx + %+ weld + /(scot %p our.bowl)/invite-store/(scot %da now.bowl)/invitatory + /[term]/noun + == + :: + ++ invite-scry + |= [=term uid=serial] + .^ (unit invite) + %gx + %+ weld + /(scot %p our.bowl)/invite-store/(scot %da now.bowl)/invite + /[term]/(scot %uv uid)/noun + == -- :: -|_ =bowl:gall -:: -++ invite-hook-poke - |= [=ship action=invite-action] - ^- card - :* %pass - /invite-hook - %agent - [ship %invite-hook] - %poke - %invite-action - !>(action) - == -:: -++ invite-poke - |= [=path action=invite-action] - ^- card - :* %pass - path - %agent - [our.bowl %invite-store] - %poke - %invite-action - !>(action) - == -:: -++ invitatory-scry - |= pax=path - ^- (unit invitatory) - =. pax - ;: weld - /(scot %p our.bowl)/invite-store/(scot %da now.bowl)/invitatory - pax - /noun - == - .^((unit invitatory) %gx pax) -:: -++ invite-scry - |= [pax=path uid=serial] - ^- (unit invite) - =. pax - ;: weld - /(scot %p our.bowl)/invite-store/(scot %da now.bowl)/invite - pax - /(scot %uv uid)/noun - == - .^((unit invite) %gx pax) +++ on-peek on-peek:def +++ on-watch on-watch:def +++ on-leave on-leave:def +++ on-agent on-agent:def +++ on-arvo on-arvo:def +++ on-fail on-fail:def -- - diff --git a/pkg/arvo/app/invite-store.hoon b/pkg/arvo/app/invite-store.hoon index c09bd4cba..258f61a4c 100644 --- a/pkg/arvo/app/invite-store.hoon +++ b/pkg/arvo/app/invite-store.hoon @@ -1,184 +1,209 @@ :: invite-store [landscape] -/+ *invite-json, default-agent, dbug +/- store=invite-store +/+ res=resource, default-agent, dbug |% +$ card card:agent:gall -:: +$ versioned-state - $% state-zero + $% state-0 + state-1 == :: -+$ state-zero - $: %0 - =invites ++$ invitatory-0 (map serial:store invite-0) ++$ invite-0 + $: =ship :: ship to subscribe to upon accepting invite + app=@tas :: app to subscribe to upon accepting invite + =path :: path to subscribe to upon accepting invite + recipient=ship :: recipient to receive invite + text=cord :: text to describe the invite == +:: ++$ state-0 [%0 invites=(map path invitatory-0)] ++$ state-1 [%1 =invites:store] -- :: -=| state-zero +=| state-1 =* state - %- agent:dbug ^- agent:gall -=< - |_ bol=bowl:gall - +* this . - inv-core +> - ic ~(. inv-core bol) - def ~(. (default-agent this %|) bol) - ++ on-init on-init:def - ++ on-save !>(state) - ++ on-load - |= old=vase - `this(state !<(state-zero old)) +:: +|_ =bowl:gall ++* this . + def ~(. (default-agent this %|) bowl) +:: +++ on-init + ^- (quip card _this) + :- ~ + %_ this + invites.state + %- ~(gas by *invites:store) + [%graph *invitatory:store]~ + == +:: +++ on-save !>(state) +++ on-load + |= old-vase=vase + =/ old !<(versioned-state old-vase) + ?: ?=(%1 -.old) + `this(state old) + :- =- [%pass / %agent [our.bowl %invite-store] %poke %invite-action -]~ + !> ^- action:store + [%create %graph] + %= this + state + :- %1 + %- ~(gas by *invites:store) + %+ murn ~(tap by invites.old) + |= [=path =invitatory-0] + ^- (unit [term invitatory:store]) + ?. ?=([@ ~] path) ~ + :- ~ + :- i.path + %- ~(gas by *invitatory:store) + %+ murn ~(tap by invitatory-0) + |= [=serial:store =invite-0] + ^- (unit [serial:store invite:store]) + =/ resource=(unit resource:res) (de-path-soft:res path.invite-0) + ?~ resource ~ + :- ~ + :- serial + ^- invite:store + :* ship.invite-0 + app.invite-0 + u.resource + recipient.invite-0 + text.invite-0 + == + == +:: +++ on-agent on-agent:def +++ on-arvo on-arvo:def +++ on-leave on-leave:def +++ on-fail on-fail:def +:: +++ on-watch + |= =path + ^- (quip card _this) + ?> (team:title our.bowl src.bowl) + =/ cards=(list card) + ?+ path (on-watch:def path) + [%all ~] [%give %fact ~ %invite-update !>([%initial invites])]~ + [%updates ~] ~ + [%invitatory @ ~] + =/ inv=invitatory:store (~(got by invites) i.t.path) + [%give %fact ~ %invite-update !>([%invitatory inv])]~ + == + [cards this] +:: +++ on-poke + |= [=mark =vase] + ^- (quip card _this) + |^ + ?> (team:title our.bowl src.bowl) + =^ cards state + ?+ mark (on-poke:def mark vase) + %invite-action (poke-invite-action !<(action:store vase)) + == + [cards this] :: - ++ on-poke - |= [=mark =vase] - ^- (quip card _this) - ?> (team:title our.bol src.bol) - =^ cards state - ?+ mark (on-poke:def mark vase) - %json (poke-invite-action:ic (json-to-action !<(json vase))) - %invite-action (poke-invite-action:ic !<(invite-action vase)) - == - [cards this] - :: - ++ on-watch - |= =path - ^- (quip card _this) - =/ cards=(list card) - ?+ path (on-watch:def path) - [%all ~] [%give %fact ~ %invite-update !>([%initial invites])]~ - [%updates ~] ~ - [%invitatory *] - =/ inv=invitatory (~(got by invites) t.path) - [%give %fact ~ %invite-update !>([%invitatory inv])]~ - == - [cards this] - :: - ++ on-leave on-leave:def - ++ on-peek - |= =path - ^- (unit (unit cage)) - ?+ path (on-peek:def path) - [%x %all ~] (peek-x-all:ic t.t.path) - [%x %invitatory *] (peek-x-invitatory:ic t.t.path) - [%x %invite *] (peek-x-invite:ic t.t.path) + ++ poke-invite-action + |= =action:store + ^- (quip card _state) + ?- -.action + %create (handle-create +.action) + %delete (handle-delete +.action) + %invite (handle-invite +.action) + %accept (handle-accept +.action) + %decline (handle-decline +.action) + %invites ~|('only send this to %invite-hook' !!) == :: - ++ on-agent on-agent:def - ++ on-arvo on-arvo:def - ++ on-fail on-fail:def + ++ handle-create + |= =term + ^- (quip card _state) + ?: (~(has by invites) term) + [~ state] + :- (send-diff term [%create term]) + state(invites (~(put by invites) term *invitatory:store)) + :: + ++ handle-delete + |= =term + ^- (quip card _state) + ?. (~(has by invites) term) + [~ state] + :- (send-diff term [%delete term]) + state(invites (~(del by invites) term)) + :: + ++ handle-invite + |= [=term =serial:store =invite:store] + ^- (quip card _state) + ?. (~(has by invites) term) + [~ state] + =/ container (~(got by invites) term) + =. serial (sham eny.bowl) + =. container (~(put by container) serial invite) + :- (send-diff term [%invite term serial invite]) + state(invites (~(put by invites) term container)) + :: + ++ handle-accept + |= [=term =serial:store] + ^- (quip card _state) + ?. (~(has by invites) term) + [~ state] + =/ container (~(got by invites) term) + =/ invite (~(get by container) serial) + ?~ invite + [~ state] + =. container (~(del by container) serial) + :- (send-diff term [%accepted term serial u.invite]) + state(invites (~(put by invites) term container)) + :: + ++ handle-decline + |= [=term =serial:store] + ^- (quip card _state) + ?. (~(has by invites) term) + [~ state] + =/ container (~(got by invites) term) + =/ invite (~(get by container) serial) + ?~ invite + [~ state] + =. container (~(del by container) serial) + :- (send-diff term [%decline term serial]) + state(invites (~(put by invites) term container)) + :: + ++ update-subscribers + |= [=path =update:store] + ^- card + [%give %fact ~[path] %invite-update !>(update)] + :: + ++ send-diff + |= [=term =update:store] + ^- (list card) + :~ (update-subscribers /all update) + (update-subscribers /updates update) + (update-subscribers /invitatory/[term] update) + == -- :: -|_ bol=bowl:gall -:: -++ peek-x-all - |= pax=path +++ on-peek + |= =path ^- (unit (unit cage)) - [~ ~ %noun !>(invites)] -:: -++ peek-x-invitatory - |= pax=path - ^- (unit (unit cage)) - ?~ pax - ~ - =/ invitatory=(unit invitatory) (~(get by invites) pax) - [~ ~ %noun !>(invitatory)] -:: -++ peek-x-invite - |= pax=path - ^- (unit (unit cage)) - :: /:path/:uid - =/ pas (flop pax) - ?~ pas - ~ - =/ uid=serial (slav %uv i.pas) - =. pax (scag (dec (lent pax)) `(list @ta)`pax) - =/ invitatory=(unit invitatory) (~(get by invites) pax) - ?~ invitatory - ~ - =/ invite=(unit invite) (~(get by u.invitatory) uid) - [~ ~ %noun !>(invite)] -:: -++ poke-invite-action - |= action=invite-action - ^- (quip card _state) - ?> (team:title our.bol src.bol) - ?- -.action - %create (handle-create action) - %delete (handle-delete action) - %invite (handle-invite action) - %accept (handle-accept action) - %decline (handle-decline action) + ?+ path (on-peek:def path) + [%x %all ~] + ``noun+!>(invites) + :: + [%x %invitatory @ ~] + :^ ~ ~ %noun + !> ^- (unit invitatory:store) + (~(get by invites) i.t.t.path) + :: + [%x %invite @ @ ~] + =* term i.t.t.path + =/ =serial:store (slav %uv i.t.t.t.path) + ?. (~(has by invites) term) + ~ + =/ =invitatory:store (~(got by invites) term) + :^ ~ ~ %noun + !> ^- (unit invite:store) + (~(get by invitatory) serial) == -:: -++ handle-create - |= act=invite-action - ^- (quip card _state) - ?> ?=(%create -.act) - ?: (~(has by invites) path.act) - [~ state] - :- (send-diff path.act act) - state(invites (~(put by invites) path.act *invitatory)) -:: -++ handle-delete - |= act=invite-action - ^- (quip card _state) - ?> ?=(%delete -.act) - ?. (~(has by invites) path.act) - [~ state] - :- (send-diff path.act act) - state(invites (~(del by invites) path.act)) -:: -++ handle-invite - |= act=invite-action - ^- (quip card _state) - ?> ?=(%invite -.act) - ?. (~(has by invites) path.act) - [~ state] - =/ container (~(got by invites) path.act) - =. uid.act (sham eny.bol) - =. container (~(put by container) uid.act invite.act) - :- (send-diff path.act act) - state(invites (~(put by invites) path.act container)) -:: -++ handle-accept - |= act=invite-action - ^- (quip card _state) - ?> ?=(%accept -.act) - ?. (~(has by invites) path.act) - [~ state] - =/ container (~(got by invites) path.act) - =/ invite (~(get by container) uid.act) - ?~ invite - [~ state] - =. container (~(del by container) uid.act) - :- (send-diff path.act [%accepted path.act uid.act u.invite]) - state(invites (~(put by invites) path.act container)) -:: -++ handle-decline - |= act=invite-action - ^- (quip card _state) - ?> ?=(%decline -.act) - ?. (~(has by invites) path.act) - [~ state] - =/ container (~(got by invites) path.act) - =/ invite (~(get by container) uid.act) - ?~ invite - [~ state] - =. container (~(del by container) uid.act) - :- (send-diff path.act act) - state(invites (~(put by invites) path.act container)) -:: -++ update-subscribers - |= [pax=path upd=invite-update] - ^- card - [%give %fact ~[pax] %invite-update !>(upd)] -:: -++ send-diff - |= [pax=path upd=invite-update] - ^- (list card) - :~ (update-subscribers /all upd) - (update-subscribers /updates upd) - (update-subscribers [%invitatory pax] upd) - == -:: -- diff --git a/pkg/arvo/app/landscape/img/chat.png b/pkg/arvo/app/landscape/img/chat.png deleted file mode 100644 index 58223a5d6..000000000 Binary files a/pkg/arvo/app/landscape/img/chat.png and /dev/null differ diff --git a/pkg/arvo/app/landscape/img/chatswitcherclosed.png b/pkg/arvo/app/landscape/img/chatswitcherclosed.png deleted file mode 100644 index 2009b319b..000000000 Binary files a/pkg/arvo/app/landscape/img/chatswitcherclosed.png and /dev/null differ diff --git a/pkg/arvo/app/landscape/img/chatswitcherlink.png b/pkg/arvo/app/landscape/img/chatswitcherlink.png deleted file mode 100644 index 9b47164dd..000000000 Binary files a/pkg/arvo/app/landscape/img/chatswitcherlink.png and /dev/null differ diff --git a/pkg/arvo/app/landscape/img/chevron.png b/pkg/arvo/app/landscape/img/chevron.png deleted file mode 100644 index 79eada831..000000000 Binary files a/pkg/arvo/app/landscape/img/chevron.png and /dev/null differ diff --git a/pkg/arvo/app/landscape/img/codeeval.png b/pkg/arvo/app/landscape/img/codeeval.png deleted file mode 100644 index 8bcb81ca6..000000000 Binary files a/pkg/arvo/app/landscape/img/codeeval.png and /dev/null differ diff --git a/pkg/arvo/app/landscape/img/dojo.png b/pkg/arvo/app/landscape/img/dojo.png deleted file mode 100644 index d6b651bc5..000000000 Binary files a/pkg/arvo/app/landscape/img/dojo.png and /dev/null differ diff --git a/pkg/arvo/app/landscape/img/link.png b/pkg/arvo/app/landscape/img/link.png deleted file mode 100644 index d83bb877c..000000000 Binary files a/pkg/arvo/app/landscape/img/link.png and /dev/null differ diff --git a/pkg/arvo/app/landscape/img/links.png b/pkg/arvo/app/landscape/img/links.png deleted file mode 100644 index d83bb877c..000000000 Binary files a/pkg/arvo/app/landscape/img/links.png and /dev/null differ diff --git a/pkg/arvo/app/landscape/img/popout.png b/pkg/arvo/app/landscape/img/popout.png deleted file mode 100644 index 712c2ab2f..000000000 Binary files a/pkg/arvo/app/landscape/img/popout.png and /dev/null differ diff --git a/pkg/arvo/app/landscape/img/publish.png b/pkg/arvo/app/landscape/img/publish.png deleted file mode 100644 index df16b2647..000000000 Binary files a/pkg/arvo/app/landscape/img/publish.png and /dev/null differ diff --git a/pkg/arvo/app/landscape/img/search.png b/pkg/arvo/app/landscape/img/search.png deleted file mode 100644 index 7ac218304..000000000 Binary files a/pkg/arvo/app/landscape/img/search.png and /dev/null differ diff --git a/pkg/arvo/app/landscape/img/send.png b/pkg/arvo/app/landscape/img/send.png deleted file mode 100644 index 6c13772b9..000000000 Binary files a/pkg/arvo/app/landscape/img/send.png and /dev/null differ diff --git a/pkg/arvo/app/landscape/img/spinner.png b/pkg/arvo/app/landscape/img/spinner.png deleted file mode 100644 index 8fddc7895..000000000 Binary files a/pkg/arvo/app/landscape/img/spinner.png and /dev/null differ diff --git a/pkg/arvo/app/landscape/index.html b/pkg/arvo/app/landscape/index.html index cab16a450..a2a06dea1 100644 --- a/pkg/arvo/app/landscape/index.html +++ b/pkg/arvo/app/landscape/index.html @@ -24,6 +24,6 @@
- + diff --git a/pkg/arvo/app/launch.hoon b/pkg/arvo/app/launch.hoon index eddf976c5..16997bfc0 100644 --- a/pkg/arvo/app/launch.hoon +++ b/pkg/arvo/app/launch.hoon @@ -12,6 +12,7 @@ [%3 *] [%4 state-zero] [%5 state-zero] + [%6 state-zero] == :: +$ state-zero @@ -21,7 +22,7 @@ == -- :: -=| [%5 state-zero] +=| [%6 state-zero] =* state - %- agent:dbug ^- agent:gall @@ -36,27 +37,42 @@ %_ new-state tiles %- ~(gas by *tiles:store) - %+ turn `(list term)`[%weather %clock %dojo ~] + %+ turn `(list term)`[%weather %clock %term ~] |= =term :- term ^- tile:store - ?+ term [[%custom ~] %.y] - %dojo [[%basic 'Dojo' '/~landscape/img/Dojo.png' '/~dojo'] %.y] + ?+ term [[%custom ~] %.y] + %term [[%basic 'Terminal' '/~landscape/img/term.png' '/~term'] %.y] == - tile-ordering [%weather %clock %dojo ~] + tile-ordering [%weather %clock %term ~] == - [~ this(state [%5 new-state])] + [~ this(state [%6 new-state])] :: ++ on-save !>(state) ++ on-load |= old=vase ^- (quip card _this) =/ old-state !<(versioned-state old) - |- + =| cards=(list card) + |- ^- (quip card _this) + ?: ?=(%6 -.old-state) + [cards this(state old-state)] ?: ?=(%5 -.old-state) - `this(state old-state) + :: replace %dojo with %term + :: + =. tiles.old-state + %+ ~(put by (~(del by tiles.old-state) %dojo)) + %term + :_ is-shown:(~(gut by tiles.old-state) %dojo *tile:store) + [%basic 'Terminal' '/~landscape/img/term.png' '/~term'] + =. tile-ordering.old-state + %+ turn tile-ordering.old-state + |=(t=term ?:(=(%dojo t) %term t)) + $(old-state [%6 +.old-state]) ?: ?=(%4 -.old-state) - :- [%pass / %arvo %e %disconnect [~ /]]~ + =. cards + %+ snoc cards + [%pass / %arvo %e %disconnect [~ /]] =. tiles.old-state (~(del by tiles.old-state) %chat) =. tiles.old-state @@ -65,7 +81,7 @@ (~(del by tiles.old-state) %links) =. tile-ordering.old-state (skip tile-ordering.old-state |=(=term ?=(?(%links %chat %publish) term))) - this(state [%5 +.old-state]) + $(old-state [%5 +.old-state]) =/ new-state *state-zero =. new-state %_ new-state @@ -80,18 +96,22 @@ == tile-ordering [%weather %clock %dojo ~] == - :_ this(state [%5 new-state]) - %+ welp - :~ [%pass / %arvo %e %disconnect [~ /]] - :* %pass /srv %agent [our.bowl %file-server] - %poke %file-server-action - !>([%serve-dir / /app/landscape %.n %.y]) - == - == - %+ turn ~(tap by wex.bowl) - |= [[=wire =ship =term] *] - ^- card - [%pass wire %agent [ship term] %leave ~] + %_ $ + old-state [%5 new-state] + :: + cards + %+ welp + :~ [%pass / %arvo %e %disconnect [~ /]] + :* %pass /srv %agent [our.bowl %file-server] + %poke %file-server-action + !>([%serve-dir / /app/landscape %.n %.y]) + == + == + %+ turn ~(tap by wex.bowl) + |= [[=wire =ship =term] *] + ^- card + [%pass wire %agent [ship term] %leave ~] + == :: ++ on-poke |= [=mark =vase] diff --git a/pkg/arvo/app/link-store.hoon b/pkg/arvo/app/link-store.hoon index 0d8f472cf..d653e7748 100644 --- a/pkg/arvo/app/link-store.hoon +++ b/pkg/arvo/app/link-store.hoon @@ -116,7 +116,7 @@ |= [=resource =graph:gra] ^- card %- poke-graph-store - [%0 now.bowl %add-graph resource graph `%graph-validator-link] + [%0 now.bowl %add-graph resource graph `%graph-validator-link %.y] :: ++ archive-graph |= =resource diff --git a/pkg/arvo/app/metadata-store.hoon b/pkg/arvo/app/metadata-store.hoon index 78f5dd04a..ec32cf818 100644 --- a/pkg/arvo/app/metadata-store.hoon +++ b/pkg/arvo/app/metadata-store.hoon @@ -10,7 +10,7 @@ :: encode group-path and app-path using (scot %t (spat group-path)) :: :: +watch paths: -:: /all assocations + updates +:: /all associations + updates :: /updates just updates :: /app-name/%app-name specific app's associations + updates :: @@ -57,6 +57,7 @@ +$ state-3 [%3 base-state-1] +$ state-4 [%4 base-state-1] +$ state-5 [%5 base-state-1] ++$ state-6 [%6 base-state-1] +$ versioned-state $% state-0 state-1 @@ -64,10 +65,11 @@ state-3 state-4 state-5 + state-6 == -- :: -=| state-5 +=| state-6 =* state - %+ verb | %- agent:dbug @@ -86,29 +88,37 @@ =/ old !<(versioned-state vase) =| cards=(list card) |^ - ?: ?=(%5 -.old) + ?: ?=(%6 -.old) [cards this(state old)] - ?: ?=(%4 -.old) - %_ $ - -.old %5 - :: - group-indices.old - %- ~(gas ju *(jug group-path md-resource)) - ~(tap in ~(key by associations.old)) - :: - app-indices.old - %- ~(gas ju *(jug app-name [group-path app-path])) - %+ turn ~(tap in ~(key by associations.old)) - |= [g=group-path r=md-resource] - ^- [app-name [group-path app-path]] - [app-name.r [g app-path.r]] + ?: ?=(%5 -.old) + =/ =^associations + (migrate-app-to-graph-store %publish associations.old) + %_ $ + -.old %6 + associations.old associations :: resource-indices.old - %- ~(gas ju *(jug md-resource group-path)) - %+ turn ~(tap in ~(key by associations.old)) - |= [g=group-path r=md-resource] - ^- [md-resource group-path] - [r g] + (rebuild-resource-indices associations) + :: + app-indices.old + (rebuild-app-indices associations) + :: + group-indices.old + (rebuild-group-indices associations) + == + + ?: ?=(%4 -.old) + %_ $ + -.old %5 + :: + resource-indices.old + (rebuild-resource-indices associations.old) + :: + app-indices.old + (rebuild-app-indices associations.old) + :: + group-indices.old + (rebuild-group-indices associations.old) == ?: ?=(%3 -.old) $(old [%4 +.old]) @@ -147,6 +157,43 @@ == $(old new-state-1) :: + ++ rebuild-resource-indices + |= =^associations + %- ~(gas ju *(jug md-resource group-path)) + %+ turn ~(tap in ~(key by associations)) + |= [g=group-path r=md-resource] + ^- [md-resource group-path] + [r g] + :: + ++ rebuild-group-indices + |= =^associations + %- ~(gas ju *(jug group-path md-resource)) + ~(tap in ~(key by associations)) + :: + ++ rebuild-app-indices + |= =^associations + %- ~(gas ju *(jug app-name [group-path app-path])) + %+ turn ~(tap in ~(key by associations)) + |= [g=group-path r=md-resource] + ^- [app-name [group-path app-path]] + [app-name.r [g app-path.r]] + + :: + ++ migrate-app-to-graph-store + |= [app=@tas =^associations] + ^+ associations + %- malt + %+ turn ~(tap by associations) + |= [[=group-path =md-resource] m=metadata] + ^- [[^group-path ^md-resource] metadata] + ?. =(app-name.md-resource app) + [[group-path md-resource] m] + =/ new-app-path=path + ?. ?=([@ @ ~] app-path.md-resource) + app-path.md-resource + ship+app-path.md-resource + [[group-path [%graph new-app-path]] m(module app)] + :: ++ poke-md-hook |= act=metadata-hook-action ^- card diff --git a/pkg/arvo/app/observe-hook.hoon b/pkg/arvo/app/observe-hook.hoon new file mode 100644 index 000000000..174b90b13 --- /dev/null +++ b/pkg/arvo/app/observe-hook.hoon @@ -0,0 +1,222 @@ +:: observe-hook: +:: +:: helper that observes an app at a particular path and forwards all facts +:: to a particular thread. kills the subscription if the thread crashes +:: +/- sur=observe-hook +/+ default-agent, dbug +:: +|% ++$ card card:agent:gall ++$ versioned-state + $% state-0 + == +:: ++$ serial @uv ++$ state-0 [%0 observers=(map serial observer:sur)] +++ got-by-val + |= [a=(map serial observer:sur) b=observer:sur] + ^- serial + %- need + %+ roll ~(tap by a) + |= [[key=serial val=observer:sur] output=(unit serial)] + ?:(=(val b) `key output) +-- +:: +%- agent:dbug +=| state-0 +=* state - +:: +^- agent:gall +|_ =bowl:gall ++* this . + def ~(. (default-agent this %|) bowl) +:: +++ on-init + |^ ^- (quip card _this) + :_ this + :_ ~ + (act /inv-gra [%watch %invite-store /invitatory/graph %invite-accepted-graph]) + :: + ++ act + |= [=wire =action:sur] + ^- card + :* %pass + wire + %agent + [our.bowl %observe-hook] + %poke + %observe-action + !> ^- action:sur + action + == + -- +:: +++ on-save !>(state) +++ on-load + |= old-vase=vase + ^- (quip card _this) + `this(state !<(state-0 old-vase)) +:: +++ on-poke + |= [=mark =vase] + ^- (quip card _this) + ?> (team:title our.bowl src.bowl) + ?. ?=(%observe-action mark) + (on-poke:def mark vase) + =/ =action:sur !<(action:sur vase) + =* observer observer.action + =/ vals (silt ~(val by observers)) + ?- -.action + %watch + ?: ?|(=(app.observer %spider) =(app.observer %observe-hook)) + ~|('we avoid infinite loops' !!) + ?: (~(has in vals) observer) + ~|('duplicate observer' !!) + :_ this(observers (~(put by observers) (sham eny.bowl) observer)) + :_ ~ + :* %pass + /observer/(scot %uv (sham eny.bowl)) + %agent + [our.bowl app.observer] + %watch + path.observer + == + :: + %ignore + ?. (~(has in vals) observer) + ~|('cannot remove nonexistent observer' !!) + =/ key (got-by-val observers observer) + :_ this(observers (~(del by observers) key)) + :_ ~ + :* %pass + /observer/(scot %uv key) + %agent + [our.bowl app.observer] + %leave + ~ + == + == +:: +++ on-agent + |= [=wire =sign:agent:gall] + ^- (quip card _this) + |^ + ?+ wire (on-agent:def wire sign) + [%observer @ ~] on-observer + [%thread-result @ ~] on-thread-result + [%thread-start @ @ ~] on-thread-start + == + :: + ++ on-observer + ?> ?=([%observer @ ~] wire) + ?+ -.sign (on-agent:def wire sign) + %watch-ack + ?~ p.sign [~ this] + =/ =serial (slav %uv i.t.wire) + ~& watch-ack-deleting-observer+(~(got by observers) serial) + [~ this(observers (~(del by observers) serial))] + :: + %kick + =/ =serial (slav %uv i.t.wire) + =/ =observer:sur (~(got by observers) serial) + :_ this + :_ ~ + :* %pass + wire + %agent + [our.bowl app.observer] + %watch + path.observer + == + :: + %fact + =/ =serial (slav %uv i.t.wire) + =/ =observer:sur (~(got by observers) serial) + =/ tid (scot %uv (sham eny.bowl)) + :_ this + :~ :* %pass + [%thread-result i.t.wire ~] + %agent + [our.bowl %spider] + %watch + [%thread-result tid ~] + == + :* %pass + [%thread-start i.t.wire tid ~] + %agent + [our.bowl %spider] + %poke + %spider-start + !>([~ `tid thread.observer (slop q.cage.sign !>(~))]) + == == + == + :: + ++ on-thread-result + ?> ?=([%thread-result @ ~] wire) + ?+ -.sign (on-agent:def wire sign) + %kick [~ this] + %watch-ack [~ this] + :: + %fact + ?. =(p.cage.sign %thread-fail) + :_ this + :_ ~ + :* %pass + wire + %agent + [our.bowl %spider] + %leave + ~ + == + =/ =serial (slav %uv i.t.wire) + =/ =observer:sur (~(got by observers) serial) + ~& observer-failed+observer + :_ this(observers (~(del by observers) serial)) + :~ :* %pass + [%observer i.t.wire ~] + %agent + [our.bowl app.observer] + %leave + ~ + == + :* %pass + wire + %agent + [our.bowl %spider] + %leave + ~ + == + == + == + :: + ++ on-thread-start + ?> ?=([%thread-start @ @ ~] wire) + ?. ?=(%poke-ack -.sign) (on-agent:def wire sign) + ?~ p.sign [~ this] + =/ =serial (slav %uv i.t.wire) + =/ =observer:sur (~(got by observers) serial) + ~& added-invalid-observer+observer + :_ this(observers (~(del by observers) serial)) + :~ :* %pass + [%observer i.t.wire ~] + %agent + [our.bowl app.observer] + %leave + ~ + == + :* %pass + wire + %agent + [our.bowl app.observer] + %leave + ~ + == == + -- +:: +++ on-watch on-watch:def +++ on-leave on-leave:def +++ on-peek on-peek:def +++ on-arvo on-arvo:def +++ on-fail on-fail:def +-- diff --git a/pkg/arvo/app/publish.hoon b/pkg/arvo/app/publish.hoon index cf0654df8..2588be433 100644 --- a/pkg/arvo/app/publish.hoon +++ b/pkg/arvo/app/publish.hoon @@ -8,7 +8,7 @@ /- *permission-hook /- *permission-group-hook /- *permission-store -/- *invite-store +/- inv=invite-store /- *metadata-store /- *metadata-hook /- contact-view @@ -22,6 +22,7 @@ /+ verb /+ grpl=group /+ group-store +/+ graph-store /+ resource :: ~% %publish ..is ~ @@ -52,6 +53,15 @@ == == :: ++$ state-four + [state-three migrate=migration-state] +:: +:: $migration-state: resources that are unavailable because their host +:: has not processed the ota, and number of times we've tried to reach +:: the host ++$ migration-state + (map resource @ud) +:: +$ versioned-state $% [%1 state-two] [%2 state-two] @@ -59,6 +69,7 @@ [%4 state-three] [%5 state-three] [%6 state-three] + [%7 state-four] == :: +$ metadata-delta @@ -74,7 +85,7 @@ == -- :: -=| [%6 state-three] +=| [%7 state-four] =* state - %- agent:dbug %+ verb | @@ -87,23 +98,7 @@ :: ++ on-init ^- (quip card _this) - =/ rav [%sing %t [%da now.bol] /app/publish/notebooks] - :_ this - :~ [%pass /view-bind %arvo %e %connect [~ /'publish-view'] %publish] - [%pass /read/paths %arvo %c %warp our.bol q.byk.bol `rav] - (invite-poke:main [%create /publish]) - :* %pass /invites %agent [our.bol %invite-store] %watch - /invitatory/publish - == - :* %pass / %agent [our.bol %invite-store] %poke %invite-action - !>([%create /publish]) - == - :* %pass /srv %agent [our.bol %file-server] - %poke %file-server-action - !>([%serve-dir /'~publish' /app/landscape %.n %.y]) - == - [%pass /groups %agent [our.bol %group-store] %watch /groups] - == + `this :: ++ on-save !>(state) :: @@ -122,7 +117,7 @@ :* %pass /permissions %agent [our.bol %permission-store] %watch /updates == - (invite-poke:main [%create /publish]) + (invite-poke:main [%create %publish]) :* %pass /invites %agent [our.bol %invite-store] %watch /invitatory/publish == @@ -136,10 +131,10 @@ =+ ^- [kick-cards=(list card) old-subs=(jug @tas @p)] kick-subs =/ inv-scry-pax /(scot %p our.bol)/invite-store/(scot %da now.bol)/invitatory/publish/noun - =/ inv=(unit invitatory) .^((unit invitatory) %gx inv-scry-pax) + =/ invi=(unit invitatory:inv) .^((unit invitatory:inv) %gx inv-scry-pax) =| new-state=state-two - =? tile-num.new-state ?=(^ inv) - ~(wyt by u.inv) + =? tile-num.new-state ?=(^ invi) + ~(wyt by u.invi) %= $ old-state [%& %2 new-state] :: @@ -242,8 +237,152 @@ == :: %6 + =/ [ours=(set [rid=resource nb=notebook]) theirs=(set resource)] + %+ roll ~(tap by books.p.old-state) + |= [[[who=@p book=@tas] nb=notebook] [ours=(set [resource notebook]) theirs=(set resource)]] + ^- [(set [resource notebook]) (set resource)] + =/ =resource + [who book] + ?. =(who our.bol) + ours^(~(put in theirs) resource) + :_ theirs + (~(put in ours) [resource nb]) + :: + %_ $ + p.old-state + :+ %7 +.p.old-state + %- ~(gas by *(map resource @ud)) + (turn ~(tap in theirs) (late 0)) + :: + cards + ;: weld + cards + :: move our books to graph-store + ^- (list card) + %- zing + %+ turn ~(tap in ours) + |= [rid=resource nb=notebook] + ^- (list card) + =/ =graph:graph-store + (notebook-to-graph nb) + :~ + %- poke-graph-store + :* %0 date-created.nb %add-graph + rid + graph + `%graph-validator-publish + %.y + == + (poke-graph-push %add rid) + == + :: for their books, subscribe to graph-pull-hook, to see if host has migrated + ^- (list card) + (turn ~(tap in theirs) check-host-migrate:main) + :: leave all subscriptions + ^- (list card) + %+ turn ~(tap in ~(key by wex.bol)) + |= [=wire =ship app=term] + ^- card + [%pass wire %agent [ship app] %leave ~] + == + == + :: + %7 [cards this(state p.old-state)] == + ++ blank-note-node + |= =note + %* . *node:graph-store + author.post author.note + time-sent.post date-created.note + == + :: + ++ notebook-to-graph + |= =notebook + ^- graph:graph-store + %+ gas:orm:graph-store *graph:graph-store + %+ turn ~(tap by notes.notebook) + |= [@ta =note] + ^- [atom node:graph-store] + :- date-created.note + %* . (blank-note-node note) + index.post ~[date-created.note] + :: + children + :- %graph + (note-to-revision-container notebook note) + == + :: + ++ note-to-revision-container + |= [=notebook =note] + ^- graph:graph-store + %+ gas:orm:graph-store *graph:graph-store + :~ + :- %1 + %* . (blank-note-node note) + index.post ~[date-created.note %1] + children graph+(note-to-revisions note) + == + :: + :- %2 + %* . (blank-note-node note) + index.post ~[date-created.note %2] + children (comments-to-internal-graph note) + == + == + :: + ++ note-to-revisions + |= =note + ^- graph:graph-store + %^ put:orm:graph-store + *graph:graph-store %1 + =/ body=@t + =/ file + (trip file.note) + =/ idx + (find ";>" file) + ?~ idx + file.note + %- crip + (slag (add 2 u.idx) (trip file.note)) + %* . (blank-note-node note) + index.post ~[date-created.note %1 %1] + contents.post ~[text+title.note text+body] + == + :: + ++ comments-to-internal-graph + |= =note + ^- internal-graph:graph-store + ?: =(~ comments.note) + [%empty ~] + :- %graph + %+ gas:orm:graph-store *graph:graph-store + %+ turn ~(tap by comments.note) + |= [when=@da =comment] + ^- [atom node:graph-store] + :- when + %* . *node:graph-store + author.post author.comment + index.post ~[date-created.note %2 when] + time-sent.post when + contents.post [%text content.comment]~ + == + :: + ++ poke-our + |= [app=term =cage] + [%pass / %agent [our.bol app] %poke cage] + :: + ++ poke-graph-pull + |= =action:pull-hook + (poke-our %graph-pull-hook pull-hook-action+!>(action)) + :: + ++ poke-graph-store + |= =update:graph-store + (poke-our %graph-store graph-update+!>(update)) + :: + ++ poke-graph-push + |= =action:push-hook + (poke-our %graph-push-hook push-hook-action+!>(action)) ++ convert-notebook-3-4 |= prev=notebook-3 ^- notebook-3 @@ -312,12 +451,12 @@ |= who=@p ^- card =/ uid (sham %publish who book eny.bol) - =/ inv=invite - :* our.bol %publish /notebook/[book] who + =/ =invite:inv + :* our.bol %publish [our.bol book] who (crip "invite for notebook {}/{(trip book)}") == - =/ act=invite-action [%invite /publish uid inv] - [%pass /invite %agent [who %invite-hook] %poke %invite-action !>(act)] + =/ =action:inv [%invite %publish uid invite] + [%pass /invite %agent [who %invite-hook] %poke %invite-action !>(action)] :: ++ move-files |= old-subs=(jug @tas @p) @@ -373,227 +512,49 @@ [[%pass /move-files %arvo %c %info q.byk.bol %& sob] cards] -- :: - ++ on-poke - |= [mar=mark vas=vase] - ^- (quip card _this) - ?+ mar (on-poke:def mar vas) - :: - %noun - ?+ q.vas - [~ this] - :: - %flush-limbo [~ this(limbo [~ ~])] - :: - %reset-warp - =/ rav [%sing %t [%da now.bol] /app/publish/notebooks] - :_ this - [%pass /read/paths %arvo %c %warp our.bol q.byk.bol `rav]~ - == - :: - %handle-http-request - =+ !<([id=@ta req=inbound-request:eyre] vas) - :_ this - %+ give-simple-payload:app id - %+ require-authorization:app req - handle-http-request:main - :: - %publish-action - =^ cards state - (poke-publish-action:main !<(action vas)) - [cards this] - == - :: - ++ on-watch - |= pax=path - ^- (quip card _this) - ?+ pax (on-watch:def pax) - [%http-response *] [~ this] - [%primary ~] [~ this] - [%notebook @ ~] - =^ cards state - (watch-notebook:main pax) - [cards this] - == - :: + ++ on-poke on-poke:def + ++ on-watch on-watch:def ++ on-leave on-leave:def - ++ on-peek - |= pax=path - ^- (unit (unit cage)) - ?+ pax (on-peek:def pax) - [%t %limbo ~] - :^ ~ ~ %noun - !> ^- (list path) - %+ weld - %+ turn ~(tap by notes.limbo) - |= [[who=@p book=@tas note=@tas] *] - ^- path - /(scot %p who)/[book]/[note] - %+ turn ~(tap by comments.limbo) - |= [[who=@p book=@tas note=@tas comment=@da] *] - ^- path - /(scot %p who)/[book]/[note]/(scot %ds comment) - :: - [%x %limbo @ @ @ ~] - =/ host=(unit @p) (slaw %p i.t.t.pax) - ?~ host [~ ~] - =/ book-name i.t.t.t.pax - =/ note-name i.t.t.t.t.pax - =/ note (~(get by notes.limbo) u.host book-name note-name) - ?~ note ~ - ``noun+!>(u.note) - :: - [%x %limbo @ @ @ @ ~] - =/ host=(unit @p) (slaw %p i.t.t.pax) - =/ comment-date=(unit @da) (slaw %da i.t.t.t.t.t.pax) - ?~ host [~ ~] - ?~ comment-date [~ ~] - =/ book-name i.t.t.t.pax - =/ note-name i.t.t.t.t.pax - =/ comment - (~(get by comments.limbo) u.host book-name note-name u.comment-date) - ?~ comment ~ - ``noun+!>(u.comment) - :: - [%x %book @ @ ~] - =/ host=(unit @p) (slaw %p i.t.t.pax) - =/ book-name i.t.t.t.pax - ?~ host [~ ~] - =/ book (~(get by books) u.host book-name) - ?~ book ~ - ``noun+!>(u.book) - == - :: - ++ on-agent - |= [wir=wire sin=sign:agent:gall] + ++ on-peek on-peek:def + ++ on-agent + |= [=wire =sign:agent:gall] ^- (quip card _this) - ?- -.sin - %poke-ack - ?: ?=([%join-group @ @ ~] wir) - ?^ p.sin - (on-agent:def wir sin) - =/ =ship - (slav %p i.t.wir) - =^ cards state - (subscribe-notebook ship i.t.t.wir) - [cards this] - ?~ p.sin - [~ this] - =^ cards state - (handle-poke-fail:main wir) - [cards this] - :: If our subscribe failed, delete notebook associated with subscription if - :: it exists - :: - %watch-ack - ?. ?=([%subscribe @ @ ~] wir) - (on-agent:def wir sin) - ?~ p.sin - [~ this] - =/ who=@p (slav %p i.t.wir) - =/ book=@tas i.t.t.wir - =/ del [%del-book who book] - :_ this(books (~(del by books) who book)) - [%give %fact [/primary]~ %publish-primary-delta !>(del)]~ - :: Resubscribe to any subscription we get kicked from. The case of actually - :: getting banned from a notebook is handled by %watch-ack - :: - %kick - ?+ wir - [~ this] - :: - [%subscribe @ @ ~] - =/ who=@p (slav %p i.t.wir) - =/ book=@tas i.t.t.wir - =/ wen=(unit @da) (get-last-update:main who book) - =/ pax=path - ?~ wen - /notebook/[book] - /notebook/[book]/(scot %da u.wen) - :_ this - [%pass wir %agent [who %publish] %watch pax]~ - :: - [%permissions ~] - :_ this - [%pass /permissions %agent [our.bol %permission-store] %watch /updates]~ - :: - [%groups ~] - :_ this - [%pass /groups %agent [our.bol %group-store] %watch /groups]~ - :: - [%invites ~] - :_ this - :_ ~ - :* %pass /invites %agent [our.bol %invite-store] %watch - /invitatory/publish - == - == - :: - %fact - ?+ wir (on-agent:def wir sin) - [%subscribe @ @ ~] - =/ who=@p (slav %p i.t.wir) - =/ book-name i.t.t.wir - ?> ?=(%publish-notebook-delta p.cage.sin) - =^ cards state - (handle-notebook-delta:main !<(notebook-delta q.cage.sin) state) - [cards this] - :: - [%groups ~] - =^ cards state - (handle-group-update:main !<(update:group-store q.cage.sin)) - [cards this] - :: - [%invites ~] - =^ cards state - (handle-invite-update:main !<(invite-update q.cage.sin)) - [cards this] - :: - [%collection *] - [~ this] - == - == - :: + ?. ?=([%graph-migrate *] wire) + (on-agent:def wire sign) + =/ rid=resource + (de-path:resource t.wire) + ?. ?=(%watch-ack -.sign) + ~| "Expected error, please ignore" + (on-agent:def wire sign) + ?~ p.sign + :: if watch acked successfully, then host has completed OTA, and + :: we are safe to add it to the pull-hook + :_ this(migrate (~(del by migrate) rid)) + ~[(poke-graph-pull:main %add entity.rid rid)] + :: if nacked, then set a exponential backoff and retry + =/ nack-count=@ud + +((~(gut by migrate) rid 0)) + ?: (gte nack-count 24) + ~& >>> "failed to migrate notebook {} to graph-store" + [~ this] + :_ this(migrate (~(put by migrate) rid nack-count)) + :: (bex 19) is roughly 6 days + =/ wakeup=@da + (add now.bol (mul ~s1 (bex (min 19 nack-count)))) + [%pass wire %arvo %b %wait wakeup]~ + :: ++ on-arvo - |= [wir=wire sin=sign-arvo] + |= [=wire =sign-arvo] ^- (quip card _this) - ?+ wir - (on-arvo:def wir sin) - :: - [%read %paths ~] - ?> ?=([?(%b %c) %writ *] sin) - =/ rot=riot:clay +>.sin - ?> ?=(^ rot) - =^ cards state - (read-paths:main u.rot) - [cards this] - :: - [%read %info *] - ?> ?=([?(%b %c) %writ *] sin) - =/ rot=riot:clay +>.sin - =^ cards state - (read-info:main t.t.wir rot) - [cards this] - :: - [%read %note *] - ?> ?=([?(%b %c) %writ *] sin) - =/ rot=riot:clay +>.sin - =^ cards state - (read-note:main t.t.wir rot) - [cards this] - :: - [%read %comment *] - ?> ?=([?(%b %c) %writ *] sin) - =/ rot=riot:clay +>.sin - =^ cards state - (read-comment:main t.t.wir rot) - [cards this] - :: - [%bind ~] - [~ this] - :: - [%view-bind ~] - [~ this] - == + ?. ?=([%graph-migrate *] wire) + (on-arvo:def wire sign-arvo) + =/ rid=resource + (de-path:resource t.wire) + ?> ?=([%b %wake *] sign-arvo) + ~? ?=(^ error.sign-arvo) + "behn errored in backoff timers, continuing anyway" + :_ this + ~[(check-host-migrate:main rid)] :: ++ on-fail on-fail:def -- @@ -601,624 +562,8 @@ |_ bol=bowl:gall ++ grup ~(. grpl bol) :: -++ metadata-store-poke - |= act=metadata-action - ^- card - [%pass / %agent [our.bol %metadata-store] %poke %metadata-action !>(act)] - :: -:: -++ get-last-update - |= [host=@p book-name=@tas] - ^- (unit @da) - =/ book (~(get by books) host book-name) - ?~ book ~ - =/ wen date-created.u.book - %- some - %- ~(rep by notes.u.book) - |= [[@tas =note] out=_wen] - ^- @da - %+ max out - %+ max last-edit.note - %- ~(rep by comments.note) - |= [[@da =comment] out=_out] - (max date-created.comment out) -:: -++ get-notebook-from-date - |= [host=@p book-name=@tas wen=@da] - ^- notebook - =/ book (~(got by books) host book-name) - %= book - notes - %- ~(rep by notes.book) - |= [[nom=@tas not=note] out=(map @tas note)] - ^- (map @tas note) - ?: (gth last-edit.not wen) - (~(put by out) nom not) - =. comments.not - %- ~(rep by comments.not) - |= [[nam=@da com=comment] out=(map @da comment)] - ?: (gth date-created.com wen) - (~(put by out) nam com) - out - ?~ comments.not - out - (~(put by out) nom not) - == -:: -++ merge-notebooks - |= [base=notebook diff=notebook] - ^- notebook - %= diff - notes - %- ~(rep by notes.diff) - |= [[nom=@tas not=note] out=_notes.base] - =/ base-note=(unit note) (~(get by out) nom) - ?~ base-note - (~(put by out) nom not) - =. comments.u.base-note - (~(uni by comments.u.base-note) comments.not) - (~(put by out) nom u.base-note) - == -:: -++ read-paths - |= ran=rant:clay - ^- (quip card _state) - =/ rav [%next %t [%da now.bol] /app/publish/notebooks] - =/ new (filter-and-sort-paths !<((list path) q.r.ran)) - =/ dif (diff-paths our-paths new) - =^ del-moves state (del-paths del.dif) - =^ add-moves state (add-paths add.dif) - :: - =/ cards=(list card) - ;: weld - [%pass /read/paths %arvo %c %warp our.bol q.byk.bol `rav]~ - del-moves - add-moves - == - [cards state(our-paths new)] -:: -++ read-info - |= [pax=path rot=riot:clay] - ^- (quip card _state) - ?> ?=([%app %publish %notebooks @ %publish-info ~] pax) - =/ book-name i.t.t.t.pax - ?~ rot - [~ state] - =/ info=notebook-info !<(notebook-info q.r.u.rot) - =/ new-book=notebook - :* title.info - description.info - comments.info - writers.info - subscribers.info - now.bol - ~ ~ ~ - == - =/ rif=riff:clay [q.byk.bol `[%next %x [%da now.bol] pax]] - =/ delta=notebook-delta - [%edit-book our.bol book-name new-book] - =^ cards state - (handle-notebook-delta delta state) - :_ state - :* [%pass (welp /read/info pax) %arvo %c %warp our.bol rif] - cards - == -:: -++ read-note - |= [pax=path rot=riot:clay] - ^- (quip card _state) - ?> ?=([%app %publish %notebooks @ @ %udon ~] pax) - =/ book-name i.t.t.t.pax - =/ note-name i.t.t.t.t.pax - =/ book (~(get by books) our.bol book-name) - ?~ book - [~ state] - =/ old-note (~(get by notes.u.book) note-name) - ?~ old-note - [~ state] - ?~ rot - [~ state] - =/ udon !<(@t q.r.u.rot) - =/ new-note=note (form-note note-name udon) - =/ rif=riff:clay [q.byk.bol `[%next %x [%da now.bol] pax]] - =/ delta=notebook-delta - [%edit-note our.bol book-name note-name new-note] - =^ cards state - (handle-notebook-delta delta state) - :_ state - :* [%pass (welp /read/note pax) %arvo %c %warp our.bol rif] - cards - == -:: -++ read-comment - |= [pax=path rot=riot:clay] - ^- (quip card _state) - ?> ?=([%app %publish %notebooks @ @ @ %publish-comment ~] pax) - ?~ rot - [~ state] - =/ comment-date (slaw %da i.t.t.t.t.t.pax) - ?~ comment-date - [~ state] - =/ book-name i.t.t.t.pax - =/ note-name i.t.t.t.t.pax - =/ com-2-3 !<(?(comment-2 comment-3) q.r.u.rot) - =/ new-comment=comment-3 - ?: ?=(comment-2 com-2-3) - [author.com-2-3 date-created.com-2-3 content.com-2-3 %.n] - com-2-3 - =/ rif=riff:clay [q.byk.bol `[%next %x [%da now.bol] pax]] - =/ delta=notebook-delta - [%edit-comment our.bol book-name note-name u.comment-date new-comment] - =^ cards state - (handle-notebook-delta delta state) - :_ state - :* [%pass (welp /read/comment pax) %arvo %c %warp our.bol rif] - cards - == -:: -++ filter-and-sort-paths - |= paths=(list path) - ^- (list path) - %+ sort - %+ skim paths - |= pax=path - ?| ?=([%app %publish %notebooks @ %publish-info ~] pax) - ?=([%app %publish %notebooks @ @ %udon ~] pax) - ?=([%app %publish %notebooks @ @ @ %publish-comment ~] pax) - == - |= [a=path b=path] - ^- ? - (lte (lent a) (lent b)) -:: -++ diff-paths - |= [old=(list path) new=(list path)] - ^- [del=(list path) add=(list path)] - =/ del=(list path) (skim old |=(p=path ?=(~ (find [p]~ new)))) - =/ add=(list path) (skim new |=(p=path ?=(~ (find [p]~ old)))) - [del add] -:: -++ del-paths - |= paths=(list path) - ^- (quip card _state) - %+ roll paths - |= [pax=path cad=(list card) sty=_state] - ?+ pax !! - [%app %publish %notebooks @ %publish-info ~] - =/ book-name i.t.t.t.pax - =/ delta=notebook-delta [%del-book our.bol book-name] - =^ cards sty (handle-notebook-delta delta sty) - [(weld cards cad) sty] - :: - [%app %publish %notebooks @ @ %udon ~] - =/ book-name i.t.t.t.pax - =/ note-name i.t.t.t.t.pax - =/ book (~(get by books.sty) our.bol book-name) - ?~ book - [cad sty] - =. notes.u.book (~(del by notes.u.book) note-name) - =/ delta=notebook-delta [%del-note our.bol book-name note-name] - =^ cards sty (handle-notebook-delta delta sty) - [(weld cards cad) sty] - :: - [%app %publish %notebooks @ @ @ %publish-comment ~] - =/ book-name i.t.t.t.pax - =/ note-name i.t.t.t.t.pax - =/ comment-date (slaw %da i.t.t.t.t.t.pax) - ?~ comment-date - [cad sty] - =/ delta=notebook-delta - [%del-comment our.bol book-name note-name u.comment-date] - =^ cards sty (handle-notebook-delta delta sty) - [(weld cards cad) sty] - == -:: -++ add-paths - |= paths=(list path) - ^- (quip card _state) - %+ roll paths - |= [pax=path cad=(list card) sty=_state] - ^- (quip card _state) - ?+ pax !! - [%app %publish %notebooks @ %publish-info ~] - =/ book-name i.t.t.t.pax - =/ info=notebook-info .^(notebook-info %cx (welp our-beak pax)) - =* title title.info - =* description description.info - =/ new-book=notebook - :* title - description - comments.info - writers.info - subscribers.info - now.bol - ~ ~ ~ - == - =+ ^- [grp-car=(list card) write-pax=path read-pax=path] - ?: =(writers.new-book /) - =/ group-path /~/(scot %p our.bol)/[book-name] - (make-groups book-name [group-path ~ %.n %.n] title description) - [~ writers.info subscribers.info] - =. writers.new-book write-pax - =. subscribers.new-book read-pax - =+ ^- [read-cards=(list card) notes=(map @tas note)] - (watch-notes /app/publish/notebooks/[book-name]) - =. notes.new-book notes - =/ delta=notebook-delta [%add-book our.bol book-name new-book] - =/ rif=riff:clay [q.byk.bol `[%next %x [%da now.bol] pax]] - =^ update-cards sty (handle-notebook-delta delta sty) - :_ sty - ;: weld - grp-car - [%pass (welp /read/info pax) %arvo %c %warp our.bol rif]~ - read-cards - update-cards - cad - == - :: - [%app %publish %notebooks @ @ %udon ~] - =/ book-name i.t.t.t.pax - =/ note-name i.t.t.t.t.pax - =/ new-note=note (scry-note pax) - =+ ^- [read-cards=(list card) comments=(map @da comment)] - (watch-comments /app/publish/notebooks/[book-name]/[note-name]) - =. comments.new-note comments - =/ rif=riff:clay [q.byk.bol `[%next %x [%da now.bol] pax]] - =/ delta=notebook-delta - [%add-note our.bol book-name note-name new-note] - =^ update-cards sty (handle-notebook-delta delta sty) - :_ sty - ;: weld - [%pass (welp /read/note pax) %arvo %c %warp our.bol rif]~ - read-cards - update-cards - cad - == - :: - [%app %publish %notebooks @ @ @ %publish-comment ~] - =/ book-name i.t.t.t.pax - =/ note-name i.t.t.t.t.pax - =/ comment-name (slaw %da i.t.t.t.t.t.pax) - ?~ comment-name - [~ sty] - =/ com-2-3 .^(?(comment-2 comment-3) %cx (welp our-beak pax)) - =/ new-com=comment-3 - ?: ?=(comment-2 com-2-3) - [author.com-2-3 date-created.com-2-3 content.com-2-3 %.n] - com-2-3 - =/ rif=riff:clay [q.byk.bol `[%next %x [%da now.bol] pax]] - :: - =/ delta=notebook-delta - [%add-comment our.bol book-name note-name u.comment-name new-com] - =^ update-cards sty (handle-notebook-delta delta sty) - :_ sty - ;: weld - [%pass (welp /read/comment pax) %arvo %c %warp our.bol rif]~ - update-cards - cad - == - == -:: -++ watch-notes - |= pax=path - ^- [(list card) (map @tas note)] - =/ paths .^((list path) %ct (weld our-beak pax)) - %+ roll paths - |= [pax=path cards=(list card) notes=(map @tas note)] - ?. ?=([%app %publish %notebooks @ @ %udon ~] pax) - [cards notes] - =/ book-name i.t.t.t.pax - =/ note-name i.t.t.t.t.pax - =/ new-note (scry-note pax) - =^ comment-cards comments.new-note - (watch-comments /app/publish/notebooks/[book-name]/[note-name]) - =/ rif=riff:clay [q.byk.bol `[%next %x [%da now.bol] pax]] - :_ (~(put by notes) note-name new-note) - ;: weld - [%pass (welp /read/note pax) %arvo %c %warp our.bol rif]~ - comment-cards - cards - == -:: -++ watch-comments - |= pax=path - ^- [(list card) (map @da comment)] - =/ paths .^((list path) %ct (weld our-beak pax)) - %+ roll paths - |= [pax=path cards=(list card) comments=(map @da comment)] - ?. ?=([%app %publish %notebooks @ @ @ %publish-comment ~] pax) - [cards comments] - =/ comment-name (slaw %da i.t.t.t.t.t.pax) - ?~ comment-name - [cards comments] - =/ new-com .^(comment %cx (welp our-beak pax)) - =/ rif=riff:clay [q.byk.bol `[%next %x [%da now.bol] pax]] - :_ (~(put by comments) u.comment-name new-com) - [[%pass (welp /read/comment pax) %arvo %c %warp our.bol rif] cards] -:: -++ scry-note - |= pax=path - ^- note - ?> ?=([%app %publish %notebooks @ @ %udon ~] pax) - =/ note-name i.t.t.t.t.pax - =/ udon=@t .^(@t %cx (welp our-beak pax)) - (form-note note-name udon) -:: -++ form-snippet - |= file=@t - ^- @t - =/ front-idx (add 3 (need (find ";>" (trip file)))) - =/ front-matter (cat 3 (end 3 front-idx file) 'dummy text\0a') - =/ body (cut 3 [front-idx (met 3 file)] file) - (of-wain:format (scag 1 (to-wain:format body))) -:: -++ form-note - |= [note-name=@tas file=@t] - ^- note - =/ snippet=@t (form-snippet file) - =/ front-idx (add 3 (need (find ";>" (trip file)))) - =/ front-matter (cat 3 (end 3 front-idx file) 'dummy text\0a') - =/ meta=(each (map term knot) tang) - %- mule |. - %- ~(run by inf:(static:cram (ream front-matter))) - |= a=dime ^- cord - ?+ (end 3 1 p.a) (scot a) - %t q.a - == - :: - =/ author=@p our.bol - =? author ?=(%.y -.meta) - %+ fall - (biff (~(get by p.meta) %author) (slat %p)) - our.bol - :: - =/ title=@t note-name - =? title ?=(%.y -.meta) - (fall (~(get by p.meta) %title) note-name) - :: - =/ date-created=@da now.bol - =? date-created ?=(%.y -.meta) - %+ fall - (biff (~(get by p.meta) %date-created) (slat %da)) - now.bol - :: - =/ last-modified=@da now.bol - =? last-modified ?=(%.y -.meta) - %+ fall - (biff (~(get by p.meta) %last-modified) (slat %da)) - now.bol - :: - :* author - title - note-name - date-created - last-modified - %.y - file - snippet - ~ - %.n - == -:: -++ handle-group-update - |= =update:group-store - ^- (quip card _state) - ?. ?=(?(%remove-members %add-members) -.update) - [~ state] - =* ships ships.update - =/ =path - (en-path:resource resource.update) - =/ book=(unit @tas) - %+ roll ~(tap by books) - |= [[[who=@p nom=@tas] book=notebook] out=(unit @tas)] - ?. =(who our.bol) - out - ?. =(path subscribers.book) - out - `nom - ?~ book - [~ state] - :_ state - %- zing - :- ^- (list card) - %+ roll ~(tap by books) - |= [[[who=@p book=@tas] nb=notebook] out=(list card)] - ^- (list card) - ?. =(who our.bol) - out - ?. =(writers.nb path) - out - =/ rid (de-path:resource writers.nb) - =/ grp=(unit group) (scry-group:grup rid) - ?~ grp out - ?: hidden.u.grp - out - =/ =tag [%publish (cat 3 'writers-' book)] - :_ out - (group-proxy-poke entity.rid %add-tag rid tag members.u.grp) - %+ turn ~(tap in ships) - |= who=@p - ?. (allowed who %read u.book) - [%give %kick [/notebook/[u.book]]~ `who]~ - ?: ?|(?=(%remove-members -.update) (is-managed-path:grup path)) - ~ - =/ uid (sham %publish who u.book eny.bol) - =/ inv=invite - :* our.bol %publish /notebook/[u.book] who - (crip "invite for notebook {}/{(trip u.book)}") - == - =/ act=invite-action [%invite /publish uid inv] - [%pass / %agent [our.bol %invite-hook] %poke %invite-action !>(act)]~ -:: -++ handle-invite-update - |= upd=invite-update - ^- (quip card _state) - ?+ -.upd - [~ state] - :: - %delete - [~ state] - :: - %invite - [~ state] - :: - %decline - [~ state] - :: - %accepted - ?> ?=([@ @ *] path.invite.upd) - =/ book i.t.path.invite.upd - =/ group - (group-from-book notebook+book^~) - ?^ group - (subscribe-notebook ship.invite.upd book) - =/ rid=resource - (de-path:resource ship+path.invite.upd) - =/ join-wire=wire - /join-group/[(scot %p ship.invite.upd)]/[book] - =/ =cage - :- %group-update - !> ^- action:group-store - [%add-members rid (sy our.bol ~)] - :_ state - [%pass join-wire %agent [entity.rid %group-push-hook] %poke cage]~ - == -:: -++ subscribe-notebook - |= [=ship book=@tas] - ^- (quip card _state) - =/ pax=path /notebook/[book] - =/ wir=wire /subscribe/[(scot %p ship)]/[book] - =? tile-num (gth tile-num 0) - (dec tile-num) - =/ jon=json (frond:enjs:format %notifications (numb:enjs:format tile-num)) - :_ state - :~ [%pass wir %agent [ship %publish] %watch pax] - [%give %fact [/publishtile]~ %json !>(jon)] - == -:: -++ watch-notebook - |= pax=path - ?> ?=([%notebook @ *] pax) - =/ book-name i.t.pax - ?. (allowed src.bol %read book-name) - ~|("not permitted" !!) - =/ book - ?: ?=([%notebook @ @ ~] pax) - =/ wen=@da (slav %da i.t.t.pax) - (get-notebook-from-date our.bol book-name wen) - (~(got by books) our.bol book-name) - =/ delta=notebook-delta - [%add-book our.bol book-name book] - :_ state - [%give %fact ~ %publish-notebook-delta !>(delta)]~ -:: ++ our-beak /(scot %p our.bol)/[q.byk.bol]/(scot %da now.bol) :: -++ book-writers - |= [host=@p book=@tas] - ^- (set ship) - =/ =notebook (~(got by books) host book) - =/ rid=resource - (de-path:resource writers.notebook) - %- ~(uni in (fall (scry-tag:grup rid %admin) ~)) - %+ fall - (scry-tag:grup rid `tag`[%publish (cat 3 %writers- book)]) - ~ -:: -++ allowed - |= [who=@p mod=?(%read %write) book=@tas] - ^- ? - =/ =notebook (~(got by books) our.bol book) - =/ rid=resource - (de-path:resource writers.notebook) - ?: ?=(%read mod) - (~(has in (members:grup rid)) who) - (~(has in (book-writers our.bol book)) who) -:: -++ write-file - |= [pax=path cay=cage] - ^- card - =. pax (weld our-beak pax) - [%pass (weld /write pax) %arvo %c %info (foal:space:userlib pax cay)] -:: -++ delete-file - |= pax=path - ^- card - =. pax (weld our-beak pax) - [%pass (weld /delete pax) %arvo %c %info (fray:space:userlib pax)] -:: -++ delete-dir - |= pax=path - ^- card - =/ nor=nori:clay - :- %& - %+ turn .^((list path) %ct (weld our-beak pax)) - |= pax=path - ^- [path miso:clay] - [pax %del ~] - [%pass (weld /delete pax) %arvo %c %info q.byk.bol nor] -:: -++ add-front-matter - |= [fro=(map knot cord) udon=@t] - ^- @t - %- of-wain:format - =/ tum (trip udon) - =/ id (find ";>" tum) - ?~ id - %+ weld (front-to-wain fro) - (to-wain:format (crip :(weld ";>\0a" tum))) - %+ weld (front-to-wain fro) - (to-wain:format (crip (slag u.id tum))) -:: -++ front-to-wain - |= a=(map knot cord) - ^- wain - =/ entries=wain - %+ turn ~(tap by a) - |= b=[knot cord] - =/ c=[term cord] (,[term cord] b) - (crip " [{<-.c>} {<+.c>}]") - :: - ?~ entries ~ - ;: weld - [':- :~' ~] - entries - [' ==' ~] - == -:: -++ give-primary-delta - |= del=primary-delta - ^- card - [%give %fact [/primary]~ %publish-primary-delta !>(del)] -:: -++ group-poke - |= act=action:group-store - ^- card - [%pass / %agent [our.bol %group-store] %poke %group-action !>(act)] -:: -++ group-proxy-poke - |= [who=ship act=action:group-store] - ^- card - [%pass / %agent [who %group-push-hook] %poke %group-update !>(act)] -:: -++ group-pull-hook-poke - |= act=action:pull-hook - ^- card - [%pass / %agent [our.bol %group-pull-hook] %poke %pull-hook-action !>(act)] -:: -++ contact-view-poke - |= act=contact-view-action:contact-view - ^- card - [%pass / %agent [our.bol %contact-view] %poke %contact-view-action !>(act)] -:: -++ contact-view-create - |= [=path ships=(set ship) =policy title=@t description=@t] - =/ rid=resource - (de-path:resource path) - =/ act=contact-view-action:contact-view - [%create name.rid policy title description] - (contact-view-poke act) -:: ++ perm-hook-poke |= act=permission-hook-action ^- card @@ -1232,7 +577,7 @@ == :: ++ invite-poke - |= act=invite-action + |= act=action:inv ^- card [%pass / %agent [our.bol %invite-store] %poke %invite-action !>(act)] :: @@ -1254,11 +599,11 @@ %+ turn ~(tap in invitees) |= who=ship =/ uid (sham %publish who book eny.bol) - =/ inv=invite - :* our.bol %publish /(scot %p our.bol)/[book] who + =/ =invite:inv + :* our.bol %publish [our.bol book] who (crip "invite for notebook {}/{(trip book)}") == - =/ act=invite-action [%invite /publish uid inv] + =/ act=action:inv [%invite %publish uid invite] [%pass / %agent [our.bol %invite-hook] %poke %invite-action !>(act)] :: ++ make-groups @@ -1297,1135 +642,50 @@ :- (group-poke %add-group rid policy %.y) (generate-invites book (~(del in invitees.group) our.bol)) :: -++ handle-poke-fail - |= wir=wire - ^- (quip card _state) - ?+ wir - [~ state] - :: new note failed, stash it in limbo - :: - [%forward %new-note @ @ @ ~] - =/ host=@p (slav %p i.t.t.wir) - =/ book-name i.t.t.t.wir - =/ note-name i.t.t.t.t.wir - =/ book (~(get by books) [host book-name]) - ?~ book - [~ state] - =/ note (~(get by notes.u.book) note-name) - ?~ note - [~ state] - =. notes.limbo (~(put by notes.limbo) [host book-name note-name] u.note) - =. notes.u.book (~(del by notes.u.book) note-name) - =/ del [%del-note host book-name note-name] - :- [(give-primary-delta del)]~ - state(books (~(put by books) [host book-name] u.book)) - :: new comment failed, stash it in limbo - :: - [%forward %new-comment @ @ @ @ ~] - =/ host=@p (slav %p i.t.t.wir) - =/ book-name i.t.t.t.wir - =/ note-name i.t.t.t.t.wir - =/ comment-date=@da (slav %da i.t.t.t.t.t.wir) - =/ book (~(get by books) [host book-name]) - ?~ book - [~ state] - =/ note (~(get by notes.u.book) note-name) - ?~ note - [~ state] - =/ comment (~(get by comments.u.note) comment-date) - ?~ comment - [~ state] - =. comments.limbo - %+ ~(put by comments.limbo) - [host book-name note-name comment-date] - u.comment - =. comments.u.note (~(del by comments.u.note) comment-date) - =. notes.u.book (~(put by notes.u.book) note-name u.note) - =/ del [%del-comment host book-name note-name comment-date] - :- [(give-primary-delta del)]~ - state(books (~(put by books) [host book-name] u.book)) - :: edit note failed, restore old version - :: - [%forward %edit-note @ @ @ ~] - =/ host=@p (slav %p i.t.t.wir) - =/ book-name i.t.t.t.wir - =/ note-name i.t.t.t.t.wir - =/ book (~(get by books) [host book-name]) - ?~ book - [~ state] - =/ note (~(get by notes.limbo) host book-name note-name) - ?~ note - [~ state] - =. notes.u.book (~(put by notes.u.book) note-name u.note) - =/ del [%edit-note host book-name note-name u.note] - :- [(give-primary-delta del)]~ - %= state - books (~(put by books) [host book-name] u.book) - notes.limbo (~(del by notes.limbo) host book-name note-name) - == - :: edit comment failed, restore old version - :: - [%forward %new-comment @ @ @ @ ~] - =/ host=@p (slav %p i.t.t.wir) - =/ book-name i.t.t.t.wir - =/ note-name i.t.t.t.t.wir - =/ comment-date=@da (slav %da i.t.t.t.t.t.wir) - =/ book (~(get by books) [host book-name]) - ?~ book - [~ state] - =/ note (~(get by notes.u.book) note-name) - ?~ note - [~ state] - =/ comment - (~(get by comments.limbo) host book-name note-name comment-date) - ?~ comment - [~ state] - =. comments.u.note (~(put by comments.u.note) comment-date u.comment) - =. notes.u.book (~(put by notes.u.book) note-name u.note) - =/ del [%edit-comment host book-name note-name comment-date u.comment] - :- [(give-primary-delta del)]~ - %= state - books (~(put by books) [host book-name] u.book) - :: - comments.limbo - %+ ~(del by comments.limbo) - [host book-name note-name comment-date] - u.comment - == - :: delete note failed, restore old version - :: - [%forward %del-note @ @ @ ~] - =/ host=@p (slav %p i.t.t.wir) - =/ book-name i.t.t.t.wir - =/ note-name i.t.t.t.t.wir - =/ book (~(get by books) [host book-name]) - ?~ book - [~ state] - =/ note (~(get by notes.limbo) host book-name note-name) - ?~ note - [~ state] - =. notes.u.book (~(put by notes.u.book) note-name u.note) - =/ del [%add-note host book-name note-name u.note] - :- [(give-primary-delta del)]~ - %= state - books (~(put by books) [host book-name] u.book) - notes.limbo (~(del by notes.limbo) host book-name note-name) - == - :: delete comment failed, restore old version - :: - [%forward %del-comment @ @ @ @ ~] - =/ host=@p (slav %p i.t.t.wir) - =/ book-name i.t.t.t.wir - =/ note-name i.t.t.t.t.wir - =/ comment-date=@da (slav %da i.t.t.t.t.t.wir) - =/ book (~(get by books) [host book-name]) - ?~ book - [~ state] - =/ note (~(get by notes.u.book) note-name) - ?~ note - [~ state] - =/ comment - (~(get by comments.limbo) host book-name note-name comment-date) - ?~ comment - [~ state] - =. comments.u.note (~(put by comments.u.note) comment-date u.comment) - =. notes.u.book (~(put by notes.u.book) note-name u.note) - =/ del [%add-comment host book-name note-name comment-date u.comment] - :- [(give-primary-delta del)]~ - %= state - books (~(put by books) [host book-name] u.book) - :: - comments.limbo - %+ ~(del by comments.limbo) - [host book-name note-name comment-date] - u.comment - == - == -:: -++ poke-publish-action - |= act=action - ^- (quip card _state) - ?- -.act - :: %new-book: Make groups and save publish info file. - :: - %new-book - ?. (team:title our.bol src.bol) - ~|("action not permitted" !!) - ?: (~(has by books) our.bol book.act) - ~|("notebook already exists: {}" !!) - =+ ^- [cards=(list card) write-pax=path read-pax=path] - (make-groups book.act group.act title.act about.act) - =/ new-book=notebook-info - :* title.act - about.act - coms.act - write-pax - read-pax - == - =/ pax=path /app/publish/notebooks/[book.act]/publish-info - :_ state - [(write-file pax %publish-info !>(new-book)) cards] - :: %new-note: - :: If poke is from us, eagerly store new note in books. If poke is to us, - :: save file, otherwise forward the poke. If forwarded poke fails, note is - :: removed from books and stored in limbo. - :: - %new-note - =/ book=(unit notebook) (~(get by books) who.act book.act) - ?~ book - ~|("nonexistent notebook {}" !!) - ?: (~(has by notes.u.book) note.act) - ~|("note already exists: {}" !!) - =/ front=(map knot cord) - %- my - :~ title+title.act - author+(scot %p src.bol) - date-created+(scot %da now.bol) - last-modified+(scot %da now.bol) - == - =/ file=@t (add-front-matter front body.act) - :: - =^ cards books - ?. =(src.bol our.bol) - [~ books] - =/ new-note=note - :* src.bol - title.act - note.act - now.bol - now.bol - %.y - file - (form-snippet file) - ~ - %.y - == - =/ del=primary-delta [%add-note who.act book.act note.act new-note] - :- [(give-primary-delta del)]~ - %+ ~(put by books) - [who.act book.act] - u.book(notes (~(put by notes.u.book) note.act new-note)) - :: - :_ state - ?. =(who.act our.bol) - =/ poke-wir=wire - /forward/new-note/(scot %p who.act)/[book.act]/[note.act] - :_ cards - [%pass poke-wir %agent [who.act %publish] %poke %publish-action !>(act)] - ?. ?| (team:title our.bol src.bol) - (allowed src.bol %write book.act) - == - ~|("action not permitted" !!) - =/ pax=path /app/publish/notebooks/[book.act]/[note.act]/udon - :_ cards - [(write-file pax %udon !>(file))] - :: %new-comment - :: If poke is from us, eagerly store new comment in books. If poke is to - :: us, save file, otherwise forward the poke. If forwarded poke fails, - :: comment is removed from books and stored in limbo. - :: - %new-comment - =/ book=(unit notebook) (~(get by books) who.act book.act) - ?~ book - ~|("nonexistent notebook {}" !!) - =/ note=(unit note) (~(get by notes.u.book) note.act) - ?~ note - ~|("nonexistent note {}" !!) - =/ new-comment=comment - :* author=src.bol - date-created=now.bol - content=body.act - %.y - == - :: - =^ cards books - ?. =(src.bol our.bol) - [~ books] - =/ new-note - %= u.note - comments (~(put by comments.u.note) now.bol new-comment) - == - =/ del=primary-delta - [%add-comment who.act book.act note.act now.bol new-comment] - :- [(give-primary-delta del)]~ - %+ ~(put by books) - [who.act book.act] - u.book(notes (~(put by notes.u.book) note.act new-note)) - :_ state - ?. =(who.act our.bol) - =/ poke-wir=wire - :~ %forward - %new-comment - (scot %p who.act) - book.act - note.act - (scot %da now.bol) - == - :_ cards - [%pass poke-wir %agent [who.act %publish] %poke %publish-action !>(act)] - ?. ?& ?| (team:title our.bol src.bol) - (allowed src.bol %read book.act) - == - comments.u.book - == - ~|("action not permitted" !!) - =/ pax=path - %+ weld /app/publish/notebooks - /[book.act]/[note.act]/(scot %da now.bol)/publish-comment - [(write-file pax %publish-comment !>(new-comment(pending %.n)))]~ - :: %edit-book: Make groups and save publish-info file - :: - %edit-book - ?. (team:title our.bol src.bol) - ~|("action not permitted" !!) - =/ book (~(get by books) our.bol book.act) - ?~ book - ~|("nonexistent notebook" !!) - =+ ^- [cards=(list card) write-pax=path read-pax=path] - ?~ group.act - [~ writers.u.book subscribers.u.book] - (make-groups book.act u.group.act title.act about.act) - =/ new-info=notebook-info - :* title.act - about.act - coms.act - write-pax - read-pax - == - =/ pax=path /app/publish/notebooks/[book.act]/publish-info - :_ state - [(write-file pax %publish-info !>(new-info)) cards] - :: %edit-note: - :: If poke is from us, eagerly store new note in books, and place the old - :: note in limbo. If poke is to us, save file, otherwise forward the poke. - :: If forwarded poke fails, old note is restored from limbo. - :: - %edit-note - =/ book=(unit notebook) (~(get by books) who.act book.act) - ?~ book - ~|("nonexistent notebook {}" !!) - =/ note=(unit note) (~(get by notes.u.book) note.act) - ?~ note - ~|("nonexistent note: {}" !!) - =/ front=(map knot cord) - %- my - :~ title+title.act - author+(scot %p src.bol) - date-created+(scot %da date-created.u.note) - last-modified+(scot %da now.bol) - == - =/ file=@t (add-front-matter front body.act) - :: - =^ cards state - ?. =(src.bol our.bol) - [~ state] - =/ new-note - %= u.note - author src.bol - title title.act - last-edit now.bol - file file - snippet (form-snippet file) - pending %.y - == - =/ del=primary-delta [%edit-note who.act book.act note.act new-note] - :- [(give-primary-delta del)]~ - %= state - notes.limbo - (~(put by notes.limbo) [who.act book.act note.act] u.note) - :: - books - %+ ~(put by books) - [who.act book.act] - u.book(notes (~(put by notes.u.book) note.act new-note)) - == - :: - :_ state - ?. =(who.act our.bol) - =/ poke-wir=wire - /forward/edit-note/(scot %p who.act)/[book.act]/[note.act] - :_ cards - [%pass poke-wir %agent [who.act %publish] %poke %publish-action !>(act)] - ?. ?| (team:title our.bol src.bol) - ?& =(author.u.note src.bol) - (allowed src.bol %write book.act) - == - == - ~|("action not permitted" !!) - =/ pax=path /app/publish/notebooks/[book.act]/[note.act]/udon - [(write-file pax %udon !>(file))]~ - :: %edit-comment: - :: If poke is from us, eagerly store new comment in books, and place the - :: old note in limbo. If poke is to us, save file, otherwise forward the - :: poke. If forwarded poke fails, old comment is restored from limbo. - :: - %edit-comment - =/ book=(unit notebook) (~(get by books) who.act book.act) - ?~ book - ~|("nonexistent notebook {}" !!) - =/ note=(unit note) (~(get by notes.u.book) note.act) - ?~ note - ~|("nonexistent note {}" !!) - =/ comment-date (slav %da comment.act) - =/ comment=(unit comment) (~(get by comments.u.note) comment-date) - ?~ comment - ~|("nonexistent comment {}" !!) - =/ new-comment - u.comment(content body.act, pending %.y) - :: - =^ cards state - ?. =(src.bol our.bol) - [~ state] - =/ new-note - %= u.note - comments - (~(put by comments.u.note) comment-date new-comment) - == - =/ del=primary-delta - [%edit-comment who.act book.act note.act comment-date new-comment] - :- [(give-primary-delta del)]~ - %= state - books - %+ ~(put by books) - [who.act book.act] - u.book(notes (~(put by notes.u.book) note.act new-note)) - :: - comments.limbo - %+ ~(put by comments.limbo) - [who.act book.act note.act comment-date] - u.comment - == - :: - :_ state - ?. =(who.act our.bol) - =/ poke-wir - :~ %forward - %edit-comment - (scot %p who.act) - book.act - note.act - comment.act - == - :_ cards - [%pass poke-wir %agent [who.act %publish] %poke %publish-action !>(act)] - ?. ?| (team:title our.bol src.bol) - ?& =(author.u.comment src.bol) - (allowed src.bol %read book.act) - == - == - ~|("action not permitted" !!) - =/ pax=path - %+ weld /app/publish/notebooks - /[book.act]/[note.act]/[comment.act]/publish-comment - [(write-file pax %publish-comment !>(new-comment(pending %.n)))]~ - :: %del-book: Delete whole notebook directory, delete groups and permissions - :: - %del-book - ?. (team:title our.bol src.bol) - ~|("action not permitted" !!) - =/ book=(unit notebook) (~(get by books) our.bol book.act) - ?~ book - ~|("nonexistent notebook {}" !!) - =/ pax=path /app/publish/notebooks/[book.act] - ?> ?=(^ writers.u.book) - ?> ?=(^ subscribers.u.book) - =/ cards=(list card) - ~[(delete-dir pax)] - =/ rid=resource - (de-path:resource writers.u.book) - =? cards !(is-managed:grup rid) - [(group-poke %remove-group rid ~) cards] - [cards state] - :: %del-note: - :: If poke is from us, eagerly remove note from books, and place the - :: old note in limbo. If poke is to us, save file, otherwise forward the - :: poke. If forwarded poke fails, old note is restored from limbo. - :: - %del-note - =/ book=(unit notebook) (~(get by books) who.act book.act) - ?~ book - ~|("nonexistent notebook {}" !!) - =/ note=(unit note) (~(get by notes.u.book) note.act) - ?~ note - ~|("nonexistent note: {}" !!) - :: - =^ cards state - ?. =(src.bol our.bol) - [~ state] - =/ del=primary-delta [%del-note who.act book.act note.act] - =. notes.u.book (~(del by notes.u.book) note.act) - :- [(give-primary-delta del)]~ - %= state - books (~(put by books) [who.act book.act] u.book) - notes.limbo (~(put by notes.limbo) [who.act book.act note.act] u.note) - == - :: - :_ state - ?. =(who.act our.bol) - =/ poke-wir=wire - /forward/del-note/(scot %p who.act)/[book.act]/[note.act] - :_ cards - [%pass poke-wir %agent [who.act %publish] %poke %publish-action !>(act)] - ?. ?| (team:title our.bol src.bol) - ?& =(author.u.note src.bol) - (allowed src.bol %write book.act) - == - == - ~|("action not permitted" !!) - =/ pax=path /app/publish/notebooks/[book.act]/[note.act]/udon - [(delete-file pax)]~ - :: %del-comment: - :: If poke is from us, eagerly remove comment from books, and place the - :: old note in limbo. If poke is to us, save file, otherwise forward the - :: poke. If forwarded poke fails, old comment is restored from limbo. - :: - %del-comment - =/ book=(unit notebook) (~(get by books) who.act book.act) - ?~ book - ~|("nonexistent notebook {}" !!) - =/ note=(unit note) (~(get by notes.u.book) note.act) - ?~ note - ~|("nonexistent note {}" !!) - =/ comment-date (slav %da comment.act) - =/ comment=(unit comment) (~(get by comments.u.note) comment-date) - ?~ comment - ~|("nonexistent comment {}" !!) - :: - =^ cards state - ?. =(src.bol our.bol) - [~ state] - =/ del=primary-delta - [%del-comment who.act book.act note.act comment-date] - =. comments.u.note (~(del by comments.u.note) comment-date) - =. notes.u.book (~(put by notes.u.book) note.act u.note) - :- [(give-primary-delta del)]~ - %= state - books - (~(put by books) [who.act book.act] u.book) - :: - comments.limbo - %+ ~(put by comments.limbo) - [who.act book.act note.act comment-date] - u.comment - == - :: - :_ state - ?. =(who.act our.bol) - =/ poke-wir=wire - :~ %forward - %del-comment - (scot %p who.act) - book.act - note.act - comment.act - == - :_ cards - [%pass poke-wir %agent [who.act %publish] %poke %publish-action !>(act)] - ?. ?| (team:title our.bol src.bol) - ?& =(author.u.comment src.bol) - (allowed src.bol %read book.act) - == - == - ~|("action not permitted" !!) - =/ pax=path - %+ weld /app/publish/notebooks - /[book.act]/[note.act]/[comment.act]/publish-comment - [(delete-file pax)]~ - :: %subscribe - :: - %subscribe - ?> (team:title our.bol src.bol) - ?: =(our.bol who.act) - [~ state] - =/ join-wire=wire - /join-group/[(scot %p who.act)]/[book.act] - =/ meta=(unit (set path)) - (metadata-resource-scry %publish /(scot %p who.act)/[book.act]) - ?^ meta - (subscribe-notebook who.act book.act) - =/ rid=resource - [who.act book.act] - =/ =cage - :- %group-update - !> ^- action:group-store - [%add-members rid (sy our.bol ~)] - :_ state - [%pass join-wire %agent [who.act %group-push-hook] %poke cage]~ - :: %unsubscribe - :: - %unsubscribe - ?> (team:title our.bol src.bol) - =/ wir=wire /subscribe/(scot %p who.act)/[book.act] - =/ del=primary-delta [%del-book who.act book.act] - =/ book=notebook - (~(got by books) who.act book.act) - =/ rid=resource - (de-path:resource writers.book) - =/ =group - (need (scry-group:grup rid)) - =/ cards=(list card) - :~ [%pass wir %agent [who.act %publish] %leave ~] - [%give %fact [/primary]~ %publish-primary-delta !>(del)] - == - =? cards hidden.group - %+ weld cards - :~ (group-proxy-poke who.act %remove-members rid (sy our.bol ~)) - (group-poke %remove-group rid ~) - == - [cards state(books (~(del by books) who.act book.act))] - :: %read - :: - %read - ?> (team:title our.bol src.bol) - =/ book=(unit notebook) - (~(get by books) who.act book.act) - ?~ book - ~|("nonexistent notebook: {}" !!) - =/ not=(unit note) (~(get by notes.u.book) note.act) - ?~ not - ~|("nonexistent note: {}" !!) - =? tile-num &(!read.u.not (gth tile-num 0)) - (dec tile-num) - =. read.u.not %.y - =. notes.u.book (~(put by notes.u.book) note.act u.not) - =. books (~(put by books) [who.act book.act] u.book) - :_ state - [%give %fact [/primary]~ %publish-primary-delta !>(act)]~ - :: %groupify - :: - %groupify - ?. (team:title our.bol src.bol) - ~|("action not permitted" !!) - =/ book (~(get by books) our.bol book.act) - ?~ book - ~|("nonexistent notebook: {}" !!) - :: - =* old-group-path writers.u.book - =/ app-path /[(scot %p our.bol)]/[book.act] - =/ =metadata - (need (metadata-scry old-group-path app-path)) - =/ old-rid=resource - (de-path:resource old-group-path) - ?< (is-managed:grup old-rid) - ?~ target.act - :: just create contacts object for group - :_ state - ~[(contact-view-poke %groupify old-rid title.metadata description.metadata)] - :: change associations - =* group-path u.target.act - =/ rid=resource - (de-path:resource group-path) - =/ old-group=group - (need (scry-group:grup old-rid)) - =/ =group - (need (scry-group:grup rid)) - =/ ships=(set ship) - (~(dif in members.old-group) members.group) - =. subscribers.u.book - group-path - =. writers.u.book - group-path - =. books - (~(put by books) [our.bol book.act] u.book) - =/ del - [%edit-book our.bol book.act u.book] - :_ state - :* [%give %fact [/primary]~ %publish-primary-delta !>(del)] - [%give %fact [/notebook/[book.act]]~ %publish-notebook-delta !>(del)] - (metadata-store-poke %remove app-path %publish app-path) - (metadata-store-poke %add group-path [%publish app-path] metadata) - (group-poke %remove-group old-rid ~) - ?. inclusive.act - ~ - :- (group-poke %add-members rid ships) - %+ turn - ~(tap in ships) - |= =ship - =/ =invite - :* our.bol - %contact-hook - group-path - ship '' - == - =/ act=invite-action [%invite /contacts (shaf %msg-uid eny.bol) invite] - [%pass / %agent [our.bol %invite-hook] %poke %invite-action !>(act)] - == - == -:: -++ get-subscribers - |= book=@tas - ^- (set @p) - %+ roll ~(val by sup.bol) - |= [[who=@p pax=path] out=(set @p)] - ^- (set @p) - ?. ?=([%notebook @ ~] pax) out - ?. =(book i.t.pax) out - (~(put in out) who) -:: -++ get-notebook - |= [host=@p book-name=@tas sty=_state] - ^- (unit notebook) - (~(get by books.sty) host book-name) -:: -++ get-unread - |= book=notebook - ^- @ud - %+ roll ~(tap by notes.book) - |= [[nom=@tas not=note] out=@ud] - ?: read.not - out - +(out) -:: -++ emit-updates-and-state - |= [host=@p book-name=@tas book=notebook del=notebook-delta sty=_state] - ^- (quip card _state) - :_ sty(books (~(put by books.sty) [host book-name] book)) - ?: =(our.bol host) - :~ [%give %fact [/notebook/[book-name]]~ %publish-notebook-delta !>(del)] - [%give %fact [/primary]~ %publish-primary-delta !>(del)] - == - [%give %fact [/primary]~ %publish-primary-delta !>(del)]~ -:: -++ metadata-poke - |= act=metadata-action - ^- card - [%pass / %agent [our.bol %metadata-hook] %poke %metadata-action !>(act)] -:: -:: -++ metadata-scry - |= [group-path=path app-path=path] - ^- (unit metadata) - ?. .^(? %gu (scot %p our.bol) %metadata-store (scot %da now.bol) ~) ~ - .^ (unit metadata) - %gx - (scot %p our.bol) - %metadata-store - (scot %da now.bol) - %metadata - (scot %t (spat group-path)) - %publish - (scot %t (spat app-path)) - /noun - == -:: -++ metadata-resource-scry - |= [app=@tas app-path=path] - ^- (unit (set path)) - ?. .^(? %gu (scot %p our.bol) %metadata-store (scot %da now.bol) ~) ~ - .^ (unit (set path)) - %gx - ;: weld - /(scot %p our.bol)/metadata-store/(scot %da now.bol)/resource/[app] - app-path - /noun - == - == -:: -++ emit-metadata - |= del=metadata-delta - ^- (list card) - |^ - ?- -.del - %add - =/ preexisting (metadata-scry group-path.del app-path.del) - =/ meta=metadata - %* . *metadata - title title.del - description desc.del - date-created created.del - creator author.del - == - ?~ preexisting - (add group-path.del app-path.del meta) - =. color.meta color.u.preexisting - (add group-path.del app-path.del meta) - :: - %remove - =/ app-path [(scot %p author.del) /[book.del]] - =/ group-path=(unit path) (group-from-book app-path) - ?~ group-path ~ - [(metadata-poke [%remove u.group-path [%publish app-path]])]~ - == - :: - ++ add - |= [group-path=path app-path=path =metadata] - ^- (list card) - [(metadata-poke [%add group-path [%publish app-path] metadata])]~ - -- -:: -++ group-from-book - |= app-path=path - ^- (unit path) - ?. .^(? %gu (scot %p our.bol) %metadata-store (scot %da now.bol) ~) - ?: ?=([@ ^] app-path) - ~& [%assuming-ported-legacy-publish app-path] - `[%'~' app-path] - ~&([%weird-publish app-path] ~) - =/ resource-indices - .^ (jug md-resource group-path) - %gy - (scot %p our.bol) - %metadata-store - (scot %da now.bol) - /resource-indices - == - =/ groups=(unit (set path)) - (~(get by resource-indices) [%publish app-path]) - ?~ groups ~ - =/ group-paths ~(tap in u.groups) - ?~ group-paths ~ - `i.group-paths -:: -++ metadata-hook-poke - |= act=metadata-hook-action - ^- card - :* %pass / %agent - [our.bol %metadata-hook] - %poke %metadata-hook-action - !>(act) - == -:: -++ handle-notebook-delta - |= [del=notebook-delta sty=_state] - ^- (quip card _state) - ?- -.del - %add-book - ?: =(our.bol host.del) - =^ cards state - (emit-updates-and-state host.del book.del data.del del sty) - :_ state - %- zing - :~ cards - [(metadata-hook-poke [%add-owned writers.data.del])]~ - %- emit-metadata - :* %add - writers.data.del - [(scot %p host.del) /[book.del]] - title.data.del - description.data.del - host.del - date-created.data.del - == - == - =? data.del (~(has by books) host.del book.del) - (merge-notebooks (~(got by books) host.del book.del) data.del) - =^ cards state - (emit-updates-and-state host.del book.del data.del del sty) - =/ rid=resource - (de-path:resource writers.data.del) - =? cards !=(our.bol entity.rid) - :_ cards - (group-pull-hook-poke [%add host.del rid]) - :_ state - :* (metadata-hook-poke [%add-synced host.del writers.data.del]) - cards - == - :: - %add-note - =/ book=(unit notebook) - (get-notebook host.del book.del sty) - ?~ book - [~ sty] - =. read.data.del =(our.bol author.data.del) - =. notes.u.book (~(put by notes.u.book) note.del data.del) - (emit-updates-and-state host.del book.del u.book del sty) - :: - %add-comment - =/ book=(unit notebook) - (get-notebook host.del book.del sty) - ?~ book - [~ sty] - =/ note (~(get by notes.u.book) note.del) - ?~ note - [~ sty] - =/ limbo-comment=(unit @da) - %- ~(rep by comments.u.note) - |= [[date=@da com=comment] out=(unit @da)] - ?: ?& =(author.com author.data.del) - =(content.com content.data.del) - =(%.y pending.com) - == - `date - out - =? comments.u.note ?=(^ limbo-comment) - (~(del by comments.u.note) u.limbo-comment) - =. comments.u.note (~(put by comments.u.note) comment-date.del data.del) - =. notes.u.book (~(put by notes.u.book) note.del u.note) - (emit-updates-and-state host.del book.del u.book del sty) - :: - %edit-book - =/ old-book=(unit notebook) - (get-notebook host.del book.del sty) - ?~ old-book - [~ sty] - =/ new-book=notebook - %= data.del - date-created date-created.u.old-book - notes notes.u.old-book - order order.u.old-book - == - =^ cards state - (emit-updates-and-state host.del book.del new-book del sty) - :_ state - %+ weld cards - %- emit-metadata - :* %add - writers.new-book - [(scot %p host.del) /[book.del]] - title.new-book - description.new-book - host.del - date-created.new-book - == - :: - %edit-note - =. notes.limbo.sty (~(del by notes.limbo.sty) host.del book.del note.del) - =/ book=(unit notebook) - (get-notebook host.del book.del sty) - ?~ book - [~ sty] - =/ old-note (~(get by notes.u.book) note.del) - ?~ old-note - [~ sty] - ?: =(our.bol author.u.old-note) - [~ sty] - =/ new-note=note - %= data.del - date-created date-created.u.old-note - comments comments.u.old-note - read read.u.old-note - == - =. notes.u.book (~(put by notes.u.book) note.del new-note) - (emit-updates-and-state host.del book.del u.book del sty) - :: - %edit-comment - =. comments.limbo.sty - %- ~(del by comments.limbo.sty) - [host.del book.del note.del comment-date.del] - =/ book=(unit notebook) - (get-notebook host.del book.del sty) - ?~ book - [~ sty] - =/ note (~(get by notes.u.book) note.del) - ?~ note - [~ sty] - =/ old-comment (~(get by comments.u.note) comment-date.del) - ?~ old-comment - [~ sty] - =. comments.u.note (~(put by comments.u.note) comment-date.del data.del) - =. notes.u.book (~(put by notes.u.book) note.del u.note) - (emit-updates-and-state host.del book.del u.book del sty) - :: - %del-book - =/ book=(unit notebook) - (get-notebook host.del book.del sty) - ?~ book [~ sty] - :_ sty(books (~(del by books.sty) host.del book.del)) - ?. =(our.bol host.del) - %+ welp - [%give %fact [/primary]~ %publish-primary-delta !>(del)]~ - ?: (is-managed writers.u.book) ~ - [(metadata-hook-poke [%remove writers.u.book])]~ - %- zing - :~ [%give %fact [/notebook/[book.del]]~ %publish-notebook-delta !>(del)]~ - [%give %fact [/primary]~ %publish-primary-delta !>(del)]~ - (emit-metadata %remove host.del book.del) - :: - ?: (is-managed writers.u.book) ~ - [(metadata-hook-poke [%remove writers.u.book])]~ - == - :: - %del-note - =. notes.limbo.sty (~(del by notes.limbo.sty) host.del book.del note.del) - =/ book=(unit notebook) - (get-notebook host.del book.del sty) - ?~ book - [~ sty] - =/ not=(unit note) (~(get by notes.u.book) note.del) - ?~ not - [~ sty] - =. notes.u.book (~(del by notes.u.book) note.del) - (emit-updates-and-state host.del book.del u.book del sty) - :: - %del-comment - =. comments.limbo.sty - %- ~(del by comments.limbo.sty) - [host.del book.del note.del comment.del] - =/ book=(unit notebook) - (get-notebook host.del book.del sty) - ?~ book - [~ sty] - =/ note (~(get by notes.u.book) note.del) - ?~ note - [~ sty] - =. comments.u.note (~(del by comments.u.note) comment.del) - =. notes.u.book (~(put by notes.u.book) note.del u.note) - (emit-updates-and-state host.del book.del u.book del sty) - == -:: -++ get-subscribers-json - |= book=@tas - ^- json - :- %a - %+ roll ~(val by sup.bol) - |= [[who=@p pax=path] out=(list json)] - ^- (list json) - ?. ?=([%notebook @ ~] pax) out - ?. =(book i.t.pax) out - [[%s (scot %p who)] out] -:: -++ get-writers-json - |= [host=@p book=@tas] - =/ =tag - [%publish (cat 3 %writers- book)] - ^- json - =/ writers=(list ship) - ~(tap in (book-writers host book)) - :- %a - %+ turn writers - |= who=@p - ^- json - [%s (scot %p who)] -:: -++ get-notebook-json - |= [host=@p book-name=@tas] - ^- (unit json) - =, enjs:format - =/ book=(unit notebook) (~(get by books) host book-name) - ?~ book - ~ - =/ notebook-json (notebook-full:enjs host book-name u.book) - ?> ?=(%o -.notebook-json) - =. p.notebook-json - (~(uni by p.notebook-json) (notes-page:enjs notes.u.book 0 50)) - =. p.notebook-json - (~(put by p.notebook-json) %subscribers (get-subscribers-json book-name)) - =/ notebooks-json (notebooks-map:enjs our.bol books) - =. p.notebook-json - (~(put by p.notebook-json) %writers (get-writers-json host book-name)) - ?> ?=(%o -.notebooks-json) - =/ host-books-json (~(got by p.notebooks-json) (scot %p host)) - ?> ?=(%o -.host-books-json) - =. p.host-books-json (~(put by p.host-books-json) book-name notebook-json) - =. p.notebooks-json - (~(put by p.notebooks-json) (scot %p host) host-books-json) - `(pairs notebooks+notebooks-json ~) -:: -++ get-note-json - |= [host=@p book-name=@tas note-name=@tas] - ^- (unit json) - =, enjs:format - =/ book=(unit notebook) (~(get by books) host book-name) - ?~ book - ~ - =/ note=(unit note) (~(get by notes.u.book) note-name) - ?~ note - ~ - =/ notebook-json (notebook-full:enjs host book-name u.book) - ?> ?=(%o -.notebook-json) - =/ note-json (note-presentation:enjs u.book note-name u.note) - =. p.notebook-json (~(uni by p.notebook-json) note-json) - =/ notebooks-json (notebooks-map:enjs our.bol books) - ?> ?=(%o -.notebooks-json) - =/ host-books-json (~(got by p.notebooks-json) (scot %p host)) - ?> ?=(%o -.host-books-json) - =. p.host-books-json (~(put by p.host-books-json) book-name notebook-json) - =. p.notebooks-json - (~(put by p.notebooks-json) (scot %p host) host-books-json) - `(pairs notebooks+notebooks-json ~) -:: ++ is-managed |= =path ^- ? ?> ?=(^ path) !=(i.path '~') :: -++ handle-http-request - |= req=inbound-request:eyre - ^- simple-payload:http - =/ url (parse-request-line url.request.req) - ?+ url not-found:gen - :: - :: pagination endpoints - :: - :: all notebooks, short form - [[[~ %json] [%'publish-view' %notebooks ~]] ~] - %- json-response:gen - (notebooks-map:enjs our.bol books) - :: - :: notes pagination - [[[~ %json] [%'publish-view' %notes @ @ @ @ ~]] ~] - =/ host=(unit @p) (slaw %p i.t.t.site.url) - ?~ host - not-found:gen - =/ book-name i.t.t.t.site.url - =/ book=(unit notebook) (~(get by books) u.host book-name) - ?~ book - not-found:gen - =/ start (rush i.t.t.t.t.site.url dem) - ?~ start - not-found:gen - =/ length (rush i.t.t.t.t.t.site.url dem) - ?~ length - not-found:gen - %- json-response:gen - :- %o - (notes-page:enjs notes.u.book u.start u.length) - :: - :: comments pagination - [[[~ %json] [%'publish-view' %comments @ @ @ @ @ ~]] ~] - =/ host=(unit @p) (slaw %p i.t.t.site.url) - ?~ host - not-found:gen - =/ book-name i.t.t.t.site.url - =/ book=(unit notebook) (~(get by books) u.host book-name) - ?~ book - not-found:gen - =/ note-name i.t.t.t.t.site.url - =/ note=(unit note) (~(get by notes.u.book) note-name) - ?~ note - not-found:gen - =/ start (rush i.t.t.t.t.t.site.url dem) - ?~ start - not-found:gen - =/ length (rush i.t.t.t.t.t.t.site.url dem) - ?~ length - not-found:gen - %- json-response:gen - (comments-page:enjs comments.u.note u.start u.length) - :: - :: single notebook with initial 50 notes in short form, as json - [[[~ %json] [%'publish-view' @ @ ~]] ~] - =, enjs:format - =/ host=(unit @p) (slaw %p i.t.site.url) - ?~ host not-found:gen - =/ book-name i.t.t.site.url - =/ book=(unit notebook) (~(get by books) u.host book-name) - ?~ book not-found:gen - =/ notebook-json (notebook-full:enjs u.host book-name u.book) - ?> ?=(%o -.notebook-json) - =. p.notebook-json - (~(uni by p.notebook-json) (notes-page:enjs notes.u.book 0 50)) - =. p.notebook-json - (~(put by p.notebook-json) %subscribers (get-subscribers-json book-name)) - =. p.notebook-json - (~(put by p.notebook-json) %writers (get-writers-json u.host book-name)) - (json-response:gen (pairs notebook+notebook-json ~)) - :: - :: single note, with initial 50 comments, as json - [[[~ %json] [%'publish-view' @ @ @ ~]] ~] - =, enjs:format - =/ host=(unit @p) (slaw %p i.t.site.url) - ?~ host not-found:gen - =/ book-name i.t.t.site.url - =/ book=(unit notebook) (~(get by books) u.host book-name) - ?~ book not-found:gen - =/ note-name i.t.t.t.site.url - =/ note=(unit note) (~(get by notes.u.book) note-name) - ?~ note not-found:gen - =/ jon=json - o+(note-presentation:enjs u.book note-name u.note) - (json-response:gen jon) - == +++ group-poke + |= =update:group-store + ^- card + [%pass / %agent [our.bol %group-store] %poke %group-update !>(update)] +:: +++ group-proxy-poke + |= [=ship =update:group-store] + ^- card + [%pass / %agent [ship %group-push-hook] %poke %group-update !>(update)] +++ contact-view-poke + |= act=contact-view-action:contact-view + ^- card + [%pass / %agent [our.bol %contact-view] %poke %contact-view-action !>(act)] +:: +++ contact-view-create + |= [=path ships=(set ship) =policy title=@t description=@t] + =/ rid=resource + (de-path:resource path) + =/ act=contact-view-action:contact-view + [%create name.rid policy title description] + (contact-view-poke act) +:: +++ check-host-migrate + |= rid=resource + ^- card + =/ res-path + (en-path:resource rid) + =- [%pass graph-migrate+res-path %agent -] + [[entity.rid %graph-push-hook] %watch resource+res-path] +:: + +:: +++ poke-our + |= [app=term =cage] + [%pass / %agent [our.bol app] %poke cage] +:: +++ poke-graph-pull + |= =action:pull-hook + (poke-our %graph-pull-hook pull-hook-action+!>(action)) :: -- diff --git a/pkg/arvo/app/soto.hoon b/pkg/arvo/app/soto.hoon index 2ca0c2620..29d9a7a94 100644 --- a/pkg/arvo/app/soto.hoon +++ b/pkg/arvo/app/soto.hoon @@ -1,74 +1,4 @@ +:: soto [tombstone]: former dojo relay for urbit's landscape interface :: -:: soto [landscape]: A Dojo relay for Urbit's Landscape interface -:: -:: Relays sole-effects to subscribers and forwards sole-action pokes -:: -/- sole -/+ *soto, default-agent -|% -+$ card card:agent:gall -:: -+$ versioned-state - $@ state-null - state-zero -:: -+$ state-null ~ -:: -+$ state-zero [%0 ~] --- -=| state-zero -=* state - -^- agent:gall -|_ bol=bowl:gall -+* this . - soto-core +> - sc ~(. soto-core bol) - def ~(. (default-agent this %|) bol) -:: -++ on-init - :_ this - :_ ~ - :* %pass /srv %agent [our.bol %file-server] - %poke %file-server-action - !>([%serve-dir /'~dojo' /app/landscape %.n %.y]) - == -++ on-save !>(state) -:: -++ on-load - |= old-vase=vase - =/ old - !<(versioned-state old-vase) - ?^ old - [~ this(state old)] - :_ this(state [%0 ~]) - :~ [%pass /bind/soto %arvo %e %disconnect [~ /'~dojo']] - :* %pass /srv %agent [our.bol %file-server] - %poke %file-server-action - !>([%serve-dir /'~dojo' /app/landscape %.n %.y]) - == - == -:: -++ on-poke on-poke:def -++ on-watch - |= pax=path - ^- (quip card _this) - ?+ pax (on-watch:def pax) - [%sototile ~] - :_ this - [%give %fact ~ %json !>(~)]~ - == -:: -++ on-agent on-agent:def -:: -++ on-arvo - |= [wir=wire sin=sign-arvo] - ^- (quip card _this) - ?: ?=(%bound +<.sin) - [~ this] - (on-arvo:def wir sin) -:: -++ on-fail on-fail:def -++ on-leave on-leave:def -++ on-peek on-peek:def -:: --- +/+ default-agent +(default-agent *agent:gall %|) diff --git a/pkg/arvo/app/spider.hoon b/pkg/arvo/app/spider.hoon index 1d4230c68..a328e0d47 100644 --- a/pkg/arvo/app/spider.hoon +++ b/pkg/arvo/app/spider.hoon @@ -1,5 +1,5 @@ /- spider -/+ libstrand=strand, default-agent, verb, server +/+ libstrand=strand, default-agent, verb, server =, strand=strand:libstrand |% +$ card card:agent:gall diff --git a/pkg/arvo/gen/configure-hosting.hoon b/pkg/arvo/gen/configure-hosting.hoon new file mode 100644 index 000000000..ad9c4a771 --- /dev/null +++ b/pkg/arvo/gen/configure-hosting.hoon @@ -0,0 +1,12 @@ +:: .configuration/pill +configure-hosting +:: +:: boot king haskell with `--inject-event-list /path/to/configuration.pill` +:: +:- %say +|= [[now=@da @ our=@p ^] *] +:- %noun +:~ [//term/1 %belt %txt "|unlink %chat-cli"] + [//term/1 %belt %ret 0] + [//term/1 %belt %txt "|cors-approve 'https://horizon.tlon.network'"] + [//term/1 %belt %ret 0] +== diff --git a/pkg/arvo/gen/graph-store/add-graph.hoon b/pkg/arvo/gen/graph-store/add-graph.hoon index a95468928..0192ede89 100644 --- a/pkg/arvo/gen/graph-store/add-graph.hoon +++ b/pkg/arvo/gen/graph-store/add-graph.hoon @@ -3,8 +3,8 @@ /+ *graph-store :- %say |= $: [now=@da eny=@uvJ =beak] - [[=resource mark=(unit mark) ~] ~] + [[=resource mark=(unit mark) overwrite=? ~] ~] == :- %graph-update ^- update -[%0 now [%add-graph resource (gas:orm ~ ~) mark]] +[%0 now [%add-graph resource (gas:orm ~ ~) mark overwrite]] diff --git a/pkg/arvo/lib/base64.hoon b/pkg/arvo/lib/base64.hoon index 6843f8be8..aa5887419 100644 --- a/pkg/arvo/lib/base64.hoon +++ b/pkg/arvo/lib/base64.hoon @@ -3,6 +3,7 @@ :: pad: include padding when encoding, require when decoding :: url: use url-safe characters '-' for '+' and '_' for '/' :: +:: =+ [pad=& url=|] |% :: diff --git a/pkg/arvo/lib/graph-store.hoon b/pkg/arvo/lib/graph-store.hoon index 4014a624a..a6055bb10 100644 --- a/pkg/arvo/lib/graph-store.hoon +++ b/pkg/arvo/lib/graph-store.hoon @@ -34,6 +34,79 @@ ++ enjs =, enjs:format |% + :: + ++ signatures + |= s=^signatures + ^- json + [%a (turn ~(tap in s) signature)] + :: + ++ signature + |= s=^signature + ^- json + %- pairs + :~ [%signature s+(scot %ux p.s)] + [%ship (ship q.s)] + [%life (numb r.s)] + == + :: + ++ index + |= i=^index + ^- json + ?: =(~ i) s+'/' + =/ j=^tape "" + |- + ?~ i [%s (crip j)] + =/ k=json (numb i.i) + ?> ?=(%n -.k) + %_ $ + i t.i + j (weld j (weld "/" (trip +.k))) + == + :: + ++ uid + |= u=^uid + ^- json + %- pairs + :~ [%resource (enjs:res resource.u)] + [%index (index index.u)] + == + :: + ++ content + |= c=^content + ^- json + ?- -.c + %mention (frond %mention (ship ship.c)) + %text (frond %text s+text.c) + %url (frond %url s+url.c) + %reference (frond %reference (uid uid.c)) + %code + %+ frond %code + %- pairs + :- [%expression s+expression.c] + :_ ~ + :- %output + :: virtualize output rendering, +tank:enjs:format might crash + :: + =/ result=(each (list json) tang) + (mule |.((turn output.c tank))) + ?- -.result + %& a+p.result + %| a+[a+[%s '[[output rendering error]]']~]~ + == + == + :: + ++ post + |= p=^post + ^- json + %- pairs + :~ [%author (ship author.p)] + [%index (index index.p)] + [%time-sent (time time-sent.p)] + [%contents [%a (turn contents.p content)]] + [%hash ?~(hash.p ~ s+(scot %ux u.hash.p))] + [%signatures (signatures signatures.p)] + == + :: ++ update |= upd=^update ^- json @@ -50,6 +123,7 @@ :~ [%resource (enjs:res resource.upd)] [%graph (graph graph.upd)] [%mark ?~(mark.upd ~ s+u.mark.upd)] + [%overwrite b+overwrite.upd] == :: %remove-graph @@ -132,20 +206,6 @@ :~ (index [a]~) (node n) == - :: - ++ index - |= i=^index - ^- json - =/ j=^tape "" - |- - ?~ i [%s (crip j)] - =/ k=json (numb i.i) - ?> ?=(%n -.k) - %_ $ - i t.i - j (weld j (weld "/" (trip +.k))) - == - :: ++ node |= n=^node ^- json @@ -158,41 +218,7 @@ == == :: - ++ post - |= p=^post - ^- json - %- pairs - :~ [%author (ship author.p)] - [%index (index index.p)] - [%time-sent (time time-sent.p)] - [%contents [%a (turn contents.p content)]] - [%hash ?~(hash.p ~ s+(scot %ux u.hash.p))] - [%signatures (signatures signatures.p)] - == - :: - ++ content - |= c=^content - ^- json - ?- -.c - %text (frond %text s+text.c) - %url (frond %url s+url.c) - %reference (frond %reference (uid uid.c)) - %code - %+ frond %code - %- pairs - :- [%expression s+expression.c] - :_ ~ - :- %output - :: virtualize output rendering, +tank:enjs:format might crash - :: - =/ result=(each (list json) tang) - (mule |.((turn output.c tank))) - ?- -.result - %& a+p.result - %| a+[a+[%s '[[output rendering error]]']~]~ - == - == - :: + :: ++ nodes |= m=(map ^index ^node) ^- json @@ -210,27 +236,6 @@ ^- json [%a (turn ~(tap in i) index)] :: - ++ uid - |= u=^uid - ^- json - %- pairs - :~ [%resource (enjs:res resource.u)] - [%index (index index.u)] - == - :: - ++ signatures - |= s=^signatures - ^- json - [%a (turn ~(tap in s) signature)] - :: - ++ signature - |= s=^signature - ^- json - %- pairs - :~ [%signature s+(scot %ux p.s)] - [%ship (ship q.s)] - [%life (numb r.s)] - == -- -- :: @@ -272,6 +277,7 @@ :~ [%resource dejs:res] [%graph graph] [%mark (mu so)] + [%overwrite bo] == :: ++ graph @@ -300,9 +306,14 @@ ++ node %- ot :~ [%post post] - :: TODO: support adding nodes with children by supporting the - :: graph key - [%children (of [%empty ul]~)] + [%children internal-graph] + == + :: + ++ internal-graph + ^- $-(json ^internal-graph) + %- of + :~ [%empty ul] + [%graph graph] == :: ++ post @@ -317,7 +328,8 @@ :: ++ content %- of - :~ [%text so] + :~ [%mention (su ;~(pfix sig fed:ag))] + [%text so] [%url so] [%reference uid] [%code eval] diff --git a/pkg/arvo/lib/graph.hoon b/pkg/arvo/lib/graph.hoon index 2e2bcdf1e..2d2046801 100644 --- a/pkg/arvo/lib/graph.hoon +++ b/pkg/arvo/lib/graph.hoon @@ -17,6 +17,18 @@ %+ scry-for update:store /graph/(scot %p entity.res)/[name.res] :: +++ got-node + |= [res=resource =index:store] + ^- node:store + =+ %+ scry-for ,=update:store + %+ weld + /node/(scot %p entity.res)/[name.res] + (turn index (cury scot %ud)) + ?> ?=(%0 -.update) + ?> ?=(%add-nodes -.q.update) + ?> ?=(^ nodes.q.update) + q.n.nodes.q.update +:: ++ get-update-log |= rid=resource ^- update-log:store @@ -33,4 +45,12 @@ ^- update-log:store %+ scry-for update-log:store /update-log-subset/(scot %p entity.res)/[name.res]/(scot %da start)/'~' +:: +++ get-keys + ^- resources + =+ %+ scry-for ,=update:store + /keys + ?> ?=(%0 -.update) + ?> ?=(%keys -.q.update) + resources.q.update -- diff --git a/pkg/arvo/lib/hark/chat-hook.hoon b/pkg/arvo/lib/hark/chat-hook.hoon new file mode 100644 index 000000000..1bdd167f5 --- /dev/null +++ b/pkg/arvo/lib/hark/chat-hook.hoon @@ -0,0 +1,30 @@ +/- sur=hark-chat-hook +^? +=< [. sur] +=, sur +|% +++ dejs + =, dejs:format + |% + ++ action + %- of + :~ listen+pa + ignore+pa + set-mentions+bo + == + -- +:: +++ enjs + =, enjs:format + |% + ++ update + |= upd=^update + %+ frond -.upd + ?- -.upd + ?(%listen %ignore) (path chat.upd) + %set-mentions b+mentions.upd + %initial a+(turn ~(tap in watching.upd) path) + == + -- +-- + diff --git a/pkg/arvo/lib/hark/graph-hook.hoon b/pkg/arvo/lib/hark/graph-hook.hoon new file mode 100644 index 000000000..d379236ff --- /dev/null +++ b/pkg/arvo/lib/hark/graph-hook.hoon @@ -0,0 +1,66 @@ +/- sur=hark-graph-hook, post +/+ graph-store, resource +^? +=< [. sur] +=, sur +|% +++ dejs + =, dejs:format + |% + :: + ++ index + ^- $-(json index:graph-store) + (su ;~(pfix net (more net dem))) + :: + ++ graph-index + %- ot + :~ graph+dejs-path:resource + index+index + == + :: + ++ action + %- of + :~ listen+graph-index + ignore+graph-index + set-mentions+bo + set-watch-on-self+bo + == + -- +:: +++ enjs + =, enjs:format + |% + :: + ++ graph-index + |= [graph=resource =index:post] + %- pairs + :~ graph+s+(enjs-path:resource graph) + index+(index:enjs:graph-store index) + == + :: + ++ action + |= act=^action + ^- json + %+ frond -.act + ?- -.act + %set-watch-on-self b+watch-on-self.act + %set-mentions b+mentions.act + ?(%listen %ignore) (graph-index graph.act index.act) + == + :: + :: + :: + ++ update + |= upd=^update + ^- json + ?. ?=(%initial -.upd) + (action upd) + %+ frond -.upd + %- pairs + :~ 'watchOnSelf'^b+watch-on-self.upd + 'mentions'^b+mentions.upd + :+ %watching %a + (turn ~(tap in watching.upd) graph-index) + == + -- +-- diff --git a/pkg/arvo/lib/hark/group-hook.hoon b/pkg/arvo/lib/hark/group-hook.hoon new file mode 100644 index 000000000..e32ee8bd7 --- /dev/null +++ b/pkg/arvo/lib/hark/group-hook.hoon @@ -0,0 +1,34 @@ +/- sur=hark-group-hook +/+ resource +^? +=< [. sur] +=, sur +|% +++ dejs + =, dejs:format + |% + ++ action + %- of + :~ listen+dejs-path:resource + ignore+dejs-path:resource + == + -- +:: +++ enjs + =, enjs:format + |% + ++ res + (cork enjs-path:resource (lead %s)) + :: + ++ update + |= upd=^update + %+ frond -.upd + ?- -.upd + ?(%listen %ignore) (res group.upd) + :: + %initial + :- %a + (turn ~(tap in watching.upd) res) + == + -- +-- diff --git a/pkg/arvo/lib/hark/store.hoon b/pkg/arvo/lib/hark/store.hoon new file mode 100644 index 000000000..f9448f1e8 --- /dev/null +++ b/pkg/arvo/lib/hark/store.hoon @@ -0,0 +1,226 @@ +/- sur=hark-store, post +/+ resource, graph-store, group-store, chat-store +^? +=< [. sur] +=, sur +|% +++ dejs + =, dejs:format + |% + ++ index + %- of + :~ graph+graph-index + group+group-index + chat+chat-index + == + :: + ++ chat-index + %- ot + :~ chat+pa + mention+bo + == + :: + ++ group-index + %- ot + :~ group+dejs-path:resource + description+so + == + :: + ++ graph-index + %- ot + :~ group+dejs-path:resource + graph+dejs-path:resource + module+so + description+so + == + :: parse date as @ud + :: TODO: move to zuse + ++ sd + |= jon=json + ^- @da + ?> ?=(%s -.jon) + `@da`(rash p.jon dem:ag) + + :: + ++ notif-ref + ^- $-(json [@da ^index]) + %- ot + :~ time+sd + index+index + == + :: + ++ add + |= jon=json + [*^index *notification] + :: + ++ action + ^- $-(json ^action) + %- of + :~ seen+ul + archive+notif-ref + unread+notif-ref + read+notif-ref + add+add + set-dnd+bo + read-index+index + == + -- +:: +++ enjs + =, enjs:format + |% + ++ update + |= upd=^update + ^- json + |^ + %+ frond -.upd + ?+ -.upd a+~ + %added (added +.upd) + %timebox (timebox +.upd) + %set-dnd b+dnd.upd + %count (numb count.upd) + %unreads (unreads unreads.upd) + %more (more +.upd) + :: + ?(%archive %read %unread) + (notif-ref +.upd) + == + :: + ++ unreads + |= l=(list [^index @ud]) + ^- json + :- %a + ^- (list json) + %+ turn l + |= [idx=^index unread=@ud] + %- pairs + :~ unread+(numb unread) + index+(index idx) + == + :: + ++ added + |= [tim=@da idx=^index not=^notification] + ^- json + %- pairs + :~ time+s+(scot %ud tim) + index+(index idx) + notification+(notification not) + == + :: + ++ notif-ref + |= [tim=@da idx=^index] + ^- json + %- pairs + :~ time+s+(scot %ud tim) + index+(index idx) + == + :: + ++ more + |= upds=(list ^update) + ^- json + a+(turn upds update) + :: + ++ index + |= =^index + %+ frond -.index + |^ + ?- -.index + %graph (graph-index +.index) + %group (group-index +.index) + %chat (chat-index +.index) + == + :: + ++ chat-index + |= [chat=^path mention=?] + ^- json + %- pairs + :~ chat+(path chat) + mention+b+mention + == + :: + ++ graph-index + |= [group=resource graph=resource module=@t description=@t] + ^- json + %- pairs + :~ group+s+(enjs-path:resource group) + graph+s+(enjs-path:resource graph) + module+s+module + description+s+description + == + :: + ++ group-index + |= [group=resource description=@t] + ^- json + %- pairs + :~ group+s+(enjs-path:resource group) + description+s+description + == + -- + :: + ++ notification + |= ^notification + ^- json + %- pairs + :~ time+(time date) + read+b+read + contents+(^contents contents) + == + :: + ++ contents + |= =^contents + ^- json + %+ frond -.contents + |^ + ?- -.contents + %graph (graph-contents +.contents) + %group (group-contents +.contents) + %chat (chat-contents +.contents) + == + :: + ++ chat-contents + |= =(list envelope:chat-store) + ^- json + :- %a + (turn list envelope:enjs:chat-store) + :: + ++ graph-contents + |= =(list post:post) + ^- json + :- %a + (turn list post:enjs:graph-store) + :: + ++ group-contents + |= =(list ^group-contents) + ^- json + :- %a + %+ murn list + |= =^group-contents + ?. ?=(?(%add-members %remove-members) -.group-contents) + ~ + `(update:enjs:group-store group-contents) + -- + :: + ++ indexed-notification + |= [=^index =^notification] + %- pairs + :~ index+(^index index) + notification+(^notification notification) + == + :: + ++ timebox + |= [tim=@da arch=? l=(list [^index ^notification])] + ^- json + %- pairs + :~ time+s+(scot %ud tim) + archive+b+arch + :- %notifications + ^- json + :- %a + %+ turn l + |= [=^index =^notification] + ^- json + (indexed-notification index notification) + == + -- + -- +-- diff --git a/pkg/arvo/lib/hood/drum.hoon b/pkg/arvo/lib/hood/drum.hoon index a41c917a7..84ddfa0f7 100644 --- a/pkg/arvo/lib/hood/drum.hoon +++ b/pkg/arvo/lib/hood/drum.hoon @@ -69,11 +69,11 @@ %azimuth-tracker %ping %goad + %lens == ?: lit ~ :~ %acme - %lens %clock %dojo %launch @@ -91,7 +91,7 @@ %chat-hook %chat-view %chat-cli - %soto + %herm %contact-store %contact-hook %contact-view @@ -107,6 +107,11 @@ %graph-store %graph-pull-hook %graph-push-hook + %hark-store + %hark-graph-hook + %hark-group-hook + %hark-chat-hook + %observe-hook == :: ++ deft-fish :: default connects @@ -209,7 +214,7 @@ == :: ++ on-load - |= [hood-version=?(%1 %2 %3 %4 %5 %6 %7 %8 %9 %10) old=any-state] + |= [hood-version=@ud old=any-state] =< se-abet =< se-view =. sat old =. dev (~(gut by bin) ost *source) @@ -237,10 +242,17 @@ => (se-born | %home %group-push-hook) (se-born | %home %group-pull-hook) =? ..on-load (lte hood-version %9) - (se-born | %home %graph-store) + (se-born | %home %graph-store) =? ..on-load (lte hood-version %10) => (se-born | %home %graph-push-hook) (se-born | %home %graph-pull-hook) + =? ..on-load (lte hood-version %11) + => (se-born | %home %hark-graph-hook) + => (se-born | %home %hark-group-hook) + => (se-born | %home %hark-chat-hook) + => (se-born | %home %hark-store) + => (se-born | %home %observe-hook) + (se-born | %home %herm) ..on-load :: ++ reap-phat :: ack connect @@ -556,7 +568,6 @@ ++ se-show :: show buffer, raw |= lin/(pair @ud stub) ^+ +> - =. p.lin (add p.lin (lent-stye:klr q.lin)) ?: =(mir lin) +> =. +> ?:(=(p.mir p.lin) +> (se-blit %hop p.lin)) =. +> ?:(=(q.mir q.lin) +> (se-blit %pom q.lin)) @@ -1120,25 +1131,10 @@ (fall p.q.a p.q.b) (fall q.q.a q.q.b) :: - ++ lent-stye - |= a/stub ^- @ - (roll (lnts-stye a) add) - :: ++ lent-char |= a/stub ^- @ (roll (lnts-char a) add) :: - ++ lnts-stye :: stub pair head lengths - |= a/stub ^- (list @) - %+ turn a - |= a/(pair stye (list @c)) - ;: add :: presumes impl of cvrt:ansi in %dill - (mul 5 2) :: bg - (mul 5 2) :: fg - =+ b=~(wyt in p.p.a) :: effect - ?:(=(0 b) 0 (mul 4 +(b))) - == - :: ++ lnts-char :: stub pair tail lengths |= a/stub ^- (list @) %+ turn a diff --git a/pkg/arvo/lib/invite-json.hoon b/pkg/arvo/lib/invite-json.hoon index c28cd16c8..7faac97d0 100644 --- a/pkg/arvo/lib/invite-json.hoon +++ b/pkg/arvo/lib/invite-json.hoon @@ -1,4 +1,5 @@ /- *invite-store +/+ resource |% ++ slan |=(mod/@tas |=(txt/@ta (need (slaw mod txt)))) :: @@ -12,9 +13,9 @@ ^- json %- pairs:enjs:format %+ turn ~(tap by inv) - |= [=path =invitatory] + |= [=term =invitatory] ^- [cord json] - [(spat path) (invitatory-to-json invitatory)] + [term (invitatory-to-json invitatory)] :: ++ invitatory-to-json |= =invitatory @@ -33,13 +34,13 @@ %- pairs :~ [%ship (ship ship.invite)] [%app [%s app.invite]] - [%path (path path.invite)] + [%resource (enjs:resource resource.invite)] [%recipient (ship recipient.invite)] [%text [%s text.invite]] == :: ++ update-to-json - |= upd=invite-update + |= upd=update =, enjs:format ^- json %+ frond %invite-update @@ -50,15 +51,15 @@ [%initial (invites-to-json invites.upd)] ?: =(%create -.upd) ?> ?=(%create -.upd) - [%create (pairs [%path (path path.upd)]~)] + [%create (pairs [%term s+term.upd]~)] ?: =(%delete -.upd) ?> ?=(%delete -.upd) - [%delete (pairs [%path (path path.upd)]~)] + [%delete (pairs [%term s+term.upd]~)] ?: =(%accepted -.upd) ?> ?=(%accepted -.upd) :- %accepted %- pairs - :~ [%path (path path.upd)] + :~ [%term s+term.upd] [%uid s+(scot %uv uid.upd)] [%invite (invite-to-json invite.upd)] == @@ -66,14 +67,14 @@ ?> ?=(%decline -.upd) :- %decline %- pairs - :~ [%path (path path.upd)] + :~ [%term s+term.upd] [%uid s+(scot %uv uid.upd)] == ?: =(%invite -.upd) ?> ?=(%invite -.upd) :- %invite %- pairs - :~ [%path (path path.upd)] + :~ [%term s+term.upd] [%uid s+(scot %uv uid.upd)] [%invite (invite-to-json invite.upd)] == @@ -88,53 +89,45 @@ :: ++ json-to-action |= jon=json - ^- invite-action + ^- action =, dejs:format =< (parse-json jon) |% ++ parse-json %- of - :~ [%create create] - [%delete delete] + :~ [%create so] + [%delete so] [%invite invite] [%accept accept] [%decline decline] == :: - ++ create - (ot [%path pa]~) - :: - ++ delete - (ot [%path pa]~) - :: - ++ invite %- ot - :~ [%path pa] + :~ [%term so] [%uid seri] [%invite invi] == :: ++ accept %- ot - :~ [%path pa] + :~ [%term so] [%uid seri] == :: ++ decline %- ot - :~ [%path pa] + :~ [%term so] [%uid seri] == :: ++ invi %- ot :~ [%ship (su ;~(pfix sig fed:ag))] - [%app (se %tas)] - [%path pa] + [%app so] + [%resource dejs:resource] [%recipient (su ;~(pfix sig fed:ag))] [%text so] == -- -- - diff --git a/pkg/arvo/lib/metadata.hoon b/pkg/arvo/lib/metadata.hoon index 32d5bf4a6..71c894b20 100644 --- a/pkg/arvo/lib/metadata.hoon +++ b/pkg/arvo/lib/metadata.hoon @@ -1,6 +1,7 @@ :: metadata: helpers for getting data from the metadata-store :: /- *metadata-store +/+ res=resource :: |_ =bowl:gall ++ app-paths-from-group @@ -21,6 +22,27 @@ ?. =(app-name.md-resource app-name) ~ `app-path.md-resource :: +++ peek-metadata + |= [app-name=term =group=resource:res =app=resource:res] + ^- (unit metadata) + =/ group-cord=cord (scot %t (spat (en-path:res group-resource))) + =/ app-cord=cord (scot %t (spat (en-path:res app-resource))) + =/ our=cord (scot %p our.bowl) + =/ now=cord (scot %da now.bowl) + .^ (unit metadata) + %gx (scot %p our.bowl) %metadata-store (scot %da now.bowl) + %metadata group-cord app-name app-cord /noun + == +:: +++ group-from-app-resource + |= [app=term =app=resource:res] + ^- (unit resource:res) + =/ app-path (en-path:res app-resource) + =/ group-paths (groups-from-resource app app-path) + ?~ group-paths + ~ + `(de-path:res i.group-paths) +:: ++ groups-from-resource |= =md-resource ^- (list group-path) diff --git a/pkg/arvo/lib/publish.hoon b/pkg/arvo/lib/publish.hoon index 2a035aad5..2db1066ad 100644 --- a/pkg/arvo/lib/publish.hoon +++ b/pkg/arvo/lib/publish.hoon @@ -1,240 +1,4 @@ /- sur=publish /+ elem-to-react-json ^? -=< [. sur] -=, sur -|% -:: -++ enjs - =, enjs:format - |% - :: - ++ tang - |= tan=^tang - %- wall - %- zing - %+ turn tan - |= a=^tank - (wash [0 80] a) - :: - ++ note-build - |= build=(each manx ^tang) - ^- json - ?: ?=(%.y -.build) - %- pairs - :~ success+b+%.y - result+(elem-to-react-json p.build) - == - %- pairs - :~ success+b+%.n - result+(tang p.build) - == - :: - ++ notebooks-list - |= [our=@p books=(map @tas notebook) subs=(map [@p @tas] notebook)] - ^- json - :- %a - %+ weld - %+ turn ~(tap by books) - |= [name=@tas book=notebook] - (notebook-short book) - %+ turn ~(tap by subs) - |= [[host=@p name=@tas] book=notebook] - (notebook-short book) - :: - ++ notebooks-map - |= [our=@p books=(map [@p @tas] notebook)] - ^- json - =/ notebooks-map=json - %- ~(rep by books) - |= [[[host=@p book-name=@tas] book=notebook] out=json] - ^- json - =/ host-ta (scot %p host) - ?~ out - (frond host-ta (frond book-name (notebook-short book))) - ?> ?=(%o -.out) - =/ books (~(get by p.out) host-ta) - ?~ books - :- %o - (~(put by p.out) host-ta (frond book-name (notebook-short book))) - ?> ?=(%o -.u.books) - =. p.u.books (~(put by p.u.books) book-name (notebook-short book)) - :- %o - (~(put by p.out) host-ta u.books) - =? notebooks-map ?=(~ notebooks-map) - [%o ~] - notebooks-map - :: - ++ notebook-short - |= book=notebook - ^- json - %- pairs - :~ title+s+title.book - date-created+(time date-created.book) - about+s+description.book - num-notes+(numb ~(wyt by notes.book)) - num-unread+(numb (count-unread notes.book)) - comments+b+comments.book - writers-group-path+s+(spat writers.book) - subscribers-group-path+s+(spat subscribers.book) - == - :: - ++ notebook-full - |= [host=@p book-name=@tas book=notebook] - ^- json - %- pairs - :~ title+s+title.book - about+s+description.book - date-created+(time date-created.book) - num-notes+(numb ~(wyt by notes.book)) - num-unread+(numb (count-unread notes.book)) - notes-by-date+(notes-by-date notes.book) - comments+b+comments.book - writers-group-path+s+(spat writers.book) - subscribers-group-path+s+(spat subscribers.book) - == - :: - ++ note-presentation - |= [book=notebook note-name=@tas not=note] - ^- (map @t json) - =/ notes-list=(list [@tas note]) - %+ sort ~(tap by notes.book) - |= [[@tas n1=note] [@tas n2=note]] - (gte date-created.n1 date-created.n2) - =/ idx=@ (need (find [note-name not]~ notes-list)) - =/ next=(unit [name=@tas not=note]) - ?: =(idx 0) ~ - `(snag (dec idx) notes-list) - =/ prev=(unit [name=@tas not=note]) - ?: =(+(idx) (lent notes-list)) ~ - `(snag +(idx) notes-list) - =/ current=json (note-full note-name not) - ?> ?=(%o -.current) - =. p.current (~(put by p.current) %prev-note ?~(prev ~ s+name.u.prev)) - =. p.current (~(put by p.current) %next-note ?~(next ~ s+name.u.next)) - =/ notes=(list [@t json]) [note-name current]~ - =? notes ?=(^ prev) - [[name.u.prev (note-short name.u.prev not.u.prev)] notes] - =? notes ?=(^ next) - [[name.u.next (note-short name.u.next not.u.next)] notes] - %- my - :~ notes+(pairs notes) - notes-by-date+a+(turn notes-list |=([name=@tas *] s+name)) - == - :: - ++ note-full - |= [note-name=@tas =note] - ^- json - %- pairs - :~ note-id+s+note-name - author+s+(scot %p author.note) - title+s+title.note - date-created+(time date-created.note) - snippet+s+snippet.note - file+s+file.note - num-comments+(numb ~(wyt by comments.note)) - comments+(comments-page:enjs comments.note 0 50) - read+b+read.note - pending+b+pending.note - == - :: - ++ notes-by-date - |= notes=(map @tas note) - ^- json - =/ notes-list=(list [@tas note]) - %+ sort ~(tap by notes) - |= [[@tas n1=note] [@tas n2=note]] - (gte date-created.n1 date-created.n2) - :- %a - %+ turn notes-list - |= [name=@tas note] - ^- json - [%s name] - :: - ++ note-short - |= [note-name=@tas =note] - ^- json - %- pairs - :~ note-id+s+note-name - author+s+(scot %p author.note) - title+s+title.note - date-created+(time date-created.note) - num-comments+(numb ~(wyt by comments.note)) - read+b+read.note - snippet+s+snippet.note - pending+b+pending.note - == - :: - ++ notes-page - |= [notes=(map @tas note) start=@ud length=@ud] - ^- (map @t json) - =/ notes-list=(list [@tas note]) - %+ sort ~(tap by notes) - |= [[@tas n1=note] [@tas n2=note]] - (gte date-created.n1 date-created.n2) - %- my - :~ notes-by-date+a+(turn notes-list |=([name=@tas *] s+name)) - notes+o+(^notes-list (scag length (slag start notes-list))) - == - :: - ++ notes-list - |= notes=(list [@tas note]) - ^- (map @t json) - %+ roll notes - |= [[name=@tas not=note] out-map=(map @t json)] - ^- (map @t json) - (~(put by out-map) name (note-short name not)) - :: - ++ comments-page - |= [comments=(map @da ^comment) start=@ud end=@ud] - ^- json - =/ coms=(list [@da ^comment]) - %+ sort ~(tap by comments) - |= [[d1=@da ^comment] [d2=@da ^comment]] - (gte d1 d2) - %- comments-list - (scag end (slag start coms)) - :: - ++ comments-list - |= comments=(list [@da ^comment]) - ^- json - :- %a - (turn comments comment) - :: - ++ comment - |= [date=@da com=^comment] - ^- json - %+ frond - (scot %da date) - %- pairs - :~ author+s+(scot %p author.com) - date-created+(time date-created.com) - content+s+content.com - pending+b+pending.com - == - -- -:: -++ string-to-symbol - |= tap=tape - ^- @tas - %- crip - %+ turn tap - |= a=@ - ?: ?| &((gte a 'a') (lte a 'z')) - &((gte a '0') (lte a '9')) - == - a - ?: &((gte a 'A') (lte a 'Z')) - (add 32 a) - '-' -:: -++ count-unread - |= notes=(map @tas note) - ^- @ud - %- ~(rep by notes) - |= [[key=@tas val=note] count=@ud] - ?: read.val - count - +(count) -:: --- +sur diff --git a/pkg/arvo/lib/pull-hook.hoon b/pkg/arvo/lib/pull-hook.hoon index ce4954bb6..cb3b5e6f7 100644 --- a/pkg/arvo/lib/pull-hook.hoon +++ b/pkg/arvo/lib/pull-hook.hoon @@ -38,16 +38,24 @@ push-hook-name=term == :: -:: $state-0: state for the pull hook +:: $base-state-0: state for the pull hook :: :: .tracking: a map of resources we are pulling, and the ships that :: we are pulling them from. :: .inner-state: state given to internal door :: -+$ state-0 - $: %0 - tracking=(map resource ship) - inner-state=vase ++$ base-state-0 + $: tracking=(map resource ship) + inner-state=vase + == +:: ++$ state-0 [%0 base-state-0] +:: ++$ state-1 [%1 base-state-0] +:: ++$ versioned-state + $% state-0 + state-1 == :: ++ default @@ -133,7 +141,7 @@ ++ agent |* =config |= =(pull-hook config) - =| state-0 + =| state-1 =* state - ^- agent:gall =< @@ -149,12 +157,40 @@ [cards this] ++ on-load |= =old=vase - ^- [(list card:agent:gall) agent:gall] =/ old - !<(state-0 old-vase) - =^ cards pull-hook - (on-load:og inner-state.old) - [cards this(state old)] + !<(versioned-state old-vase) + =| cards=(list card:agent:gall) + |^ + ?- -.old + %1 + =^ og-cards pull-hook + (on-load:og inner-state.old) + [(weld cards og-cards) this(state old)] + :: + %0 + %_ $ + -.old %1 + :: + cards + (weld cards (missing-subscriptions tracking.old)) + == + == + ++ missing-subscriptions + |= tracking=(map resource ship) + ^- (list card:agent:gall) + %+ murn + ~(tap by tracking) + |= [rid=resource =ship] + ^- (unit card:agent:gall) + =/ =path + resource+(en-path:resource rid) + =/ =wire + (make-wire pull+path) + ?: (~(has by wex.bowl) [wire ship push-hook-name.config]) + ~ + `[%pass wire %agent [ship push-hook-name.config] %watch path] + -- + :: ++ on-save ^- vase =. inner-state diff --git a/pkg/arvo/lib/push-hook.hoon b/pkg/arvo/lib/push-hook.hoon index 9f9e2af3a..9553745eb 100644 --- a/pkg/arvo/lib/push-hook.hoon +++ b/pkg/arvo/lib/push-hook.hoon @@ -45,17 +45,24 @@ pull-hook-name=term == :: -:: $state-0: state for the push hook +:: $base-state-0: state for the push hook :: :: .sharing: resources that the push hook is proxying :: .inner-state: state given to internal door :: -+$ state-0 - $: %0 - sharing=(set resource) - inner-state=vase ++$ base-state-0 + $: sharing=(set resource) + inner-state=vase == :: ++$ state-0 [%0 base-state-0] +:: ++$ state-1 [%1 base-state-0] +:: ++$ versioned-state + $% state-0 + state-1 + == ++ push-hook |* =config $_ ^| @@ -144,7 +151,7 @@ ++ agent |* =config |= =(push-hook config) - =| state-0 + =| state-1 =* state - ^- agent:gall =< @@ -163,10 +170,39 @@ ++ on-load |= =old=vase =/ old - !<(state-0 old-vase) - =^ cards push-hook - (on-load:og inner-state.old) - `this(state old) + !<(versioned-state old-vase) + =| cards=(list card:agent:gall) + |^ + ?- -.old + %1 + =^ og-cards push-hook + (on-load:og inner-state.old) + [(weld cards og-cards) this(state old)] + :: + %0 + %_ $ + -.old %1 + :: + cards + =/ paths=(list path) + kicked-watches + ?~ paths cards + :_ cards + [%give %kick paths ~] + == + == + :: + ++ kicked-watches + ^- (list path) + %~ tap in + %+ roll + ~(val by sup.bowl) + |= [[=ship =path] out=(set path)] + ?~ path out + ?. (lth 4 (lent path)) + out + (~(put in out) path) + -- :: ++ on-save =. inner-state @@ -282,14 +318,15 @@ |= rid=resource =/ pax=path [%resource (en-path:resource rid)] - =/ paths=(list path) + =/ paths=(set path) + %- sy %+ turn (incoming-subscriptions pax) - |=([ship pox=path] pax) + |=([ship pox=path] pox) =. sharing (~(del in sharing) rid) :_ state - [%give %kick ~[pax] ~]~ + [%give %kick ~(tap in paths) ~]~ :: ++ revoke |= [ships=(set ship) rid=resource] @@ -334,9 +371,14 @@ =/ rid=(unit resource) (resource-for-update:og vase) ?~ rid ~ - =/ =path + =/ prefix=path resource+(en-path:resource u.rid) - [%give %fact ~[path] update-mark.config vase]~ + =/ paths=(list path) + %+ turn + (incoming-subscriptions prefix) + |=([ship pax=path] pax) + ?~ paths ~ + [%give %fact paths update-mark.config vase]~ :: ++ forward-update |= =vase diff --git a/pkg/arvo/lib/resource.hoon b/pkg/arvo/lib/resource.hoon index 07931f5a4..f84acb0b8 100644 --- a/pkg/arvo/lib/resource.hoon +++ b/pkg/arvo/lib/resource.hoon @@ -37,6 +37,13 @@ %- spat (en-path resource) :: +++ dejs-path + %- su:dejs:format + ;~ pfix + (jest '/ship/') + ;~((glue fas) ;~(pfix sig fed:ag) urs:ab) + == +:: ++ dejs =, dejs:format ^- $-(json resource) diff --git a/pkg/arvo/lib/verb.hoon b/pkg/arvo/lib/verb.hoon index ead541ef9..2737addfc 100644 --- a/pkg/arvo/lib/verb.hoon +++ b/pkg/arvo/lib/verb.hoon @@ -12,26 +12,26 @@ :: ++ on-init ^- (quip card:agent:gall agent:gall) - %- (print bowl "{}: on-init") + %- (print bowl |.("{}: on-init")) =^ cards agent on-init:ag [[(emit-event %on-init ~) cards] this] :: ++ on-save ^- vase - %- (print bowl "{}: on-save") + %- (print bowl |.("{}: on-save")) on-save:ag :: ++ on-load |= old-state=vase ^- (quip card:agent:gall agent:gall) - %- (print bowl "{}: on-load") + %- (print bowl |.("{}: on-load")) =^ cards agent (on-load:ag old-state) [[(emit-event %on-load ~) cards] this] :: ++ on-poke |= [=mark =vase] ^- (quip card:agent:gall agent:gall) - %- (print bowl "{}: on-poke with mark {}") + %- (print bowl |.("{}: on-poke with mark {}")) ?: ?=(%verb mark) ?- !<(?(%loud %bowl) vase) %loud `this(loud !loud) @@ -43,7 +43,7 @@ ++ on-watch |= =path ^- (quip card:agent:gall agent:gall) - %- (print bowl "{}: on-watch on path {}") + %- (print bowl |.("{}: on-watch on path {}")) =^ cards agent ?: ?=([%verb %events ~] path) [~ agent] @@ -53,7 +53,7 @@ ++ on-leave |= =path ^- (quip card:agent:gall agent:gall) - %- (print bowl "{}: on-leave on path {}") + %- (print bowl |.("{}: on-leave on path {}")) ?: ?=([%verb %event ~] path) [~ this] =^ cards agent (on-leave:ag path) @@ -62,39 +62,40 @@ ++ on-peek |= =path ^- (unit (unit cage)) - %- (print bowl "{}: on-peek on path {}") + %- (print bowl |.("{}: on-peek on path {}")) (on-peek:ag path) :: ++ on-agent |= [=wire =sign:agent:gall] ^- (quip card:agent:gall agent:gall) - %- (print bowl "{}: on-agent on wire {}, {<-.sign>}") + %- (print bowl |.("{}: on-agent on wire {}, {<-.sign>}")) =^ cards agent (on-agent:ag wire sign) [[(emit-event %on-agent wire -.sign) cards] this] :: ++ on-arvo |= [=wire =sign-arvo] ^- (quip card:agent:gall agent:gall) - %- (print bowl "{}: on-arvo on wire {}, {<[- +<]:sign-arvo>}") + %- %+ print bowl |. + "{}: on-arvo on wire {}, {<[- +<]:sign-arvo>}" =^ cards agent (on-arvo:ag wire sign-arvo) [[(emit-event %on-arvo wire [- +<]:sign-arvo) cards] this] :: ++ on-fail |= [=term =tang] ^- (quip card:agent:gall agent:gall) - %- (print bowl "{}: on-fail with term {}") + %- (print bowl |.("{}: on-fail with term {}")) =^ cards agent (on-fail:ag term tang) [[(emit-event %on-fail term) cards] this] -- :: ++ print - |= [=bowl:gall =tape] + |= [=bowl:gall render=(trap tape)] ^+ same =? . bowl-print %- (slog >bowl< ~) . ?. loud same - %- (slog leaf+tape ~) + %- (slog [%leaf $:render] ~) same :: ++ emit-event diff --git a/pkg/arvo/mar/belt.hoon b/pkg/arvo/mar/belt.hoon new file mode 100644 index 000000000..1c5a6e1ea --- /dev/null +++ b/pkg/arvo/mar/belt.hoon @@ -0,0 +1,29 @@ +:: belt: runtime belt structure +:: +|_ =belt:dill +++ grad %noun +:: +grab: convert from +:: +++ grab + |% + ++ noun belt:dill + ++ json + ^- $-(^json belt:dill) + =, dejs:format + %- of + :~ aro+(su (perk %d %l %r %u ~)) + bac+ul + ctl+(cu taft so) + del+ul + met+(cu taft so) + ret+ul + txt+(ar (cu taft so)) + == + -- +:: +grow: convert to +:: +++ grow + |% + ++ noun belt + -- +-- diff --git a/pkg/arvo/mar/blit.hoon b/pkg/arvo/mar/blit.hoon new file mode 100644 index 000000000..242678ea3 --- /dev/null +++ b/pkg/arvo/mar/blit.hoon @@ -0,0 +1,58 @@ +:: blit: runtime blit structure +:: +/+ base64 +:: +|_ =blit:dill +++ grad %noun +:: +grab: convert from +:: +++ grab + |% + ++ noun blit:dill + -- +:: +grow: convert to +:: +++ grow + |% + ++ noun blit + ++ json + ^- ^json + =, enjs:format + %+ frond -.blit + ?- -.blit + %bel b+& + %clr b+& + %hop (numb p.blit) + %lin a+(turn p.blit |=(c=@c s+(tuft c))) + %mor b+& + %url s+p.blit + :: + %sag + %- pairs + :~ 'path'^(path p.blit) + 'file'^s+(en:base64 (as-octs:mimes:html (jam q.blit))) + == + :: + %sav + %- pairs + :~ 'path'^(path p.blit) + 'file'^s+(en:base64 (as-octs:mimes:html q.blit)) + == + :: + %klr + :- %a + %+ turn p.blit + |= [=stye text=(list @c)] + %- pairs + :~ 'text'^a+(turn text |=(c=@c s+(tuft c))) + :: + :- 'stye' + %- pairs + :~ 'back'^[?~(. ~ s+.)]:p.q.stye + 'fore'^[?~(. ~ s+.)]:q.q.stye + 'deco'^a+(turn ~(tap in p.stye) |=(d=deco ?~(d ~ s+d))) + == + == + == + -- +-- diff --git a/pkg/arvo/mar/graph/validator/link.hoon b/pkg/arvo/mar/graph/validator/link.hoon index 02de528df..a860b2015 100644 --- a/pkg/arvo/mar/graph/validator/link.hoon +++ b/pkg/arvo/mar/graph/validator/link.hoon @@ -3,6 +3,11 @@ ++ grow |% ++ noun i + ++ notification-kind + ?+ index.p.i ~ + [@ ~] `[%link 0] + [@ @ @ ~] `[%comment 1] + == -- ++ grab |% @@ -16,10 +21,16 @@ ?> ?=([[%text @] [%url @] ~] contents.p.ip) ip :: - :: comment on link post; comment text + :: comment on link post; container structure :: [@ @ ~] - ?> ?=([[%text @] ~] contents.p.ip) + ?> ?=(~ contents.p.ip) + ip + :: + :: comment on link post; comment text + :: + [@ @ @ ~] + ?> ?=(^ contents.p.ip) ip == -- diff --git a/pkg/arvo/mar/graph/validator/publish.hoon b/pkg/arvo/mar/graph/validator/publish.hoon new file mode 100644 index 000000000..cddf69073 --- /dev/null +++ b/pkg/arvo/mar/graph/validator/publish.hoon @@ -0,0 +1,59 @@ +/- *post +|_ i=indexed-post +++ grow + |% + ++ noun i + :: +notification-kind + :: Ignore all containers, only notify on content + :: + ++ notification-kind + ?+ index.p.i ~ + [@ %1 @ ~] `[%note 0] + [@ %2 @ @ ~] `[%comment 1] + == + -- +++ grab + |% + :: +noun: Validate publish post + :: + ++ noun + |= p=* + =/ ip ;;(indexed-post p) + ?+ index.p.ip !! + :: top level post must have no content + [@ ~] + ?> ?=(~ contents.p.ip) + ip + :: container for revisions + :: + [@ %1 ~] + ?> ?=(~ contents.p.ip) + ip + :: specific revision + :: first content is the title + :: revisions are numbered by the revision count + :: starting at one + [@ %1 @ ~] + ?> ?=([* * *] contents.p.ip) + ?> ?=(%text -.i.contents.p.ip) + ip + :: container for comments + :: + [@ %2 ~] + ?> ?=(~ contents.p.ip) + ip + :: container for comment revisions + :: + [@ %2 @ ~] + ?> ?=(~ contents.p.ip) + ip + :: specific comment revision + :: + [@ %2 @ @ ~] + ?> ?=(^ contents.p.ip) + ip + == + -- +:: +++ grad %noun +-- diff --git a/pkg/arvo/mar/hark/action.hoon b/pkg/arvo/mar/hark/action.hoon new file mode 100644 index 000000000..608f7f318 --- /dev/null +++ b/pkg/arvo/mar/hark/action.hoon @@ -0,0 +1,13 @@ +/+ *hark-store +|_ act=action +++ grad %noun +++ grow + |% + ++ noun act + -- +++ grab + |% + ++ noun action + ++ json action:dejs + -- +-- diff --git a/pkg/arvo/mar/hark/chat-hook-action.hoon b/pkg/arvo/mar/hark/chat-hook-action.hoon new file mode 100644 index 000000000..37987c956 --- /dev/null +++ b/pkg/arvo/mar/hark/chat-hook-action.hoon @@ -0,0 +1,13 @@ +/+ *hark-chat-hook +|_ act=action +++ grad %noun +++ grow + |% + ++ noun act + -- +++ grab + |% + ++ noun action + ++ json action:dejs + -- +-- diff --git a/pkg/arvo/mar/hark/chat-hook-update.hoon b/pkg/arvo/mar/hark/chat-hook-update.hoon new file mode 100644 index 000000000..b76de0bb5 --- /dev/null +++ b/pkg/arvo/mar/hark/chat-hook-update.hoon @@ -0,0 +1,16 @@ +/+ *hark-chat-hook +|_ upd=update +++ grad %noun +++ grow + |% + ++ noun upd + ++ json + %+ frond:enjs:format + %hark-chat-hook-update + (update:enjs upd) + -- +++ grab + |% + ++ noun update + -- +-- diff --git a/pkg/arvo/mar/hark/graph-hook-action.hoon b/pkg/arvo/mar/hark/graph-hook-action.hoon new file mode 100644 index 000000000..1dbc05a09 --- /dev/null +++ b/pkg/arvo/mar/hark/graph-hook-action.hoon @@ -0,0 +1,13 @@ +/+ *hark-graph-hook +|_ act=action +++ grad %noun +++ grow + |% + ++ noun act + -- +++ grab + |% + ++ noun action + ++ json action:dejs + -- +-- diff --git a/pkg/arvo/mar/hark/graph-hook-update.hoon b/pkg/arvo/mar/hark/graph-hook-update.hoon new file mode 100644 index 000000000..61c77dcd6 --- /dev/null +++ b/pkg/arvo/mar/hark/graph-hook-update.hoon @@ -0,0 +1,17 @@ +/+ *hark-graph-hook +|_ upd=update +++ grad %noun +++ grow + |% + ++ noun upd + ++ json + %+ frond:enjs:format + %hark-graph-hook-update + (update:enjs upd) + -- +++ grab + |% + ++ noun update + ++ json update:dejs + -- +-- diff --git a/pkg/arvo/mar/hark/group-hook-action.hoon b/pkg/arvo/mar/hark/group-hook-action.hoon new file mode 100644 index 000000000..05b9d15eb --- /dev/null +++ b/pkg/arvo/mar/hark/group-hook-action.hoon @@ -0,0 +1,13 @@ +/+ *hark-group-hook +|_ act=action +++ grad %noun +++ grow + |% + ++ noun act + -- +++ grab + |% + ++ noun action + ++ json action:dejs + -- +-- diff --git a/pkg/arvo/mar/hark/group-hook-update.hoon b/pkg/arvo/mar/hark/group-hook-update.hoon new file mode 100644 index 000000000..95063a5a0 --- /dev/null +++ b/pkg/arvo/mar/hark/group-hook-update.hoon @@ -0,0 +1,16 @@ +/+ *hark-group-hook +|_ upd=update +++ grad %noun +++ grow + |% + ++ noun upd + ++ json + %+ frond:enjs:format + %hark-group-hook-update + (update:enjs upd) + -- +++ grab + |% + ++ noun update + -- +-- diff --git a/pkg/arvo/mar/hark/update.hoon b/pkg/arvo/mar/hark/update.hoon new file mode 100644 index 000000000..8aeff8f5a --- /dev/null +++ b/pkg/arvo/mar/hark/update.hoon @@ -0,0 +1,15 @@ +/+ *hark-store +|_ upd=update +++ grad %noun +++ grow + |% + ++ noun upd + ++ json + %+ frond:enjs:format 'harkUpdate' + (update:enjs upd) + -- +++ grab + |% + ++ noun update + -- +-- diff --git a/pkg/arvo/mar/invite/action.hoon b/pkg/arvo/mar/invite/action.hoon index a0baf8b6d..90a6d0c5c 100644 --- a/pkg/arvo/mar/invite/action.hoon +++ b/pkg/arvo/mar/invite/action.hoon @@ -1,6 +1,6 @@ /+ *invite-json =, dejs:format -|_ act=invite-action +|_ act=action ++ grad %noun ++ grow |% @@ -8,7 +8,7 @@ -- ++ grab |% - ++ noun invite-action + ++ noun action ++ json |= jon=^json (json-to-action jon) diff --git a/pkg/arvo/mar/invite/update.hoon b/pkg/arvo/mar/invite/update.hoon index 534e304d8..af0abca82 100644 --- a/pkg/arvo/mar/invite/update.hoon +++ b/pkg/arvo/mar/invite/update.hoon @@ -1,15 +1,15 @@ +/- store=invite-store /+ *invite-json -|_ upd=invite-update +|_ =update:store ++ grad %noun ++ grow |% - ++ noun upd - ++ json (update-to-json upd) + ++ noun update + ++ json (update-to-json update) -- :: ++ grab |% - ++ noun invite-update + ++ noun update:store -- -:: -- diff --git a/pkg/arvo/mar/observe/action.hoon b/pkg/arvo/mar/observe/action.hoon new file mode 100644 index 000000000..466debd69 --- /dev/null +++ b/pkg/arvo/mar/observe/action.hoon @@ -0,0 +1,13 @@ +/- sur=observe-hook +|_ =action:sur +++ grad %noun +++ grow + |% + ++ noun action + -- +:: +++ grab + |% + ++ noun action:sur + -- +-- diff --git a/pkg/arvo/mar/publish/action.hoon b/pkg/arvo/mar/publish/action.hoon index 2e5c1ce3c..f346ed816 100644 --- a/pkg/arvo/mar/publish/action.hoon +++ b/pkg/arvo/mar/publish/action.hoon @@ -1,6 +1,6 @@ :: :::: /hoon/action/publish/mar - :: + :: tombstoned, now unused /- *publish =, format :: @@ -16,121 +16,5 @@ ++ grab |% ++ noun action - ++ json - |= jon=^json - =, dejs:format - ;; action - |^ %. jon - %- of - :~ new-book+new-book - new-note+new-note - new-comment+new-comment - edit-book+edit-book - edit-note+edit-note - edit-comment+edit-comment - del-book+del-book - del-note+del-note - del-comment+del-comment - subscribe+subscribe - unsubscribe+unsubscribe - read+read - groupify+groupify - == - :: - ++ new-book - %- ot - :~ book+so - title+so - about+so - coms+bo - group+group-info - == - :: - ++ new-note - %- ot - :~ who+(su fed:ag) - book+so - note+so - title+so - body+so - == - :: - ++ new-comment - %- ot - :~ who+(su fed:ag) - book+so - note+so - body+so - == - :: - ++ edit-book - %- ot - :~ book+so - title+so - about+so - coms+bo - group+(mu group-info) - == - :: - ++ edit-note - %- ot - :~ who+(su fed:ag) - book+so - note+so - title+so - body+so - == - :: - ++ edit-comment - %- ot - :~ who+(su fed:ag) - book+so - note+so - comment+so - body+so - == - :: - ++ del-book (ot book+so ~) - :: - ++ del-note (ot who+(su fed:ag) book+so note+so ~) - :: - ++ del-comment - %- ot - :~ who+(su fed:ag) - book+so - note+so - comment+so - == - ++ subscribe - %- ot - :~ who+(su fed:ag) - book+so - == - ++ unsubscribe - %- ot - :~ who+(su fed:ag) - book+so - == - ++ read - %- ot - :~ who+(su fed:ag) - book+so - note+so - == - ++ groupify - %- ot - :~ book+so - target+(mu pa) - inclusive+bo - == - ++ group-info - %- ot - :~ group-path+pa - invitees+set-ship - use-preexisting+bo - make-managed+bo - == - ++ set-ship (as (su fed:ag)) - -- -- -- diff --git a/pkg/arvo/mar/publish/info.hoon b/pkg/arvo/mar/publish/info.hoon index 82ce342ef..288c3609d 100644 --- a/pkg/arvo/mar/publish/info.hoon +++ b/pkg/arvo/mar/publish/info.hoon @@ -1,25 +1,12 @@ :: :::: /hoon/info/publish/mar + :: tombstoned, now unused :: /- *publish !: |_ info=notebook-info :: :: -++ grow - |% - ++ mime - :- /text/x-publish-info - (as-octs:mimes:html (of-wain:format txt)) - ++ txt - ^- wain - :~ (cat 3 'title: ' title.info) - (cat 3 'description: ' description.info) - (cat 3 'comments: ' ?:(comments.info 'on' 'off')) - (cat 3 'writers: ' (spat writers.info)) - (cat 3 'subscribers: ' (spat subscribers.info)) - == - -- ++ grab |% ++ mime diff --git a/pkg/arvo/mar/publish/primary-delta.hoon b/pkg/arvo/mar/publish/primary-delta.hoon index d2b01052d..41b331c08 100644 --- a/pkg/arvo/mar/publish/primary-delta.hoon +++ b/pkg/arvo/mar/publish/primary-delta.hoon @@ -13,73 +13,5 @@ ++ grow |% ++ noun del - ++ json - %+ frond:enjs:format %publish-update - %+ frond:enjs:format -.del - ?- -.del - %add-book - %+ frond:enjs:format (scot %p host.del) - %+ frond:enjs:format book.del - (notebook-short:enjs data.del) - :: - %add-note - %+ frond:enjs:format (scot %p host.del) - %+ frond:enjs:format book.del - (note-full:enjs note.del data.del) - :: - %add-comment - %- pairs:enjs:format - :~ host+s+(scot %p host.del) - book+s+book.del - note+s+note.del - comment+(comment:enjs comment-date.del data.del) - == - :: - %edit-book - %+ frond:enjs:format (scot %p host.del) - %+ frond:enjs:format book.del - (notebook-short:enjs data.del) - :: - %edit-note - %+ frond:enjs:format (scot %p host.del) - %+ frond:enjs:format book.del - (note-full:enjs note.del data.del) - :: - %edit-comment - %- pairs:enjs:format - :~ host+s+(scot %p host.del) - book+s+book.del - note+s+note.del - comment+(comment:enjs comment-date.del data.del) - == - :: - %del-book - %- pairs:enjs:format - :~ host+s+(scot %p host.del) - book+s+book.del - == - :: - %del-note - %- pairs:enjs:format - :~ host+s+(scot %p host.del) - book+s+book.del - note+s+note.del - == - :: - %del-comment - %- pairs:enjs:format - :~ host+s+(scot %p host.del) - book+s+book.del - note+s+note.del - comment+s+(scot %da comment.del) - == - :: - %read - %- pairs:enjs:format - :~ host+s+(scot %p who.del) - book+s+book.del - note+s+note.del - == - == -- -- diff --git a/pkg/arvo/ren/publish/comments.hoon b/pkg/arvo/ren/publish/comments.hoon deleted file mode 100644 index a793df78c..000000000 --- a/pkg/arvo/ren/publish/comments.hoon +++ /dev/null @@ -1,14 +0,0 @@ -/- publish -/+ publish -/= result - /^ (list comment:publish) - /; - |= comments=(map knot comment:publish) - ^- (list [comment-info:publish @t]) - %+ sort ~(val by comments) - |= [a=comment:publish b=comment:publish] - ^- ? - (gte date-created.info.a date-created.info.b) -:: - /_ /publish-comment/ -result diff --git a/pkg/arvo/ren/publish/post.hoon b/pkg/arvo/ren/publish/post.hoon deleted file mode 100644 index 717facddc..000000000 --- a/pkg/arvo/ren/publish/post.hoon +++ /dev/null @@ -1,20 +0,0 @@ -/- publish -/+ publish, cram, elem-to-react-json -/= args /$ ,[beam *] -/= result - /^ [post-info:publish manx @t] - /; - |= $: post-front=(map knot cord) - post-content=manx - post-raw=wain - ~ - == - :+ (front-to-post-info:publish post-front) - post-content - (of-wain:format (slag 11 post-raw)) -:: - /. /&front&/udon/ - /&elem&/udon/ - /&txt&/udon/ - == -result diff --git a/pkg/arvo/ren/run.hoon b/pkg/arvo/ren/run.hoon deleted file mode 100644 index 104669204..000000000 --- a/pkg/arvo/ren/run.hoon +++ /dev/null @@ -1,10 +0,0 @@ -:: For testing purposes -:: -:::: /hoon/run/ren - :: -/? 310 -/, /ren/run /~ ~|(%loop !!) - / /!noun/ -== -~& run+-.- -~ diff --git a/pkg/arvo/ren/test-gen.hoon b/pkg/arvo/ren/test-gen.hoon deleted file mode 100644 index ebe10ffa6..000000000 --- a/pkg/arvo/ren/test-gen.hoon +++ /dev/null @@ -1,4 +0,0 @@ -/+ test-runner -/= test-core /!noun/ -:: -(get-test-arms:test-runner !>(test-core)) diff --git a/pkg/arvo/sur/graph-store.hoon b/pkg/arvo/sur/graph-store.hoon index edcfb4135..e2570fed8 100644 --- a/pkg/arvo/sur/graph-store.hoon +++ b/pkg/arvo/sur/graph-store.hoon @@ -42,7 +42,7 @@ :: +$ update-0 $% logged-update-0 - [%add-graph =resource =graph mark=(unit mark)] + [%add-graph =resource =graph mark=(unit mark) overwrite=?] [%remove-graph =resource] :: [%add-tag =term =resource] diff --git a/pkg/arvo/sur/hark-chat-hook.hoon b/pkg/arvo/sur/hark-chat-hook.hoon new file mode 100644 index 000000000..a5e9bf875 --- /dev/null +++ b/pkg/arvo/sur/hark-chat-hook.hoon @@ -0,0 +1,15 @@ +^? +|% ++$ action + $% [?(%listen %ignore) chat=path] + [%set-mentions mentions=?] + == +:: ++$ update + $% + action + $: %initial + watching=(set path) + == + == +-- diff --git a/pkg/arvo/sur/hark-graph-hook.hoon b/pkg/arvo/sur/hark-graph-hook.hoon new file mode 100644 index 000000000..58f42b3ac --- /dev/null +++ b/pkg/arvo/sur/hark-graph-hook.hoon @@ -0,0 +1,20 @@ +/- *resource, graph-store, post +^? +|% ++$ action + $% + [?(%listen %ignore) graph=resource =index:post] + [%set-mentions mentions=?] + [%set-watch-on-self watch-on-self=?] + == +:: ++$ update + $% + action + $: %initial + watching=(set [resource index:post]) + mentions=_& + watch-on-self=_& + == + == +-- diff --git a/pkg/arvo/sur/hark-group-hook.hoon b/pkg/arvo/sur/hark-group-hook.hoon new file mode 100644 index 000000000..f44ed63bc --- /dev/null +++ b/pkg/arvo/sur/hark-group-hook.hoon @@ -0,0 +1,11 @@ +/- *resource +^? +|% ++$ action + [?(%listen %ignore) group=resource] +:: ++$ update + $% action + [%initial watching=(set resource)] + == +-- diff --git a/pkg/arvo/sur/hark-store.hoon b/pkg/arvo/sur/hark-store.hoon new file mode 100644 index 000000000..6d2783709 --- /dev/null +++ b/pkg/arvo/sur/hark-store.hoon @@ -0,0 +1,52 @@ +/- *resource, graph-store, post, group-store, metadata-store, chat-store +^? +|% ++$ index + $% [%graph group=resource graph=resource module=@t description=@t] + [%group group=resource description=@t] + [%chat chat=path mention=?] + == +:: ++$ group-contents + $~ [%add-members *resource ~] + $% $>(?(%add-members %remove-members) update:group-store) + metadata-action:metadata-store + == +:: ++$ notification + [date=@da read=? =contents] +:: ++$ contents + $% [%graph =(list post:post)] + [%group =(list group-contents)] + [%chat =(list envelope:chat-store)] + == +:: ++$ timebox + (map index notification) +:: ++$ notifications + ((mop @da timebox) gth) +:: ++$ action + $% [%add =index =notification] + [%archive time=@da index] + [%read time=@da index] + [%read-index index] + [%unread time=@da index] + [%set-dnd dnd=?] + [%seen ~] + == +:: +++ indexed-notification + [index notification] +:: ++$ update + $% action + [%more more=(list update)] + [%added time=@da =index =notification] + [%timebox time=@da archived=? =(list [index notification])] + [%count count=@ud] + [%unreads unreads=(list [index @ud])] + == +-- diff --git a/pkg/arvo/sur/invite-store.hoon b/pkg/arvo/sur/invite-store.hoon index ff8aa46d6..2d2d1e8e9 100644 --- a/pkg/arvo/sur/invite-store.hoon +++ b/pkg/arvo/sur/invite-store.hoon @@ -1,45 +1,49 @@ +/- *resource |% ++ serial @uvH :: +$ invite $: =ship :: ship to subscribe to upon accepting invite app=@tas :: app to subscribe to upon accepting invite - =path :: path to subscribe to upon accepting invite + =resource :: resource to subscribe to upon accepting invite recipient=ship :: recipient to receive invite text=cord :: text to describe the invite == :: -:: +invites: each application using invites creates its own path that ++$ multi-invite + $: =ship :: ship to subscribe to upon accepting invite + app=@tas :: app to subscribe to upon accepting invite + =resource :: resource to subscribe to upon accepting invite + recipients=(set ship) :: recipient to receive invite + text=cord :: text to describe the invite + == +:: +:: +invites: each application using invites creates its own resource that :: contains a map of serial to invite. this allows it to only receive :: invites that it is concerned with :: -+$ invites (map path invitatory) :: main data structure ++$ invites (map term invitatory) :: main data structure :: +$ invitatory (map serial invite) :: containing or conveying an invitation :: -:: +$ invite-base - $% [%create =path] :: create a path - [%delete =path] :: delete a path - [%invite =path uid=serial =invite] :: receive an invite at path/uid - [%decline =path uid=serial] :: decline an invite at path/uid + $% [%create =term] :: create a resource + [%delete =term] :: delete a resource + [%invite =term uid=serial =invite] :: receive an invite at term/uid + [%decline =term uid=serial] :: decline an invite at term/uid == :: -+$ invite-action ++$ action $% invite-base - [%accept =path uid=serial] :: accept an invite at path/uid + [%accept =term uid=serial] :: accept an invite at term/uid + [%invites =term uid=serial invites=multi-invite] == :: -+$ invite-update ++$ update $% invite-base [%initial =invites] - [%invitatory =invitatory] :: receive invitatory - [%accepted =path uid=serial =invite] :: an invite has been accepted - == -:: -+$ invite-diff - $% [%invite-initial invites] - [%invite-update invite-update] + [%invitatory =invitatory] :: receive invitatory + [%accepted =term uid=serial =invite] :: an invite has been accepted == -- diff --git a/pkg/arvo/sur/observe-hook.hoon b/pkg/arvo/sur/observe-hook.hoon new file mode 100644 index 000000000..a424c9622 --- /dev/null +++ b/pkg/arvo/sur/observe-hook.hoon @@ -0,0 +1,7 @@ +|% ++$ observer [app=term =path thread=term] ++$ action + $% [%watch =observer] + [%ignore =observer] + == +-- diff --git a/pkg/arvo/sur/post.hoon b/pkg/arvo/sur/post.hoon index c5c354615..4855a3a68 100644 --- a/pkg/arvo/sur/post.hoon +++ b/pkg/arvo/sur/post.hoon @@ -28,6 +28,7 @@ :: +$ content $% [%text text=cord] + [%mention =ship] [%url url=cord] [%code expression=cord output=(list tank)] [%reference =uid] diff --git a/pkg/arvo/sys/hoon.hoon b/pkg/arvo/sys/hoon.hoon index a1c02abfb..c3b73520f 100644 --- a/pkg/arvo/sys/hoon.hoon +++ b/pkg/arvo/sys/hoon.hoon @@ -2150,7 +2150,12 @@ :: 3f: scrambling :: :: 3g: molds and mold builders :: :: :: -~% %tri + ~ +~% %tri + + == + %year year + %yore yore + %ob ob + == |% :: :::: 3a: signed and modular ints :: @@ -3827,13 +3832,20 @@ -- :: ++ ob + ~% %ob ..ob + == + %fein fein + %fynd fynd + == |% + :: :: +fein: conceal structure, v3. :: :: +fein conceals planet-sized atoms. The idea is that it should not be :: trivial to tell which planet a star has spawned under. :: ++ fein + ~/ %fein |= pyn/@ ^- @ ?: &((gte pyn 0x1.0000) (lte pyn 0xffff.ffff)) (add 0x1.0000 (feis (sub pyn 0x1.0000))) @@ -3849,6 +3861,7 @@ :: Restores obfuscated values that have been enciphered with +fein. :: ++ fynd + ~/ %fynd |= cry/@ ^- @ ?: &((gte cry 0x1.0000) (lte cry 0xffff.ffff)) (add 0x1.0000 (tail (sub cry 0x1.0000))) @@ -3858,7 +3871,6 @@ %+ con hi $(cry lo) cry - :: :: +feis: a four-round generalised Feistel cipher over the domain :: [0, 2^32 - 2^16 - 1]. :: @@ -4039,12 +4051,12 @@ ++ pint {p/{p/@ q/@} q/{p/@ q/@}} :: line+column range ++ rule _|:($:nail $:edge) :: parsing rule ++ spot {p/path q/pint} :: range in file -++ tone $% {$0 p/*} :: success - {$1 p/(list)} :: blocks - {$2 p/(list {@ta *})} :: error report +++ tone $% {$0 product/*} :: success + {$1 block/*} :: single block + {$2 trace/(list {@ta *})} :: error report == :: ++ toon $% {$0 p/*} :: success - {$1 p/(list)} :: blocks + {$1 p/*} :: block {$2 p/(list tank)} :: stack trace == :: ++ wonk =+ veq=$:edge :: product from edge @@ -4073,6 +4085,7 @@ ~% %qua + == + %mure mure %mute mute %show show == @@ -6373,196 +6386,294 @@ :: :::: 4n: virtualization :: +:: +mack: untyped, scry-less, unitary virtualization +:: ++ mack - |= {sub/* fol/*} + |= [sub=* fol=*] ^- (unit) - =+ ton=(mink [sub fol] |=({* *} ~)) - ?.(?=({$0 *} ton) ~ [~ p.ton]) + =/ ton (mink [sub fol] |~(^ ~)) + ?.(?=(%0 -.ton) ~ `product.ton) +:: +mink: raw virtual nock :: ++ mink !. ~/ %mink - |= {{sub/* fol/*} gul/$-({* *} (unit (unit)))} - =+ tax=*(list {@ta *}) - |- ^- tone - ?@ fol - [%2 tax] - ?: ?=(^ -.fol) - =+ hed=$(fol -.fol) - ?: ?=($2 -.hed) - hed - =+ tal=$(fol +.fol) - ?- -.tal - $0 ?-(-.hed $0 [%0 p.hed p.tal], $1 hed) - $1 ?-(-.hed $0 tal, $1 [%1 (weld p.hed p.tal)]) - $2 tal + |= $: [subject=* formula=*] + scry=$-(^ (unit (unit))) + == + =| trace=(list [@ta *]) + |^ ^- tone + ?+ formula [%2 trace] + [^ *] + =/ head $(formula -.formula) + ?. ?=(%0 -.head) head + =/ tail $(formula +.formula) + ?. ?=(%0 -.tail) tail + [%0 product.head product.tail] + :: + [%0 axis=@] + =/ part (frag axis.formula subject) + ?~ part [%2 trace] + [%0 u.part] + :: + [%1 constant=*] + [%0 constant.formula] + :: + [%2 subject=* formula=*] + =/ subject $(formula subject.formula) + ?. ?=(%0 -.subject) subject + =/ formula $(formula formula.formula) + ?. ?=(%0 -.formula) formula + %= $ + subject product.subject + formula product.formula + == + :: + [%3 argument=*] + =/ argument $(formula argument.formula) + ?. ?=(%0 -.argument) argument + [%0 .?(product.argument)] + :: + [%4 argument=*] + =/ argument $(formula argument.formula) + ?. ?=(%0 -.argument) argument + ?^ product.argument [%2 trace] + [%0 .+(product.argument)] + :: + [%5 a=* b=*] + =/ a $(formula a.formula) + ?. ?=(%0 -.a) a + =/ b $(formula b.formula) + ?. ?=(%0 -.b) b + [%0 =(product.a product.b)] + :: + [%6 test=* yes=* no=*] + =/ result $(formula test.formula) + ?. ?=(%0 -.result) result + ?+ product.result + [%2 trace] + %& $(formula yes.formula) + %| $(formula no.formula) + == + :: + [%7 subject=* next=*] + =/ subject $(formula subject.formula) + ?. ?=(%0 -.subject) subject + %= $ + subject product.subject + formula next.formula + == + :: + [%8 head=* next=*] + =/ head $(formula head.formula) + ?. ?=(%0 -.head) head + %= $ + subject [product.head subject] + formula next.formula + == + :: + [%9 axis=@ core=*] + =/ core $(formula core.formula) + ?. ?=(%0 -.core) core + =/ arm (frag axis.formula product.core) + ?~ arm [%2 trace] + %= $ + subject product.core + formula u.arm + == + :: + [%10 [axis=@ value=*] target=*] + ?: =(0 axis.formula) [%2 trace] + =/ target $(formula target.formula) + ?. ?=(%0 -.target) target + =/ value $(formula value.formula) + ?. ?=(%0 -.value) value + =/ mutant=(unit *) + (edit axis.formula product.target product.value) + ?~ mutant [%2 trace] + [%0 u.mutant] + :: + [%11 tag=@ next=*] + =/ next $(formula next.formula) + ?. ?=(%0 -.next) next + :- %0 + .* subject + [11 tag.formula 1 product.next] + :: + [%11 [tag=@ clue=*] next=*] + =/ clue $(formula clue.formula) + ?. ?=(%0 -.clue) clue + =/ next + =? trace + ?=(?(%hunk %hand %lose %mean %spot) tag.formula) + [[tag.formula product.clue] trace] + $(formula next.formula) + ?. ?=(%0 -.next) next + :- %0 + .* subject + [11 [tag.formula 1 product.clue] 1 product.next] + :: + [%12 ref=* path=*] + =/ ref $(formula ref.formula) + ?. ?=(%0 -.ref) ref + =/ path $(formula path.formula) + ?. ?=(%0 -.path) path + =/ result (scry product.ref product.path) + ?~ result + [%1 product.path] + ?~ u.result + [%2 [%hunk product.ref product.path] trace] + [%0 u.u.result] + == + :: + ++ frag + |= [axis=@ noun=*] + ^- (unit) + ?: =(0 axis) ~ + |- ^- (unit) + ?: =(1 axis) `noun + ?@ noun ~ + =/ pick (cap axis) + %= $ + axis (mas axis) + noun ?-(pick %2 -.noun, %3 +.noun) == - ?+ fol - [%2 tax] :: - {$0 b/@} - ?: =(0 b.fol) [%2 tax] - ?: =(1 b.fol) [%0 sub] - ?: ?=(@ sub) [%2 tax] - =+ [now=(cap b.fol) lat=(mas b.fol)] - $(b.fol lat, sub ?:(=(2 now) -.sub +.sub)) - :: - {$1 b/*} - [%0 b.fol] - :: - {$2 b/{^ *}} - =+ ben=$(fol b.fol) - ?. ?=($0 -.ben) ben - ?>(?=(^ p.ben) $(sub -.p.ben, fol +.p.ben)) - ::?>(?=(^ p.ben) $([sub fol] p.ben) - :: - {$3 b/*} - =+ ben=$(fol b.fol) - ?. ?=($0 -.ben) ben - [%0 .?(p.ben)] - :: - {$4 b/*} - =+ ben=$(fol b.fol) - ?. ?=($0 -.ben) ben - ?. ?=(@ p.ben) [%2 tax] - [%0 .+(p.ben)] - :: - {$5 b/* c/*} - =+ hed=$(fol b.fol) - ?. ?=($0 -.hed) hed - =+ tal=$(fol c.fol) - ?. ?=($0 -.tal) tal - [%0 =(p.hed p.tal)] - :: - {$6 b/* c/* d/*} - =+ ben=$(fol b.fol) - ?. ?=($0 -.ben) ben - ?: =(& p.ben) $(fol c.fol) - ?: =(| p.ben) $(fol d.fol) - [%2 tax] - :: - {$7 b/* c/*} - =+ ben=$(fol b.fol) - ?. ?=($0 -.ben) ben - $(sub p.ben, fol c.fol) - :: - {$8 b/* c/*} - =+ ben=$(fol b.fol) - ?. ?=($0 -.ben) ben - $(sub [p.ben sub], fol c.fol) - :: - {$9 b/* c/*} - =+ ben=$(fol c.fol) - ?. ?=($0 -.ben) ben - =. sub p.ben - =+ lof=$(fol [0 b.fol]) - ?. ?=($0 -.lof) lof - $(fol p.lof) - :: - {$10 {b/@ c/*} d/*} - =+ bog=$(fol d.fol) - ?. ?=({$0 *} bog) bog - =+ lot=$(fol c.fol) - ?. ?=({$0 *} lot) lot - =+ [axe=b.fol big=p.bog lit=p.lot] - ^- tone - :- %0 - |- ^- p/* - ?: =(2 axe) [lit +.big] - ?: =(3 axe) [-.big lit] - =+ mor=(mas axe) - ?: =(2 (cap axe)) - [$(big -.big, axe mor) +.big] - [-.big $(big +.big, axe mor)] - :: - {$11 @ c/*} $(fol c.fol) - {$11 {b/* c/*} d/*} - =+ ben=$(fol c.fol) - ?. ?=($0 -.ben) ben - ?: ?=(?($hunk $hand $lose $mean $spot) b.fol) - $(fol d.fol, tax [[b.fol p.ben] tax]) - $(fol d.fol) - :: - {$12 b/* c/*} - =+ ref=$(fol b.fol) - =+ ben=$(fol c.fol) - ?. ?=($0 -.ref) ref - ?. ?=($0 -.ben) ben - =+ val=(gul p.ref p.ben) - ?~(val [%1 p.ben ~] ?~(u.val [%2 [[%hunk (mush p.ben)] tax]] [%0 u.u.val])) - == + ++ edit + |= [axis=@ target=* value=*] + ^- (unit) + ?: =(1 axis) `value + ?@ target ~ + =/ pick (cap axis) + =/ mutant + %= $ + axis (mas axis) + target ?-(pick %2 -.target, %3 +.target) + == + ?~ mutant ~ + ?- pick + %2 `[u.mutant +.target] + %3 `[-.target u.mutant] + == + -- +:: +mock: virtual nock :: ++ mock - |= {{sub/* fol/*} gul/$-({* *} (unit (unit)))} + |= [[sub=* fol=*] gul=$-(^ (unit (unit)))] (mook (mink [sub fol] gul)) +:: +mook: convert $tone to $toon, rendering stack frames :: ++ mook - |= ton/tone + |= ton=tone ^- toon - ?. ?=({$2 *} ton) ton - :- %2 - :: =. p.ton (moop p.ton) - =+ yel=(lent p.ton) - =. p.ton - ?. (gth yel 1.024) p.ton + ?. ?=([%2 *] ton) + ton + |^ [%2 (turn skip rend)] + :: + ++ skip + ^+ trace.ton + =/ yel (lent trace.ton) + ?. (gth yel 1.024) trace.ton %+ weld - (scag 512 p.ton) - ^- (list {@ta *}) - :_ (slag (sub yel 512) p.ton) + (scag 512 trace.ton) + ^+ trace.ton + :_ (slag (sub yel 512) trace.ton) :- %lose - %+ rap 3 - "[skipped {(scow %ud (sub yel 1.024))} frames]" - |- ^- (list tank) - ?~ p.ton ~ - =+ rep=$(p.ton t.p.ton) - ?+ -.i.p.ton rep - $hunk [(tank +.i.p.ton) rep] - $lose [[%leaf (rip 3 (@ +.i.p.ton))] rep] - $hand [[%leaf (scow %p (mug +.i.p.ton))] rep] - $mean :_ rep - ?@ +.i.p.ton [%leaf (rip 3 (@ +.i.p.ton))] - =+ mac=(mack +.i.p.ton +<.i.p.ton) - ?~(mac [%leaf "####"] (tank u.mac)) - $spot :_ rep - =+ sot=(spot +.i.p.ton) - :+ %rose [":" ~ ~] - :~ (smyt p.sot) - => [ud=|=(a/@u (scow %ud a)) q.sot] - leaf+"<[{(ud p.p)} {(ud q.p)}].[{(ud p.q)} {(ud q.q)}]>" - == == + (crip "[skipped {(scow %ud (sub yel 1.024))} frames]") + :: + :: +rend: raw stack frame to tank + :: + :: $% [%hunk ref=* path] :: failed scry ([~ ~]) + :: [%lose cord] :: skipped frames + :: [%hand *] :: mug any + :: [%mean $@(cord (trap tank))] :: ~_ et al + :: [%spot spot] :: source location + :: == + :: + ++ rend + |= [tag=@ta dat=*] + ^- tank + ?+ tag + :: + leaf+"mook.{(rip 3 tag)}" + :: + %hunk + ?@ dat leaf+"mook.hunk" + =/ sof=(unit path) ((soft path) +.dat) + ?~ sof leaf+"mook.hunk" + (smyt u.sof) + :: + %lose + ?^ dat leaf+"mook.lose" + leaf+(rip 3 dat) + :: + %hand + leaf+(scow %p (mug dat)) + :: + %mean + ?@ dat leaf+(rip 3 dat) + =/ mac (mack dat -.dat) + ?~ mac leaf+"####" + =/ sof ((soft tank) u.mac) + ?~ sof leaf+"mook.mean" + u.sof + :: + %spot + =/ sof=(unit spot) ((soft spot) dat) + ?~ sof leaf+"mook.spot" + :+ %rose [":" ~ ~] + :~ (smyt p.u.sof) + =* l p.q.u.sof + =* r q.q.u.sof + =/ ud |=(a=@u (scow %ud a)) + leaf+"<[{(ud p.l)} {(ud q.l)}].[{(ud p.r)} {(ud q.r)}]>" + == + == + -- +:: +mole: typed unitary virtual :: -++ mush :: sane name to leaf - |= val/* - ^- tank - :+ %rose - [['/' ~] ['/' ~] ~] - (turn ((list @ta) val) |=(a/@ta [%leaf (trip a)])) +++ mole + ~/ %mole + |* tap=(trap) + ^- (unit _$:tap) + =/ mur (mure tap) + ?~(mur ~ `$:tap) +:: +mong: virtual slam :: ++ mong - |= {{gat/* sam/*} gul/$-({* *} (unit (unit)))} + |= [[gat=* sam=*] gul=$-(^ (unit (unit)))] ^- toon - ?. &(?=(^ gat) ?=(^ +.gat)) - [%2 ~] + ?. ?=([* ^] gat) [%2 ~] (mock [gat(+< sam) %9 2 %0 1] gul) +:: +mule: typed virtual :: -++ mule :: typed virtual +++ mule ~/ %mule - =+ taq=|.(**) - |@ ++ $ - =+ mud=(mute taq) - ?- -.mud - %& [%& p=$:taq] - %| [%| p=p.mud] - == - -- + |* tap=(trap) + =/ mud (mute tap) + ?- -.mud + %& [%& p=$:tap] + %| [%| p=p.mud] + == +:: +mure: untyped unitary virtual :: -++ mute :: untyped virtual - |= taq/_=>(~ ^?(|.(**))) +++ mure + |= tap=(trap) + ^- (unit) + =/ ton (mink [tap %9 2 %0 1] |=((pair) ``.*(~ [%12 1+p 1+q]))) + ?.(?=(%0 -.ton) ~ `product.ton) +:: +mute: untyped virtual +:: +++ mute + |= tap=(trap) ^- (each * (list tank)) - =/ ton (mock [taq %9 2 %0 1] |=((pair) ``.*(~ [%12 1+p 1+q]))) + =/ ton (mock [tap %9 2 %0 1] |=((pair) ``.*(~ [%12 1+p 1+q]))) ?- -.ton - $0 [%& p.ton] - $1 [%| (turn p.ton |=(a/* (smyt (path a))))] - $2 [%| p.ton] + %0 [%& p.ton] + :: + %1 =/ sof=(unit path) ((soft path) p.ton) + [%| ?~(sof leaf+"mute.hunk" (smyt u.sof)) ~] + :: + %2 [%| p.ton] == :: +slum: slam a gate on a sample using raw nock, untyped :: @@ -6571,15 +6682,11 @@ |= [gat=* sam=*] ^- * .*(gat [%9 2 %10 [6 %1 sam] %0 1]) +:: +soft: virtual clam :: -++ soft :: maybe remold - |* han/$-(* *) - |= fud/* ^- (unit han) - =+ result=(mule |.((han fud))) - ?- -.result - %| ~ - %& [~ p.result] - == +++ soft + |* han=$-(* *) + |=(fud=* (mole |.((han fud)))) :: :::: 4o: molds and mold builders :: @@ -9766,18 +9873,7 @@ ^- (pair type type) (~(mull et hyp rig) gol dox) :: - ++ felt - ~/ %felt - |= lap/opal - ^- type - ?- -.lap - %& p.lap - %| %- fork - %+ turn ~(tap in q.lap) - |= [a=type *] - ?> ?=([%core *] a) - [%core q.q.a q.a] - == + ++ felt !! :: :: ++ feel :: detect existence |= rot/(list wing) @@ -9826,7 +9922,12 @@ == :: %& - =. sut (felt q.p.mor) + =. sut + =* lap q.p.mor + ?- -.lap + %& p.lap + %| (fork (turn ~(tap in q.lap) head)) + == => :_ + :* axe=`axis`1 lon=p.p.mor diff --git a/pkg/arvo/sys/vane/ames.hoon b/pkg/arvo/sys/vane/ames.hoon index 328c10852..0ebe2d09c 100644 --- a/pkg/arvo/sys/vane/ames.hoon +++ b/pkg/arvo/sys/vane/ames.hoon @@ -1046,6 +1046,9 @@ ++ on-hear-open |= [=lane =packet ok=?] ^+ event-core + :: assert the comet can't pretend to be a moon or other address + :: + ?> ?=(%pawn (clan:title sndr.packet)) :: if we already know .sndr, ignore duplicate attestation :: =/ ship-state (~(get by peers.ames-state) sndr.packet) diff --git a/pkg/arvo/sys/vane/dill.hoon b/pkg/arvo/sys/vane/dill.hoon index 04f4d0353..58b2b5004 100644 --- a/pkg/arvo/sys/vane/dill.hoon +++ b/pkg/arvo/sys/vane/dill.hoon @@ -8,9 +8,10 @@ -- :: => |% :: console protocol ++ axle :: - $: %3 :: + $: %4 ::TODO replace ducts with session ids :: hey/(unit duct) :: default duct dug/(map duct axon) :: conversations + eye=(jug duct duct) :: outside listeners lit/? :: boot in lite mode $= veb :: vane verbosities $~ (~(put by *(map @tas log-level)) %hole %soft) :: quiet packet crashes @@ -21,7 +22,7 @@ tem/(unit (list dill-belt)) :: pending, reverse wid/_80 :: terminal width pos/@ud :: cursor position - see/(list @c) :: current line + see=$%([%lin (list @c)] [%klr stub]) :: current line == :: +$ log-level ?(%hush %soft %loud) :: none, line, full -- => :: @@ -151,7 +152,11 @@ :: ++ done :: return gift |= git/gift:able - +>(moz :_(moz [hen %give git])) + =- +>.$(moz (weld - moz)) + %+ turn + :- hen + ~(tap in (~(get ju eye.all) hen)) + |=(=duct [duct %give git]) :: ++ deal :: pass to %gall |= [=wire =deal:gall] @@ -161,7 +166,7 @@ |= [=wire =note] +>(moz :_(moz [hen %pass wire note])) :: - ++ from :: receive belt + ++ from :: receive blit |= bit/dill-blit ^+ +> ?: ?=($mor -.bit) @@ -172,86 +177,33 @@ %+ done %blit :~ [%lin p.bit] [%mor ~] - [%lin see] + see [%hop pos] == ?: ?=($klr -.bit) %+ done %blit - :~ [%lin (cvrt:ansi p.bit)] + :~ [%klr p.bit] [%mor ~] - [%lin see] + see [%hop pos] == ?: ?=($pro -.bit) - (done(see p.bit) %blit [[%lin p.bit] [%hop pos] ~]) + =. see [%lin p.bit] + (done %blit [see [%hop pos] ~]) ?: ?=($pom -.bit) - =. see (cvrt:ansi p.bit) - (done %blit [[%lin see] [%hop pos] ~]) + ::NOTE treat "styled prompt" without style as plain prompt, + :: to allow rendering by older runtimes + ::TODO remove me once v0.10.9+ has high/guaranteed adoption + :: + ?: (levy p.bit (cork head |*(s=stye =(*stye s)))) + $(bit [%pro (zing (turn p.bit tail))]) + =. see [%klr p.bit] + (done %blit [see [%hop pos] ~]) ?: ?=($hop -.bit) (done(pos p.bit) %blit [bit ~]) ?: ?=($qit -.bit) (dump %logo ~) (done %blit [bit ~]) - :: - ++ ansi - |% - ++ cvrt :: stub to (list @c) - |= a/stub :: with ANSI codes - ^- (list @c) - %- zing %+ turn a - |= a/(pair stye (list @c)) - ^- (list @c) - ;: weld - ?: =(0 ~(wyt in p.p.a)) ~ - `(list @c)`(zing (turn ~(tap in p.p.a) ef)) - (bg p.q.p.a) - (fg q.q.p.a) - q.a - ?~(p.p.a ~ (ef ~)) - (bg ~) - (fg ~) - == - :: - ++ ef |=(a/^deco (scap (deco a))) :: ANSI effect - :: - ++ fg |=(a/^tint (scap (tint a))) :: ANSI foreground - :: - ++ bg :: ANSI background - |= a/^tint - %- scap - =>((tint a) [+(p) q]) :: (add 10 fg) - :: - ++ scap :: ANSI escape seq - |= a/$@(@ (pair @ @)) - %- (list @c) - :+ 27 '[' :: "\033[{a}m" - ?@(a :~(a 'm') :~(p.a q.a 'm')) - :: - ++ deco :: ANSI effects - |= a/^deco ^- @ - ?- a - ~ '0' - $br '1' - $un '4' - $bl '5' - == - :: - ++ tint :: ANSI colors (fg) - |= a/^tint - ^- (pair @ @) - :- '3' - ?- a - $k '0' - $r '1' - $g '2' - $y '3' - $b '4' - $m '5' - $c '6' - $w '7' - ~ '9' - == - -- :: XX move :: ++ sein @@ -398,7 +350,7 @@ =* duc (need hey.all) =/ app %hood =/ see (tuba "") - =/ zon=axon [app input=[~ ~] width=80 cursor=(lent see) see] + =/ zon=axon [app input=[~ ~] width=80 cursor=(lent see) lin+see] :: =^ moz all abet:(~(into as duc zon) ~) [moz ..^$] @@ -422,7 +374,29 @@ =. veb.all (~(put by veb.all) tag.task level.task) [~ ..^$] :: + ?: ?=(%view -.task) + :: crash on viewing non-existent session + :: + ~| [%no-session session.task] + ?> =(~ session.task) + =/ session (need hey.all) + =/ =axon (~(got by dug.all) session) + :: register the viewer and send them the prompt line + :: + :- [hen %give %blit [see.axon]~]~ + ..^$(eye.all (~(put ju eye.all) session hen)) + :: + ?: ?=(%flee -.task) + :- ~ + ~| [%no-session session.task] + ?> =(~ session.task) + =/ session (need hey.all) + ..^$(eye.all (~(del ju eye.all) session hen)) + :: =/ nus (ax hen) + =? nus &(?=(~ nus) ?=(^ hey.all)) + ::TODO allow specifying target session in task + (ax u.hey.all) ?~ nus :: :hen is an unrecognized duct :: could be before %boot (or %boot failed) @@ -441,7 +415,7 @@ ++ axle-1 $: $1 hey/(unit duct) - dug/(map duct axon) + dug/(map duct axon-3) lit/? $= hef $: a/(unit mass) @@ -457,10 +431,11 @@ $~ (~(put by *(map @tas log-level)) %hole %soft) (map @tas log-level) == + :: ++ axle-2 $: %2 hey/(unit duct) - dug/(map duct axon) + dug/(map duct axon-3) lit/? dog/_| $= hef @@ -478,29 +453,68 @@ (map @tas log-level) == :: - ++ axle-any - $%(axle-1 axle-2 axle) + +$ axle-3 + $: %3 + hey=(unit duct) + dug=(map duct axon-3) + lit=? + $= veb + $~ (~(put by *(map @tas log-level)) %hole %soft) + (map @tas log-level) + == + +$ axon-3 + $: ram=term + tem=(unit (list dill-belt)) + wid=_80 + pos=@ud + see=(list @c) + == + :: + +$ axle-any + $%(axle-1 axle-2 axle-3 axle) -- :: |= old=axle-any ?- -.old %1 $(old [%2 [hey dug lit dog=& hef veb]:old]) %2 $(old [%3 [hey dug lit veb]:old]) - %3 ..^$(all old) + %3 =- $(old [%4 hey.old - ~ lit.old veb.old]) + (~(run by dug.old) |=(a=axon-3 a(see lin+see.a))) + %4 ..^$(all old) == :: ++ scry |= {fur/(unit (set monk)) ren/@tas why/shop syd/desk lot/coin tyl/path} ^- (unit (unit cage)) - ?. ?=(%& -.why) ~ - =* his p.why + ::TODO don't special-case whey scry + :: ?: &(=(ren %$) =(tyl /whey)) =/ maz=(list mass) :~ hey+&+hey.all dug+&+dug.all == ``mass+!>(maz) - [~ ~] + :: only respond for the local identity, %$ desk, current timestamp + :: + ?. ?& =(&+our why) + =([%$ %da now] lot) + =(%$ syd) + == + ~ + :: /dx/sessions//line blit current line (prompt) of default session + :: /dx/sessions//cursor @ud current cursor position of default session + ::TODO support asking for specific sessions once session ids are real + :: + ?. ?=(%x ren) ~ + ?+ tyl ~ + [%sessions %$ *] + ?~ hey.all [~ ~] + ?~ session=(~(get by dug.all) u.hey.all) [~ ~] + ?+ t.t.tyl ~ + [%line ~] ``blit+!>(`blit`see.u.session) + [%cursor ~] ``atom+!>(pos.u.session) + == + == :: ++ stay all :: diff --git a/pkg/arvo/sys/vane/gall.hoon b/pkg/arvo/sys/vane/gall.hoon index efd678317..aecfdcc5c 100644 --- a/pkg/arvo/sys/vane/gall.hoon +++ b/pkg/arvo/sys/vane/gall.hoon @@ -1553,7 +1553,7 @@ =/ res (mock [run %9 2 %0 1] (sloy ski)) ?- -.res %0 [%& !<(step:agent [-:!>(*step:agent) p.res])] - %1 [%| (turn p.res |=(a=* (smyt (path a))))] + %1 [%| (smyt ;;(path p.res)) ~] %2 [%| p.res] == :: +ap-mule-peek: same as +ap-mule but for (unit (unit cage)) @@ -1564,7 +1564,7 @@ =/ res (mock [run %9 2 %0 1] (sloy ski)) ?- -.res %0 [%& !<((unit (unit cage)) [-:!>(*(unit (unit cage))) p.res])] - %1 [%| (turn p.res |=(a=* (smyt (path a))))] + %1 [%| (smyt ;;(path p.res)) ~] %2 [%| p.res] == :: +ap-ingest: call agent arm diff --git a/pkg/arvo/sys/vane/jael.hoon b/pkg/arvo/sys/vane/jael.hoon index a4bd366f9..401eb41ab 100644 --- a/pkg/arvo/sys/vane/jael.hoon +++ b/pkg/arvo/sys/vane/jael.hoon @@ -1083,11 +1083,7 @@ =/ who (slaw %p i.tyl) ?~ who [~ ~] =/ sec (~(got by jaw.own.pki.lex) lyf.own.pki.lex) - =/ cub (nol:nu:crub:crypto sec) - :: XX use pac:ex:cub? - :: - =/ sal (add %pass step.own.pki.lex) - ``[%noun !>((end 6 1 (shaf sal (shax sec:ex:cub))))] + ``[%noun !>((end 6 1 (shaf %pass (shax sec))))] :: %life ?. ?=([@ ~] tyl) [~ ~] diff --git a/pkg/arvo/sys/zuse.hoon b/pkg/arvo/sys/zuse.hoon index 095fc5abf..9662e1c93 100644 --- a/pkg/arvo/sys/zuse.hoon +++ b/pkg/arvo/sys/zuse.hoon @@ -1120,6 +1120,7 @@ {$boot lit/? p/*} :: weird %dill boot {$crop p/@ud} :: trim kernel state $>(%crud vane-task) :: error with trace + [%flee session=~] :: unwatch session {$flog p/flog} :: wrapped error {$flow p/@tas q/(list gill:gall)} :: terminal config {$hail ~} :: terminal refresh @@ -1134,6 +1135,7 @@ {$talk p/tank} :: {$text p/tape} :: {$veer p/@ta q/path r/@t} :: install vane + [%view session=~] :: watch session blits $>(%trim vane-task) :: trim state $>(%vega vane-task) :: report upgrade {$verb ~} :: verbose mode @@ -1157,6 +1159,7 @@ $% {$bel ~} :: make a noise {$clr ~} :: clear the screen {$hop p/@ud} :: set cursor position + [%klr p=stub] :: set styled line {$lin p/(list @c)} :: set current line {$mor ~} :: newline {$sag p/path q/*} :: save to jamfile @@ -5362,28 +5365,30 @@ :: :::: ++ format ^? |% - :: :: ++to-wain:format - ++ to-wain :: atom to line list - ~% %lore ..is ~ - |= lub/@ - =| tez/(list @t) - |- ^+ tez - =+ ^= wor - =+ [meg=0 i=0] - |- ^- {meg/@ i/@ end/@f} - =+ gam=(cut 3 [i 1] lub) - ?: =(0 gam) - [meg i %.y] - ?: =(10 gam) - [meg i %.n] - $(meg (cat 3 meg gam), i +(i)) - ?: end.wor - (flop ^+(tez [meg.wor tez])) - ?: =(0 lub) (flop tez) - $(lub (rsh 3 +(i.wor) lub), tez [meg.wor tez]) + :: 0 ending a line (invalid @t) is not preserved :: ++to-wain:format + ++ to-wain :: cord to line list + ~% %leer ..is ~ + |= txt=cord + ^- wain + =/ len=@ (met 3 txt) + =/ cut =+(cut -(a 3, c 1, d txt)) + =/ sub sub + =| [i=@ out=wain] + |- ^+ out + =+ |- ^- j=@ + ?: ?| =(i len) + =(10 (cut(b i))) + == + i + $(i +(i)) + =. out :_ out + (cut(b i, c (sub j i))) + ?: =(j len) + (flop out) + $(i +(j)) :: :: ++of-wain:format - ++ of-wain :: line list to atom - |= tez/(list @t) + ++ of-wain :: line list to cord + |= tez=wain ^- cord (rap 3 (join '\0a' tez)) :: :: ++of-wall:format ++ of-wall :: line list to tape @@ -9179,6 +9184,10 @@ |- ^- seed:able:jael =/ cub=acru:ames (pit:nu:crub:crypto 512 eny) =/ who=ship `@`fig:ex:cub + :: disallow 64-bit or smaller addresses + :: + ?. ?=(%pawn (clan:title who)) + $(eny +(eny)) ?: (~(has in stars) (^sein:title who)) [who 1 sec:ex:cub ~] $(eny +(eny)) diff --git a/pkg/arvo/ted/graph/create.hoon b/pkg/arvo/ted/graph/create.hoon index e0938fbc6..e0c544818 100644 --- a/pkg/arvo/ted/graph/create.hoon +++ b/pkg/arvo/ted/graph/create.hoon @@ -1,4 +1,9 @@ -/- spider, graph=graph-store, *metadata-store, *group, group-store +/- spider, + graph=graph-store, + *metadata-store, + *group, + group-store, + inv=invite-store /+ strandio, resource, graph-view => |% @@ -27,22 +32,25 @@ =+ !<([=action:graph-view ~] arg) ?> ?=(%create -.action) ;< =bowl:spider bind:m get-bowl:strandio +:: :: Add graph to graph-store :: ?. =(our.bowl entity.rid.action) (strand-fail:strandio %bad-request ~) =/ =update:graph - [%0 now.bowl %add-graph rid.action *graph:graph mark.action] + [%0 now.bowl %add-graph rid.action *graph:graph mark.action %.n] ;< ~ bind:m (poke-our %graph-store graph-update+!>(update)) ;< ~ bind:m (poke-our %graph-push-hook %push-hook-action !>([%add rid.action])) +:: :: Add group, if graph is unmanaged :: ;< group=resource bind:m (handle-group rid.action associated.action) =/ group-path=path (en-path:resource group) +:: :: Setup metadata :: =/ =metadata @@ -53,9 +61,30 @@ creator our.bowl module module.action == -=/ act=metadata-action +=/ =metadata-action [%add group-path graph+(en-path:resource rid.action) metadata] -;< ~ bind:m (poke-our %metadata-hook %metadata-action !>(act)) +;< ~ bind:m + (poke-our %metadata-hook %metadata-action !>(metadata-action)) ;< ~ bind:m (poke-our %metadata-hook %metadata-hook-action !>([%add-owned group-path])) -(pure:m !>(~)) +:: +:: Send invites +:: +?: ?=(%group -.associated.action) + (pure:m !>(~)) +?- -.policy.associated.action + %open (pure:m !>(~)) + %invite + =/ inv-action=action:inv + :^ %invites %graph (shaf %graph-uid eny.bowl) + ^- multi-invite:inv + :* our.bowl + %graph-push-hook + rid.action + pending.policy.associated.action + description.action + == + ;< ~ bind:m + (poke-our %invite-hook %invite-action !>(inv-action)) + (pure:m !>(~)) +== diff --git a/pkg/arvo/ted/graph/delete.hoon b/pkg/arvo/ted/graph/delete.hoon index 7e95d492b..1ffcdd97d 100644 --- a/pkg/arvo/ted/graph/delete.hoon +++ b/pkg/arvo/ted/graph/delete.hoon @@ -41,6 +41,15 @@ (poke-our %graph-store %graph-update !>([%0 now.bowl %remove-graph rid])) ;< ~ bind:m (poke-our %graph-push-hook %push-hook-action !>([%remove rid])) + ;< ~ bind:m + %+ poke-our %metadata-hook + metadata-hook-action+!>([%remove (en-path:resource rid)]) + ;< ~ bind:m + %+ poke-our %metadata-store + :- %metadata-action + !> :+ %remove + (en-path:resource rid) + [%graph (en-path:resource rid)] (pure:m ~) -- :: diff --git a/pkg/arvo/ted/graph/groupify.hoon b/pkg/arvo/ted/graph/groupify.hoon index 5a3f618fd..58a9ec4a9 100644 --- a/pkg/arvo/ted/graph/groupify.hoon +++ b/pkg/arvo/ted/graph/groupify.hoon @@ -67,5 +67,5 @@ %+ poke-our %metadata-store metadata-action+!>([%remove app-path graph+app-path]) ;< ~ bind:m - (poke-our %group-store %group-update !>([%remove-group rid.action])) + (poke-our %group-store %group-update !>([%remove-group rid.action ~])) (pure:m !>(~)) diff --git a/pkg/arvo/ted/invite/accepted-graph.hoon b/pkg/arvo/ted/invite/accepted-graph.hoon new file mode 100644 index 000000000..bb1227082 --- /dev/null +++ b/pkg/arvo/ted/invite/accepted-graph.hoon @@ -0,0 +1,28 @@ +/- spider, inv=invite-store, graph-view +/+ strandio +:: +=* strand strand:spider +=* fail strand-fail:strand +=* poke-our poke-our:strandio +=* flog-text flog-text:strandio +:: +^- thread:spider +|= arg=vase +=/ m (strand ,vase) +^- form:m +=+ !<([=update:inv ~] arg) +?. ?=(%accepted -.update) + (pure:m !>(~)) +;< =bowl:spider bind:m get-bowl:strandio +=* invite invite.update +?: =(our.bowl entity.resource.invite) + :: do not crash because that will kill the invitatory subscription + (pure:m !>(~)) +;< ~ bind:m + %+ poke-our %spider + =- spider-start+!>([`tid.bowl ~ %graph-join -]) + %+ slop + !> ^- action:graph-view + [%join resource.invite ship.invite] + !>(~) +(pure:m !>(~)) diff --git a/pkg/arvo/ted/ph/lib-hooks.hoon b/pkg/arvo/ted/ph/lib-hooks.hoon new file mode 100644 index 000000000..1f2aa5966 --- /dev/null +++ b/pkg/arvo/ted/ph/lib-hooks.hoon @@ -0,0 +1,65 @@ +/- spider +/+ *ph-io, *strandio +=> +|% +++ strand strand:spider +++ start-agents + |= =ship + =/ m (strand ,~) + ;< ~ bind:m (dojo ship "|start %graph-store") + ;< ~ bind:m (dojo ship "|start %graph-push-hook") + ;< ~ bind:m (dojo ship "|start %graph-pull-hook") + ;< ~ bind:m (dojo ship "|start %group-store") + ;< ~ bind:m (dojo ship "|start %group-push-hook") + ;< ~ bind:m (dojo ship "|start %group-pull-hook") + ;< ~ bind:m (dojo ship "|start %metadata-store") + ;< ~ bind:m (dojo ship "|start %metadata-hook") + ;< ~ bind:m (sleep `@dr`300) + (pure:m ~) +:: +++ make-link + |= [title=@t url=@t] + =/ m (strand ,~) + ;< ~ bind:m (dojo ~bud ":graph-store|add-post [~bud %test] ~[[%text '{(trip title)}'] [%url '{(trip url)}']]") + (pure:m ~) +-- + +^- thread:spider +|= vase +=/ m (strand ,vase) +;< az=tid:spider + bind:m start-azimuth +;< ~ bind:m (spawn az ~bud) +;< ~ bind:m (spawn az ~dev) +;< ~ bind:m (real-ship az ~bud) +;< ~ bind:m (real-ship az ~dev) +;< ~ bind:m (start-agents ~bud) +;< ~ bind:m (start-agents ~dev) +;< ~ bind:m (send-hi ~bud ~dev) +;< ~ bind:m (dojo ~bud "-graph-create [%create [~bud %test] 'test' '' `%graph-validator-link [%policy [%open ~ ~]] 'link']") +;< ~ bind:m (sleep ~s5) +;< ~ bind:m (dojo ~dev "-graph-join [%join [~bud %test] ~bud]") +;< ~ bind:m (sleep ~s5) +;< ~ bind:m (send-hi ~bud ~dev) +;< ~ bind:m (poke-our %aqua noun+!>([%pause-events ~[~dev]])) +;< ~ bind:m (make-link 'one' 'one') +;< ~ bind:m (make-link 'two' 'one') +;< ~ bind:m (make-link 'thre' 'one') +;< ~ bind:m (make-link 'four' 'one') +;< ~ bind:m (make-link 'five' 'one') +;< ~ bind:m (make-link 'six' 'one') +;< ~ bind:m (make-link 'seven' 'one') +;< ~ bind:m (sleep ~s40) +:: five unacked events is sufficent to cause a clog, and by extension a +:: %kick +;< ~ bind:m (poke-our %aqua noun+!>([%unpause-events ~[~dev]])) +;< ~ bind:m (sleep ~s10) +;< ~ bind:m (make-link 'eight' 'one') +;< ~ bind:m (make-link 'nine' 'one') +;< ~ bind:m (sleep ~s10) +;< ~ bind:m (dojo ~dev ":graph-pull-hook +dbug %bowl") +;< ~ bind:m (dojo ~dev ":graph-store +dbug") +;< ~ bind:m (dojo ~bud ":graph-push-hook +dbug %bowl") +;< ~ bind:m (dojo ~bud ":graph-store +dbug") +;< ~ bind:m end-azimuth +(pure:m *vase) diff --git a/pkg/arvo/tests/sys/hoon/list.hoon b/pkg/arvo/tests/sys/hoon/list.hoon index cdff49daf..720914171 100644 --- a/pkg/arvo/tests/sys/hoon/list.hoon +++ b/pkg/arvo/tests/sys/hoon/list.hoon @@ -55,6 +55,11 @@ %+ expect-eq !> ~ !> (find `(list @)`~[6] l) + :: Checks for incomplete match + :: + %+ expect-eq + !> ~ + !> (find "bcd" "abc") == :: ++ test-list-flop diff --git a/pkg/arvo/tests/sys/hoon/map.hoon b/pkg/arvo/tests/sys/hoon/map.hoon index 9f9e15c3d..3ea30f6cc 100644 --- a/pkg/arvo/tests/sys/hoon/map.hoon +++ b/pkg/arvo/tests/sys/hoon/map.hoon @@ -123,7 +123,7 @@ :: :: and the ordering 1 < 2 < 3. :: - :: 1 should be in the left brach and 3 in the right one. + :: 1 should be in the left branch and 3 in the right one. :: =/ balanced-a=(map @ @) [[2 2] [[1 1] ~ ~] [[3 3] ~ ~]] :: doesn't follow vertical ordering diff --git a/pkg/arvo/tests/sys/hoon/mock.hoon b/pkg/arvo/tests/sys/hoon/mock.hoon new file mode 100644 index 000000000..411ffa592 --- /dev/null +++ b/pkg/arvo/tests/sys/hoon/mock.hoon @@ -0,0 +1,32 @@ +/+ *test +|% +:: nock 6 should allow invalid formulas in unevaluated branches +:: +++ test-conditional-skips + ;: weld + %+ expect-eq + !> 43 + !> .*(~ [%6 [%1 1] 0 [%1 43]]) + :: + %- expect-fail + |. .*(~ [%6 [%1 0] 0 [%1 43]]) + :: + %+ expect-eq + !> 42 + !> .*(~ [%6 [%1 0] [%1 42] 0]) + :: + %- expect-fail + |. .*(~ [%6 [%1 1] [%1 42] 0]) + :: + %+ expect-eq + !> 42 + !> .*(~ [%6 [%1 0] [%1 42] %1 43]) + :: + %+ expect-eq + !> 43 + !> .*(~ [%6 [%1 1] [%1 42] %1 43]) + :: + %- expect-fail + |. .*(~ [%6 [%1 2] [%1 42] %1 43]) + == +-- diff --git a/pkg/arvo/tests/sys/hoon/set.hoon b/pkg/arvo/tests/sys/hoon/set.hoon index f0bfafc9b..77ce543d2 100644 --- a/pkg/arvo/tests/sys/hoon/set.hoon +++ b/pkg/arvo/tests/sys/hoon/set.hoon @@ -92,7 +92,7 @@ :: 1.923.673.882 :: :: and the ordering 1 < 2 < 3. - :: 1 should be in the left brach and 3 in the right one. + :: 1 should be in the left branch and 3 in the right one. :: =/ balanced-a=(set @) [2 [1 ~ ~] [3 ~ ~]] :: Doesn't follow vertical ordering diff --git a/pkg/ent/shell.nix b/pkg/ent/shell.nix new file mode 100644 index 000000000..0866cb96f --- /dev/null +++ b/pkg/ent/shell.nix @@ -0,0 +1,8 @@ +let + + pkgs = import ../../default.nix { }; + +in pkgs.shellFor { + name = "ent"; + packages = ps: [ ps.ent ]; +} diff --git a/pkg/ge-additions/shell.nix b/pkg/ge-additions/shell.nix new file mode 100644 index 000000000..4e07f72b5 --- /dev/null +++ b/pkg/ge-additions/shell.nix @@ -0,0 +1,8 @@ +let + + pkgs = import ../../default.nix { }; + +in pkgs.shellFor { + name = "ge-additions"; + packages = ps: [ ps.ge-additions ]; +} diff --git a/pkg/herb/.gitattributes b/pkg/herb/.gitattributes deleted file mode 100644 index fe423d56a..000000000 --- a/pkg/herb/.gitattributes +++ /dev/null @@ -1,2 +0,0 @@ -.gitattributes export-ignore -default.nix export-ignore diff --git a/pkg/herb/default.nix b/pkg/herb/default.nix deleted file mode 100644 index b1dd819a6..000000000 --- a/pkg/herb/default.nix +++ /dev/null @@ -1,39 +0,0 @@ -let - - rev = "61c3169a0e17d789c566d5b241bfe309ce4a6275"; - hash = "0qbycg7wkb71v20rchlkafrjfpbk2fnlvvbh3ai9pyfisci5wxvq"; - - nixpkgs = builtins.fetchTarball { - name = "nixpkgs-2019-01-15"; - url = "https://github.com/nixos/nixpkgs/archive/${rev}.tar.gz"; - sha256 = hash; - }; - -in - -{ pkgs ? import nixpkgs {} }: - -let - - pyenv = pkgs.python2.withPackages (py: [ py.requests ]); - pyexe = "${pyenv}/bin/python"; - -in - -pkgs.stdenv.mkDerivation rec { - name = "herb"; - buildInputs = [ pyenv ]; - unpackPhase = "true"; - installPhase = '' - mkdir -p $out/bin - - cp ${./herb} $out/bin/herb.py - - cat > $out/bin/herb < +#include +#include + +#include +#include +#include "natpmp.h" + +// Additional binding code in C to make this more convenient to call from +// Haskell. libnatpmp expects that code which uses it to select() on an +// internal socket, which we don't want to expose to the Haskell bindings user. +// +// This is mostly an adaptation of the code in the demo natpmpc.c to use the +// select() loop. +int readNatResponseSynchronously(natpmp_t* natpmp, natpmpresp_t * response) +{ + fd_set fds; + struct timeval timeout; + int r; + int sav_errno; + + do { + FD_ZERO(&fds); + FD_SET(natpmp->s, &fds); + getnatpmprequesttimeout(natpmp, &timeout); + r = select(FD_SETSIZE, &fds, NULL, NULL, &timeout); + sav_errno = errno; + if(r<0) { + /* fprintf(stderr, "select(): errno=%d '%s'\n", */ + /* sav_errno, strerror(sav_errno)); */ + return 1; + } + r = readnatpmpresponseorretry(natpmp, response); + sav_errno = errno; + /* printf("readnatpmpresponseorretry returned %d (%s)\n", */ + /* r, r==0?"OK":(r==NATPMP_TRYAGAIN?"TRY AGAIN":"FAILED")); */ +/* if(r<0 && r!=NATPMP_TRYAGAIN) { */ +/* #ifdef ENABLE_STRNATPMPERR */ +/* fprintf(stderr, "readnatpmpresponseorretry() failed : %s\n", */ +/* strnatpmperr(r)); */ +/* #endif */ +/* fprintf(stderr, " errno=%d '%s'\n", */ +/* sav_errno, strerror(sav_errno)); */ +/* } */ + } while(r==NATPMP_TRYAGAIN); + + return r; +} diff --git a/pkg/hs/natpmp-static/cbits/binding.h b/pkg/hs/natpmp-static/cbits/binding.h new file mode 100644 index 000000000..f781c8a20 --- /dev/null +++ b/pkg/hs/natpmp-static/cbits/binding.h @@ -0,0 +1,8 @@ +#ifndef __NATPMP_BINDING_H__ +#define __NATPMP_BINDING_H__ + +#include "natpmp.h" + +int readNatResponseSynchronously(natpmp_t* natpmp, natpmpresp_t * response); + +#endif diff --git a/pkg/hs/natpmp-static/cbits/getgateway.c b/pkg/hs/natpmp-static/cbits/getgateway.c new file mode 100644 index 000000000..dfb9f3e21 --- /dev/null +++ b/pkg/hs/natpmp-static/cbits/getgateway.c @@ -0,0 +1,573 @@ +/* $Id: getgateway.c,v 1.25 2014/04/22 10:28:57 nanard Exp $ */ +/* libnatpmp + +Copyright (c) 2007-2014, Thomas BERNARD +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * The name of the author may not be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. +*/ +#include +#include +#ifndef WIN32 +#include +#endif +#if !defined(_MSC_VER) +#include +#endif +/* There is no portable method to get the default route gateway. + * So below are four (or five ?) differents functions implementing this. + * Parsing /proc/net/route is for linux. + * sysctl is the way to access such informations on BSD systems. + * Many systems should provide route information through raw PF_ROUTE + * sockets. + * In MS Windows, default gateway is found by looking into the registry + * or by using GetBestRoute(). */ +#ifdef __linux__ +#define USE_PROC_NET_ROUTE +#undef USE_SOCKET_ROUTE +#undef USE_SYSCTL_NET_ROUTE +#endif + +#if defined(BSD) || defined(__FreeBSD_kernel__) +#undef USE_PROC_NET_ROUTE +#define USE_SOCKET_ROUTE +#undef USE_SYSCTL_NET_ROUTE +#endif + +#ifdef __APPLE__ +#undef USE_PROC_NET_ROUTE +#undef USE_SOCKET_ROUTE +#define USE_SYSCTL_NET_ROUTE +#endif + +#if (defined(sun) && defined(__SVR4)) +#undef USE_PROC_NET_ROUTE +#define USE_SOCKET_ROUTE +#undef USE_SYSCTL_NET_ROUTE +#endif + +#ifdef WIN32 +#undef USE_PROC_NET_ROUTE +#undef USE_SOCKET_ROUTE +#undef USE_SYSCTL_NET_ROUTE +//#define USE_WIN32_CODE +#define USE_WIN32_CODE_2 +#endif + +#ifdef __CYGWIN__ +#undef USE_PROC_NET_ROUTE +#undef USE_SOCKET_ROUTE +#undef USE_SYSCTL_NET_ROUTE +#define USE_WIN32_CODE +#include +#include +#include +#include +#endif + +#ifdef __HAIKU__ +#include +#include +#include +#include +#define USE_HAIKU_CODE +#endif + +#ifdef USE_SYSCTL_NET_ROUTE +#include +#include +#include +#include +#endif +#ifdef USE_SOCKET_ROUTE +#include +#include +#include +#include +#include +#endif + +#ifdef USE_WIN32_CODE +#include +#include +#define MAX_KEY_LENGTH 255 +#define MAX_VALUE_LENGTH 16383 +#endif + +#ifdef USE_WIN32_CODE_2 +#include +#include +#endif + +#include "getgateway.h" + +#ifndef WIN32 +#define SUCCESS (0) +#define FAILED (-1) +#endif + +#ifdef USE_PROC_NET_ROUTE +/* + parse /proc/net/route which is as follow : + +Iface Destination Gateway Flags RefCnt Use Metric Mask MTU Window IRTT +wlan0 0001A8C0 00000000 0001 0 0 0 00FFFFFF 0 0 0 +eth0 0000FEA9 00000000 0001 0 0 0 0000FFFF 0 0 0 +wlan0 00000000 0101A8C0 0003 0 0 0 00000000 0 0 0 +eth0 00000000 00000000 0001 0 0 1000 00000000 0 0 0 + + One header line, and then one line by route by route table entry. +*/ +int getdefaultgateway(in_addr_t * addr) +{ + unsigned long d, g; + char buf[256]; + int line = 0; + FILE * f; + char * p; + f = fopen("/proc/net/route", "r"); + if(!f) + return FAILED; + while(fgets(buf, sizeof(buf), f)) { + if(line > 0) { /* skip the first line */ + p = buf; + /* skip the interface name */ + while(*p && !isspace(*p)) + p++; + while(*p && isspace(*p)) + p++; + if(sscanf(p, "%lx%lx", &d, &g)==2) { + if(d == 0 && g != 0) { /* default */ + *addr = g; + fclose(f); + return SUCCESS; + } + } + } + line++; + } + /* default route not found ! */ + if(f) + fclose(f); + return FAILED; +} +#endif /* #ifdef USE_PROC_NET_ROUTE */ + + +#ifdef USE_SYSCTL_NET_ROUTE + +#define ROUNDUP(a) \ + ((a) > 0 ? (1 + (((a) - 1) | (sizeof(long) - 1))) : sizeof(long)) + +int getdefaultgateway(in_addr_t * addr) +{ +#if 0 + /* net.route.0.inet.dump.0.0 ? */ + int mib[] = {CTL_NET, PF_ROUTE, 0, AF_INET, + NET_RT_DUMP, 0, 0/*tableid*/}; +#endif + /* net.route.0.inet.flags.gateway */ + int mib[] = {CTL_NET, PF_ROUTE, 0, AF_INET, + NET_RT_FLAGS, RTF_GATEWAY}; + size_t l; + char * buf, * p; + struct rt_msghdr * rt; + struct sockaddr * sa; + struct sockaddr * sa_tab[RTAX_MAX]; + int i; + int r = FAILED; + if(sysctl(mib, sizeof(mib)/sizeof(int), 0, &l, 0, 0) < 0) { + return FAILED; + } + if(l>0) { + buf = malloc(l); + if(sysctl(mib, sizeof(mib)/sizeof(int), buf, &l, 0, 0) < 0) { + free(buf); + return FAILED; + } + for(p=buf; prtm_msglen) { + rt = (struct rt_msghdr *)p; + sa = (struct sockaddr *)(rt + 1); + for(i=0; irtm_addrs & (1 << i)) { + sa_tab[i] = sa; + sa = (struct sockaddr *)((char *)sa + ROUNDUP(sa->sa_len)); + } else { + sa_tab[i] = NULL; + } + } + if( ((rt->rtm_addrs & (RTA_DST|RTA_GATEWAY)) == (RTA_DST|RTA_GATEWAY)) + && sa_tab[RTAX_DST]->sa_family == AF_INET + && sa_tab[RTAX_GATEWAY]->sa_family == AF_INET) { + if(((struct sockaddr_in *)sa_tab[RTAX_DST])->sin_addr.s_addr == 0) { + *addr = ((struct sockaddr_in *)(sa_tab[RTAX_GATEWAY]))->sin_addr.s_addr; + r = SUCCESS; + } + } + } + free(buf); + } + return r; +} +#endif /* #ifdef USE_SYSCTL_NET_ROUTE */ + + +#ifdef USE_SOCKET_ROUTE +/* Thanks to Darren Kenny for this code */ +#define NEXTADDR(w, u) \ + if (rtm_addrs & (w)) {\ + l = sizeof(struct sockaddr); memmove(cp, &(u), l); cp += l;\ + } + +#define rtm m_rtmsg.m_rtm + +struct { + struct rt_msghdr m_rtm; + char m_space[512]; +} m_rtmsg; + +int getdefaultgateway(in_addr_t *addr) +{ + int s, seq, l, rtm_addrs, i; + pid_t pid; + struct sockaddr so_dst, so_mask; + char *cp = m_rtmsg.m_space; + struct sockaddr *gate = NULL, *sa; + struct rt_msghdr *msg_hdr; + + pid = getpid(); + seq = 0; + rtm_addrs = RTA_DST | RTA_NETMASK; + + memset(&so_dst, 0, sizeof(so_dst)); + memset(&so_mask, 0, sizeof(so_mask)); + memset(&rtm, 0, sizeof(struct rt_msghdr)); + + rtm.rtm_type = RTM_GET; + rtm.rtm_flags = RTF_UP | RTF_GATEWAY; + rtm.rtm_version = RTM_VERSION; + rtm.rtm_seq = ++seq; + rtm.rtm_addrs = rtm_addrs; + + so_dst.sa_family = AF_INET; + so_mask.sa_family = AF_INET; + + NEXTADDR(RTA_DST, so_dst); + NEXTADDR(RTA_NETMASK, so_mask); + + rtm.rtm_msglen = l = cp - (char *)&m_rtmsg; + + s = socket(PF_ROUTE, SOCK_RAW, 0); + + if (write(s, (char *)&m_rtmsg, l) < 0) { + close(s); + return FAILED; + } + + do { + l = read(s, (char *)&m_rtmsg, sizeof(m_rtmsg)); + } while (l > 0 && (rtm.rtm_seq != seq || rtm.rtm_pid != pid)); + + close(s); + + msg_hdr = &rtm; + + cp = ((char *)(msg_hdr + 1)); + if (msg_hdr->rtm_addrs) { + for (i = 1; i; i <<= 1) + if (i & msg_hdr->rtm_addrs) { + sa = (struct sockaddr *)cp; + if (i == RTA_GATEWAY ) + gate = sa; + + cp += sizeof(struct sockaddr); + } + } else { + return FAILED; + } + + + if (gate != NULL ) { + *addr = ((struct sockaddr_in *)gate)->sin_addr.s_addr; + return SUCCESS; + } else { + return FAILED; + } +} +#endif /* #ifdef USE_SOCKET_ROUTE */ + +#ifdef USE_WIN32_CODE +LIBSPEC int getdefaultgateway(in_addr_t * addr) +{ + HKEY networkCardsKey; + HKEY networkCardKey; + HKEY interfacesKey; + HKEY interfaceKey; + DWORD i = 0; + DWORD numSubKeys = 0; + TCHAR keyName[MAX_KEY_LENGTH]; + DWORD keyNameLength = MAX_KEY_LENGTH; + TCHAR keyValue[MAX_VALUE_LENGTH]; + DWORD keyValueLength = MAX_VALUE_LENGTH; + DWORD keyValueType = REG_SZ; + TCHAR gatewayValue[MAX_VALUE_LENGTH]; + DWORD gatewayValueLength = MAX_VALUE_LENGTH; + DWORD gatewayValueType = REG_MULTI_SZ; + int done = 0; + + //const char * networkCardsPath = "SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\NetworkCards"; + //const char * interfacesPath = "SYSTEM\\CurrentControlSet\\Services\\Tcpip\\Parameters\\Interfaces"; +#ifdef UNICODE + LPCTSTR networkCardsPath = L"SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\NetworkCards"; + LPCTSTR interfacesPath = L"SYSTEM\\CurrentControlSet\\Services\\Tcpip\\Parameters\\Interfaces"; +#define STR_SERVICENAME L"ServiceName" +#define STR_DHCPDEFAULTGATEWAY L"DhcpDefaultGateway" +#define STR_DEFAULTGATEWAY L"DefaultGateway" +#else + LPCTSTR networkCardsPath = "SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\NetworkCards"; + LPCTSTR interfacesPath = "SYSTEM\\CurrentControlSet\\Services\\Tcpip\\Parameters\\Interfaces"; +#define STR_SERVICENAME "ServiceName" +#define STR_DHCPDEFAULTGATEWAY "DhcpDefaultGateway" +#define STR_DEFAULTGATEWAY "DefaultGateway" +#endif + // The windows registry lists its primary network devices in the following location: + // HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\Windows NT\CurrentVersion\NetworkCards + // + // Each network device has its own subfolder, named with an index, with various properties: + // -NetworkCards + // -5 + // -Description = Broadcom 802.11n Network Adapter + // -ServiceName = {E35A72F8-5065-4097-8DFE-C7790774EE4D} + // -8 + // -Description = Marvell Yukon 88E8058 PCI-E Gigabit Ethernet Controller + // -ServiceName = {86226414-5545-4335-A9D1-5BD7120119AD} + // + // The above service name is the name of a subfolder within: + // HKEY_LOCAL_MACHINE\SYSTEM\CurrentControlSet\Services\Tcpip\Parameters\Interfaces + // + // There may be more subfolders in this interfaces path than listed in the network cards path above: + // -Interfaces + // -{3a539854-6a70-11db-887c-806e6f6e6963} + // -DhcpIPAddress = 0.0.0.0 + // -[more] + // -{E35A72F8-5065-4097-8DFE-C7790774EE4D} + // -DhcpIPAddress = 10.0.1.4 + // -DhcpDefaultGateway = 10.0.1.1 + // -[more] + // -{86226414-5545-4335-A9D1-5BD7120119AD} + // -DhcpIpAddress = 10.0.1.5 + // -DhcpDefaultGateay = 10.0.1.1 + // -[more] + // + // In order to extract this information, we enumerate each network card, and extract the ServiceName value. + // This is then used to open the interface subfolder, and attempt to extract a DhcpDefaultGateway value. + // Once one is found, we're done. + // + // It may be possible to simply enumerate the interface folders until we find one with a DhcpDefaultGateway value. + // However, the technique used is the technique most cited on the web, and we assume it to be more correct. + + if(ERROR_SUCCESS != RegOpenKeyEx(HKEY_LOCAL_MACHINE, // Open registry key or predifined key + networkCardsPath, // Name of registry subkey to open + 0, // Reserved - must be zero + KEY_READ, // Mask - desired access rights + &networkCardsKey)) // Pointer to output key + { + // Unable to open network cards keys + return -1; + } + + if(ERROR_SUCCESS != RegOpenKeyEx(HKEY_LOCAL_MACHINE, // Open registry key or predefined key + interfacesPath, // Name of registry subkey to open + 0, // Reserved - must be zero + KEY_READ, // Mask - desired access rights + &interfacesKey)) // Pointer to output key + { + // Unable to open interfaces key + RegCloseKey(networkCardsKey); + return -1; + } + + // Figure out how many subfolders are within the NetworkCards folder + RegQueryInfoKey(networkCardsKey, NULL, NULL, NULL, &numSubKeys, NULL, NULL, NULL, NULL, NULL, NULL, NULL); + + //printf( "Number of subkeys: %u\n", (unsigned int)numSubKeys); + + // Enumrate through each subfolder within the NetworkCards folder + for(i = 0; i < numSubKeys && !done; i++) + { + keyNameLength = MAX_KEY_LENGTH; + if(ERROR_SUCCESS == RegEnumKeyEx(networkCardsKey, // Open registry key + i, // Index of subkey to retrieve + keyName, // Buffer that receives the name of the subkey + &keyNameLength, // Variable that receives the size of the above buffer + NULL, // Reserved - must be NULL + NULL, // Buffer that receives the class string + NULL, // Variable that receives the size of the above buffer + NULL)) // Variable that receives the last write time of subkey + { + if(RegOpenKeyEx(networkCardsKey, keyName, 0, KEY_READ, &networkCardKey) == ERROR_SUCCESS) + { + keyValueLength = MAX_VALUE_LENGTH; + if(ERROR_SUCCESS == RegQueryValueEx(networkCardKey, // Open registry key + STR_SERVICENAME, // Name of key to query + NULL, // Reserved - must be NULL + &keyValueType, // Receives value type + (LPBYTE)keyValue, // Receives value + &keyValueLength)) // Receives value length in bytes + { +// printf("keyValue: %s\n", keyValue); + if(RegOpenKeyEx(interfacesKey, keyValue, 0, KEY_READ, &interfaceKey) == ERROR_SUCCESS) + { + gatewayValueLength = MAX_VALUE_LENGTH; + if(ERROR_SUCCESS == RegQueryValueEx(interfaceKey, // Open registry key + STR_DHCPDEFAULTGATEWAY, // Name of key to query + NULL, // Reserved - must be NULL + &gatewayValueType, // Receives value type + (LPBYTE)gatewayValue, // Receives value + &gatewayValueLength)) // Receives value length in bytes + { + // Check to make sure it's a string + if((gatewayValueType == REG_MULTI_SZ || gatewayValueType == REG_SZ) && (gatewayValueLength > 1)) + { + //printf("gatewayValue: %s\n", gatewayValue); + done = 1; + } + } + else if(ERROR_SUCCESS == RegQueryValueEx(interfaceKey, // Open registry key + STR_DEFAULTGATEWAY, // Name of key to query + NULL, // Reserved - must be NULL + &gatewayValueType, // Receives value type + (LPBYTE)gatewayValue,// Receives value + &gatewayValueLength)) // Receives value length in bytes + { + // Check to make sure it's a string + if((gatewayValueType == REG_MULTI_SZ || gatewayValueType == REG_SZ) && (gatewayValueLength > 1)) + { + //printf("gatewayValue: %s\n", gatewayValue); + done = 1; + } + } + RegCloseKey(interfaceKey); + } + } + RegCloseKey(networkCardKey); + } + } + } + + RegCloseKey(interfacesKey); + RegCloseKey(networkCardsKey); + + if(done) + { +#if UNICODE + char tmp[32]; + for(i = 0; i < 32; i++) { + tmp[i] = (char)gatewayValue[i]; + if(!tmp[i]) + break; + } + tmp[31] = '\0'; + *addr = inet_addr(tmp); +#else + *addr = inet_addr(gatewayValue); +#endif + return 0; + } + + return -1; +} +#endif /* #ifdef USE_WIN32_CODE */ + +#ifdef USE_WIN32_CODE_2 +int getdefaultgateway(in_addr_t *addr) +{ + MIB_IPFORWARDROW ip_forward; + memset(&ip_forward, 0, sizeof(ip_forward)); + if(GetBestRoute(inet_addr("0.0.0.0"), 0, &ip_forward) != NO_ERROR) + return -1; + *addr = ip_forward.dwForwardNextHop; + return 0; +} +#endif /* #ifdef USE_WIN32_CODE_2 */ + +#ifdef USE_HAIKU_CODE +int getdefaultgateway(in_addr_t *addr) +{ + int fd, ret = -1; + struct ifconf config; + void *buffer = NULL; + struct ifreq *interface; + + if ((fd = socket(AF_INET, SOCK_DGRAM, 0)) < 0) { + return -1; + } + if (ioctl(fd, SIOCGRTSIZE, &config, sizeof(config)) != 0) { + goto fail; + } + if (config.ifc_value < 1) { + goto fail; /* No routes */ + } + if ((buffer = malloc(config.ifc_value)) == NULL) { + goto fail; + } + config.ifc_len = config.ifc_value; + config.ifc_buf = buffer; + if (ioctl(fd, SIOCGRTTABLE, &config, sizeof(config)) != 0) { + goto fail; + } + for (interface = buffer; + (uint8_t *)interface < (uint8_t *)buffer + config.ifc_len; ) { + struct route_entry route = interface->ifr_route; + int intfSize; + if (route.flags & (RTF_GATEWAY | RTF_DEFAULT)) { + *addr = ((struct sockaddr_in *)route.gateway)->sin_addr.s_addr; + ret = 0; + break; + } + intfSize = sizeof(route) + IF_NAMESIZE; + if (route.destination != NULL) { + intfSize += route.destination->sa_len; + } + if (route.mask != NULL) { + intfSize += route.mask->sa_len; + } + if (route.gateway != NULL) { + intfSize += route.gateway->sa_len; + } + interface = (struct ifreq *)((uint8_t *)interface + intfSize); + } +fail: + free(buffer); + close(fd); + return ret; +} +#endif /* #ifdef USE_HAIKU_CODE */ + +#if !defined(USE_PROC_NET_ROUTE) && !defined(USE_SOCKET_ROUTE) && !defined(USE_SYSCTL_NET_ROUTE) && !defined(USE_WIN32_CODE) && !defined(USE_WIN32_CODE_2) && !defined(USE_HAIKU_CODE) +int getdefaultgateway(in_addr_t * addr) +{ + return -1; +} +#endif diff --git a/pkg/hs/natpmp-static/cbits/getgateway.h b/pkg/hs/natpmp-static/cbits/getgateway.h new file mode 100644 index 000000000..5d3df7312 --- /dev/null +++ b/pkg/hs/natpmp-static/cbits/getgateway.h @@ -0,0 +1,49 @@ +/* $Id: getgateway.h,v 1.8 2014/04/22 09:15:40 nanard Exp $ */ +/* libnatpmp +Copyright (c) 2007-2014, Thomas BERNARD +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * The name of the author may not be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. +*/ +#ifndef __GETGATEWAY_H__ +#define __GETGATEWAY_H__ + +#ifdef WIN32 +#if !defined(_MSC_VER) || _MSC_VER >= 1600 +#include +#else +typedef unsigned long uint32_t; +typedef unsigned short uint16_t; +#endif +#define in_addr_t uint32_t +#endif +/* #include "declspec.h" */ + +/* getdefaultgateway() : + * return value : + * 0 : success + * -1 : failure */ +/* LIBSPEC */int getdefaultgateway(in_addr_t * addr); + +#endif diff --git a/pkg/hs/natpmp-static/cbits/natpmp.c b/pkg/hs/natpmp-static/cbits/natpmp.c new file mode 100644 index 000000000..289c36445 --- /dev/null +++ b/pkg/hs/natpmp-static/cbits/natpmp.c @@ -0,0 +1,387 @@ +/* $Id: natpmp.c,v 1.20 2015/05/27 12:43:15 nanard Exp $ */ +/* libnatpmp +Copyright (c) 2007-2015, Thomas BERNARD +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * The name of the author may not be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. +*/ +#ifdef __linux__ +#define _BSD_SOURCE 1 +#endif +#include +#include +#if !defined(_MSC_VER) +#include +#endif +#ifdef WIN32 +#include +#include +#include +#include +#define EWOULDBLOCK WSAEWOULDBLOCK +#define ECONNREFUSED WSAECONNREFUSED +#include "wingettimeofday.h" +#define gettimeofday natpmp_gettimeofday +#else +#include +#include +#include +#include +#include +#define closesocket close +#endif +#include "natpmp.h" +#include "getgateway.h" +#include + +LIBSPEC int initnatpmp(natpmp_t * p, int forcegw, in_addr_t forcedgw) +{ +#ifdef WIN32 + u_long ioctlArg = 1; +#else + int flags; +#endif + struct sockaddr_in addr; + if(!p) + return NATPMP_ERR_INVALIDARGS; + memset(p, 0, sizeof(natpmp_t)); + p->s = socket(PF_INET, SOCK_DGRAM, 0); + if(p->s < 0) + return NATPMP_ERR_SOCKETERROR; +#ifdef WIN32 + if(ioctlsocket(p->s, FIONBIO, &ioctlArg) == SOCKET_ERROR) + return NATPMP_ERR_FCNTLERROR; +#else + if((flags = fcntl(p->s, F_GETFL, 0)) < 0) + return NATPMP_ERR_FCNTLERROR; + if(fcntl(p->s, F_SETFL, flags | O_NONBLOCK) < 0) + return NATPMP_ERR_FCNTLERROR; +#endif + + if(forcegw) { + p->gateway = forcedgw; + } else { + if(getdefaultgateway(&(p->gateway)) < 0) + return NATPMP_ERR_CANNOTGETGATEWAY; + } + + memset(&addr, 0, sizeof(addr)); + addr.sin_family = AF_INET; + addr.sin_port = htons(NATPMP_PORT); + addr.sin_addr.s_addr = p->gateway; + if(connect(p->s, (struct sockaddr *)&addr, sizeof(addr)) < 0) + return NATPMP_ERR_CONNECTERR; + return 0; +} + +LIBSPEC int closenatpmp(natpmp_t * p) +{ + if(!p) + return NATPMP_ERR_INVALIDARGS; + if(closesocket(p->s) < 0) + return NATPMP_ERR_CLOSEERR; + return 0; +} + +int sendpendingrequest(natpmp_t * p) +{ + int r; +/* struct sockaddr_in addr;*/ + if(!p) + return NATPMP_ERR_INVALIDARGS; +/* memset(&addr, 0, sizeof(addr)); + addr.sin_family = AF_INET; + addr.sin_port = htons(NATPMP_PORT); + addr.sin_addr.s_addr = p->gateway; + r = (int)sendto(p->s, p->pending_request, p->pending_request_len, 0, + (struct sockaddr *)&addr, sizeof(addr));*/ + r = (int)send(p->s, (const char *)p->pending_request, p->pending_request_len, 0); + return (r<0) ? NATPMP_ERR_SENDERR : r; +} + +int sendnatpmprequest(natpmp_t * p) +{ + int n; + if(!p) + return NATPMP_ERR_INVALIDARGS; + /* TODO : check if no request is already pending */ + p->has_pending_request = 1; + p->try_number = 1; + n = sendpendingrequest(p); + gettimeofday(&p->retry_time, NULL); // check errors ! + p->retry_time.tv_usec += 250000; /* add 250ms */ + if(p->retry_time.tv_usec >= 1000000) { + p->retry_time.tv_usec -= 1000000; + p->retry_time.tv_sec++; + } + return n; +} + +LIBSPEC int getnatpmprequesttimeout(natpmp_t * p, struct timeval * timeout) +{ + struct timeval now; + if(!p || !timeout) + return NATPMP_ERR_INVALIDARGS; + if(!p->has_pending_request) + return NATPMP_ERR_NOPENDINGREQ; + if(gettimeofday(&now, NULL) < 0) + return NATPMP_ERR_GETTIMEOFDAYERR; + timeout->tv_sec = p->retry_time.tv_sec - now.tv_sec; + timeout->tv_usec = p->retry_time.tv_usec - now.tv_usec; + if(timeout->tv_usec < 0) { + timeout->tv_usec += 1000000; + timeout->tv_sec--; + } + return 0; +} + +LIBSPEC int sendpublicaddressrequest(natpmp_t * p) +{ + if(!p) + return NATPMP_ERR_INVALIDARGS; + //static const unsigned char request[] = { 0, 0 }; + p->pending_request[0] = 0; + p->pending_request[1] = 0; + p->pending_request_len = 2; + // TODO: return 0 instead of sizeof(request) ?? + return sendnatpmprequest(p); +} + +LIBSPEC int sendnewportmappingrequest(natpmp_t * p, int protocol, + uint16_t privateport, uint16_t publicport, + uint32_t lifetime) +{ + if(!p || (protocol!=NATPMP_PROTOCOL_TCP && protocol!=NATPMP_PROTOCOL_UDP)) + return NATPMP_ERR_INVALIDARGS; + p->pending_request[0] = 0; + p->pending_request[1] = protocol; + p->pending_request[2] = 0; + p->pending_request[3] = 0; + /* break strict-aliasing rules : + *((uint16_t *)(p->pending_request + 4)) = htons(privateport); */ + p->pending_request[4] = (privateport >> 8) & 0xff; + p->pending_request[5] = privateport & 0xff; + /* break stric-aliasing rules : + *((uint16_t *)(p->pending_request + 6)) = htons(publicport); */ + p->pending_request[6] = (publicport >> 8) & 0xff; + p->pending_request[7] = publicport & 0xff; + /* break stric-aliasing rules : + *((uint32_t *)(p->pending_request + 8)) = htonl(lifetime); */ + p->pending_request[8] = (lifetime >> 24) & 0xff; + p->pending_request[9] = (lifetime >> 16) & 0xff; + p->pending_request[10] = (lifetime >> 8) & 0xff; + p->pending_request[11] = lifetime & 0xff; + p->pending_request_len = 12; + return sendnatpmprequest(p); +} + +LIBSPEC int readnatpmpresponse(natpmp_t * p, natpmpresp_t * response) +{ + unsigned char buf[16]; + struct sockaddr_in addr; + socklen_t addrlen = sizeof(addr); + int n; + if(!p) + return NATPMP_ERR_INVALIDARGS; + n = recvfrom(p->s, (char *)buf, sizeof(buf), 0, + (struct sockaddr *)&addr, &addrlen); + if(n<0) +#ifdef WIN32 + switch(WSAGetLastError()) { +#else + switch(errno) { +#endif + /*case EAGAIN:*/ + case EWOULDBLOCK: + n = NATPMP_TRYAGAIN; + break; + case ECONNREFUSED: + n = NATPMP_ERR_NOGATEWAYSUPPORT; + break; + default: + n = NATPMP_ERR_RECVFROM; + } + /* check that addr is correct (= gateway) */ + else if(addr.sin_addr.s_addr != p->gateway) + n = NATPMP_ERR_WRONGPACKETSOURCE; + else { + response->resultcode = ntohs(*((uint16_t *)(buf + 2))); + response->epoch = ntohl(*((uint32_t *)(buf + 4))); + if(buf[0] != 0) + n = NATPMP_ERR_UNSUPPORTEDVERSION; + else if(buf[1] < 128 || buf[1] > 130) + n = NATPMP_ERR_UNSUPPORTEDOPCODE; + else if(response->resultcode != 0) { + switch(response->resultcode) { + case 1: + n = NATPMP_ERR_UNSUPPORTEDVERSION; + break; + case 2: + n = NATPMP_ERR_NOTAUTHORIZED; + break; + case 3: + n = NATPMP_ERR_NETWORKFAILURE; + break; + case 4: + n = NATPMP_ERR_OUTOFRESOURCES; + break; + case 5: + n = NATPMP_ERR_UNSUPPORTEDOPCODE; + break; + default: + n = NATPMP_ERR_UNDEFINEDERROR; + } + } else { + response->type = buf[1] & 0x7f; + if(buf[1] == 128) + //response->publicaddress.addr = *((uint32_t *)(buf + 8)); + response->pnu.publicaddress.addr.s_addr = *((uint32_t *)(buf + 8)); + else { + response->pnu.newportmapping.privateport = ntohs(*((uint16_t *)(buf + 8))); + response->pnu.newportmapping.mappedpublicport = ntohs(*((uint16_t *)(buf + 10))); + response->pnu.newportmapping.lifetime = ntohl(*((uint32_t *)(buf + 12))); + } + n = 0; + } + } + return n; +} + +int readnatpmpresponseorretry(natpmp_t * p, natpmpresp_t * response) +{ + int n; + if(!p || !response) + return NATPMP_ERR_INVALIDARGS; + if(!p->has_pending_request) + return NATPMP_ERR_NOPENDINGREQ; + n = readnatpmpresponse(p, response); + if(n<0) { + if(n==NATPMP_TRYAGAIN) { + struct timeval now; + gettimeofday(&now, NULL); // check errors ! + if(timercmp(&now, &p->retry_time, >=)) { + int delay, r; + // NOTE: This used to be 9, and was changed for the haskell + // bindings to be 5. + if(p->try_number >= 5) { + return NATPMP_ERR_NOGATEWAYSUPPORT; + } + /*printf("retry! %d\n", p->try_number);*/ + + // NOTE: Changed how delays are calculated. Waiting up to four + // minutes for a packet that might never get a response is not + // a good user experience. Instead, retry up to 2 seconds. + // + // delay = 250 * (1<try_number); // ms + delay = 250 * p->try_number; // ms + /*for(i=0; itry_number; i++) + delay += delay;*/ + p->retry_time.tv_sec += (delay / 1000); + p->retry_time.tv_usec += (delay % 1000) * 1000; + if(p->retry_time.tv_usec >= 1000000) { + p->retry_time.tv_usec -= 1000000; + p->retry_time.tv_sec++; + } + p->try_number++; + r = sendpendingrequest(p); + if(r<0) + return r; + } + } + } else { + p->has_pending_request = 0; + } + return n; +} + +#ifdef ENABLE_STRNATPMPERR +LIBSPEC const char * strnatpmperr(int r) +{ + const char * s; + switch(r) { + case NATPMP_ERR_INVALIDARGS: + s = "invalid arguments"; + break; + case NATPMP_ERR_SOCKETERROR: + s = "socket() failed"; + break; + case NATPMP_ERR_CANNOTGETGATEWAY: + s = "cannot get default gateway ip address"; + break; + case NATPMP_ERR_CLOSEERR: +#ifdef WIN32 + s = "closesocket() failed"; +#else + s = "close() failed"; +#endif + break; + case NATPMP_ERR_RECVFROM: + s = "recvfrom() failed"; + break; + case NATPMP_ERR_NOPENDINGREQ: + s = "no pending request"; + break; + case NATPMP_ERR_NOGATEWAYSUPPORT: + s = "the gateway does not support nat-pmp"; + break; + case NATPMP_ERR_CONNECTERR: + s = "connect() failed"; + break; + case NATPMP_ERR_WRONGPACKETSOURCE: + s = "packet not received from the default gateway"; + break; + case NATPMP_ERR_SENDERR: + s = "send() failed"; + break; + case NATPMP_ERR_FCNTLERROR: + s = "fcntl() failed"; + break; + case NATPMP_ERR_GETTIMEOFDAYERR: + s = "gettimeofday() failed"; + break; + case NATPMP_ERR_UNSUPPORTEDVERSION: + s = "unsupported nat-pmp version error from server"; + break; + case NATPMP_ERR_UNSUPPORTEDOPCODE: + s = "unsupported nat-pmp opcode error from server"; + break; + case NATPMP_ERR_UNDEFINEDERROR: + s = "undefined nat-pmp server error"; + break; + case NATPMP_ERR_NOTAUTHORIZED: + s = "not authorized"; + break; + case NATPMP_ERR_NETWORKFAILURE: + s = "network failure"; + break; + case NATPMP_ERR_OUTOFRESOURCES: + s = "nat-pmp server out of resources"; + break; + default: + s = "Unknown libnatpmp error"; + } + return s; +} +#endif + diff --git a/pkg/hs/natpmp-static/cbits/natpmp.h b/pkg/hs/natpmp-static/cbits/natpmp.h new file mode 100644 index 000000000..3f721b7b9 --- /dev/null +++ b/pkg/hs/natpmp-static/cbits/natpmp.h @@ -0,0 +1,221 @@ +/* $Id: natpmp.h,v 1.20 2014/04/22 09:15:40 nanard Exp $ */ +/* libnatpmp +Copyright (c) 2007-2014, Thomas BERNARD +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * The name of the author may not be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. +*/ +#ifndef __NATPMP_H__ +#define __NATPMP_H__ + +/* NAT-PMP Port as defined by the NAT-PMP draft */ +#define NATPMP_PORT (5351) + +#define ENABLE_STRNATPMPERR + +#include +#if !defined(_MSC_VER) +#include +#endif /* !defined(_MSC_VER) */ + +#ifdef WIN32 +#include +#if !defined(_MSC_VER) || _MSC_VER >= 1600 +#include +#else /* !defined(_MSC_VER) || _MSC_VER >= 1600 */ +typedef unsigned long uint32_t; +typedef unsigned short uint16_t; +#endif /* !defined(_MSC_VER) || _MSC_VER >= 1600 */ +#define in_addr_t uint32_t +#include "declspec.h" +#else /* WIN32 */ +#define LIBSPEC +#include +#endif /* WIN32 */ + +/* causes problem when installing. Maybe should it be inlined ? */ +/* #include "declspec.h" */ + +typedef struct { + int s; /* socket */ + in_addr_t gateway; /* default gateway (IPv4) */ + int has_pending_request; + unsigned char pending_request[12]; + int pending_request_len; + int try_number; + struct timeval retry_time; +} natpmp_t; + +typedef struct { + uint16_t type; /* NATPMP_RESPTYPE_* */ + uint16_t resultcode; /* NAT-PMP response code */ + uint32_t epoch; /* Seconds since start of epoch */ + union { + struct { + //in_addr_t addr; + struct in_addr addr; + } publicaddress; + struct { + uint16_t privateport; + uint16_t mappedpublicport; + uint32_t lifetime; + } newportmapping; + } pnu; +} natpmpresp_t; + +/* possible values for type field of natpmpresp_t */ +#define NATPMP_RESPTYPE_PUBLICADDRESS (0) +#define NATPMP_RESPTYPE_UDPPORTMAPPING (1) +#define NATPMP_RESPTYPE_TCPPORTMAPPING (2) + +/* Values to pass to sendnewportmappingrequest() */ +#define NATPMP_PROTOCOL_UDP (1) +#define NATPMP_PROTOCOL_TCP (2) + +/* return values */ +/* NATPMP_ERR_INVALIDARGS : invalid arguments passed to the function */ +#define NATPMP_ERR_INVALIDARGS (-1) +/* NATPMP_ERR_SOCKETERROR : socket() failed. check errno for details */ +#define NATPMP_ERR_SOCKETERROR (-2) +/* NATPMP_ERR_CANNOTGETGATEWAY : can't get default gateway IP */ +#define NATPMP_ERR_CANNOTGETGATEWAY (-3) +/* NATPMP_ERR_CLOSEERR : close() failed. check errno for details */ +#define NATPMP_ERR_CLOSEERR (-4) +/* NATPMP_ERR_RECVFROM : recvfrom() failed. check errno for details */ +#define NATPMP_ERR_RECVFROM (-5) +/* NATPMP_ERR_NOPENDINGREQ : readnatpmpresponseorretry() called while + * no NAT-PMP request was pending */ +#define NATPMP_ERR_NOPENDINGREQ (-6) +/* NATPMP_ERR_NOGATEWAYSUPPORT : the gateway does not support NAT-PMP */ +#define NATPMP_ERR_NOGATEWAYSUPPORT (-7) +/* NATPMP_ERR_CONNECTERR : connect() failed. check errno for details */ +#define NATPMP_ERR_CONNECTERR (-8) +/* NATPMP_ERR_WRONGPACKETSOURCE : packet not received from the network gateway */ +#define NATPMP_ERR_WRONGPACKETSOURCE (-9) +/* NATPMP_ERR_SENDERR : send() failed. check errno for details */ +#define NATPMP_ERR_SENDERR (-10) +/* NATPMP_ERR_FCNTLERROR : fcntl() failed. check errno for details */ +#define NATPMP_ERR_FCNTLERROR (-11) +/* NATPMP_ERR_GETTIMEOFDAYERR : gettimeofday() failed. check errno for details */ +#define NATPMP_ERR_GETTIMEOFDAYERR (-12) + +/* */ +#define NATPMP_ERR_UNSUPPORTEDVERSION (-14) +#define NATPMP_ERR_UNSUPPORTEDOPCODE (-15) + +/* Errors from the server : */ +#define NATPMP_ERR_UNDEFINEDERROR (-49) +#define NATPMP_ERR_NOTAUTHORIZED (-51) +#define NATPMP_ERR_NETWORKFAILURE (-52) +#define NATPMP_ERR_OUTOFRESOURCES (-53) + +/* NATPMP_TRYAGAIN : no data available for the moment. try again later */ +#define NATPMP_TRYAGAIN (-100) + +#ifdef __cplusplus +extern "C" { +#endif + +/* initnatpmp() + * initialize a natpmp_t object + * With forcegw=1 the gateway is not detected automaticaly. + * Return values : + * 0 = OK + * NATPMP_ERR_INVALIDARGS + * NATPMP_ERR_SOCKETERROR + * NATPMP_ERR_FCNTLERROR + * NATPMP_ERR_CANNOTGETGATEWAY + * NATPMP_ERR_CONNECTERR */ +LIBSPEC int initnatpmp(natpmp_t * p, int forcegw, in_addr_t forcedgw); + +/* closenatpmp() + * close resources associated with a natpmp_t object + * Return values : + * 0 = OK + * NATPMP_ERR_INVALIDARGS + * NATPMP_ERR_CLOSEERR */ +LIBSPEC int closenatpmp(natpmp_t * p); + +/* sendpublicaddressrequest() + * send a public address NAT-PMP request to the network gateway + * Return values : + * 2 = OK (size of the request) + * NATPMP_ERR_INVALIDARGS + * NATPMP_ERR_SENDERR */ +LIBSPEC int sendpublicaddressrequest(natpmp_t * p); + +/* sendnewportmappingrequest() + * send a new port mapping NAT-PMP request to the network gateway + * Arguments : + * protocol is either NATPMP_PROTOCOL_TCP or NATPMP_PROTOCOL_UDP, + * lifetime is in seconds. + * To remove a port mapping, set lifetime to zero. + * To remove all port mappings to the host, set lifetime and both ports + * to zero. + * Return values : + * 12 = OK (size of the request) + * NATPMP_ERR_INVALIDARGS + * NATPMP_ERR_SENDERR */ +LIBSPEC int sendnewportmappingrequest(natpmp_t * p, int protocol, + uint16_t privateport, uint16_t publicport, + uint32_t lifetime); + +/* getnatpmprequesttimeout() + * fills the timeval structure with the timeout duration of the + * currently pending NAT-PMP request. + * Return values : + * 0 = OK + * NATPMP_ERR_INVALIDARGS + * NATPMP_ERR_GETTIMEOFDAYERR + * NATPMP_ERR_NOPENDINGREQ */ +LIBSPEC int getnatpmprequesttimeout(natpmp_t * p, struct timeval * timeout); + +/* readnatpmpresponseorretry() + * fills the natpmpresp_t structure if possible + * Return values : + * 0 = OK + * NATPMP_TRYAGAIN + * NATPMP_ERR_INVALIDARGS + * NATPMP_ERR_NOPENDINGREQ + * NATPMP_ERR_NOGATEWAYSUPPORT + * NATPMP_ERR_RECVFROM + * NATPMP_ERR_WRONGPACKETSOURCE + * NATPMP_ERR_UNSUPPORTEDVERSION + * NATPMP_ERR_UNSUPPORTEDOPCODE + * NATPMP_ERR_NOTAUTHORIZED + * NATPMP_ERR_NETWORKFAILURE + * NATPMP_ERR_OUTOFRESOURCES + * NATPMP_ERR_UNSUPPORTEDOPCODE + * NATPMP_ERR_UNDEFINEDERROR */ +LIBSPEC int readnatpmpresponseorretry(natpmp_t * p, natpmpresp_t * response); + +#ifdef ENABLE_STRNATPMPERR +LIBSPEC const char * strnatpmperr(int t); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/pkg/hs/natpmp-static/hsrc_lib/Network/NatPmp.hsc b/pkg/hs/natpmp-static/hsrc_lib/Network/NatPmp.hsc new file mode 100644 index 000000000..f410625d6 --- /dev/null +++ b/pkg/hs/natpmp-static/hsrc_lib/Network/NatPmp.hsc @@ -0,0 +1,266 @@ +{-# LANGUAGE ForeignFunctionInterface, DeriveDataTypeable #-} + +-- | This module is a thin wrapper above libnatpmp.h and getgateway.h. + +module Network.NatPmp (Error(..), + NatPmpResponse(..), + ProtocolType(..), + NatPmpHandle, + Port, + LifetimeSeconds, + initNatPmp, + closeNatPmp, + getDefaultGateway, + getPublicAddress, + setPortMapping) where + +#include + +#include +#include +#include + +import Prelude +import Foreign +import Foreign.C +import Network.Socket + +import Control.Monad.IO.Unlift (MonadIO(..)) + +-- Opaque type for the internals of nat pmp +data NatPmpStruct +type NatPmpHandle = Ptr NatPmpStruct + +type Port = Word16 +type LifetimeSeconds = Word32 + +-- The response type, in its internal form. This struct is a C tagged union +-- with additional data, but we need to read and write from its C form. +data NatPmpResponse + = NatPmpResponsePublicAddress HostAddress + | NatPmpResponseUdpPortMapping Port Port LifetimeSeconds + | NatPmpResponseTcpPortMapping Port Port LifetimeSeconds + deriving (Show) + +instance Storable NatPmpResponse where + sizeOf _ = #{size natpmpresp_t} + alignment _ = alignment (undefined :: CString) + + peek p = do + t <- uintToEnum <$> (#{peek natpmpresp_t, type} p) + case t of + RTPublicAddress -> + NatPmpResponsePublicAddress <$> + (#{peek natpmpresp_t, pnu.publicaddress.addr} p) + RTUdpPortMapping -> + NatPmpResponseUdpPortMapping + <$> (#{peek natpmpresp_t, pnu.newportmapping.privateport} p) + <*> (#{peek natpmpresp_t, pnu.newportmapping.mappedpublicport} p) + <*> (#{peek natpmpresp_t, pnu.newportmapping.lifetime} p) + RTTcpPortMapping -> + NatPmpResponseTcpPortMapping + <$> (#{peek natpmpresp_t, pnu.newportmapping.privateport} p) + <*> (#{peek natpmpresp_t, pnu.newportmapping.mappedpublicport} p) + <*> (#{peek natpmpresp_t, pnu.newportmapping.lifetime} p) + + poke _ _ = error "Responses are an output data structure; poke makes no sense" + +type NatPmpResponseHandle = Ptr NatPmpResponse + +foreign import ccall unsafe "getgateway.h getdefaultgateway" _get_default_gateway :: Ptr CUInt -> IO CInt + +foreign import ccall unsafe "natpmp.h initnatpmp" _init_nat_pmp :: NatPmpHandle -> CInt -> CInt -> IO CInt +foreign import ccall unsafe "natpmp.h closenatpmp" _close_nat_pmp :: NatPmpHandle -> IO CInt +foreign import ccall unsafe "natpmp.h sendpublicaddressrequest" sendPublicAddressRequest :: NatPmpHandle -> IO CInt +foreign import ccall unsafe "natpmp.h sendnewportmappingrequest" sendNewPortMappingRequest :: NatPmpHandle -> CInt -> CUShort -> CUShort -> CUInt -> IO CInt + +foreign import ccall unsafe "binding.h readNatResponseSynchronously" readNatResponseSynchronously :: NatPmpHandle -> NatPmpResponseHandle -> IO CInt + +-- Give the type system some help +_peekCUInt :: Ptr CUInt -> IO CUInt +_peekCUInt = peek + +uintToEnum :: Enum e => CUInt -> e +uintToEnum = toEnum . fromIntegral + +intToEnum :: Enum e => CInt -> e +intToEnum = toEnum . fromIntegral + + +-- Fetches the default gateway as an ipv4 address +getDefaultGateway :: IO (Maybe HostAddress) +getDefaultGateway = + alloca $ \(pReturnAddr :: Ptr CUInt) -> do + _get_default_gateway pReturnAddr >>= \case + 0 -> (Just . fromIntegral) <$> _peekCUInt pReturnAddr + _ -> pure Nothing + + +data RespType + = RTPublicAddress + | RTUdpPortMapping + | RTTcpPortMapping + deriving (Eq, Show) + +instance Enum RespType where + fromEnum RTPublicAddress = 0 + fromEnum RTUdpPortMapping = 1 + fromEnum RTTcpPortMapping = 2 + + toEnum 0 = RTPublicAddress + toEnum 1 = RTUdpPortMapping + toEnum 2 = RTTcpPortMapping + toEnum unmatched = error ("RespType.toEnum: Cannot match " ++ show unmatched) + + +data ProtocolType + = PTUdp + | PTTcp + deriving (Eq, Show) + +instance Enum ProtocolType where + fromEnum PTUdp = 1 + fromEnum PTTcp = 2 + + toEnum 1 = PTUdp + toEnum 2 = PTTcp + toEnum x = error ("ProtocolType.toEnum: Cannot match " ++ show x) + + +data Error + = ErrInvalidArgs + | ErrSocketError + | ErrCannotGetGateway + | ErrCloseErr + | ErrRecvFrom + | ErrNoPendingReq + | ErrNoGatewaySupport + | ErrConnectErr + | ErrWrongPacketSource + | ErrSendErr + | ErrFcntlError + | ErrGetTimeOfDayError + -- + | ErrUnsuportedVersion + | ErrUnsupportedOpcode + -- + | ErrUndefinedError + | ErrNotAuthorized + | ErrNetworkFailure + | ErrOutOfResources + -- + | ErrTryAgain + | ErrHaskellBindings + deriving (Eq, Show) + +instance Enum Error where + fromEnum ErrInvalidArgs = -1 + fromEnum ErrSocketError = -2 + fromEnum ErrCannotGetGateway = -3 + fromEnum ErrCloseErr = -4 + fromEnum ErrRecvFrom = -5 + fromEnum ErrNoPendingReq = -6 + fromEnum ErrNoGatewaySupport = -7 + fromEnum ErrConnectErr = -8 + fromEnum ErrWrongPacketSource = -9 + fromEnum ErrSendErr = -10 + fromEnum ErrFcntlError = -11 + fromEnum ErrGetTimeOfDayError = -12 + -- + fromEnum ErrUnsuportedVersion = -14 + fromEnum ErrUnsupportedOpcode = -15 + -- + fromEnum ErrUndefinedError = -49 + fromEnum ErrNotAuthorized = -51 + fromEnum ErrNetworkFailure = -52 + fromEnum ErrOutOfResources = -53 + -- + fromEnum ErrTryAgain = -100 + fromEnum ErrHaskellBindings = -200 + + toEnum (-1) = ErrInvalidArgs + toEnum (-2) = ErrSocketError + toEnum (-3) = ErrCannotGetGateway + toEnum (-4) = ErrCloseErr + toEnum (-5) = ErrRecvFrom + toEnum (-6) = ErrNoPendingReq + toEnum (-7) = ErrNoGatewaySupport + toEnum (-8) = ErrConnectErr + toEnum (-9) = ErrWrongPacketSource + toEnum (-10) = ErrSendErr + toEnum (-11) = ErrFcntlError + toEnum (-12) = ErrGetTimeOfDayError + -- + toEnum (-14) = ErrUnsuportedVersion + toEnum (-15) = ErrUnsupportedOpcode + -- + toEnum (-49) = ErrUndefinedError + toEnum (-51) = ErrNotAuthorized + toEnum (-52) = ErrNetworkFailure + toEnum (-53) = ErrOutOfResources + -- + toEnum (-100) = ErrTryAgain + toEnum (-200) = ErrHaskellBindings + toEnum unmatched = error ("Error.toEnum: Cannot match " ++ show unmatched) + + +initNatPmp :: MonadIO m => m (Either Error NatPmpHandle) +initNatPmp = liftIO do + natpmp <- mallocBytes #{size natpmp_t} + ret <- _init_nat_pmp natpmp 0 0 + case ret of + 0 -> pure $ Right natpmp + _ -> do + free natpmp + pure $ Left $ intToEnum ret + + +closeNatPmp :: MonadIO m => NatPmpHandle -> m (Either Error ()) +closeNatPmp handle = liftIO do + ret <- _close_nat_pmp handle + free handle + case ret of + 0 -> pure $ Right () + _ -> pure $ Left $ intToEnum ret + + +-- | Public interface for getting the public IPv4 address +getPublicAddress :: MonadIO m => NatPmpHandle -> m (Either Error HostAddress) +getPublicAddress natpmp = liftIO do + sendRetcode <- sendPublicAddressRequest natpmp + case sendRetcode of + 2 -> alloca $ \(pResponse :: NatPmpResponseHandle) -> do + respRetcode <- readNatResponseSynchronously natpmp pResponse + case respRetcode of + 0 -> peek pResponse >>= \case + NatPmpResponsePublicAddress addr -> pure $ Right addr + _ -> pure $ Left ErrHaskellBindings + _ -> pure $ Left $ intToEnum respRetcode + _ -> pure $ Left $ intToEnum sendRetcode + +-- | Requests that the router maps the privatePort on our local computer in our +-- private network to publicPort on the public internet. +setPortMapping :: MonadIO m + => NatPmpHandle + -> ProtocolType + -> Port + -> Port + -> LifetimeSeconds + -> m (Either Error ()) +setPortMapping natpmp protocol privatePort publicPort lifetime = liftIO do + let protocolNum = fromEnum protocol + sendResp <- + sendNewPortMappingRequest natpmp + (fromIntegral protocolNum) (CUShort privatePort) (CUShort publicPort) + (CUInt lifetime) + + case sendResp of + 12 -> alloca $ \(pResponse :: NatPmpResponseHandle) -> do + respRetcode <- readNatResponseSynchronously natpmp pResponse + case respRetcode of + 0 -> peek pResponse >>= \case + NatPmpResponseUdpPortMapping _ _ _ -> pure $ Right () + NatPmpResponseTcpPortMapping _ _ _ -> pure $ Right () + _ -> pure $ Left ErrHaskellBindings + _ -> pure $ Left $ intToEnum respRetcode + x -> pure $ Left $ intToEnum x diff --git a/pkg/hs/natpmp-static/natpmp-static.cabal b/pkg/hs/natpmp-static/natpmp-static.cabal new file mode 100644 index 000000000..9ee268ab7 --- /dev/null +++ b/pkg/hs/natpmp-static/natpmp-static.cabal @@ -0,0 +1,89 @@ +cabal-version: >=1.10 +-- Initial package description 'natpmp-static.cabal' generated by 'cabal +-- init'. For further documentation, see +-- http://haskell.org/cabal/users-guide/ + +name: natpmp-static +version: 0.1.0.0 +synopsis: Haskell bindings to libnatpmp +description: + libnatpmp is a C library to communicate with routers and request + that they port forward traffic from the outside internet to your + program. + . + natpmp-static has Haskell bindings to libnatpmp to allow Haskell + programs to punch NAT holes in routers, containing a vendored copy + of the libnatpmp code so that we build Urbit's "almost static" + builds which we distribute. + . + See for upstream source. + +-- bug-reports: +license: BSD3 +license-file: LICENSE +author: Elliot Glaysher +maintainer: elliot@tlon.io +copyright: (c) 2020 Tlon. +stability: experimental +build-type: Simple + +library + hs-Source-Dirs: hsrc_lib + default-language: Haskell2010 + build-depends: base + , network + , unliftio-core + build-tools: hsc2hs + + Include-dirs: cbits + Includes: natpmp.h getgateway.h + C-Sources: cbits/natpmp.c cbits/getgateway.c cbits/binding.c + cc-options: -Wall -Os -g -fPIC + ghc-options: -Wall -fprof-auto -fPIC + + exposed-modules: Network.NatPmp + -- other-modules: + -- other-extensions: + + default-extensions: ApplicativeDo + , BangPatterns + , BlockArguments + , DataKinds + , DefaultSignatures + , DeriveAnyClass + , DeriveDataTypeable + , DeriveFoldable + , DeriveGeneric + , DeriveTraversable + , DerivingStrategies + , EmptyCase + , EmptyDataDecls + , FlexibleContexts + , FlexibleInstances + , FunctionalDependencies + , GADTs + , GeneralizedNewtypeDeriving + , LambdaCase + , MagicHash + , MultiParamTypeClasses + , NamedFieldPuns + , NoImplicitPrelude + , NumericUnderscores + , OverloadedStrings + , PartialTypeSignatures + , PatternSynonyms + , QuasiQuotes + , Rank2Types + , RankNTypes + , RecordWildCards + , ScopedTypeVariables + , StandaloneDeriving + , TemplateHaskell + , TupleSections + , TypeApplications + , TypeFamilies + , TypeOperators + , UnboxedTuples + , UnicodeSyntax + , ViewPatterns + diff --git a/pkg/hs/proto/lib/Untyped/Parser.hs b/pkg/hs/proto/lib/Untyped/Parser.hs index 14eb4db6b..be32011cd 100644 --- a/pkg/hs/proto/lib/Untyped/Parser.hs +++ b/pkg/hs/proto/lib/Untyped/Parser.hs @@ -79,61 +79,61 @@ data Mode = Wide | Tall type Parser = StateT Mode (Parsec Void Text) -withLocalState ∷ Monad m => s → StateT s m a → StateT s m a +withLocalState :: Monad m => s -> StateT s m a -> StateT s m a withLocalState val x = do { old <- get; put val; x <* put old } -inWideMode ∷ Parser a → Parser a +inWideMode :: Parser a -> Parser a inWideMode = withLocalState Wide -ace, pal, par ∷ Parser () +ace, pal, par :: Parser () ace = void (char ' ') pal = void (char '(') par = void (char ')') -- Simple Lexers --------------------------------------------------------------- -gap ∷ Parser () +gap :: Parser () gap = choice [ char ' ' >> void (some spaceChar) , newline >> void (many spaceChar) ] -whitespace ∷ Parser () +whitespace :: Parser () whitespace = ace <|> void gap -- Literals -------------------------------------------------------------------- -alpha ∷ Parser Char +alpha :: Parser Char alpha = oneOf (['a'..'z'] ++ ['A'..'Z']) -sym ∷ Parser Sym +sym :: Parser Sym sym = bucSym <|> pack <$> some alpha where bucSym = char '$' *> pure "" -atom ∷ Parser Nat +atom :: Parser Nat atom = do - init ← some digitChar - rest ← many (char '.' *> some digitChar) + init <- some digitChar + rest <- many (char '.' *> some digitChar) guard True -- TODO Validate '.'s pure (Prelude.read $ concat $ init:rest) -nat ∷ Parser Nat +nat :: Parser Nat nat = Prelude.read <$> some digitChar -tape ∷ Parser Text +tape :: Parser Text tape = do between (char '"') (char '"') $ pack <$> many (label "tape char" (anySingleBut '"')) -cord ∷ Parser Text +cord :: Parser Text cord = do between (char '\'') (char '\'') $ pack <$> many (label "cord char" (anySingleBut '\'')) -tag ∷ Parser Text +tag :: Parser Text tag = try (char '%' >> sym) -literal ∷ Parser CST +literal :: Parser CST literal = choice [ Yes <$ string "%.y" , No <$ string "%.n" @@ -156,48 +156,48 @@ literal = choice - accept the `tall` form or: - swich to `Wide` mode and then accept the wide form. -} -parseRune ∷ Parser a → Parser a → Parser a +parseRune :: Parser a -> Parser a -> Parser a parseRune tall wide = get >>= \case - Wide → wide - Tall → tall <|> inWideMode wide + Wide -> wide + Tall -> tall <|> inWideMode wide -rune0 ∷ a → Parser a +rune0 :: a -> Parser a rune0 = pure -rune1 ∷ (a→b) → Parser a → Parser b +rune1 :: (a->b) -> Parser a -> Parser b rune1 node x = parseRune tall wide - where tall = do gap; p←x; pure (node p) - wide = do pal; p←x; par; pure (node p) + where tall = do gap; p<-x; pure (node p) + wide = do pal; p<-x; par; pure (node p) -rune2 ∷ (a→b→c) → Parser a → Parser b → Parser c +rune2 :: (a->b->c) -> Parser a -> Parser b -> Parser c rune2 node x y = parseRune tall wide - where tall = do gap; p←x; gap; q←y; pure (node p q) - wide = do pal; p←x; ace; q←y; par; pure (node p q) + where tall = do gap; p<-x; gap; q<-y; pure (node p q) + wide = do pal; p<-x; ace; q<-y; par; pure (node p q) -rune3 ∷ (a→b→c→d) → Parser a → Parser b → Parser c → Parser d +rune3 :: (a->b->c->d) -> Parser a -> Parser b -> Parser c -> Parser d rune3 node x y z = parseRune tall wide - where tall = do gap; p←x; gap; q←y; gap; r←z; pure (node p q r) - wide = do pal; p←x; ace; q←y; ace; r←z; par; pure (node p q r) + where tall = do gap; p<-x; gap; q<-y; gap; r<-z; pure (node p q r) + wide = do pal; p<-x; ace; q<-y; ace; r<-z; par; pure (node p q r) -rune4 ∷ (a→b→c→d→e) → Parser a → Parser b → Parser c → Parser d → Parser e +rune4 :: (a->b->c->d->e) -> Parser a -> Parser b -> Parser c -> Parser d -> Parser e rune4 node x y z g = parseRune tall wide - where tall = do gap; p←x; gap; q←y; gap; r←z; gap; s←g; pure (node p q r s) - wide = do pal; p←x; ace; q←y; ace; r←z; ace; s←g; pure (node p q r s) + where tall = do gap; p<-x; gap; q<-y; gap; r<-z; gap; s<-g; pure (node p q r s) + wide = do pal; p<-x; ace; q<-y; ace; r<-z; ace; s<-g; pure (node p q r s) -runeN ∷ ([a]→b) → Parser a → Parser b +runeN :: ([a]->b) -> Parser a -> Parser b runeN node elem = node <$> parseRune tall wide where tall = gap >> elems where elems = term <|> elemAnd - elemAnd = do x ← elem; gap; xs ← elems; pure (x:xs) + elemAnd = do x <- elem; gap; xs <- elems; pure (x:xs) term = string "==" *> pure [] wide = pal *> option [] elems <* par where elems = (:) <$> elem <*> many (ace >> elem) -runeNE ∷ (NonEmpty a → b) → Parser a → Parser b +runeNE :: (NonEmpty a -> b) -> Parser a -> Parser b runeNE node elem = node <$> parseRune tall wide where tall = do let elems = term <|> elemAnd - elemAnd = do x ← elem; gap; xs ← elems; pure (x:xs) + elemAnd = do x <- elem; gap; xs <- elems; pure (x:xs) term = string "==" *> pure [] fst <- gap *> elem rst <- gap *> elems @@ -206,36 +206,36 @@ runeNE node elem = node <$> parseRune tall wide -- Irregular Syntax ------------------------------------------------------------ -inc ∷ Parser CST -- +(3) +inc :: Parser CST -- +(3) inc = do string "+(" - h ← cst + h <- cst char ')' pure h -equals ∷ Parser (CST, CST) -- =(3 4) +equals :: Parser (CST, CST) -- =(3 4) equals = do string "=(" - x ← cst + x <- cst ace - y ← cst + y <- cst char ')' pure (x, y) -tuple ∷ ∀a. Parser a → Parser [a] +tuple :: forall a. Parser a -> Parser [a] tuple p = char '[' >> elems where - xs ∷ Parser [a] - xs = do { x ← p; (x:) <$> tail } + xs :: Parser [a] + xs = do { x <- p; (x:) <$> tail } - tail ∷ Parser [a] + tail :: Parser [a] tail = (pure [] <* char ']') <|> (ace >> elems) - elems ∷ Parser [a] + elems :: Parser [a] elems = (pure [] <* char ']') <|> xs -appIrr ∷ Parser CST +appIrr :: Parser CST appIrr = do char '(' x <- cst @@ -244,7 +244,7 @@ appIrr = do char ')' pure (AppIrr x y) -irregular ∷ Parser CST +irregular :: Parser CST irregular = inWideMode $ choice [ Tupl <$> tuple cst @@ -255,14 +255,14 @@ irregular = -- Runes ----------------------------------------------------------------------- -pat ∷ Parser Pat +pat :: Parser Pat pat = choice [ PatTag <$> tag , char '*' $> PatTar ] -cases ∷ Parser [(Pat, CST)] +cases :: Parser [(Pat, CST)] cases = do - mode ← get + mode <- get guard (mode == Tall) end <|> lop where @@ -270,9 +270,9 @@ cases = do end = string "==" $> [] lop = do { p <- pat; gap; b <- cst; gap; ((p,b):) <$> goo } -wutHep ∷ Parser CST +wutHep :: Parser CST wutHep = do - mode ← get + mode <- get guard (mode == Tall) gap ex <- cst @@ -280,15 +280,15 @@ wutHep = do cs <- cases pure (WutHep ex cs) -barCen ∷ Parser CST +barCen :: Parser CST barCen = do - mode ← get + mode <- get guard (mode == Tall) gap cs <- cases pure (BarCen cs) -rune ∷ Parser CST +rune :: Parser CST rune = runeSwitch [ ("|=", rune2 BarTis sym cst) , ("|-", rune4 BarHep sym sym cst cst) , (":-", rune2 ColHep cst cst) @@ -313,13 +313,13 @@ rune = runeSwitch [ ("|=", rune2 BarTis sym cst) , ("~/", rune2 SigFas cst cst) ] -runeSwitch ∷ [(Text, Parser a)] → Parser a -runeSwitch = choice . fmap (\(s, p) → string s *> p) +runeSwitch :: [(Text, Parser a)] -> Parser a +runeSwitch = choice . fmap (\(s, p) -> string s *> p) -- CST Parser ------------------------------------------------------------------ -cst ∷ Parser CST +cst :: Parser CST cst = irregular <|> rune <|> literal @@ -327,19 +327,19 @@ cst = irregular <|> rune <|> literal hoonFile = do option () whitespace - h ← cst + h <- cst option () whitespace eof pure h -parse ∷ Text → Either Text CST +parse :: Text -> Either Text CST parse txt = runParser (evalStateT hoonFile Tall) "stdin" txt & \case - Left e → Left (pack $ errorBundlePretty e) - Right x → pure x + Left e -> Left (pack $ errorBundlePretty e) + Right x -> pure x -parseHoonTest ∷ Text → IO () +parseHoonTest :: Text -> IO () parseHoonTest = parseTest (evalStateT hoonFile Tall) -main ∷ IO () +main :: IO () main = (head <$> getArgs) >>= parseHoonTest diff --git a/pkg/hs/racquire/package.yaml b/pkg/hs/racquire/package.yaml index 36b562d11..b882b2753 100644 --- a/pkg/hs/racquire/package.yaml +++ b/pkg/hs/racquire/package.yaml @@ -9,7 +9,6 @@ library: - -fwarn-incomplete-patterns - -fwarn-unused-binds - -fwarn-unused-imports - - -Werror - -O2 dependencies: diff --git a/pkg/hs/shell.nix b/pkg/hs/shell.nix new file mode 100644 index 000000000..b8d5d1fe7 --- /dev/null +++ b/pkg/hs/shell.nix @@ -0,0 +1 @@ +import ../../shell.nix diff --git a/pkg/hs/stack.yaml b/pkg/hs/stack.yaml index 74bad3fe5..efd4d81f9 100644 --- a/pkg/hs/stack.yaml +++ b/pkg/hs/stack.yaml @@ -1,7 +1,8 @@ -resolver: lts-14.21 +resolver: lts-16.15 packages: - lmdb-static + - natpmp-static - proto - racquire - terminal-progress-bar @@ -14,11 +15,11 @@ packages: - urbit-noun-core extra-deps: - - flat-0.3.4@sha256:002a0e0ae656ea8cc02a772d0bcb6ea7dbd7f2e79070959cc748ad1e7138eb38 - base58-bytestring-0.1.0@sha256:a1da72ee89d5450bac1c792d9fcbe95ed7154ab7246f2172b57bd4fd9b5eab79 - lock-file-0.7.0.0@sha256:3ad84b5e454145e1d928063b56abb96db24a99a21b493989520e58fa0ab37b00 - - urbit-hob-0.3.1@sha256:afbdc7ad071eefc6ca85f5b598b6c62ed49079d15d1840dac27438a3b3150303 - para-1.1@sha256:a90eebb063ad70271e6e2a7f00a93e8e8f8b77273f100f39852fbf8301926f81 + - web3-0.9.1.0@sha256:6b7faac0b63e7d0aae46588dd9a42e11f54ce0fdf4c2744bdf4cc6c5cbf39aa2 + - vinyl-0.12.3@sha256:66553fc71cabfa86837bf5f98558e3e6d1807c47af5f5f1cd758081d3fb023ea # This allows building on NixOS. nix: diff --git a/pkg/hs/terminal-progress-bar/shell.nix b/pkg/hs/terminal-progress-bar/shell.nix deleted file mode 100644 index 1f56dc42c..000000000 --- a/pkg/hs/terminal-progress-bar/shell.nix +++ /dev/null @@ -1,2 +0,0 @@ -(import ../.).haskellPackages.terminal-progress-bar.env -# (import ../.).haskell.packages.ghc844.terminal-progress-bar.env diff --git a/pkg/hs/urbit-eventlog-lmdb/lib/Urbit/EventLog/LMDB.hs b/pkg/hs/urbit-eventlog-lmdb/lib/Urbit/EventLog/LMDB.hs index ed53db19c..7444296e8 100644 --- a/pkg/hs/urbit-eventlog-lmdb/lib/Urbit/EventLog/LMDB.hs +++ b/pkg/hs/urbit-eventlog-lmdb/lib/Urbit/EventLog/LMDB.hs @@ -41,9 +41,9 @@ import qualified Data.Vector as V -- Public Types ---------------------------------------------------------------- data LogIdentity = LogIdentity - { who :: Ship - , isFake :: Bool - , lifecycleLen :: Word + { who :: !Ship + , isFake :: !Bool + , lifecycleLen :: !Word } deriving (Eq, Ord, Show) deriveNoun ''LogIdentity @@ -100,7 +100,7 @@ rawOpen :: MonadIO m => FilePath -> m Env rawOpen dir = io $ do env <- mdb_env_create mdb_env_set_maxdbs env 3 - mdb_env_set_mapsize env (100 * 1024 * 1024 * 1024) + mdb_env_set_mapsize env (1024 * 1024 * 1024 * 1024) mdb_env_open env dir [] pure env @@ -300,7 +300,7 @@ streamEvents log first = do for_ batch yield streamEvents log (first + word (length batch)) -streamEffectsRows :: ∀e. HasLogFunc e +streamEffectsRows :: forall e. HasLogFunc e => EventLog -> Word64 -> ConduitT () (Word64, ByteString) (RIO e) () streamEffectsRows log = go @@ -352,7 +352,7 @@ readBatch log first = start {-| Read 1000 rows from the database, starting from key `first`. -} -readRowsBatch :: ∀e. HasLogFunc e +readRowsBatch :: forall e. HasLogFunc e => Env -> Dbi -> Word64 -> RIO e (V.Vector (Word64, ByteString)) readRowsBatch env dbi first = readRows where diff --git a/pkg/hs/urbit-king/TODO.md b/pkg/hs/urbit-king/TODO.md index f1a292b85..2e3be7108 100644 --- a/pkg/hs/urbit-king/TODO.md +++ b/pkg/hs/urbit-king/TODO.md @@ -63,12 +63,6 @@ Polish: changed too quickly. -# Finding the Serf Executable - -- [ ] Right now, `urbit-worker` is found by looking it up in the PATH. This - is wrong, but what is right? - - # Take Advantage of New IPC Features - [ ] Hook up `scry` to drivers. @@ -91,11 +85,12 @@ Implement Pier-wide process start events - [x] Handle %trim effect - [x] Inject entropy event on pier start: ``[//arvo [%wack ENT]]` - [ ] Verbose flag: `-v` injects `[%verb ~]` -- [ ] CLI event injection: `-I file-path`. The `file-path` is a jammed - noun representing an event: `[wire card]`. - 1. Just parse it as an `Ev` for now. - 2. Make the serf IPC code not care about the shape of events and effects. - 3. Support invalid events throughout the system (use `Lenient`?) + +- CLI event injection: `-I file-path`. The `file-path` is a jammed noun + representing an event: `[wire card]`. + - [x] Just parse it as an `Ev` for now. + - [ ] Make the serf IPC code not care about the shape of events and effects. + - [ ] Support invalid events throughout the system (use `Lenient`?) # Polish diff --git a/pkg/hs/urbit-king/lib/Urbit/Arvo/Common.hs b/pkg/hs/urbit-king/lib/Urbit/Arvo/Common.hs index 1b0b4ad4b..645b8a1f7 100644 --- a/pkg/hs/urbit-king/lib/Urbit/Arvo/Common.hs +++ b/pkg/hs/urbit-king/lib/Urbit/Arvo/Common.hs @@ -1,3 +1,9 @@ +{-# LANGUAGE StrictData #-} + +-- This is required due to the use of 'Void' in a constructor slot in +-- combination with 'deriveNoun' which generates an unreachable pattern. +{-# OPTIONS_GHC -Wno-overlapping-patterns #-} + {-| Types used in both Events and Effects. -} @@ -14,6 +20,8 @@ module Urbit.Arvo.Common import Urbit.Prelude hiding (Term) +import Control.Monad.Fail (fail) + import qualified Network.HTTP.Types.Method as H import qualified Urbit.Ob as Ob diff --git a/pkg/hs/urbit-king/lib/Urbit/Arvo/Effect.hs b/pkg/hs/urbit-king/lib/Urbit/Arvo/Effect.hs index 606539907..b6a2bd174 100644 --- a/pkg/hs/urbit-king/lib/Urbit/Arvo/Effect.hs +++ b/pkg/hs/urbit-king/lib/Urbit/Arvo/Effect.hs @@ -1,3 +1,9 @@ +{-# LANGUAGE StrictData #-} + +-- This is required due to the use of 'Void' in a constructor slot in +-- combination with 'deriveNoun' which generates an unreachable pattern. +{-# OPTIONS_GHC -Wno-overlapping-patterns #-} + {-| Effect Types and Their Noun Conversions -} @@ -6,6 +12,7 @@ module Urbit.Arvo.Effect where import Urbit.Noun.Time import Urbit.Prelude +import Control.Monad.Fail (fail) import Urbit.Arvo.Common (KingId(..), ServId(..)) import Urbit.Arvo.Common (Header, HttpEvent, HttpServerConf, Method, Mime) import Urbit.Arvo.Common (AmesDest, Turf) @@ -112,6 +119,7 @@ data Blit = Bel () | Clr () | Hop Word64 + | Klr Stub | Lin [Char] | Mor () | Sag Path Noun @@ -119,12 +127,84 @@ data Blit | Url Cord deriving (Eq, Ord) +data Deco + = DecoBl + | DecoBr + | DecoUn + | DecoNull + deriving (Eq, Ord, Show) + +data Tint + = TintR + | TintG + | TintB + | TintC + | TintM + | TintY + | TintK + | TintW + | TintNull + deriving (Eq, Ord, Show) + +data Stye = Stye + { deco :: (HoonSet Deco) + , back :: Tint + , fore :: Tint + } + deriving (Eq, Ord, Show) + +newtype Stub = Stub [(Stye, [Char])] + deriving (Eq, Ord, Show) + +instance ToNoun Deco where + toNoun = \case + DecoBl -> toNoun $ Cord "bl" + DecoBr -> toNoun $ Cord "br" + DecoUn -> toNoun $ Cord "un" + DecoNull -> Atom 0 + +instance FromNoun Deco where + parseNoun = named "Deco" . \case + Atom 0 -> pure DecoNull + n -> parseNoun @Cord n <&> unCord >>= \case + "bl" -> pure DecoBl + "br" -> pure DecoBr + "un" -> pure DecoUn + t -> fail ("invalid: " <> unpack t) + +instance ToNoun Tint where + toNoun = \case + TintR -> toNoun $ Cord "r" + TintG -> toNoun $ Cord "g" + TintB -> toNoun $ Cord "b" + TintC -> toNoun $ Cord "c" + TintM -> toNoun $ Cord "m" + TintY -> toNoun $ Cord "y" + TintK -> toNoun $ Cord "k" + TintW -> toNoun $ Cord "w" + TintNull -> Atom 0 + +instance FromNoun Tint where + parseNoun = named "Tint" . \case + Atom 0 -> pure TintNull + n -> parseNoun @Cord n <&> unCord >>= \case + "r" -> pure TintR + "g" -> pure TintG + "b" -> pure TintB + "c" -> pure TintC + "m" -> pure TintM + "y" -> pure TintY + "k" -> pure TintK + "w" -> pure TintW + t -> fail ("invalid: " <> unpack t) + -- Manual instance to not save the noun/atom in Sag/Sav, because these can be -- megabytes and makes king hang. instance Show Blit where show (Bel ()) = "Bel ()" show (Clr ()) = "Clr ()" show (Hop x) = "Hop " ++ (show x) + show (Klr s) = "Klr " ++ (show s) show (Lin c) = "Lin " ++ (show c) show (Mor ()) = "Mor ()" show (Sag path _) = "Sag " ++ (show path) @@ -144,6 +224,8 @@ data TermEf | TermEfMass Path Noun -- Irrelevant deriving (Eq, Ord, Show) +deriveNoun ''Stye +deriveNoun ''Stub deriveNoun ''Blit deriveNoun ''TermEf diff --git a/pkg/hs/urbit-king/lib/Urbit/Arvo/Event.hs b/pkg/hs/urbit-king/lib/Urbit/Arvo/Event.hs index 39df141e1..f95166a49 100644 --- a/pkg/hs/urbit-king/lib/Urbit/Arvo/Event.hs +++ b/pkg/hs/urbit-king/lib/Urbit/Arvo/Event.hs @@ -1,11 +1,17 @@ +{-# LANGUAGE StrictData #-} + +-- This is required due to the use of 'Void' in a constructor slot in +-- combination with 'deriveNoun' which generates an unreachable pattern. +{-# OPTIONS_GHC -Wno-overlapping-patterns #-} + {-| Event Types and Noun Conversion -} module Urbit.Arvo.Event where -import Urbit.Noun.Tree (HoonMap, HoonSet) -import Urbit.Prelude hiding (Term) +import Urbit.Prelude hiding (Term) +import Control.Monad.Fail (fail) import Urbit.Arvo.Common (KingId(..), ServId(..)) import Urbit.Arvo.Common (Desk, Mime) import Urbit.Arvo.Common (Header(..), HttpEvent) diff --git a/pkg/hs/urbit-king/lib/Urbit/King/API.hs b/pkg/hs/urbit-king/lib/Urbit/King/API.hs index adba47550..61bf31dfe 100644 --- a/pkg/hs/urbit-king/lib/Urbit/King/API.hs +++ b/pkg/hs/urbit-king/lib/Urbit/King/API.hs @@ -18,7 +18,6 @@ import Urbit.Prelude import Network.Socket (Socket) import Prelude (read) -import Urbit.Arvo (Belt) import Urbit.King.App (HasPierPath(..)) import qualified Network.HTTP.Types as H @@ -32,7 +31,7 @@ import qualified Urbit.Vere.Term.API as Term -- Types ----------------------------------------------------------------------- -type TermConn = NounServ.Conn Belt [Term.Ev] +type TermConn = NounServ.Conn Term.ClientTake [Term.Ev] type TermConnAPI = TVar (Maybe (TermConn -> STM ())) diff --git a/pkg/hs/urbit-king/lib/Urbit/King/App.hs b/pkg/hs/urbit-king/lib/Urbit/King/App.hs index bd8b6b1a5..6be577a65 100644 --- a/pkg/hs/urbit-king/lib/Urbit/King/App.hs +++ b/pkg/hs/urbit-king/lib/Urbit/King/App.hs @@ -9,6 +9,8 @@ module Urbit.King.App , kingEnvKillSignal , killKingActionL , onKillKingSigL + , HostEnv + , runHostEnv , PierEnv , runPierEnv , killPierActionL @@ -17,6 +19,8 @@ module Urbit.King.App , HasKingId(..) , HasProcId(..) , HasKingEnv(..) + , HasMultiEyreApi(..) + , HasHostEnv(..) , HasPierEnv(..) , module Urbit.King.Config ) @@ -25,11 +29,16 @@ where import Urbit.King.Config import Urbit.Prelude -import System.Directory (createDirectoryIfMissing, getHomeDirectory) +import System.Directory ( createDirectoryIfMissing + , getXdgDirectory + , XdgDirectory(XdgCache) + ) import System.Posix.Internals (c_getpid) import System.Posix.Types (CPid(..)) import System.Random (randomIO) import Urbit.King.App.Class (HasStderrLogFunc(..)) +import Urbit.Vere.Eyre.Multi (MultiEyreApi) +import Urbit.Vere.Ports (PortControlApi, HasPortControlApi(..)) -- KingEnv --------------------------------------------------------------------- @@ -70,39 +79,50 @@ instance HasProcId KingEnv where instance HasKingId KingEnv where kingIdL = kingEnvKingId - -- Running KingEnvs ------------------------------------------------------------ -runKingEnvStderr :: Bool -> RIO KingEnv a -> IO a -runKingEnvStderr verb inner = do +runKingEnvStderr :: Bool -> LogLevel -> RIO KingEnv a -> IO a +runKingEnvStderr verb lvl inner = do logOptions <- - logOptionsHandle stderr verb <&> setLogUseTime True <&> setLogUseLoc False - + logOptionsHandle stderr verb + <&> setLogUseTime True + <&> setLogUseLoc False + <&> setLogMinLevel lvl withLogFunc logOptions $ \logFunc -> runKingEnv logFunc logFunc inner -runKingEnvLogFile :: Bool -> RIO KingEnv a -> IO a -runKingEnvLogFile verb inner = withLogFileHandle $ \h -> do - logOptions <- - logOptionsHandle h verb <&> setLogUseTime True <&> setLogUseLoc False - stderrLogOptions <- - logOptionsHandle stderr verb <&> setLogUseTime False <&> setLogUseLoc False +runKingEnvLogFile :: Bool -> LogLevel -> Maybe FilePath -> RIO KingEnv a -> IO a +runKingEnvLogFile verb lvl fileM inner = do + logFile <- case fileM of + Just f -> pure f + Nothing -> defaultLogFile + withLogFileHandle logFile $ \h -> do + logOptions <- + logOptionsHandle h verb + <&> setLogUseTime True + <&> setLogUseLoc False + <&> setLogMinLevel lvl + stderrLogOptions <- + logOptionsHandle stderr verb + <&> setLogUseTime False + <&> setLogUseLoc False + <&> setLogMinLevel lvl + withLogFunc stderrLogOptions $ \stderrLogFunc -> withLogFunc logOptions + $ \logFunc -> runKingEnv logFunc stderrLogFunc inner - withLogFunc stderrLogOptions $ \stderrLogFunc -> withLogFunc logOptions - $ \logFunc -> runKingEnv logFunc stderrLogFunc inner - -withLogFileHandle :: (Handle -> IO a) -> IO a -withLogFileHandle act = do - home <- getHomeDirectory - let logDir = home ".urbit" - createDirectoryIfMissing True logDir - withFile (logDir "king.log") AppendMode $ \handle -> do +withLogFileHandle :: FilePath -> (Handle -> IO a) -> IO a +withLogFileHandle f act = + withFile f AppendMode $ \handle -> do hSetBuffering handle LineBuffering act handle +defaultLogFile :: IO FilePath +defaultLogFile = do + logDir <- getXdgDirectory XdgCache "urbit" + createDirectoryIfMissing True logDir + pure (logDir "king.log") + runKingEnvNoLog :: RIO KingEnv a -> IO a -runKingEnvNoLog act = withFile "/dev/null" AppendMode $ \handle -> do - logOptions <- logOptionsHandle handle True - withLogFunc logOptions $ \logFunc -> runKingEnv logFunc logFunc act +runKingEnvNoLog act = runKingEnv mempty mempty act runKingEnv :: LogFunc -> LogFunc -> RIO KingEnv a -> IO a runKingEnv logFunc stderr action = do @@ -121,14 +141,69 @@ killKingActionL :: HasKingEnv e => Getter e (STM ()) killKingActionL = kingEnvL . kingEnvKillSignal . to (\kil -> void (tryPutTMVar kil ())) +-- HostEnv ------------------------------------------------------------------ + +-- The host environment is everything in King, eyre configuration shared +-- across ships, and nat punching data. + +class HasMultiEyreApi a where + multiEyreApiL :: Lens' a MultiEyreApi + +class (HasKingEnv a, HasMultiEyreApi a, HasPortControlApi a) => + HasHostEnv a where + hostEnvL :: Lens' a HostEnv + +data HostEnv = HostEnv + { _hostEnvKingEnv :: !KingEnv + , _hostEnvMultiEyreApi :: !MultiEyreApi + , _hostEnvPortControlApi :: !PortControlApi + } + +makeLenses ''HostEnv + +instance HasKingEnv HostEnv where + kingEnvL = hostEnvKingEnv + +instance HasLogFunc HostEnv where + logFuncL = kingEnvL . logFuncL + +instance HasStderrLogFunc HostEnv where + stderrLogFuncL = kingEnvL . stderrLogFuncL + +instance HasProcId HostEnv where + procIdL = kingEnvL . procIdL + +instance HasKingId HostEnv where + kingIdL = kingEnvL . kingEnvKingId + +instance HasMultiEyreApi HostEnv where + multiEyreApiL = hostEnvMultiEyreApi + +instance HasPortControlApi HostEnv where + portControlApiL = hostEnvPortControlApi + +-- Running Running Envs -------------------------------------------------------- + +runHostEnv :: MultiEyreApi -> PortControlApi -> RIO HostEnv a + -> RIO KingEnv a +runHostEnv multi ports action = do + king <- ask + + let hostEnv = HostEnv { _hostEnvKingEnv = king + , _hostEnvMultiEyreApi = multi + , _hostEnvPortControlApi = ports + } + + io (runRIO hostEnv action) -- PierEnv --------------------------------------------------------------------- -class (HasKingEnv a, HasPierConfig a, HasNetworkConfig a) => HasPierEnv a where +class (HasKingEnv a, HasHostEnv a, HasPierConfig a, HasNetworkConfig a) => + HasPierEnv a where pierEnvL :: Lens' a PierEnv data PierEnv = PierEnv - { _pierEnvKingEnv :: !KingEnv + { _pierEnvHostEnv :: !HostEnv , _pierEnvPierConfig :: !PierConfig , _pierEnvNetworkConfig :: !NetworkConfig , _pierEnvKillSignal :: !(TMVar ()) @@ -137,7 +212,16 @@ data PierEnv = PierEnv makeLenses ''PierEnv instance HasKingEnv PierEnv where - kingEnvL = pierEnvKingEnv + kingEnvL = pierEnvHostEnv . kingEnvL + +instance HasHostEnv PierEnv where + hostEnvL = pierEnvHostEnv + +instance HasMultiEyreApi PierEnv where + multiEyreApiL = pierEnvHostEnv . multiEyreApiL + +instance HasPortControlApi PierEnv where + portControlApiL = pierEnvHostEnv . portControlApiL instance HasPierEnv PierEnv where pierEnvL = id @@ -180,11 +264,11 @@ killPierActionL = -- Running Pier Envs ----------------------------------------------------------- runPierEnv - :: PierConfig -> NetworkConfig -> TMVar () -> RIO PierEnv a -> RIO KingEnv a + :: PierConfig -> NetworkConfig -> TMVar () -> RIO PierEnv a -> RIO HostEnv a runPierEnv pierConfig networkConfig vKill action = do - app <- ask + host <- ask - let pierEnv = PierEnv { _pierEnvKingEnv = app + let pierEnv = PierEnv { _pierEnvHostEnv = host , _pierEnvPierConfig = pierConfig , _pierEnvNetworkConfig = networkConfig , _pierEnvKillSignal = vKill diff --git a/pkg/hs/urbit-king/lib/Urbit/King/CLI.hs b/pkg/hs/urbit-king/lib/Urbit/King/CLI.hs index b5edbcd2f..b9e7fadaa 100644 --- a/pkg/hs/urbit-king/lib/Urbit/King/CLI.hs +++ b/pkg/hs/urbit-king/lib/Urbit/King/CLI.hs @@ -6,21 +6,25 @@ -} module Urbit.King.CLI where -import ClassyPrelude +import ClassyPrelude hiding (log) import Options.Applicative import Options.Applicative.Help.Pretty import Data.Word (Word16) +import RIO (LogLevel(..)) import System.Environment (getProgName) -------------------------------------------------------------------------------- -data KingOpts = KingOpts - { koSharedHttpPort :: Maybe Word16 - , koSharedHttpsPort :: Maybe Word16 +data Host = Host + { hSharedHttpPort :: Maybe Word16 + , hSharedHttpsPort :: Maybe Word16 + , hUseNatPmp :: Nat + , hSerfExe :: Maybe Text } deriving (Show) +-- | Options for each running pier. data Opts = Opts { oQuiet :: Bool , oHashless :: Bool @@ -40,10 +44,23 @@ data Opts = Opts , oHttpPort :: Maybe Word16 , oHttpsPort :: Maybe Word16 , oLoopbackPort :: Maybe Word16 - , oSerfExe :: Maybe Text + , oInjectEvents :: [Injection] } deriving (Show) +-- | Options for the logging subsystem. +data Log = Log + { lTarget :: Maybe (LogTarget FilePath) + , lLevel :: LogLevel + } + deriving (Show) + +data LogTarget a + = LogOff + | LogStderr + | LogFile a + deriving (Show) + data BootType = BootComet | BootFake Text @@ -55,12 +72,25 @@ data PillSource | PillSourceURL String deriving (Show) +data Nat + = NatAlways + | NatWhenPrivateNetwork + | NatNever + deriving (Show) + +data Injection + = InjectOneEvent FilePath + | InjectManyEvents FilePath + deriving (Show) + data New = New { nPillSource :: PillSource , nPierPath :: Maybe FilePath -- Derived from ship name if not specified. , nArvoDir :: Maybe FilePath , nBootType :: BootType , nLite :: Bool + , nEthNode :: String + , nSerfExe :: Maybe Text } deriving (Show) @@ -96,14 +126,15 @@ data Bug , bFinalEvt :: Word64 } | CheckDawn - { bKeyfilePath :: FilePath + { bEthNode :: String + , bKeyfilePath :: FilePath } | CheckComet deriving (Show) data Cmd - = CmdNew New Opts - | CmdRun KingOpts [(Run, Opts, Bool)] + = CmdNew New Opts + | CmdRun Host [(Run, Opts, Bool)] | CmdBug Bug | CmdCon FilePath deriving (Show) @@ -135,7 +166,7 @@ footNote exe = string $ intercalate "\n" -------------------------------------------------------------------------------- -parseArgs :: IO Cmd +parseArgs :: IO (Cmd, Log) parseArgs = do nm <- getProgName @@ -198,6 +229,35 @@ pillFromURL = PillSourceURL <$> strOption pierPath :: Parser FilePath pierPath = strArgument (metavar "PIER" <> help "Path to pier") +injectEvents :: Parser [Injection] +injectEvents = many $ InjectOneEvent <$> strOption + ( short 'I' + <> long "inject-event" + <> metavar "JAM" + <> help "Path to a jammed event" + <> hidden) + <|> InjectManyEvents <$> strOption + ( long "inject-event-list" + <> metavar "JAM_LIST" + <> help "Path to a jammed list of events" + <> hidden) + +serfExe :: Parser (Maybe Text) +serfExe = optional + $ strOption + $ metavar "PATH" + <> long "serf" + <> help "Path to serf binary to run ships in" + <> hidden + +ethNode :: Parser String +ethNode = strOption + $ short 'e' + <> long "eth-node" + <> value "http://eth-mainnet.urbit.org:8545" + <> help "Ethereum gateway URL" + <> hidden + new :: Parser New new = do nPierPath <- optional pierPath @@ -218,6 +278,10 @@ new = do <> value Nothing <> help "Replace initial clay filesys with contents of PATH" + nEthNode <- ethNode + + nSerfExe <- serfExe + pure New{..} opts :: Parser Opts @@ -273,13 +337,7 @@ opts = do <> help "Localhost-only HTTP port" <> hidden - oSerfExe <- - optional - $ strOption - $ metavar "PATH" - <> long "serf" - <> help "Path to Serf" - <> hidden + oInjectEvents <- injectEvents oHashless <- switch $ short 'S' <> long "hashless" @@ -293,7 +351,7 @@ opts = do oVerbose <- switch $ short 'v' <> long "verbose" - <> help "Verbose" + <> help "Puts the serf and king into verbose mode" <> hidden oExit <- switch $ short 'x' @@ -332,22 +390,69 @@ opts = do oFullReplay <- switch $ long "full-log-replay" - <> help "Ignores the snapshot and recomputes state from log" + <> help "Ignores snapshot and recomputes state from event log" <> hidden pure (Opts{..}) -newShip :: Parser Cmd -newShip = CmdNew <$> new <*> opts +log :: Parser Log +log = do + lTarget <- + optional + $ ( flag' LogStderr + $ long "log-to-stderr" + <> long "stderr" + <> help "Display logs on stderr" + <> hidden + ) + <|> ( fmap LogFile . strOption + $ long "log-to" + <> metavar "LOG_FILE" + <> help "Append logs to the given file" + <> hidden + ) + <|> ( flag' LogOff + $ long "no-logging" + <> help "Disable logging entirely" + <> hidden + ) + + lLevel <- + ( flag' LevelDebug + $ long "log-debug" + <> help "Log errors, warnings, info, and debug messages" + <> hidden + ) + <|> ( flag' LevelInfo + $ long "log-info" + <> help "Log errors, warnings, and info" + <> hidden + ) + <|> ( flag' LevelWarn + $ long "log-warn" + <> help "Log errors and warnings (default)" + <> hidden + ) + <|> ( flag' LevelError + $ long "log-error" + <> help "Log errors only" + <> hidden + ) + <|> pure LevelWarn + + pure (Log{..}) + +newShip :: Parser (Cmd, Log) +newShip = (,) <$> (CmdNew <$> new <*> opts) <*> log runOneShip :: Parser (Run, Opts, Bool) runOneShip = (,,) <$> fmap Run pierPath <*> opts <*> df where df = switch (short 'd' <> long "daemon" <> help "Daemon mode" <> hidden) -kingOpts :: Parser KingOpts -kingOpts = do - koSharedHttpPort <- +host :: Parser Host +host = do + hSharedHttpPort <- optional $ option auto $ metavar "PORT" @@ -355,7 +460,7 @@ kingOpts = do <> help "HTTP port" <> hidden - koSharedHttpsPort <- + hSharedHttpsPort <- optional $ option auto $ metavar "PORT" @@ -363,10 +468,31 @@ kingOpts = do <> help "HTTPS port" <> hidden - pure (KingOpts{..}) + hUseNatPmp <- + ( flag' NatAlways + $ long "port-forwarding" + <> help "Always try to search for a router to forward ames ports" + <> hidden + ) <|> + ( flag' NatNever + $ long "no-port-forwarding" + <> help "Disable trying to ask the router to forward ames ports" + <> hidden + ) <|> + ( flag' NatWhenPrivateNetwork + $ long "port-forwarding-when-internal" + <> help ("Try asking the router to forward when ip is 192.168.0.0/16, " <> + "172.16.0.0/12 or 10.0.0.0/8 (default).") + <> hidden + ) <|> + (pure $ NatWhenPrivateNetwork) -runShip :: Parser Cmd -runShip = CmdRun <$> kingOpts <*> some runOneShip + hSerfExe <- serfExe + + pure (Host{..}) + +runShip :: Parser (Cmd, Log) +runShip = (,) <$> (CmdRun <$> host <*> some runOneShip) <*> log valPill :: Parser Bug valPill = do @@ -408,10 +534,10 @@ browseEvs :: Parser Bug browseEvs = EventBrowser <$> pierPath checkDawn :: Parser Bug -checkDawn = CheckDawn <$> keyfilePath +checkDawn = CheckDawn <$> ethNode <*> keyfilePath -bugCmd :: Parser Cmd -bugCmd = fmap CmdBug +bugCmd :: Parser (Cmd, Log) +bugCmd = (flip (,) <$> log <*>) $ fmap CmdBug $ subparser $ command "validate-pill" ( info (valPill <**> helper) @@ -446,15 +572,15 @@ bugCmd = fmap CmdBug $ progDesc "Shows the list of stars accepting comets" ) -conCmd :: Parser Cmd -conCmd = CmdCon <$> pierPath +conCmd :: Parser (Cmd, Log) +conCmd = (,) <$> (CmdCon <$> pierPath) <*> log allFx :: Parser Bug allFx = do bPierPath <- strArgument (metavar "PIER" <> help "Path to pier") pure CollectAllFX{..} -cmd :: Parser Cmd +cmd :: Parser (Cmd, Log) cmd = subparser $ command "new" ( info (newShip <**> helper) $ progDesc "Boot a new ship." diff --git a/pkg/hs/urbit-king/lib/Urbit/King/Config.hs b/pkg/hs/urbit-king/lib/Urbit/King/Config.hs index 7cb9ceb2c..4ccece736 100644 --- a/pkg/hs/urbit-king/lib/Urbit/King/Config.hs +++ b/pkg/hs/urbit-king/lib/Urbit/King/Config.hs @@ -14,7 +14,7 @@ import qualified Urbit.Vere.Serf as Serf data PierConfig = PierConfig { _pcPierPath :: FilePath , _pcDryRun :: Bool - , _pcSerfExe :: Text + , _pcSerfExe :: Maybe Text , _pcSerfFlags :: [Serf.Flag] } deriving (Show) diff --git a/pkg/hs/urbit-king/lib/Urbit/King/Main.hs b/pkg/hs/urbit-king/lib/Urbit/King/Main.hs index 2fb280d2d..596015428 100644 --- a/pkg/hs/urbit-king/lib/Urbit/King/Main.hs +++ b/pkg/hs/urbit-king/lib/Urbit/King/Main.hs @@ -82,17 +82,16 @@ import Urbit.Arvo import Urbit.King.Config import Urbit.Vere.Dawn import Urbit.Vere.Pier -import Urbit.Vere.Eyre.Multi (multiEyre, MultiEyreApi, MultiEyreConf(..)) +import Urbit.Vere.Ports +import Urbit.Vere.Eyre.Multi (multiEyre, MultiEyreConf(..)) import Urbit.Vere.Pier.Types import Urbit.Vere.Serf import Urbit.King.App import Control.Concurrent (myThreadId) import Control.Exception (AsyncException(UserInterrupt)) -import Control.Lens ((&)) import System.Process (system) -import Text.Show.Pretty (pPrint) -import Urbit.Noun.Conversions (cordToUW) +import System.IO (hPutStrLn) import Urbit.Noun.Time (Wen) import Urbit.Vere.LockFile (lockFile) @@ -139,12 +138,12 @@ toSerfFlags CLI.Opts{..} = catMaybes m setFrom True flag = Just flag setFrom False _ = Nothing -toPierConfig :: FilePath -> CLI.Opts -> PierConfig -toPierConfig pierPath o@(CLI.Opts{..}) = PierConfig { .. } +toPierConfig :: FilePath -> Maybe Text -> CLI.Opts -> PierConfig +toPierConfig pierPath serfExe o@(CLI.Opts{..}) = PierConfig { .. } where _pcPierPath = pierPath _pcDryRun = oDryRun || isJust oDryFrom - _pcSerfExe = fromMaybe "urbit-worker" oSerfExe + _pcSerfExe = serfExe _pcSerfFlags = toSerfFlags o toNetworkConfig :: CLI.Opts -> NetworkConfig @@ -173,10 +172,10 @@ logStderr action = do logFunc <- view stderrLogFuncL runRIO logFunc action -logSlogs :: HasStderrLogFunc e => RIO e (TVar (Text -> IO ())) +logSlogs :: HasStderrLogFunc e => RIO e (TVar ((Atom, Tank) -> IO ())) logSlogs = logStderr $ do env <- ask - newTVarIO (runRIO env . logOther "serf" . display . T.strip) + newTVarIO (runRIO env . logOther "serf" . display . T.strip . tankToText . snd) tryBootFromPill :: Bool @@ -184,69 +183,69 @@ tryBootFromPill -> Bool -> Ship -> LegacyBootEvent - -> MultiEyreApi -> RIO PierEnv () -tryBootFromPill oExit pill lite ship boot multi = do +tryBootFromPill oExit pill lite ship boot = do mStart <- newEmptyMVar vSlog <- logSlogs - runOrExitImmediately vSlog (bootedPier vSlog) oExit mStart multi + runOrExitImmediately vSlog (bootedPier vSlog) oExit mStart [] where bootedPier vSlog = do view pierPathL >>= lockFile - rio $ logDebug "Starting boot" + rio $ logInfo "Starting boot" sls <- Pier.booted vSlog pill lite ship boot - rio $ logDebug "Completed boot" + rio $ logInfo "Completed boot" pure sls runOrExitImmediately - :: TVar (Text -> IO ()) + :: TVar ((Atom, Tank) -> IO ()) -> RAcquire PierEnv (Serf, Log.EventLog) -> Bool -> MVar () - -> MultiEyreApi + -> [Ev] -> RIO PierEnv () -runOrExitImmediately vSlog getPier oExit mStart multi = do +runOrExitImmediately vSlog getPier oExit mStart injected = do rwith getPier (if oExit then shutdownImmediately else runPier) where shutdownImmediately :: (Serf, Log.EventLog) -> RIO PierEnv () shutdownImmediately (serf, log) = do - logDebug "Sending shutdown signal" + logInfo "Sending shutdown signal" Serf.stop serf - logDebug "Shutdown!" + logInfo "Shutdown!" runPier :: (Serf, Log.EventLog) -> RIO PierEnv () runPier serfLog = do - runRAcquire (Pier.pier serfLog vSlog mStart multi) + runRAcquire (Pier.pier serfLog vSlog mStart injected) tryPlayShip :: Bool -> Bool -> Maybe Word64 -> MVar () - -> MultiEyreApi + -> [Ev] -> RIO PierEnv () -tryPlayShip exitImmediately fullReplay playFrom mStart multi = do +tryPlayShip exitImmediately fullReplay playFrom mStart injected = do when fullReplay wipeSnapshot vSlog <- logSlogs - runOrExitImmediately vSlog (resumeShip vSlog) exitImmediately mStart multi + runOrExitImmediately vSlog (resumeShip vSlog) exitImmediately mStart injected where wipeSnapshot = do shipPath <- view pierPathL - logDebug "wipeSnapshot" - logDebug $ display $ pack @Text ("Wiping " <> north shipPath) - logDebug $ display $ pack @Text ("Wiping " <> south shipPath) + logInfo "wipeSnapshot" + logInfo $ display $ pack @Text ("Wiping " <> north shipPath) + logInfo $ display $ pack @Text ("Wiping " <> south shipPath) removeFileIfExists (north shipPath) removeFileIfExists (south shipPath) north shipPath = shipPath <> "/.urb/chk/north.bin" south shipPath = shipPath <> "/.urb/chk/south.bin" - resumeShip :: TVar (Text -> IO ()) -> RAcquire PierEnv (Serf, Log.EventLog) + resumeShip :: TVar ((Atom, Tank) -> IO ()) + -> RAcquire PierEnv (Serf, Log.EventLog) resumeShip vSlog = do view pierPathL >>= lockFile - rio $ logDebug "RESUMING SHIP" + rio $ logInfo "RESUMING SHIP" sls <- Pier.resumed vSlog playFrom - rio $ logDebug "SHIP RESUMED" + rio $ logInfo "SHIP RESUMED" pure sls runRAcquire :: (MonadUnliftIO (m e), MonadIO (m e), MonadReader e (m e)) @@ -261,7 +260,7 @@ checkEvs pierPath first last = do rwith (Log.existing logPath) $ \log -> do let ident = Log.identity log let pbSty = PB.defStyle { PB.stylePostfix = PB.exact } - logDebug (displayShow ident) + logInfo (displayShow ident) last <- atomically $ Log.lastEv log <&> \lastReal -> min last lastReal @@ -286,7 +285,7 @@ checkEvs pierPath first last = do showEvents pb eId cycle = await >>= \case Nothing -> do lift $ PB.killProgressBar pb - lift $ logDebug "Everything checks out." + lift $ logInfo "Everything checks out." Just bs -> do lift $ PB.incProgress pb 1 lift $ do @@ -315,10 +314,10 @@ collectAllFx = error "TODO" -} collectAllFx :: FilePath -> RIO KingEnv () collectAllFx top = do - logDebug $ display $ pack @Text top + logInfo $ display $ pack @Text top vSlog <- logSlogs rwith (collectedFX vSlog) $ \() -> - logDebug "Done collecting effects!" + logInfo "Done collecting effects!" where tmpDir :: FilePath tmpDir = top ".tmpdir" @@ -339,10 +338,10 @@ collectAllFx top = do replayPartEvs :: FilePath -> Word64 -> RIO KingEnv () replayPartEvs top last = do - logDebug $ display $ pack @Text top + logInfo $ display $ pack @Text top fetchSnapshot rwith replayedEvs $ \() -> - logDebug "Done replaying events!" + logInfo "Done replaying events!" where fetchSnapshot :: RIO KingEnv () fetchSnapshot = do @@ -385,57 +384,57 @@ replayPartEvs top last = do -} testPill :: HasLogFunc e => FilePath -> Bool -> Bool -> RIO e () testPill pax showPil showSeq = do - logDebug "Reading pill file." + logInfo "Reading pill file." pillBytes <- readFile pax - logDebug "Cueing pill file." + logInfo "Cueing pill file." pillNoun <- io $ cueBS pillBytes & either throwIO pure - logDebug "Parsing pill file." + logInfo "Parsing pill file." pill <- fromNounErr pillNoun & either (throwIO . uncurry ParseErr) pure - logDebug "Using pill to generate boot sequence." + logInfo "Using pill to generate boot sequence." bootSeq <- genBootSeq (Ship 0) pill False (Fake (Ship 0)) - logDebug "Validate jam/cue and toNoun/fromNoun on pill value" + logInfo "Validate jam/cue and toNoun/fromNoun on pill value" reJam <- validateNounVal pill - logDebug "Checking if round-trip matches input file:" + logInfo "Checking if round-trip matches input file:" unless (reJam == pillBytes) $ do - logDebug " Our jam does not match the file...\n" - logDebug " This is surprising, but it is probably okay." + logInfo " Our jam does not match the file...\n" + logInfo " This is surprising, but it is probably okay." when showPil $ do - logDebug "\n\n== Pill ==\n" + logInfo "\n\n== Pill ==\n" io $ pPrint pill when showSeq $ do - logDebug "\n\n== Boot Sequence ==\n" + logInfo "\n\n== Boot Sequence ==\n" io $ pPrint bootSeq validateNounVal :: (HasLogFunc e, Eq a, ToNoun a, FromNoun a) => a -> RIO e ByteString validateNounVal inpVal = do - logDebug " jam" + logInfo " jam" inpByt <- evaluate $ jamBS $ toNoun inpVal - logDebug " cue" + logInfo " cue" outNon <- cueBS inpByt & either throwIO pure - logDebug " fromNoun" + logInfo " fromNoun" outVal <- fromNounErr outNon & either (throwIO . uncurry ParseErr) pure - logDebug " toNoun" + logInfo " toNoun" outNon <- evaluate (toNoun outVal) - logDebug " jam" + logInfo " jam" outByt <- evaluate $ jamBS outNon - logDebug "Checking if: x == cue (jam x)" + logInfo "Checking if: x == cue (jam x)" unless (inpVal == outVal) $ error "Value fails test: x == cue (jam x)" - logDebug "Checking if: jam x == jam (cue (jam x))" + logInfo "Checking if: jam x == jam (cue (jam x))" unless (inpByt == outByt) $ error "Value fails test: jam x == jam (cue (jam x))" @@ -444,14 +443,14 @@ validateNounVal inpVal = do -------------------------------------------------------------------------------- -pillFrom :: CLI.PillSource -> RIO KingEnv Pill +pillFrom :: CLI.PillSource -> RIO HostEnv Pill pillFrom = \case CLI.PillSourceFile pillPath -> do - logDebug $ display $ "boot: reading pill from " ++ (pack pillPath :: Text) + logInfo $ display $ "boot: reading pill from " ++ (pack pillPath :: Text) io (loadFile pillPath >>= either throwIO pure) CLI.PillSourceURL url -> do - logDebug $ display $ "boot: retrieving pill from " ++ (pack url :: Text) + logInfo $ display $ "boot: retrieving pill from " ++ (pack url :: Text) -- Get the jamfile with the list of stars accepting comets right now. manager <- io $ C.newManager tlsManagerSettings request <- io $ C.parseRequest url @@ -475,7 +474,12 @@ newShip CLI.New{..} opts = do -} multi <- multiEyre (MultiEyreConf Nothing Nothing True) - case nBootType of + -- TODO: We hit the same problem as above: we need a host env to boot a ship + -- because it may autostart the ship, so build an inactive port configuration. + let ports = buildInactivePorts + + -- here we are with a king env, and we now need a multi env. + runHostEnv multi ports $ case nBootType of CLI.BootComet -> do pill <- pillFrom nPillSource putStrLn "boot: retrieving list of stars currently accepting comets" @@ -486,12 +490,13 @@ newShip CLI.New{..} opts = do eny <- io $ Sys.randomIO let seed = mineComet (Set.fromList starList) eny putStrLn ("boot: found comet " ++ renderShip (sShip seed)) - bootFromSeed multi pill seed + putStrLn ("code: " ++ (tshow $ deriveCode $ sRing seed)) + bootFromSeed pill seed CLI.BootFake name -> do pill <- pillFrom nPillSource ship <- shipFrom name - runTryBootFromPill multi pill name ship (Fake ship) + runTryBootFromPill pill name ship (Fake ship) CLI.BootFromKeyfile keyFile -> do text <- readFileUtf8 keyFile @@ -506,10 +511,10 @@ newShip CLI.New{..} opts = do pill <- pillFrom nPillSource - bootFromSeed multi pill seed + bootFromSeed pill seed where - shipFrom :: Text -> RIO KingEnv Ship + shipFrom :: Text -> RIO HostEnv Ship shipFrom name = case Ob.parsePatp name of Left x -> error "Invalid ship name" Right p -> pure $ Ship $ fromIntegral $ Ob.fromPatp p @@ -519,7 +524,7 @@ newShip CLI.New{..} opts = do Just x -> x Nothing -> "./" <> unpack name - nameFromShip :: Ship -> RIO KingEnv Text + nameFromShip :: HasKingEnv e => Ship -> RIO e Text nameFromShip s = name where nameWithSig = Ob.renderPatp $ Ob.patp $ fromIntegral s @@ -527,37 +532,42 @@ newShip CLI.New{..} opts = do Nothing -> error "Urbit.ob didn't produce string with ~" Just x -> pure x - bootFromSeed :: MultiEyreApi -> Pill -> Seed -> RIO KingEnv () - bootFromSeed multi pill seed = do - ethReturn <- dawnVent seed + bootFromSeed :: Pill -> Seed -> RIO HostEnv () + bootFromSeed pill seed = do + ethReturn <- dawnVent nEthNode seed case ethReturn of Left x -> error $ unpack x Right dawn -> do let ship = sShip $ dSeed dawn name <- nameFromShip ship - runTryBootFromPill multi pill name ship (Dawn dawn) + runTryBootFromPill pill name ship (Dawn dawn) -- Now that we have all the information for running an application with a -- PierConfig, do so. - runTryBootFromPill multi pill name ship bootEvent = do - vKill <- view kingEnvKillSignal - let pierConfig = toPierConfig (pierPath name) opts + runTryBootFromPill :: Pill + -> Text + -> Ship + -> LegacyBootEvent + -> RIO HostEnv () + runTryBootFromPill pill name ship bootEvent = do + vKill <- view (kingEnvL . kingEnvKillSignal) + let pierConfig = toPierConfig (pierPath name) nSerfExe opts let networkConfig = toNetworkConfig opts runPierEnv pierConfig networkConfig vKill $ - tryBootFromPill True pill nLite ship bootEvent multi ------- tryBootFromPill (CLI.oExit opts) pill nLite flags ship bootEvent + tryBootFromPill True pill nLite ship bootEvent -runShipEnv :: CLI.Run -> CLI.Opts -> TMVar () -> RIO PierEnv a -> RIO KingEnv a -runShipEnv (CLI.Run pierPath) opts vKill act = do +runShipEnv :: Maybe Text -> CLI.Run -> CLI.Opts -> TMVar () -> RIO PierEnv a + -> RIO HostEnv a +runShipEnv serfExe (CLI.Run pierPath) opts vKill act = do runPierEnv pierConfig netConfig vKill act where - pierConfig = toPierConfig pierPath opts + pierConfig = toPierConfig pierPath serfExe opts netConfig = toNetworkConfig opts runShip - :: CLI.Run -> CLI.Opts -> Bool -> MultiEyreApi -> RIO PierEnv () -runShip (CLI.Run pierPath) opts daemon multi = do + :: CLI.Run -> CLI.Opts -> Bool -> RIO PierEnv () +runShip (CLI.Run pierPath) opts daemon = do mStart <- newEmptyMVar if daemon then runPier mStart @@ -575,13 +585,38 @@ runShip (CLI.Run pierPath) opts daemon multi = do where runPier :: MVar () -> RIO PierEnv () runPier mStart = do + injections <- loadInjections (CLI.oInjectEvents opts) tryPlayShip (CLI.oExit opts) (CLI.oFullReplay opts) (CLI.oDryFrom opts) mStart - multi + injections + loadInjections :: [CLI.Injection] -> RIO PierEnv [Ev] + loadInjections injections = do + perInjection :: [[Ev]] <- for injections $ \case + CLI.InjectOneEvent filePath -> do + logInfo $ display $ "boot: reading injected event from " ++ + (pack filePath :: Text) + io (loadFile filePath >>= either throwIO (pure . singleton)) + + CLI.InjectManyEvents filePath -> do + logInfo $ display $ "boot: reading injected event list from " ++ + (pack filePath :: Text) + io (loadFile filePath >>= either throwIO pure) + pure $ concat perInjection + + + +buildPortHandler :: HasLogFunc e => CLI.Nat -> RIO e PortControlApi +buildPortHandler CLI.NatNever = pure buildInactivePorts +-- TODO: Figure out what to do about logging here. The "port: " messages are +-- the sort of thing that should be put on the muxed terminal log, but we don't +-- have that at this layer. +buildPortHandler CLI.NatAlways = buildNatPorts (io . hPutStrLn stderr . unpack) +buildPortHandler CLI.NatWhenPrivateNetwork = + buildNatPortsWhenPrivate (io . hPutStrLn stderr . unpack) startBrowser :: HasLogFunc e => FilePath -> RIO e () startBrowser pierPath = runRAcquire $ do @@ -589,8 +624,8 @@ startBrowser pierPath = runRAcquire $ do log <- Log.existing (pierPath <> "/.urb/log") rio $ EventBrowser.run log -checkDawn :: HasLogFunc e => FilePath -> RIO e () -checkDawn keyfilePath = do +checkDawn :: HasLogFunc e => String -> FilePath -> RIO e () +checkDawn provider keyfilePath = do -- The keyfile is a jammed Seed then rendered in UW format text <- readFileUtf8 keyfilePath asAtom <- case cordToUW (Cord $ T.strip text) of @@ -604,7 +639,7 @@ checkDawn keyfilePath = do print $ show seed - e <- dawnVent seed + e <- dawnVent provider seed print $ show e @@ -621,12 +656,12 @@ checkComet = do main :: IO () main = do - args <- CLI.parseArgs + (args, log) <- CLI.parseArgs hSetBuffering stdout NoBuffering setupSignalHandlers - runKingEnv args $ case args of + runKingEnv args log $ case args of CLI.CmdRun ko ships -> runShips ko ships CLI.CmdNew n o -> newShip n o CLI.CmdBug (CLI.CollectAllFX pax ) -> collectAllFx pax @@ -635,16 +670,19 @@ main = do CLI.CmdBug (CLI.ValidateEvents pax f l) -> checkEvs pax f l CLI.CmdBug (CLI.ValidateFX pax f l) -> checkFx pax f l CLI.CmdBug (CLI.ReplayEvents pax l ) -> replayPartEvs pax l - CLI.CmdBug (CLI.CheckDawn pax ) -> checkDawn pax + CLI.CmdBug (CLI.CheckDawn provider pax ) -> checkDawn provider pax CLI.CmdBug CLI.CheckComet -> checkComet CLI.CmdCon pier -> connTerm pier where - runKingEnv args = - let verb = verboseLogging args - in if willRunTerminal args - then runKingEnvLogFile verb - else runKingEnvStderr verb + runKingEnv args log = + let + verb = verboseLogging args + CLI.Log {..} = log + in case logTarget lTarget args of + CLI.LogFile f -> runKingEnvLogFile verb lLevel f + CLI.LogStderr -> runKingEnvStderr verb lLevel + CLI.LogOff -> runKingEnvNoLog setupSignalHandlers = do mainTid <- myThreadId @@ -657,12 +695,23 @@ main = do CLI.CmdRun ko ships -> any CLI.oVerbose (ships <&> \(_, o, _) -> o) _ -> False - willRunTerminal :: CLI.Cmd -> Bool - willRunTerminal = \case - CLI.CmdCon _ -> True - CLI.CmdRun ko [(_,_,daemon)] -> not daemon - CLI.CmdRun ko _ -> False - _ -> False + -- If the user hasn't specified where to log, what we do depends on what + -- command she has issued. Notably, the LogFile Nothing outcome means that + -- runKingEnvLogFile should run an IO action to get the official app data + -- directory and open a canonically named log file there. + logTarget :: Maybe (CLI.LogTarget FilePath) + -> CLI.Cmd + -> CLI.LogTarget (Maybe FilePath) + logTarget = \case + Just (CLI.LogFile f) -> const $ CLI.LogFile (Just f) + Just CLI.LogStderr -> const $ CLI.LogStderr + Just CLI.LogOff -> const $ CLI.LogOff + Nothing -> \case + CLI.CmdCon _ -> CLI.LogFile Nothing + CLI.CmdRun ko [(_,_,daemon)] | daemon -> CLI.LogStderr + | otherwise -> CLI.LogFile Nothing + CLI.CmdRun ko _ -> CLI.LogStderr + _ -> CLI.LogStderr {- @@ -670,19 +719,17 @@ main = do Once `waitForKillRequ` returns, the ship will be terminated and this routine will exit. - - TODO Use logging system instead of printing. -} runShipRestarting - :: CLI.Run -> CLI.Opts -> MultiEyreApi -> RIO KingEnv () -runShipRestarting r o multi = do + :: Maybe Text -> CLI.Run -> CLI.Opts -> RIO HostEnv () +runShipRestarting serfExe r o = do let pier = pack (CLI.rPierPath r) - loop = runShipRestarting r o multi + loop = runShipRestarting serfExe r o onKill <- view onKillKingSigL vKillPier <- newEmptyTMVarIO - tid <- asyncBound $ runShipEnv r o vKillPier $ runShip r o True multi + tid <- asyncBound $ runShipEnv serfExe r o vKillPier $ runShip r o True let onShipExit = Left <$> waitCatchSTM tid onKillRequ = Right <$> onKill @@ -697,9 +744,10 @@ runShipRestarting r o multi = do loop Right () -> do logTrace $ display (pier <> " shutdown requested") + atomically $ putTMVar vKillPier () race_ (wait tid) $ do threadDelay 5_000_000 - logDebug $ display (pier <> " not down after 5s, killing with fire.") + logInfo $ display (pier <> " not down after 5s, killing with fire.") cancel tid logTrace $ display ("Ship terminated: " <> pier) @@ -707,10 +755,11 @@ runShipRestarting r o multi = do TODO This is messy and shared a lot of logic with `runShipRestarting`. -} runShipNoRestart - :: CLI.Run -> CLI.Opts -> Bool -> MultiEyreApi -> RIO KingEnv () -runShipNoRestart r o d multi = do - vKill <- view kingEnvKillSignal -- killing ship same as killing king - tid <- asyncBound (runShipEnv r o vKill $ runShip r o d multi) + :: Maybe Text -> CLI.Run -> CLI.Opts -> Bool -> RIO HostEnv () +runShipNoRestart serfExe r o d = do + -- killing ship same as killing king + vKill <- view (kingEnvL . kingEnvKillSignal) + tid <- asyncBound (runShipEnv serfExe r o vKill $ runShip r o d) onKill <- view onKillKingSigL let pier = pack (CLI.rPierPath r) @@ -731,40 +780,32 @@ runShipNoRestart r o d multi = do cancel tid logTrace $ display (pier <> " terminated.") -runShips :: CLI.KingOpts -> [(CLI.Run, CLI.Opts, Bool)] -> RIO KingEnv () -runShips CLI.KingOpts {..} ships = do +runShips :: CLI.Host -> [(CLI.Run, CLI.Opts, Bool)] -> RIO KingEnv () +runShips CLI.Host {..} ships = do let meConf = MultiEyreConf - { mecHttpPort = fromIntegral <$> koSharedHttpPort - , mecHttpsPort = fromIntegral <$> koSharedHttpsPort + { mecHttpPort = fromIntegral <$> hSharedHttpPort + , mecHttpsPort = fromIntegral <$> hSharedHttpsPort , mecLocalhostOnly = False -- TODO Localhost-only needs to be -- a king-wide option. } - - {- - TODO Need to rework RIO environment to fix this. Should have a - bunch of nested contexts: - - - King has started. King has Id. Logging available. - - In running environment. MultiEyre and global config available. - - In pier environment: pier path and config available. - - In running ship environment: serf state, event queue available. - -} multi <- multiEyre meConf - go multi ships + ports <- buildPortHandler hUseNatPmp + + runHostEnv multi ports (go ships) where - go :: MultiEyreApi -> [(CLI.Run, CLI.Opts, Bool)] -> RIO KingEnv () - go me = \case + go :: [(CLI.Run, CLI.Opts, Bool)] -> RIO HostEnv () + go = \case [] -> pure () - [rod] -> runSingleShip rod me - ships -> runMultipleShips (ships <&> \(r, o, _) -> (r, o)) me + [rod] -> runSingleShip hSerfExe rod + ships -> runMultipleShips hSerfExe (ships <&> \(r, o, _) -> (r, o)) -- TODO Duplicated logic. -runSingleShip :: (CLI.Run, CLI.Opts, Bool) -> MultiEyreApi -> RIO KingEnv () -runSingleShip (r, o, d) multi = do - shipThread <- async (runShipNoRestart r o d multi) +runSingleShip :: Maybe Text -> (CLI.Run, CLI.Opts, Bool) -> RIO HostEnv () +runSingleShip serfExe (r, o, d) = do + shipThread <- async (runShipNoRestart serfExe r o d) {- Wait for the ship to go down. @@ -784,10 +825,10 @@ runSingleShip (r, o, d) multi = do pure () -runMultipleShips :: [(CLI.Run, CLI.Opts)] -> MultiEyreApi -> RIO KingEnv () -runMultipleShips ships multi = do +runMultipleShips :: Maybe Text -> [(CLI.Run, CLI.Opts)] -> RIO HostEnv () +runMultipleShips serfExe ships = do shipThreads <- for ships $ \(r, o) -> do - async (runShipRestarting r o multi) + async (runShipRestarting serfExe r o) {- Since `spin` never returns, this will run until the main @@ -812,7 +853,7 @@ runMultipleShips ships multi = do -------------------------------------------------------------------------------- -connTerm :: ∀e. HasLogFunc e => FilePath -> RIO e () +connTerm :: forall e. HasLogFunc e => FilePath -> RIO e () connTerm = Term.runTerminalClient diff --git a/pkg/hs/urbit-king/lib/Urbit/King/Scry.hs b/pkg/hs/urbit-king/lib/Urbit/King/Scry.hs new file mode 100644 index 000000000..f2a989be3 --- /dev/null +++ b/pkg/hs/urbit-king/lib/Urbit/King/Scry.hs @@ -0,0 +1,31 @@ +{-| + Scry helpers +-} + +module Urbit.King.Scry (scryNow) where + +import Urbit.Prelude +import Urbit.Vere.Serf.Types + +import qualified Urbit.Noun.Time as Time + +scryNow :: forall e n + . (HasLogFunc e, FromNoun n) + => (Time.Wen -> Gang -> Path -> IO (Maybe (Term, Noun))) + -> Text -- ^ vane + care as two-letter string + -> Ship -- ^ ship in scry path, usually the local ship + -> Text -- ^ desk in scry path + -> [Text] -- ^ resource path to scry for + -> RIO e (Maybe n) +scryNow scry vare ship desk path = do + env <- ask + wen <- io Time.now + let wan = tshow $ Time.MkDate wen + let pax = Path $ fmap MkKnot $ vare : (tshow ship) : desk : wan : path + io (scry wen Nothing pax) >>= \case + Just (_, fromNoun @n -> Just v) -> pure $ Just v + Just (_, n) -> do + logError $ displayShow ("uncanny scry result", vare, pax, n) + pure Nothing + Nothing -> pure Nothing + diff --git a/pkg/hs/urbit-king/lib/Urbit/Prelude.hs b/pkg/hs/urbit-king/lib/Urbit/Prelude.hs index a19d44cc5..fb8f59963 100644 --- a/pkg/hs/urbit-king/lib/Urbit/Prelude.hs +++ b/pkg/hs/urbit-king/lib/Urbit/Prelude.hs @@ -15,15 +15,15 @@ module Urbit.Prelude , module RIO , io, rio , logTrace + , acquireWorker, acquireWorkerBound ) where import ClassyPrelude import Urbit.Noun -import Control.Lens hiding (Each, Index, cons, index, snoc, uncons, unsnoc, - (<.>), (<|)) - import Control.Arrow ((<<<), (>>>)) +import Control.Lens hiding (Each, Index, cons, index, snoc, uncons, unsnoc, + (<.>), (<|)) import Data.Acquire (Acquire, mkAcquire, with) import Data.RAcquire (RAcquire, mkRAcquire, rwith) import Data.RAcquire (MonadAcquire(..), MonadRIO(..)) @@ -34,10 +34,9 @@ import Text.Show.Pretty (pPrint, ppShow) import RIO (RIO, runRIO) import RIO (Utf8Builder, display, displayShow) import RIO (threadDelay) - -import RIO (HasLogFunc, LogFunc, logDebug, logError, logFuncL, logInfo, - logOptionsHandle, logOther, logWarn, mkLogFunc, setLogUseLoc, - setLogUseTime, withLogFunc) +import RIO (HasLogFunc, LogFunc, LogLevel(..), logDebug, logError, logFuncL, + logInfo, logOptionsHandle, logOther, logWarn, mkLogFunc, + setLogMinLevel, setLogUseLoc, setLogUseTime, withLogFunc) io :: MonadIO m => IO a -> m a io = liftIO @@ -47,3 +46,21 @@ rio = liftRIO logTrace :: HasLogFunc e => Utf8Builder -> RIO e () logTrace = logOther "trace" + + +-- Utils for Spawning Worker Threads ------------------------------------------- + +acquireWorker :: HasLogFunc e => Text -> RIO e () -> RAcquire e (Async ()) +acquireWorker nam act = mkRAcquire (async act) kill + where + kill tid = do + logInfo ("Killing worker thread: " <> display nam) + cancel tid + +acquireWorkerBound :: HasLogFunc e => Text -> RIO e () -> RAcquire e (Async ()) +acquireWorkerBound nam act = mkRAcquire (asyncBound act) kill + where + kill tid = do + logInfo ("Killing worker thread: " <> display nam) + cancel tid + diff --git a/pkg/hs/urbit-king/lib/Urbit/Vere/Ames.hs b/pkg/hs/urbit-king/lib/Urbit/Vere/Ames.hs index 146389810..251eb7b14 100644 --- a/pkg/hs/urbit-king/lib/Urbit/Vere/Ames.hs +++ b/pkg/hs/urbit-king/lib/Urbit/Vere/Ames.hs @@ -1,3 +1,7 @@ +-- This is required due to the use of 'Void' in a constructor slot in +-- combination with 'deriveNoun' which generates an unreachable pattern. +{-# OPTIONS_GHC -Wno-overlapping-patterns #-} + {-| Ames IO Driver -} @@ -9,13 +13,20 @@ import Urbit.Prelude import Network.Socket hiding (recvFrom, sendTo) import Urbit.Arvo hiding (Fake) import Urbit.King.Config +import Urbit.King.Scry +import Urbit.Vere.Ames.LaneCache +import Urbit.Vere.Ames.Packet import Urbit.Vere.Pier.Types +import Urbit.Vere.Ports +import Data.Serialize (decode, encode) import Urbit.King.App (HasKingId(..), HasPierEnv(..)) import Urbit.Vere.Ames.DNS (NetworkMode(..), ResolvServ(..)) import Urbit.Vere.Ames.DNS (galaxyPort, resolvServ) import Urbit.Vere.Ames.UDP (UdpServ(..), fakeUdpServ, realUdpServ) +import qualified Urbit.Noun.Time as Time + -- Constants ------------------------------------------------------------------- @@ -32,11 +43,15 @@ packetsDroppedPerComplaint = 1000 -- Types ----------------------------------------------------------------------- +type Version = Word8 + data AmesDrv = AmesDrv { aTurfs :: TVar (Maybe [Turf]) , aDropped :: TVar Word + , aVersion :: TVar (Maybe Version) , aUdpServ :: UdpServ , aResolvr :: ResolvServ + , aVersTid :: Async () , aRecvTid :: Async () } @@ -56,6 +71,7 @@ localhost = tupleToHostAddress (127, 0, 0, 1) inaddrAny :: HostAddress inaddrAny = tupleToHostAddress (0, 0, 0, 0) + modeAddress :: NetworkMode -> Maybe HostAddress modeAddress = \case @@ -81,9 +97,10 @@ bornEv inst = EvBlip $ BlipEvNewt $ NewtEvBorn (fromIntegral inst, ()) () hearEv :: PortNumber -> HostAddress -> ByteString -> Ev hearEv p a bs = - EvBlip $ BlipEvAmes $ AmesEvHear () dest (MkBytes bs) - where - dest = EachNo $ Jammed $ AAIpv4 (Ipv4 a) (fromIntegral p) + EvBlip $ BlipEvAmes $ AmesEvHear () (ipDest p a) (MkBytes bs) + +ipDest :: PortNumber -> HostAddress -> AmesDest +ipDest p a = EachNo $ Jammed $ AAIpv4 (Ipv4 a) (fromIntegral p) -------------------------------------------------------------------------------- @@ -105,7 +122,10 @@ udpPort isFake who = do mPort <- view (networkConfigL . ncAmesPort) pure $ maybe (listenPort mode who) fromIntegral mPort -udpServ :: (HasLogFunc e, HasNetworkConfig e) => Bool -> Ship -> RIO e UdpServ +udpServ :: (HasLogFunc e, HasNetworkConfig e, HasPortControlApi e) + => Bool + -> Ship + -> RIO e UdpServ udpServ isFake who = do mode <- netMode isFake port <- udpPort isFake who @@ -121,9 +141,10 @@ ames' :: HasPierEnv e => Ship -> Bool + -> (Time.Wen -> Gang -> Path -> IO (Maybe (Term, Noun))) -> (Text -> RIO e ()) -> RIO e ([Ev], RAcquire e (DriverApi NewtEf)) -ames' who isFake stderr = do +ames' who isFake scry stderr = do -- Unfortunately, we cannot use TBQueue because the only behavior -- provided for when full is to block the writer. The implementation -- below uses materially the same data structures as TBQueue, however. @@ -134,7 +155,7 @@ ames' who isFake stderr = do vail <- readTVar avail if vail > 0 then do - modifyTVar avail (subtract 1) + modifyTVar' avail (subtract 1) writeTQueue ventQ p pure Intake else do @@ -147,7 +168,7 @@ ames' who isFake stderr = do pure pM env <- ask - let (bornEvs, startDriver) = ames env who isFake enqueuePacket stderr + let (bornEvs, startDriver) = ames env who isFake scry enqueuePacket stderr let runDriver = do diOnEffect <- startDriver @@ -170,14 +191,15 @@ ames' who isFake stderr = do -} ames :: forall e - . (HasLogFunc e, HasNetworkConfig e, HasKingId e) + . (HasLogFunc e, HasNetworkConfig e, HasPortControlApi e, HasKingId e) => e -> Ship -> Bool + -> (Time.Wen -> Gang -> Path -> IO (Maybe (Term, Noun))) -> (EvErr -> STM PacketOutcome) -> (Text -> RIO e ()) -> ([Ev], RAcquire e (NewtEf -> IO ())) -ames env who isFake enqueueEv stderr = (initialEvents, runAmes) +ames env who isFake scry enqueueEv stderr = (initialEvents, runAmes) where king = fromIntegral (env ^. kingIdL) @@ -190,36 +212,106 @@ ames env who isFake enqueueEv stderr = (initialEvents, runAmes) drv <- mkRAcquire start stop pure (handleEffect drv mode) - start :: HasLogFunc e => RIO e AmesDrv + start :: RIO e AmesDrv start = do + mode <- rio (netMode isFake) + cachedScryLane <- cache scryLane + aTurfs <- newTVarIO Nothing aDropped <- newTVarIO 0 + aVersion <- newTVarIO Nothing + aVersTid <- trackVersionThread aVersion aUdpServ <- udpServ isFake who - aRecvTid <- queuePacketsThread aDropped aUdpServ aResolvr <- resolvServ aTurfs (usSend aUdpServ) stderr + aRecvTid <- queuePacketsThread + aDropped + aVersion + cachedScryLane + (send aUdpServ aResolvr mode) + aUdpServ + pure (AmesDrv { .. }) hearFailed _ = pure () - queuePacketsThread :: HasLogFunc e => TVar Word -> UdpServ -> RIO e (Async ()) - queuePacketsThread dropCtr UdpServ {..} = async $ forever $ do - outcome <- atomically $ do - (p, a, b) <- usRecv - enqueueEv (EvErr (hearEv p a b) hearFailed) - case outcome of - Intake -> pure () - Ouster -> do - d <- atomically $ do - d <- readTVar dropCtr - writeTVar dropCtr (d + 1) - pure d - when (d `rem` packetsDroppedPerComplaint == 0) $ - logWarn "ames: queue full; dropping inbound packets" + trackVersionThread :: HasLogFunc e => TVar (Maybe Version) -> RIO e (Async ()) + trackVersionThread versSlot = async $ forever do + scryVersion >>= \case + Just v -> do + v0 <- readTVarIO versSlot + atomically $ writeTVar versSlot (Just v) + if (v0 == Just v) + then logInfo $ displayShow ("ames: proto version unchanged at", v) + else stderr ("ames: protocol version now " <> tshow v) - stop :: AmesDrv -> RIO e () + Nothing -> logError "ames: could not scry for version" + + threadDelay (10 * 60 * 1_000_000) -- 10m + + queuePacketsThread :: HasLogFunc e + => TVar Word + -> TVar (Maybe Version) + -> (Ship -> RIO e (Maybe [AmesDest])) + -> (AmesDest -> ByteString -> RIO e ()) + -> UdpServ + -> RIO e (Async ()) + queuePacketsThread dropCtr vers lan forward UdpServ{..} = async $ forever $ do + -- port number, host address, bytestring + (p, a, b) <- atomically usRecv + ver <- readTVarIO vers + + case decode b of + Right (pkt@Packet {..}) | ver == Nothing || ver == Just pktVersion -> do + logDebug $ displayShow ("ames: bon packet", pkt, showUD $ bytesAtom b) + + if pktRcvr == who + then serfsUp p a b + else lan pktRcvr >>= \case + Just ls + | dest:_ <- filter notSelf ls + -> forward dest $ encode pkt + { pktOrigin = pktOrigin <|> Just (ipDest p a) } + where + notSelf (EachYes g) = who /= Ship (fromIntegral g) + notSelf (EachNo _) = True + _ -> logInfo $ displayShow ("ames: dropping unroutable", pkt) + + Right pkt -> logInfo $ displayShow ("ames: dropping ill-versed", pkt, ver) + + -- XX better handle misversioned or illegible packets. + -- Remarks from 67f06ce5, pkg/urbit/vere/io/ames.c, L1010: + -- + -- [There are] two protocol-change scenarios [which we must think about]: + -- + -- - packets using old protocol versions from our sponsees + -- these must be let through, and this is a transitive condition; + -- they must also be forwarded where appropriate + -- they can be validated, as we know their semantics + -- + -- - packets using newer protocol versions + -- these should probably be let through, or at least + -- trigger printfs suggesting upgrade. + -- they cannot be filtered, as we do not know their semantics + -- + Left e -> logInfo $ displayShow ("ames: dropping malformed", e) + + where + serfsUp p a b = + atomically (enqueueEv (EvErr (hearEv p a b) hearFailed)) >>= \case + Intake -> pure () + Ouster -> do + d <- atomically $ do + d <- readTVar dropCtr + writeTVar dropCtr (d + 1) + pure d + when (d `rem` packetsDroppedPerComplaint == 0) $ + logWarn "ames: queue full; dropping inbound packets" + + stop :: forall e. AmesDrv -> RIO e () stop AmesDrv {..} = io $ do usKill aUdpServ rsKill aResolvr + cancel aVersTid cancel aRecvTid handleEffect :: AmesDrv -> NetworkMode -> NewtEf -> IO () @@ -229,20 +321,33 @@ ames env who isFake enqueueEv stderr = (initialEvents, runAmes) NewtEfSend (_id, ()) dest (MkBytes bs) -> do atomically (readTVar aTurfs) >>= \case - Nothing -> pure () - Just turfs -> sendPacket drv mode dest bs + Nothing -> stderr "ames: send before turfs" >> pure () + Just turfs -> send aUdpServ aResolvr mode dest bs - sendPacket :: AmesDrv -> NetworkMode -> AmesDest -> ByteString -> RIO e () - sendPacket AmesDrv {..} mode dest byt = do - let to adr = io (usSend aUdpServ adr byt) + send :: UdpServ + -> ResolvServ + -> NetworkMode + -> AmesDest + -> ByteString + -> RIO e () + send udpServ resolvr mode dest byt = do + let to adr = io (usSend udpServ adr byt) case (mode, dest) of (NoNetwork, _ ) -> pure () (Fake , _ ) -> when (okFakeAddr dest) $ to (localAddr Fake dest) (Localhost, _ ) -> to (localAddr Localhost dest) (Real , ra) -> ra & \case - EachYes gala -> io (rsSend aResolvr gala byt) + EachYes gala -> io (rsSend resolvr gala byt) EachNo addr -> to (ipv4Addr addr) + scryVersion :: HasLogFunc e => RIO e (Maybe Version) + scryVersion = scryNow scry "ax" who "" ["protocol", "version"] + + scryLane :: HasLogFunc e + => Ship + -> RIO e (Maybe [AmesDest]) + scryLane ship = scryNow scry "ax" who "" ["peers", tshow ship, "forward-lane"] + ipv4Addr (Jammed (AAVoid v )) = absurd v ipv4Addr (Jammed (AAIpv4 a p)) = SockAddrInet (fromIntegral p) (unIpv4 a) diff --git a/pkg/hs/urbit-king/lib/Urbit/Vere/Ames/DNS.hs b/pkg/hs/urbit-king/lib/Urbit/Vere/Ames/DNS.hs index f16f6a55a..307864612 100644 --- a/pkg/hs/urbit-king/lib/Urbit/Vere/Ames/DNS.hs +++ b/pkg/hs/urbit-king/lib/Urbit/Vere/Ames/DNS.hs @@ -64,7 +64,7 @@ import Urbit.Prelude import Network.Socket hiding (recvFrom, sendTo) import Urbit.Arvo hiding (Fake) -import qualified Data.Map as M +import qualified Data.Map.Strict as M import qualified Urbit.Noun.Time as Time import qualified Urbit.Ob as Ob @@ -131,11 +131,11 @@ doResolv gal (prevWen, prevIP) turfs stderr = do io (resolv gal turfs) >>= \case Nothing -> do stderr $ "ames: czar at " ++ galStr ++ ": not found" - logDebug $ displayShow ("(ames) Failed to lookup IP for ", gal) + logInfo $ displayShow ("(ames) Failed to lookup IP for ", gal) pure (prevIP, tim) Just (turf, host, port, addr) -> do when (Just addr /= prevIP) (printCzar addr) - logDebug $ displayShow ("(ames) Looked up ", host, port, turf, addr) + logInfo $ displayShow ("(ames) Looked up ", host, port, turf, addr) pure (Just addr, tim) where galStr = renderGalaxy gal @@ -155,7 +155,7 @@ resolvWorker resolvWorker gal vTurfs vLast waitMsg send stderr = async (forever go) where logDrop = - logDebug $ displayShow ("(ames) Dropping packet; no ip for galaxy ", gal) + logInfo $ displayShow ("(ames) Dropping packet; no ip for galaxy ", gal) go :: RIO e () go = do diff --git a/pkg/hs/urbit-king/lib/Urbit/Vere/Ames/LaneCache.hs b/pkg/hs/urbit-king/lib/Urbit/Vere/Ames/LaneCache.hs new file mode 100644 index 000000000..519f5845d --- /dev/null +++ b/pkg/hs/urbit-king/lib/Urbit/Vere/Ames/LaneCache.hs @@ -0,0 +1,32 @@ +module Urbit.Vere.Ames.LaneCache (cache) where + +import Urbit.Prelude + +import Urbit.Noun.Time + +expiry :: Gap +expiry = (2 * 60) ^. from secs + +cache :: forall a b m n + . (Ord a, MonadIO m, MonadIO n) + => (a -> m b) + -> n (a -> m b) +cache act = do + cas <- newTVarIO (mempty :: Map a (Wen, b)) + + let fun x = lookup x <$> readTVarIO cas >>= \case + Nothing -> thru + Just (t, v) -> do + t' <- io now + if gap t' t > expiry + then thru + else pure v + where + thru :: m b + thru = do + t <- io now + v <- act x + atomically $ modifyTVar' cas (insertMap x (t, v)) + pure v + + pure fun diff --git a/pkg/hs/urbit-king/lib/Urbit/Vere/Ames/Packet.hs b/pkg/hs/urbit-king/lib/Urbit/Vere/Ames/Packet.hs new file mode 100644 index 000000000..193a1da36 --- /dev/null +++ b/pkg/hs/urbit-king/lib/Urbit/Vere/Ames/Packet.hs @@ -0,0 +1,103 @@ +{-| + Parsing of Ames packets +-} + +module Urbit.Vere.Ames.Packet where + +import Urbit.Prelude + +import Control.Monad.Fail +import Data.Bits +import Data.LargeWord +import Data.Serialize + +import Urbit.Arvo (AmesDest) + +data Packet = Packet + { pktVersion :: Word8 + , pktEncrypted :: Bool + -- + , pktSndr :: Ship + , pktRcvr :: Ship + , pktOrigin :: Maybe AmesDest + , pktContent :: Bytes + } + deriving Eq + +instance Show Packet where + show Packet {..} + = "Packet {pktVersion = " + <> show pktVersion + <> ", pktEncrypted = " + <> show pktEncrypted + <> ", pktSndr = " + <> show pktSndr + <> ", pktRcvr = " + <> show pktRcvr + <> ", pktOrigin = " + <> show pktOrigin + <> ", pktContent = " + <> showUD (bytesAtom $ unBytes pktContent) + <> "}" + +instance Serialize Packet where + get = do + -- header + head <- getWord32le + let pktVersion = head .&. 0b111 & fromIntegral + let checksum = shiftR head 3 .&. (2 ^ 20 - 1) + let sndrRank = shiftR head 23 .&. 0b11 + let rcvrRank = shiftR head 25 .&. 0b11 + let pktEncrypted = testBit head 27 & not -- loobean + -- verify checksum + lookAhead $ do + len <- remaining + body <- getBytes len + -- XX mug (marked "TODO") is implemented as "slowMug" in U.N.Tree. Ominous + -- Also, toNoun will copy the bytes into an atom. We probably want a mugBS + let chk = fromIntegral (mug $ toNoun $ MkBytes body) .&. (2 ^ 20 - 1) + when (checksum /= chk) $ + fail ("checksum mismatch: expected " <> show checksum + <> "; got " <> show chk) + -- body + pktSndr <- getShip sndrRank + pktRcvr <- getShip rcvrRank + len <- remaining + payload <- getBytes len + -- data ("payload") + (pktOrigin, pktContent) <- case cueBS payload of + Left e -> fail (show e) + Right n -> case fromNounErr n of + Left e -> fail (show e) + Right c -> pure c + pure Packet {..} + where + getShip = fmap Ship . \case + 0 -> fromIntegral <$> getWord16le -- galaxy / star + 1 -> fromIntegral <$> getWord32le -- planet + 2 -> fromIntegral <$> getWord64le -- moon + 3 -> LargeKey <$> getWord64le <*> getWord64le -- comet + _ -> fail "impossibiru" + + put Packet {..} = do + let load = jamBS $ toNoun (pktOrigin, pktContent) + let (sndR, putSndr) = putShipGetRank pktSndr + let (rcvR, putRcvr) = putShipGetRank pktRcvr + let body = runPut (putSndr <> putRcvr <> putByteString load) + -- XX again maybe mug can be made better here + let chek = fromIntegral (mug $ toNoun $ MkBytes body) .&. (2 ^ 20 - 1) + let encr = pktEncrypted + let vers = fromIntegral pktVersion .&. 0b111 + let head = vers + .|. shiftL chek 3 + .|. shiftL sndR 23 + .|. shiftL rcvR 25 + .|. if encr then 0 else bit 27 + putWord32le head + putByteString body -- XX can we avoid copy? + where + putShipGetRank s@(Ship (LargeKey p q)) = case () of + _ | s < 2 ^ 16 -> (0, putWord16le $ fromIntegral s) -- gar + | s < 2 ^ 32 -> (1, putWord32le $ fromIntegral s) -- pan + | s < 2 ^ 64 -> (2, putWord64le $ fromIntegral s) -- mon + | otherwise -> (3, putWord64le p >> putWord64le q) -- com diff --git a/pkg/hs/urbit-king/lib/Urbit/Vere/Ames/UDP.hs b/pkg/hs/urbit-king/lib/Urbit/Vere/Ames/UDP.hs index 47a9b24fb..75014dd0d 100644 --- a/pkg/hs/urbit-king/lib/Urbit/Vere/Ames/UDP.hs +++ b/pkg/hs/urbit-king/lib/Urbit/Vere/Ames/UDP.hs @@ -33,6 +33,7 @@ module Urbit.Vere.Ames.UDP where import Urbit.Prelude +import Urbit.Vere.Ports import Network.Socket hiding (recvFrom, sendTo) @@ -79,14 +80,14 @@ forceBind :: HasLogFunc e => PortNumber -> HostAddress -> RIO e Socket forceBind por hos = go where go = do - logDebug (display ("AMES: UDP: Opening socket on port " <> tshow por)) + logInfo (display ("AMES: UDP: Opening socket on port " <> tshow por)) io (doBind por hos) >>= \case Right sk -> do - logDebug (display ("AMES: UDP: Opened socket on port " <> tshow por)) + logInfo (display ("AMES: UDP: Opened socket on port " <> tshow por)) pure sk Left err -> do - logDebug (display ("AMES: UDP: " <> tshow err)) - logDebug ("AMES: UDP: Failed to open UDP socket. Waiting") + logInfo (display ("AMES: UDP: " <> tshow err)) + logInfo ("AMES: UDP: Failed to open UDP socket. Waiting") threadDelay 250_000 go @@ -137,7 +138,7 @@ recvPacket sok = do -} fakeUdpServ :: HasLogFunc e => RIO e UdpServ fakeUdpServ = do - logDebug $ displayShow ("AMES", "UDP", "\"Starting\" fake UDP server.") + logInfo $ displayShow ("AMES", "UDP", "\"Starting\" fake UDP server.") pure UdpServ { .. } where usSend = \_ _ -> pure () @@ -151,9 +152,13 @@ fakeUdpServ = do Real UDP server. See module-level docs. -} realUdpServ - :: forall e . HasLogFunc e => PortNumber -> HostAddress -> RIO e UdpServ + :: forall e + . (HasLogFunc e, HasPortControlApi e) + => PortNumber + -> HostAddress + -> RIO e UdpServ realUdpServ por hos = do - logDebug $ displayShow ("AMES", "UDP", "Starting real UDP server.") + logInfo $ displayShow ("AMES", "UDP", "Starting real UDP server.") env <- ask @@ -173,7 +178,7 @@ realUdpServ por hos = do -} let signalBrokenSocket :: Socket -> RIO e () signalBrokenSocket sock = do - logDebug $ displayShow ("AMES", "UDP" + logInfo $ displayShow ("AMES", "UDP" , "Socket broken. Requesting new socket" ) atomically $ do @@ -197,11 +202,21 @@ realUdpServ por hos = do logWarn "AMES: UDP: Dropping outbound packet because queue is full." tOpen <- async $ forever $ do - sk <- forceBind por hos - atomically (writeTVar vSock (Just sk)) - broken <- atomically (takeTMVar vFail) - logWarn "AMES: UDP: Closing broken socket." - io (close broken) + sk <- forceBind por hos + sn <- io $ getSocketName sk + + let waitForRelease = do + atomically (writeTVar vSock (Just sk)) + broken <- atomically (takeTMVar vFail) + logWarn "AMES: UDP: Closing broken socket." + io (close broken) + + case sn of + (SockAddrInet boundPort _) -> + -- When we're on IPv4, maybe port forward at the NAT. + rwith (requestPortAccess $ fromIntegral boundPort) $ + \() -> waitForRelease + _ -> waitForRelease tSend <- async $ forever $ join $ atomically $ do (adr, byt) <- readTBQueue qSend @@ -227,11 +242,11 @@ realUdpServ por hos = do enqueueRecvPacket p a b let shutdown = do - logDebug "AMES: UDP: Shutting down. (killing threads)" + logInfo "AMES: UDP: Shutting down. (killing threads)" cancel tOpen cancel tSend cancel tRecv - logDebug "AMES: UDP: Shutting down. (closing socket)" + logInfo "AMES: UDP: Shutting down. (closing socket)" io $ join $ atomically $ do res <- readTVar vSock <&> maybe (pure ()) close writeTVar vSock Nothing diff --git a/pkg/hs/urbit-king/lib/Urbit/Vere/Behn.hs b/pkg/hs/urbit-king/lib/Urbit/Vere/Behn.hs index 72ae5218f..aab87ebff 100644 --- a/pkg/hs/urbit-king/lib/Urbit/Vere/Behn.hs +++ b/pkg/hs/urbit-king/lib/Urbit/Vere/Behn.hs @@ -1,3 +1,7 @@ +-- This is required due to the use of 'Void' in a constructor slot in +-- combination with 'deriveNoun', which will generate an unreachable pattern. +{-# OPTIONS_GHC -Wno-overlapping-patterns #-} + {-| Behn: Timer Driver -} diff --git a/pkg/hs/urbit-king/lib/Urbit/Vere/Clay.hs b/pkg/hs/urbit-king/lib/Urbit/Vere/Clay.hs index 6b8272266..4bf767fa3 100644 --- a/pkg/hs/urbit-king/lib/Urbit/Vere/Clay.hs +++ b/pkg/hs/urbit-king/lib/Urbit/Vere/Clay.hs @@ -163,7 +163,7 @@ clay env plan = handleEffect :: ClayDrv -> SyncEf -> IO () handleEffect cd = runRIO env . \case SyncEfHill _ mountPoints -> do - logDebug $ displayShow ("(clay) known mount points:", mountPoints) + logInfo $ displayShow ("(clay) known mount points:", mountPoints) pierPath <- view pierPathL mountPairs <- flip mapM mountPoints $ \desk -> do ss <- takeFilesystemSnapshot (pierPath (deskToPath desk)) @@ -171,14 +171,14 @@ clay env plan = atomically $ writeTVar (cdMountPoints cd) (M.fromList mountPairs) SyncEfDirk p desk -> do - logDebug $ displayShow ("(clay) dirk:", p, desk) + logInfo $ displayShow ("(clay) dirk:", p, desk) m <- atomically $ readTVar (cdMountPoints cd) let snapshot = M.findWithDefault M.empty desk m pierPath <- view pierPathL let dir = pierPath deskToPath desk actions <- buildActionListFromDifferences dir snapshot - logDebug $ displayShow ("(clay) dirk actions: ", actions) + logInfo $ displayShow ("(clay) dirk actions: ", actions) let !intoList = map (actionsToInto dir) actions @@ -191,12 +191,12 @@ clay env plan = atomically $ plan (EvErr syncEv syncFailed) - atomically $ modifyTVar + atomically $ modifyTVar' (cdMountPoints cd) (applyActionsToMountPoints desk actions) SyncEfErgo p desk actions -> do - logDebug $ displayShow ("(clay) ergo:", p, desk, actions) + logInfo $ displayShow ("(clay) ergo:", p, desk, actions) m <- atomically $ readTVar (cdMountPoints cd) let mountPoint = M.findWithDefault M.empty desk m @@ -206,15 +206,15 @@ clay env plan = let hashedActions = map (calculateActionHash dir) actions for_ hashedActions (performAction mountPoint) - atomically $ modifyTVar + atomically $ modifyTVar' (cdMountPoints cd) (applyActionsToMountPoints desk hashedActions) SyncEfOgre p desk -> do - logDebug $ displayShow ("(clay) ogre:", p, desk) + logInfo $ displayShow ("(clay) ogre:", p, desk) pierPath <- view pierPathL removeDirectoryRecursive $ pierPath deskToPath desk - atomically $ modifyTVar (cdMountPoints cd) (M.delete desk) + atomically $ modifyTVar' (cdMountPoints cd) (M.delete desk) -- Change the structures off of the event into something we can work with @@ -229,13 +229,13 @@ clay env plan = performAction :: (Map FilePath Int) -> (FilePath, Maybe (Mime, Int)) -> RIO e () performAction m (fp, Nothing) = do - logDebug $ displayShow ("(clay) deleting file ", fp) + logInfo $ displayShow ("(clay) deleting file ", fp) removeFile fp performAction m (fp, Just ((Mime _ (File (Octs bs)), hash))) - | skip = logDebug $ + | skip = logInfo $ displayShow ("(clay) skipping unchanged file update " , fp) | otherwise = do - logDebug $ displayShow ("(clay) updating file " , fp) + logInfo $ displayShow ("(clay) updating file " , fp) createDirectoryIfMissing True $ takeDirectory fp writeFile fp bs where diff --git a/pkg/hs/urbit-king/lib/Urbit/Vere/Dawn.hs b/pkg/hs/urbit-king/lib/Urbit/Vere/Dawn.hs index ae79088da..b30ade42b 100644 --- a/pkg/hs/urbit-king/lib/Urbit/Vere/Dawn.hs +++ b/pkg/hs/urbit-king/lib/Urbit/Vere/Dawn.hs @@ -2,54 +2,56 @@ Use etherium to access PKI information. -} -module Urbit.Vere.Dawn where +module Urbit.Vere.Dawn ( dawnVent + , dawnCometList + , renderShip + , mineComet + -- Used only in testing + , mix + , shas + , shaf + , deriveCode + , cometFingerprintBS + , cometFingerprint + ) where import Urbit.Arvo.Common import Urbit.Arvo.Event hiding (Address) -import Urbit.Prelude hiding (Call, rights, to) +import Urbit.Prelude hiding (Call, rights, to, (.=)) import Data.Bits (xor) import Data.List (nub) import Data.Text (splitOn) -import Network.Ethereum.Account -import Network.Ethereum.Api.Eth -import Network.Ethereum.Api.Provider -import Network.Ethereum.Api.Types hiding (blockNumber) -import Network.Ethereum.Web3 -import Network.HTTP.Client.TLS +import Data.Aeson +import Data.HexString +import Numeric (showHex) import qualified Crypto.Hash.SHA256 as SHA256 import qualified Crypto.Hash.SHA512 as SHA512 import qualified Crypto.Sign.Ed25519 as Ed import qualified Data.Binary as B -import qualified Data.ByteArray as BA import qualified Data.ByteString as BS import qualified Data.ByteString.Char8 as C -import qualified Network.Ethereum.Ens as Ens +import qualified Data.ByteString.Lazy as L import qualified Network.HTTP.Client as C -import qualified Urbit.Azimuth as AZ import qualified Urbit.Ob as Ob --- During boot, use the infura provider -provider = HttpProvider - "https://mainnet.infura.io/v3/196a7f37c7d54211b4a07904ec73ad87" +import qualified Network.HTTP.Client.TLS as TLS +import qualified Network.HTTP.Types as HT + +-- The address of the azimuth contract as a string. +azimuthAddr :: Text +azimuthAddr = "0x223c067f8cf28ae173ee5cafea60ca44c335fecb" -- Conversion Utilities -------------------------------------------------------- --- Takes the web3's bytes representation and changes the endianness. -bytes32ToBS :: BytesN 32 -> ByteString -bytes32ToBS = reverse . BA.pack . BA.unpack +passFromBS :: ByteString -> ByteString -> ByteString -> Pass +passFromBS enc aut sut + | bytesAtom sut /= 1 = Pass (Ed.PublicKey mempty) (Ed.PublicKey mempty) + | otherwise = Pass (Ed.PublicKey aut) (Ed.PublicKey enc) -toBloq :: Quantity -> Bloq -toBloq = fromIntegral . unQuantity - -passFromEth :: BytesN 32 -> BytesN 32 -> UIntN 32 -> Pass -passFromEth enc aut sut | sut /= 1 = - Pass (Ed.PublicKey mempty) (Ed.PublicKey mempty) -passFromEth enc aut sut = - Pass (decode aut) (decode enc) - where - decode = Ed.PublicKey . bytes32ToBS +bsToBool :: ByteString -> Bool +bsToBool bs = bytesAtom bs == 1 clanFromShip :: Ship -> Ob.Class clanFromShip = Ob.clan . Ob.patp . fromIntegral @@ -60,6 +62,13 @@ shipSein = Ship . fromIntegral . Ob.fromPatp . Ob.sein . Ob.patp . fromIntegral renderShip :: Ship -> Text renderShip = Ob.renderPatp . Ob.patp . fromIntegral +hexStrToAtom :: Text -> Atom +hexStrToAtom = + bytesAtom . reverse . toBytes . hexString . removePrefix . encodeUtf8 + +onLeft :: (a -> b) -> Either a c -> Either b c +onLeft fun = bimap fun id + -- Data Validation ------------------------------------------------------------- -- Derive public key structure from the key derivation seed structure @@ -73,85 +82,261 @@ ringToPass Ring{..} = Pass{..} Nothing -> error "Invalid seed passed to createKeypairFromSeed" Just x -> x +-- JSONRPC Functions ----------------------------------------------------------- + +-- The big problem here is that we can't really use the generated web3 wrappers +-- around the azimuth contracts, especially for the galaxy table request. They +-- make multiple rpc invocations per galaxy request (which aren't even +-- batched!), while Vere built a single batched rpc call to fetch the entire +-- galaxy table. +-- +-- The included Network.JsonRpc.TinyClient that Network.Web3 embeds can't do +-- batches, so calling that directly is out. +-- +-- Network.JSONRPC appears to not like something about the JSON that Infura +-- returns; it's just hanging? Also no documentation. +-- +-- So, like with Vere, we roll our own. + +dawnSendHTTP :: String -> L.ByteString -> RIO e (Either Int L.ByteString) +dawnSendHTTP endpoint requestData = liftIO do + manager <- C.newManager TLS.tlsManagerSettings + + initialRequest <- C.parseRequest endpoint + let request = initialRequest + { C.method = "POST" + , C.requestBody = C.RequestBodyLBS $ requestData + , C.requestHeaders = [("Accept", "application/json"), + ("Content-Type", "application/json"), + ("Charsets", "utf-8")] + } + + response <- C.httpLbs request manager + + -- Return body if 200. + let code = HT.statusCode $ C.responseStatus response + case code of + 200 -> pure $ Right $ C.responseBody response + _ -> pure $ Left code + +class RequestMethod m where + getRequestMethod :: m -> Text + +data RawResponse = RawResponse + { rrId :: Int + , rrResult :: Text + } + deriving (Show) + +instance FromJSON RawResponse where + parseJSON = withObject "Response" $ \v -> do + rrId <- v .: "id" + rrResult <- v .: "result" + pure RawResponse{..} + + +-- Given a list of methods and parameters, return a list of decoded responses. +dawnPostRequests :: forall req e resp + . (ToJSON req, RequestMethod req) + => String + -> (req -> Text -> resp) + -> [req] + -> RIO e [resp] +dawnPostRequests endpoint responseBuilder requests = do + -- Encode our input requests + let requestPayload = + encode $ Array $ fromList $ fmap toFullRequest $ zip [0..] requests + + -- Send to the server + responses <- dawnSendHTTP endpoint requestPayload >>= \case + Left err -> error $ "error fetching " <> endpoint <> ": HTTP " <> (show err) + Right x -> pure x + + -- Get a list of the result texts in the order of the submitted requests + rawSorted <- case decode responses of + Nothing -> error $ "couldn't decode json" + Just x -> pure $ map rrResult $ sortOn rrId x + + -- Build the final result structure by calling the passed in builder with the + -- request (some outputs need data from the request structure, eitherwise, + -- we'd lean on FromJSON). + let results = map (uncurry responseBuilder) (zip requests rawSorted) + pure results + + where + toFullRequest :: (Int, req) -> Value + toFullRequest (rid, req) = object [ "jsonrpc" .= ("2.0" :: Text) + , "method" .= getRequestMethod req + , "params" .= req + , "id" .= rid + ] + +-- Azimuth JSON Requests ------------------------------------------------------- + +-- Not a full implementation of the Ethereum ABI, but just the ability to call +-- a method by encoded id (like 0x63fa9a87 for `points(uint32)`), and a single +-- UIntN 32 parameter. +encodeCall :: Text -> Int -> Text +encodeCall method idx = method <> leadingZeroes <> renderedNumber + where + renderedNumber = pack $ showHex idx "" + leadingZeroes = replicate (64 - length renderedNumber) '0' + +data BlockRequest = BlockRequest + deriving (Show, Eq) + +instance RequestMethod BlockRequest where + getRequestMethod BlockRequest = "eth_blockNumber" + +instance ToJSON BlockRequest where + toJSON BlockRequest = Array $ fromList [] + +-- No need to parse, it's already in the format we'll pass as an argument to +-- eth calls which take a block number. +parseBlockRequest :: BlockRequest -> Text -> TextBlockNum +parseBlockRequest _ txt = txt + +type TextBlockNum = Text + +data PointRequest = PointRequest + { grqHexBlockNum :: TextBlockNum + , grqPointId :: Int + } deriving (Show, Eq) + +instance RequestMethod PointRequest where + getRequestMethod PointRequest{..} = "eth_call" + +instance ToJSON PointRequest where + -- 0x63fa9a87 is the points(uint32) call. + toJSON PointRequest{..} = + Array $ fromList [object [ "to" .= azimuthAddr + , "data" .= encodeCall "0x63fa9a87" grqPointId], + String grqHexBlockNum + ] + +parseAndChunkResultToBS :: Text -> [ByteString] +parseAndChunkResultToBS result = + map reverse $ + chunkBytestring 32 $ + toBytes $ + hexString $ + removePrefix $ + encodeUtf8 result + +-- The incoming result is a text bytestring. We need to take that text, and +-- spit out the parsed data. +-- +-- We're sort of lucky here. After removing the front "0x", we can just chop +-- the incoming text string into 10 different 64 character chunks and then +-- parse them as numbers. +parseEthPoint :: PointRequest -> Text -> EthPoint +parseEthPoint PointRequest{..} result = EthPoint{..} + where + [rawEncryptionKey, + rawAuthenticationKey, + rawHasSponsor, + rawActive, + rawEscapeRequested, + rawSponsor, + rawEscapeTo, + rawCryptoSuite, + rawKeyRevision, + rawContinuityNum] = parseAndChunkResultToBS result + + escapeState = if bsToBool rawEscapeRequested + then Just $ Ship $ fromIntegral $ bytesAtom rawEscapeTo + else Nothing + + -- Vere doesn't set ownership information, neither did the old Dawn.hs + -- implementation. + epOwn = (0, 0, 0, 0) + + epNet = if not $ bsToBool rawActive + then Nothing + else Just + ( fromIntegral $ bytesAtom rawKeyRevision + , passFromBS rawEncryptionKey rawAuthenticationKey rawCryptoSuite + , fromIntegral $ bytesAtom rawContinuityNum + , (bsToBool rawHasSponsor, + Ship (fromIntegral $ bytesAtom rawSponsor)) + , escapeState + ) + + -- I don't know what this is supposed to be, other than the old Dawn.hs and + -- dawn.c do the same thing. + epKid = case clanFromShip (Ship $ fromIntegral grqPointId) of + Ob.Galaxy -> Just (0, setToHoonSet mempty) + Ob.Star -> Just (0, setToHoonSet mempty) + _ -> Nothing + +-- Preprocess data from a point request into the form used in the galaxy table. +parseGalaxyTableEntry :: PointRequest -> Text -> (Ship, (Rift, Life, Pass)) +parseGalaxyTableEntry PointRequest{..} result = (ship, (rift, life, pass)) + where + [rawEncryptionKey, + rawAuthenticationKey, + _, _, _, _, _, + rawCryptoSuite, + rawKeyRevision, + rawContinuityNum] = parseAndChunkResultToBS result + + ship = Ship $ fromIntegral grqPointId + rift = fromIntegral $ bytesAtom rawContinuityNum + life = fromIntegral $ bytesAtom rawKeyRevision + pass = passFromBS rawEncryptionKey rawAuthenticationKey rawCryptoSuite + +removePrefix :: ByteString -> ByteString +removePrefix withOhEx + | prefix == "0x" = suffix + | otherwise = error "not prefixed with 0x" + where + (prefix, suffix) = splitAt 2 withOhEx + +chunkBytestring :: Int -> ByteString -> [ByteString] +chunkBytestring size bs + | null rest = [cur] + | otherwise = (cur : chunkBytestring size rest) + where + (cur, rest) = splitAt size bs + +data TurfRequest = TurfRequest + { trqHexBlockNum :: TextBlockNum + , trqTurfId :: Int + } deriving (Show, Eq) + +instance RequestMethod TurfRequest where + getRequestMethod TurfRequest{..} = "eth_call" + +instance ToJSON TurfRequest where + -- 0xeccc8ff1 is the dnsDomains(uint32) call. + toJSON TurfRequest{..} = + Array $ fromList [object [ "to" .= azimuthAddr + , "data" .= encodeCall "0xeccc8ff1" trqTurfId], + String trqHexBlockNum + ] + +-- This is another hack instead of a full Ethereum ABI response. +parseTurfResponse :: TurfRequest -> Text -> Turf +parseTurfResponse a raw = turf + where + without0x = removePrefix $ encodeUtf8 raw + (_, blRest) = splitAt 64 without0x + (utfLenStr, utfStr) = splitAt 64 blRest + utfLen = fromIntegral $ bytesAtom $ reverse $ toBytes $ hexString utfLenStr + dnsStr = decodeUtf8 $ BS.take utfLen $ toBytes $ hexString utfStr + turf = Turf $ fmap Cord $ reverse $ splitOn "." dnsStr + -- Azimuth Functions ----------------------------------------------------------- --- Perform a request to azimuth at a certain block number -withAzimuth :: Quantity - -> Address - -> DefaultAccount Web3 a - -> Web3 a -withAzimuth bloq azimuth action = - withAccount () $ - withParam (to .~ azimuth) $ - withParam (block .~ BlockWithNumber bloq) - action +retrievePoint :: String -> TextBlockNum -> Ship -> RIO e EthPoint +retrievePoint endpoint block ship = + dawnPostRequests endpoint parseEthPoint + [PointRequest block (fromIntegral ship)] >>= \case + [x] -> pure x + _ -> error "JSON server returned multiple return values." --- Retrieves the EthPoint information for an individual point. -retrievePoint :: Quantity -> Address -> Ship -> Web3 EthPoint -retrievePoint bloq azimuth ship = - withAzimuth bloq azimuth $ do - (encryptionKey, - authenticationKey, - hasSponsor, - active, - escapeRequested, - sponsor, - escapeTo, - cryptoSuite, - keyRevision, - continuityNum) <- AZ.points (fromIntegral ship) - - let escapeState = if escapeRequested - then Just $ Ship $ fromIntegral escapeTo - else Nothing - - -- The hoon version also sets this to all 0s and then does nothing with it. - let epOwn = (0, 0, 0, 0) - - let epNet = if not active - then Nothing - else Just - ( fromIntegral keyRevision - , passFromEth encryptionKey authenticationKey cryptoSuite - , fromIntegral continuityNum - , (hasSponsor, Ship (fromIntegral sponsor)) - , escapeState - ) - - -- TODO: wtf? - let epKid = case clanFromShip ship of - Ob.Galaxy -> Just (0, setToHoonSet mempty) - Ob.Star -> Just (0, setToHoonSet mempty) - _ -> Nothing - - pure EthPoint{..} - --- Retrieves information about all the galaxies from Ethereum. -retrieveGalaxyTable :: Quantity -> Address -> Web3 (Map Ship (Rift, Life, Pass)) -retrieveGalaxyTable bloq azimuth = - withAzimuth bloq azimuth $ mapFromList <$> mapM getRow [0..255] - where - getRow idx = do - (encryptionKey, authenticationKey, _, _, _, _, _, cryptoSuite, - keyRev, continuity) <- AZ.points idx - pure ( fromIntegral idx - , ( fromIntegral continuity - , fromIntegral keyRev - , passFromEth encryptionKey authenticationKey cryptoSuite - ) - ) - --- Reads the three Ames domains from Ethereum, removing duplicates -readAmesDomains :: Quantity -> Address -> Web3 [Turf] -readAmesDomains bloq azimuth = - withAzimuth bloq azimuth $ nub <$> mapM getTurf [0..2] - where - getTurf idx = - Turf . fmap Cord . reverse . splitOn "." <$> AZ.dnsDomains idx - - -validateShipAndGetImmediateSponsor :: Quantity -> Address -> Seed -> Web3 Ship -validateShipAndGetImmediateSponsor block azimuth (Seed ship life ring oaf) = +validateShipAndGetSponsor :: String -> TextBlockNum -> Seed -> RIO e Ship +validateShipAndGetSponsor endpoint block (Seed ship life ring oaf) = case clanFromShip ship of Ob.Comet -> validateComet Ob.Moon -> validateMoon @@ -161,10 +346,10 @@ validateShipAndGetImmediateSponsor block azimuth (Seed ship life ring oaf) = -- A comet address is the fingerprint of the keypair let shipFromPass = cometFingerprint $ ringToPass ring when (ship /= shipFromPass) $ - fail ("comet name doesn't match fingerprint " ++ show ship ++ " vs " ++ + error ("comet name doesn't match fingerprint " <> show ship <> " vs " <> show shipFromPass) when (life /= 1) $ - fail ("comet can never be re-keyed") + error ("comet can never be re-keyed") pure (shipSein ship) validateMoon = do @@ -174,18 +359,18 @@ validateShipAndGetImmediateSponsor block azimuth (Seed ship life ring oaf) = pure $ shipSein ship validateRest = do - putStrLn ("boot: retrieving " ++ renderShip ship ++ "'s public keys") + putStrLn ("boot: retrieving " <> renderShip ship <> "'s public keys") - whoP <- retrievePoint block azimuth ship + whoP <- retrievePoint endpoint block ship case epNet whoP of - Nothing -> fail "ship not keyed" + Nothing -> error "ship not keyed" Just (netLife, pass, contNum, (hasSponsor, who), _) -> do when (netLife /= life) $ - fail ("keyfile life mismatch; keyfile claims life " ++ - show life ++ ", but Azimuth claims life " ++ + error ("keyfile life mismatch; keyfile claims life " <> + show life <> ", but Azimuth claims life " <> show netLife) when ((ringToPass ring) /= pass) $ - fail "keyfile does not match blockchain" + error "keyfile does not match blockchain" -- TODO: The hoon code does a breach check, but the C code never -- supplies the data necessary for it to function. pure who @@ -193,62 +378,68 @@ validateShipAndGetImmediateSponsor block azimuth (Seed ship life ring oaf) = -- Walk through the sponsorship chain retrieving the actual sponsorship chain -- as it exists on Ethereum. -getSponsorshipChain :: Quantity -> Address -> Ship -> Web3 [(Ship,EthPoint)] -getSponsorshipChain block azimuth = loop +getSponsorshipChain :: String -> TextBlockNum -> Ship -> RIO e [(Ship,EthPoint)] +getSponsorshipChain endpoint block = loop where loop ship = do - putStrLn ("boot: retrieving keys for sponsor " ++ renderShip ship) - ethPoint <- retrievePoint block azimuth ship + putStrLn ("boot: retrieving keys for sponsor " <> renderShip ship) + ethPoint <- retrievePoint endpoint block ship case (clanFromShip ship, epNet ethPoint) of - (Ob.Comet, _) -> fail "Comets cannot be sponsors" - (Ob.Moon, _) -> fail "Moons cannot be sponsors" + (Ob.Comet, _) -> error "Comets cannot be sponsors" + (Ob.Moon, _) -> error "Moons cannot be sponsors" (_, Nothing) -> - fail $ unpack ("Ship " ++ renderShip ship ++ " not booted") + error $ unpack ("Ship " <> renderShip ship <> " not booted") (Ob.Galaxy, Just _) -> pure [(ship, ethPoint)] (_, Just (_, _, _, (False, _), _)) -> - fail $ unpack ("Ship " ++ renderShip ship ++ " has no sponsor") + error $ unpack ("Ship " <> renderShip ship <> " has no sponsor") (_, Just (_, _, _, (True, sponsor), _)) -> do chain <- loop sponsor - pure $ chain ++ [(ship, ethPoint)] - + pure $ chain <> [(ship, ethPoint)] -- Produces either an error or a validated boot event structure. -dawnVent :: Seed -> RIO e (Either Text Dawn) -dawnVent dSeed@(Seed ship life ring oaf) = do - ret <- runWeb3' provider $ do - block <- blockNumber - putStrLn ("boot: ethereum block #" ++ tshow block) +dawnVent :: HasLogFunc e => String -> Seed -> RIO e (Either Text Dawn) +dawnVent provider dSeed@(Seed ship life ring oaf) = + -- The type checker can't figure this out on its own. + (onLeft tshow :: Either SomeException Dawn -> Either Text Dawn) <$> try do + putStrLn ("boot: requesting ethereum information from " <> pack provider) + blockResponses + <- dawnPostRequests provider parseBlockRequest [BlockRequest] - putStrLn "boot: retrieving azimuth contract" - azimuth <- withAccount () $ Ens.resolve "azimuth.eth" + hexStrBlock <- case blockResponses of + [num] -> pure num + x -> error "Unexpected multiple returns from block # request" - immediateSponsor <- validateShipAndGetImmediateSponsor block azimuth dSeed - dSponsor <- getSponsorshipChain block azimuth immediateSponsor + let dBloq = hexStrToAtom hexStrBlock + putStrLn ("boot: ethereum block #" <> tshow dBloq) + + immediateSponsor <- validateShipAndGetSponsor provider hexStrBlock dSeed + dSponsor <- getSponsorshipChain provider hexStrBlock immediateSponsor putStrLn "boot: retrieving galaxy table" - dCzar <- mapToHoonMap <$> retrieveGalaxyTable block azimuth + dCzar <- (mapToHoonMap . mapFromList) <$> + (dawnPostRequests provider parseGalaxyTableEntry $ + map (PointRequest hexStrBlock) [0..255]) putStrLn "boot: retrieving network domains" - dTurf <- readAmesDomains block azimuth + dTurf <- nub <$> (dawnPostRequests provider parseTurfResponse $ + map (TurfRequest hexStrBlock) [0..2]) - let dBloq = toBloq block let dNode = Nothing + pure $ MkDawn{..} - case ret of - Left x -> pure $ Left $ tshow x - Right y -> pure $ Right y +-- Comet List ------------------------------------------------------------------ dawnCometList :: RIO e [Ship] dawnCometList = do -- Get the jamfile with the list of stars accepting comets right now. - manager <- io $ C.newManager tlsManagerSettings + manager <- io $ C.newManager TLS.tlsManagerSettings request <- io $ C.parseRequest "https://bootstrap.urbit.org/comet-stars.jam" response <- io $ C.httpLbs (C.setRequestCheckStatus request) manager let body = toStrict $ C.responseBody response @@ -267,8 +458,11 @@ mix a b = BS.pack $ loop (BS.unpack a) (BS.unpack b) loop [] b = b loop (x:xs) (y:ys) = (xor x y) : loop xs ys +shax :: BS.ByteString -> BS.ByteString +shax = SHA256.hash + shas :: BS.ByteString -> BS.ByteString -> BS.ByteString -shas salt = SHA256.hash . mix salt . SHA256.hash +shas salt = shax . mix salt . shax shaf :: BS.ByteString -> BS.ByteString -> BS.ByteString shaf salt ruz = (mix a b) @@ -277,6 +471,18 @@ shaf salt ruz = (mix a b) a = (take 16 haz) b = (drop 16 haz) +-- Given a ring, derives the network login code. +-- +-- Note that the network code is a patp, not a patq: the bytes have been +-- scrambled. +deriveCode :: Ring -> Ob.Patp +deriveCode Ring {..} = Ob.patp $ + bytesAtom $ + take 8 $ + shaf (C.pack "pass") $ + shax $ + C.singleton 'B' <> ringSign <> ringCrypt + cometFingerprintBS :: Pass -> ByteString cometFingerprintBS = (shaf $ C.pack "bfig") . passToBS diff --git a/pkg/hs/urbit-king/lib/Urbit/Vere/Eyre.hs b/pkg/hs/urbit-king/lib/Urbit/Vere/Eyre.hs index 2aeb33f63..2aea99056 100644 --- a/pkg/hs/urbit-king/lib/Urbit/Vere/Eyre.hs +++ b/pkg/hs/urbit-king/lib/Urbit/Vere/Eyre.hs @@ -11,7 +11,7 @@ where import Urbit.Prelude hiding (Builder) import Urbit.Arvo hiding (ServerId, reqUrl, secure) -import Urbit.King.App (HasKingId(..), HasPierEnv(..)) +import Urbit.King.App (HasKingId(..), HasMultiEyreApi(..), HasPierEnv(..)) import Urbit.King.Config import Urbit.Vere.Eyre.Multi import Urbit.Vere.Eyre.PortsFile @@ -20,11 +20,12 @@ import Urbit.Vere.Eyre.Service import Urbit.Vere.Eyre.Wai import Urbit.Vere.Pier.Types -import Data.List.NonEmpty (NonEmpty((:|))) -import Data.PEM (pemParseBS, pemWriteBS) -import RIO.Prelude (decodeUtf8Lenient) -import System.Random (randomIO) -import Urbit.Vere.Http (convertHeaders, unconvertHeaders) +import Data.List.NonEmpty (NonEmpty((:|))) +import Data.PEM (pemParseBS, pemWriteBS) +import RIO.Prelude (decodeUtf8Lenient) +import System.Random (randomIO) +import Urbit.Vere.Http (convertHeaders, unconvertHeaders) +import Urbit.Vere.Eyre.KingSubsite (KingSubsite) import qualified Network.HTTP.Types as H @@ -170,15 +171,18 @@ execRespActs (Drv v) who reqId ev = readMVar v >>= \case atomically (routeRespAct who (sLiveReqs sv) reqId act) startServ - :: (HasPierConfig e, HasLogFunc e, HasNetworkConfig e) - => MultiEyreApi - -> Ship + :: (HasPierConfig e, HasLogFunc e, HasMultiEyreApi e, HasNetworkConfig e) + => Ship -> Bool -> HttpServerConf -> (EvErr -> STM ()) + -> (Text -> RIO e ()) + -> KingSubsite -> RIO e Serv -startServ multi who isFake conf plan = do - logDebug (displayShow ("EYRE", "startServ")) +startServ who isFake conf plan stderr sub = do + logInfo (displayShow ("EYRE", "startServ")) + + multi <- view multiEyreApiL let vLive = meaLive multi @@ -219,42 +223,42 @@ startServ multi who isFake conf plan = do let onKilReq :: Ship -> Word64 -> STM () onKilReq _ship = plan . cancelEv srvId . fromIntegral - logDebug (displayShow ("EYRE", "joinMultiEyre", who, mTls, mCre)) + logInfo (displayShow ("EYRE", "joinMultiEyre", who, mTls, mCre)) - atomically (joinMultiEyre multi who mCre onReq onKilReq) + atomically (joinMultiEyre multi who mCre onReq onKilReq sub) - logDebug $ displayShow ("EYRE", "Starting loopback server") + logInfo $ displayShow ("EYRE", "Starting loopback server") lop <- serv vLive $ ServConf { scHost = soHost (pttLop ptt) , scPort = soWhich (pttLop ptt) , scRedi = Nothing , scFake = False - , scType = STHttp who $ ReqApi + , scType = STHttp who sub $ ReqApi { rcReq = onReq Loopback , rcKil = onKilReq } } - logDebug $ displayShow ("EYRE", "Starting insecure server") + logInfo $ displayShow ("EYRE", "Starting insecure server") ins <- serv vLive $ ServConf { scHost = soHost (pttIns ptt) , scPort = soWhich (pttIns ptt) , scRedi = secRedi , scFake = noHttp - , scType = STHttp who $ ReqApi + , scType = STHttp who sub $ ReqApi { rcReq = onReq Insecure , rcKil = onKilReq } } mSec <- for mTls $ \tls -> do - logDebug "Starting secure server" + logInfo "Starting secure server" serv vLive $ ServConf { scHost = soHost (pttSec ptt) , scPort = soWhich (pttSec ptt) , scRedi = Nothing , scFake = noHttps - , scType = STHttps who tls $ ReqApi + , scType = STHttps who tls sub $ ReqApi { rcReq = onReq Secure , rcKil = onKilReq } @@ -269,7 +273,11 @@ startServ multi who isFake conf plan = do let por = Ports secPor insPor lopPor fil = pierPath <> "/.http.ports" - logDebug $ displayShow ("EYRE", "All Servers Started.", srvId, por, fil) + logInfo $ displayShow ("EYRE", "All Servers Started.", srvId, por, fil) + for secPor $ \p -> + stderr ("http: secure web interface live on https://localhost:" <> tshow p) + stderr ("http: web interface live on http://localhost:" <> tshow insPor) + stderr ("http: loopback live on http://localhost:" <> tshow lopPor) pure (Serv srvId conf lop ins mSec por fil vLive) @@ -281,16 +289,19 @@ _bornFailed env _ = runRIO env $ do pure () -- TODO What should this do? eyre' - :: HasPierEnv e - => MultiEyreApi - -> Ship + :: (HasPierEnv e, HasMultiEyreApi e) + => Ship -> Bool + -> (Text -> RIO e ()) + -> KingSubsite -> RIO e ([Ev], RAcquire e (DriverApi HttpServerEf)) -eyre' multi who isFake = do + +eyre' who isFake stderr sub = do ventQ :: TQueue EvErr <- newTQueueIO env <- ask - let (bornEvs, startDriver) = eyre env multi who (writeTQueue ventQ) isFake + let (bornEvs, startDriver) = + eyre env who (writeTQueue ventQ) isFake stderr sub let runDriver = do diOnEffect <- startDriver @@ -315,14 +326,16 @@ eyre :: forall e . (HasPierEnv e) => e - -> MultiEyreApi -> Ship -> (EvErr -> STM ()) -> Bool + -> (Text -> RIO e ()) + -> KingSubsite -> ([Ev], RAcquire e (HttpServerEf -> IO ())) -eyre env multi who plan isFake = (initialEvents, runHttpServer) +eyre env who plan isFake stderr sub = (initialEvents, runHttpServer) where king = fromIntegral (env ^. kingIdL) + multi = env ^. multiEyreApiL initialEvents :: [Ev] initialEvents = [bornEv king] @@ -335,17 +348,17 @@ eyre env multi who plan isFake = (initialEvents, runHttpServer) kill :: HasLogFunc e => Serv -> RIO e () kill Serv{..} = do atomically (leaveMultiEyre multi who) - atomically (saKil sLop) - atomically (saKil sIns) - for_ sSec (\sec -> atomically (saKil sec)) + io (saKil sLop) + io (saKil sIns) + io $ for_ sSec (\sec -> (saKil sec)) io (removePortsFile sPortsFile) restart :: Drv -> HttpServerConf -> RIO e Serv restart (Drv var) conf = do - logDebug "Restarting http server" - let startAct = startServ multi who isFake conf plan + logInfo "Restarting http server" + let startAct = startServ who isFake conf plan stderr sub res <- fromEither =<< restartService var startAct kill - logDebug "Done restating http server" + logInfo "Done restating http server" pure res liveFailed _ = pure () @@ -353,11 +366,11 @@ eyre env multi who plan isFake = (initialEvents, runHttpServer) handleEf :: Drv -> HttpServerEf -> IO () handleEf drv = runRIO env . \case HSESetConfig (i, ()) conf -> do - logDebug (displayShow ("EYRE", "%set-config")) + logInfo (displayShow ("EYRE", "%set-config")) Serv {..} <- restart drv conf - logDebug (displayShow ("EYRE", "%set-config", "Sending %live")) + logInfo (displayShow ("EYRE", "%set-config", "Sending %live")) atomically $ plan (EvErr (liveEv sServId sPorts) liveFailed) - logDebug "Write ports file" + logInfo "Write ports file" io (writePortsFile sPortsFile sPorts) HSEResponse (i, req, _seq, ()) ev -> do logDebug (displayShow ("EYRE", "%response")) diff --git a/pkg/hs/urbit-king/lib/Urbit/Vere/Eyre/KingSubsite.hs b/pkg/hs/urbit-king/lib/Urbit/Vere/Eyre/KingSubsite.hs new file mode 100644 index 000000000..3430c1612 --- /dev/null +++ b/pkg/hs/urbit-king/lib/Urbit/Vere/Eyre/KingSubsite.hs @@ -0,0 +1,112 @@ +{-| + KingSubsite: runtime-exclusive HTTP request handling, for /~_~ +-} + +module Urbit.Vere.Eyre.KingSubsite + ( KingSubsite + , kingSubsite + , runKingSubsite + , fourOhFourSubsite + ) where + +import Urbit.Prelude hiding (Builder) + +import Data.ByteString.Builder +import Urbit.King.Scry +import Urbit.Vere.Serf.Types + +import Data.Conduit (ConduitT, Flush(..), yield) +import Data.Text.Encoding (encodeUtf8Builder) + +import qualified Data.Text.Encoding as E +import qualified Network.HTTP.Types as H +import qualified Network.Wai as W +import qualified Network.Wai.Conduit as W +import qualified Urbit.Noun.Time as Time + +newtype KingSubsite = KS { runKingSubsite :: W.Application } + +data SlogAction + = KeepAlive + | Slog (Atom, Tank) + +streamSlog :: Monad m => SlogAction -> ConduitT () (Flush Builder) m () +streamSlog a = do + case a of + KeepAlive -> pure () + Slog (_, t) -> for_ (wash (WashCfg 0 80) (tankTree t)) $ \l -> do + yield $ Chunk "data:" + yield $ Chunk $ encodeUtf8Builder $ unTape l + yield $ Chunk "\n" + yield $ Chunk "\n" + yield $ Flush + +kingSubsite :: HasLogFunc e + => Ship + -> (Time.Wen -> Gang -> Path -> IO (Maybe (Term, Noun))) + -> TVar ((Atom, Tank) -> IO ()) + -> RAcquire e KingSubsite +kingSubsite who scry func = do + clients <- newTVarIO (mempty :: Map Word (SlogAction -> IO ())) + nextId <- newTVarIO (0 :: Word) + baton <- newTMVarIO () + env <- ask + + atomically $ writeTVar func $ \s -> readTVarIO clients >>= traverse_ ($ Slog s) + + acquireWorker "Runtime subsite keep-alive" $ forever $ do + threadDelay 20_000_000 + io $ readTVarIO clients >>= traverse_ ($ KeepAlive) + + pure $ KS $ \req respond -> case W.pathInfo req of + ["~_~", "slog"] -> bracket + (do + id <- atomically $ do + id <- readTVar nextId + modifyTVar' nextId (+ 1) + pure id + slogQ <- newTQueueIO + atomically $ + modifyTVar' clients (insertMap id (atomically . writeTQueue slogQ)) + pure (id, slogQ)) + (\(id, _) -> atomically $ modifyTVar' clients (deleteMap id)) + (\(_, q) -> do + authed <- authenticated env req + if not authed + then respond $ emptyResponse 403 "Permission Denied" + else + let loop = yield Flush + >> forever (atomically (readTQueue q) >>= streamSlog) + in respond $ W.responseSource (H.mkStatus 200 "OK") heads loop) + + _ -> respond $ emptyResponse 404 "Not Found" + + where + heads = [ ("Content-Type" , "text/event-stream") + , ("Cache-Control", "no-cache") + , ("Connection" , "keep-alive") + ] + + emptyResponse cod mes = W.responseLBS (H.mkStatus cod mes) [] "" + + authenticated env req = runRIO env + $ (scryAuth $ getCookie req) + >>= pure . fromMaybe False + + getCookie req = intercalate "; " + $ fmap (E.decodeUtf8 . snd) + $ filter ((== "cookie") . fst) + $ W.requestHeaders req + + scryAuth :: HasLogFunc e + => Text + -> RIO e (Maybe Bool) + scryAuth cookie = + scryNow scry "ex" who "" ["authenticated", "cookie", textAsTa cookie] + +fourOhFourSubsite :: Ship -> KingSubsite +fourOhFourSubsite who = KS $ \req respond -> + respond $ W.responseLBS (H.mkStatus 404 "Not Found") [] body + where + body = toLazyByteString $ foldMap charUtf8 $ msg + msg = "Ship " <> show who <> " not docked." diff --git a/pkg/hs/urbit-king/lib/Urbit/Vere/Eyre/Multi.hs b/pkg/hs/urbit-king/lib/Urbit/Vere/Eyre/Multi.hs index 706afb8f3..667e686d2 100644 --- a/pkg/hs/urbit-king/lib/Urbit/Vere/Eyre/Multi.hs +++ b/pkg/hs/urbit-king/lib/Urbit/Vere/Eyre/Multi.hs @@ -20,7 +20,8 @@ import Urbit.Arvo hiding (ServerId, reqUrl, secure) import Urbit.Vere.Eyre.Serv import Urbit.Vere.Eyre.Wai -import Network.TLS (Credential) +import Network.TLS (Credential) +import Urbit.Vere.Eyre.KingSubsite (KingSubsite, fourOhFourSubsite) -- Types ----------------------------------------------------------------------- @@ -45,7 +46,8 @@ data MultiEyreApi = MultiEyreApi , meaPlan :: TVar (Map Ship OnMultiReq) , meaCanc :: TVar (Map Ship OnMultiKil) , meaTlsC :: TVar (Map Ship (TlsConfig, Credential)) - , meaKill :: STM () + , meaSite :: TVar (Map Ship KingSubsite) + , meaKill :: IO () } @@ -57,27 +59,36 @@ joinMultiEyre -> Maybe (TlsConfig, Credential) -> OnMultiReq -> OnMultiKil + -> KingSubsite -> STM () -joinMultiEyre api who mTls onReq onKil = do +joinMultiEyre api who mTls onReq onKil sub = do modifyTVar' (meaPlan api) (insertMap who onReq) modifyTVar' (meaCanc api) (insertMap who onKil) for_ mTls $ \creds -> do modifyTVar' (meaTlsC api) (insertMap who creds) + modifyTVar' (meaSite api) (insertMap who sub) leaveMultiEyre :: MultiEyreApi -> Ship -> STM () leaveMultiEyre MultiEyreApi {..} who = do modifyTVar' meaCanc (deleteMap who) modifyTVar' meaPlan (deleteMap who) modifyTVar' meaTlsC (deleteMap who) + modifyTVar' meaSite (deleteMap who) multiEyre :: HasLogFunc e => MultiEyreConf -> RIO e MultiEyreApi multiEyre conf@MultiEyreConf {..} = do - logDebug (displayShow ("EYRE", "MULTI", conf)) + logInfo (displayShow ("EYRE", "MULTI", conf)) vLive <- io emptyLiveReqs >>= newTVarIO vPlan <- newTVarIO mempty vCanc <- newTVarIO (mempty :: Map Ship (Ship -> Word64 -> STM ())) vTlsC <- newTVarIO mempty + vSite <- newTVarIO mempty + + let site :: Ship -> STM KingSubsite + site who = do + sites <- readTVar vSite + pure $ maybe (fourOhFourSubsite who) id $ lookup who sites let host = if mecLocalhostOnly then SHLocalhost else SHAnyHostOk @@ -96,26 +107,26 @@ multiEyre conf@MultiEyreConf {..} = do Just cb -> cb who reqId mIns <- for mecHttpPort $ \por -> do - logDebug (displayShow ("EYRE", "MULTI", "HTTP", por)) + logInfo (displayShow ("EYRE", "MULTI", "HTTP", por)) serv vLive $ ServConf { scHost = host , scPort = SPChoices $ singleton $ fromIntegral por , scRedi = Nothing -- TODO , scFake = False - , scType = STMultiHttp $ ReqApi + , scType = STMultiHttp site $ ReqApi { rcReq = onReq Insecure , rcKil = onKil } } mSec <- for mecHttpsPort $ \por -> do - logDebug (displayShow ("EYRE", "MULTI", "HTTPS", por)) + logInfo (displayShow ("EYRE", "MULTI", "HTTPS", por)) serv vLive $ ServConf { scHost = host , scPort = SPChoices $ singleton $ fromIntegral por , scRedi = Nothing , scFake = False - , scType = STMultiHttps (MTC vTlsC) $ ReqApi + , scType = STMultiHttps (MTC vTlsC) site $ ReqApi { rcReq = onReq Secure , rcKil = onKil } @@ -126,6 +137,7 @@ multiEyre conf@MultiEyreConf {..} = do , meaPlan = vPlan , meaCanc = vCanc , meaTlsC = vTlsC + , meaSite = vSite , meaConf = conf , meaKill = traverse_ saKil (toList mIns <> toList mSec) } diff --git a/pkg/hs/urbit-king/lib/Urbit/Vere/Eyre/Serv.hs b/pkg/hs/urbit-king/lib/Urbit/Vere/Eyre/Serv.hs index e007d6331..052c11e0c 100644 --- a/pkg/hs/urbit-king/lib/Urbit/Vere/Eyre/Serv.hs +++ b/pkg/hs/urbit-king/lib/Urbit/Vere/Eyre/Serv.hs @@ -16,8 +16,6 @@ TODO How to detect socket closed during server run? -} -{-# OPTIONS_GHC -Wno-deprecations #-} - module Urbit.Vere.Eyre.Serv ( ServApi(..) , TlsConfig(..) @@ -35,11 +33,15 @@ where import Urbit.Prelude hiding (Builder) -import Data.Default (def) -import Data.List.NonEmpty (NonEmpty((:|))) -import Network.TLS (Credential, Credentials(..), ServerHooks(..)) -import Network.TLS (credentialLoadX509ChainFromMemory) -import RIO.Prelude (decodeUtf8Lenient) +import Data.Default (def) +import Data.List.NonEmpty (NonEmpty((:|))) +import Network.TLS ( Credential + , Credentials(..) + , ServerHooks(..) + ) +import Network.TLS (credentialLoadX509ChainFromMemory) +import RIO.Prelude (decodeUtf8Lenient) +import Urbit.Vere.Eyre.KingSubsite (KingSubsite) import qualified Control.Monad.STM as STM import qualified Data.Char as C @@ -54,7 +56,7 @@ import qualified Urbit.Vere.Eyre.Wai as E -- Internal Types -------------------------------------------------------------- data ServApi = ServApi - { saKil :: STM () + { saKil :: IO () , saPor :: STM W.Port } @@ -67,23 +69,23 @@ data TlsConfig = TlsConfig newtype MultiTlsConfig = MTC (TVar (Map Ship (TlsConfig, Credential))) -instance Show MultiTlsConfig where - show = const "MultiTlsConfig" - data ReqApi = ReqApi { rcReq :: Ship -> Word64 -> E.ReqInfo -> STM () , rcKil :: Ship -> Word64 -> STM () } -instance Show ReqApi where - show = const "ReqApi" - data ServType - = STHttp Ship ReqApi - | STHttps Ship TlsConfig ReqApi - | STMultiHttp ReqApi - | STMultiHttps MultiTlsConfig ReqApi - deriving (Show) + = STHttp Ship KingSubsite ReqApi + | STHttps Ship TlsConfig KingSubsite ReqApi + | STMultiHttp (Ship -> STM KingSubsite) ReqApi + | STMultiHttps MultiTlsConfig (Ship -> STM KingSubsite) ReqApi + +instance Show ServType where + show = \case + STHttp who _ _ -> "STHttp " <> show who + STHttps who tls _ _ -> "STHttps " <> show who <> " " <> show tls + STMultiHttp _ _ -> "STMultiHttp" + STMultiHttps tls _ _ -> "STMultiHttps" data ServPort = SPAnyPort @@ -140,8 +142,12 @@ openFreePort hos = do Right ps -> pure (Right ps) where doBind sok = do - adr <- Net.inet_addr hos - Net.bind sok (Net.SockAddrInet Net.defaultPort adr) + adr <- + Net.getAddrInfo Nothing (Just hos) Nothing >>= \case + [] -> error ("unable to determine numeric hostname from " ++ hos) + ip : _ -> pure (Net.addrAddress ip) + + Net.bind sok adr Net.listen sok 1 port <- Net.socketPort sok pure (fromIntegral port, sok) @@ -164,7 +170,7 @@ tryOpenChoices tryOpenChoices hos = go where go (p :| ps) = do - logDebug (displayShow ("EYRE", "Trying to open port.", p)) + logInfo (displayShow ("EYRE", "Trying to open port.", p)) io (tryOpen hos p) >>= \case Left err -> do logError (displayShow ("EYRE", "Failed to open port.", p)) @@ -185,7 +191,7 @@ tryOpenAny hos = do pure (Right (p, s)) logDbg :: (HasLogFunc e, Show a) => [Text] -> a -> RIO e () -logDbg ctx msg = logDebug (prefix <> suffix) +logDbg ctx msg = logInfo (prefix <> suffix) where prefix = display (concat $ fmap (<> ": ") ctx) suffix = displayShow msg @@ -262,26 +268,28 @@ startServer typ hos por sok red vLive = do & W.setPort (fromIntegral por) & W.setTimeout (5 * 60) + -- TODO build Eyre.Site.app in pier, thread through here let runAppl who = E.app envir who vLive reqShip = hostShip . W.requestHeaderHost case typ of - STHttp who api -> do - let app = runAppl who (rcReq api who) (rcKil api who) + STHttp who sub api -> do + let app = runAppl who (rcReq api who) (rcKil api who) sub io (W.runSettingsSocket opts sok app) - STHttps who TlsConfig {..} api -> do + STHttps who TlsConfig {..} sub api -> do let tls = W.tlsSettingsChainMemory tcCerti tcChain tcPrKey - let app = runAppl who (rcReq api who) (rcKil api who) + let app = runAppl who (rcReq api who) (rcKil api who) sub io (W.runTLSSocket tls opts sok app) - STMultiHttp api -> do + STMultiHttp fub api -> do let app req resp = do who <- reqShip req - runAppl who (rcReq api who) (rcKil api who) req resp + sub <- atomically $ fub who + runAppl who (rcReq api who) (rcKil api who) sub req resp io (W.runSettingsSocket opts sok app) - STMultiHttps mtls api -> do + STMultiHttps mtls fub api -> do TlsConfig {..} <- atomically (getFirstTlsConfig mtls) let sni = def { onServerNameIndication = onSniHdr envir mtls } @@ -296,7 +304,8 @@ startServer typ hos por sok red vLive = do runRIO envir $ logDbg ctx "Got request" who <- reqShip req runRIO envir $ logDbg ctx ("Parsed HOST", who) - runAppl who (rcReq api who) (rcKil api who) req resp + sub <- atomically $ fub who + runAppl who (rcReq api who) (rcKil api who) sub req resp io (W.runTLSSocket tlsMany opts sok app) @@ -312,7 +321,7 @@ configCreds TlsConfig {..} = fakeServ :: HasLogFunc e => ServConf -> RIO e ServApi fakeServ conf = do let por = fakePort (scPort conf) - logDebug (displayShow ("EYRE", "SERV", "Running Fake Server", por)) + logInfo (displayShow ("EYRE", "SERV", "Running Fake Server", por)) pure $ ServApi { saKil = pure () , saPor = pure por @@ -331,20 +340,18 @@ getFirstTlsConfig (MTC var) = do realServ :: HasLogFunc e => TVar E.LiveReqs -> ServConf -> RIO e ServApi realServ vLive conf@ServConf {..} = do - logDebug (displayShow ("EYRE", "SERV", "Running Real Server")) - kil <- newEmptyTMVarIO + logInfo (displayShow ("EYRE", "SERV", "Running Real Server")) por <- newEmptyTMVarIO tid <- async (runServ por) - _ <- async (atomically (takeTMVar kil) >> cancel tid) pure $ ServApi - { saKil = void (tryPutTMVar kil ()) + { saKil = cancel tid , saPor = readTMVar por } where runServ vPort = do - logDebug (displayShow ("EYRE", "SERV", "runServ")) + logInfo (displayShow ("EYRE", "SERV", "runServ")) rwith (forceOpenSocket scHost scPort) $ \(por, sok) -> do atomically (putTMVar vPort por) startServer scType scHost por sok scRedi vLive diff --git a/pkg/hs/urbit-king/lib/Urbit/Vere/Eyre/Service.hs b/pkg/hs/urbit-king/lib/Urbit/Vere/Eyre/Service.hs index ce3bc01a9..af571afc9 100644 --- a/pkg/hs/urbit-king/lib/Urbit/Vere/Eyre/Service.hs +++ b/pkg/hs/urbit-king/lib/Urbit/Vere/Eyre/Service.hs @@ -31,21 +31,21 @@ restartService -> (s -> RIO e ()) -> RIO e (Either SomeException s) restartService vServ sstart kkill = do - logDebug "restartService" + logInfo "restartService" modifyMVar vServ $ \case Nothing -> doStart Just sv -> doRestart sv where doRestart :: s -> RIO e (Maybe s, Either SomeException s) doRestart serv = do - logDebug "doStart" + logInfo "doStart" try (kkill serv) >>= \case Left exn -> pure (Nothing, Left exn) Right () -> doStart doStart :: RIO e (Maybe s, Either SomeException s) doStart = do - logDebug "doStart" + logInfo "doStart" try sstart <&> \case Right s -> (Just s, Right s) Left exn -> (Nothing, Left exn) @@ -59,7 +59,7 @@ stopService -> (s -> RIO e ()) -> RIO e (Either SomeException ()) stopService vServ kkill = do - logDebug "stopService" + logInfo "stopService" modifyMVar vServ $ \case Nothing -> pure (Nothing, Right ()) Just sv -> do diff --git a/pkg/hs/urbit-king/lib/Urbit/Vere/Eyre/Wai.hs b/pkg/hs/urbit-king/lib/Urbit/Vere/Eyre/Wai.hs index 3815e1ca9..b47725498 100644 --- a/pkg/hs/urbit-king/lib/Urbit/Vere/Eyre/Wai.hs +++ b/pkg/hs/urbit-king/lib/Urbit/Vere/Eyre/Wai.hs @@ -26,12 +26,13 @@ where import Urbit.Prelude hiding (Builder) -import Data.Binary.Builder (Builder, fromByteString) -import Data.Bits (shiftL, (.|.)) -import Data.Conduit (ConduitT, Flush(Chunk, Flush), yield) -import Network.Socket (SockAddr(..)) -import System.Random (newStdGen, randoms) -import Urbit.Arvo (Address(..), Ipv4(..), Ipv6(..), Method) +import Data.Binary.Builder (Builder, fromByteString) +import Data.Bits (shiftL, (.|.)) +import Data.Conduit (ConduitT, Flush(Chunk, Flush), yield) +import Network.Socket (SockAddr(..)) +import System.Random (newStdGen, randoms) +import Urbit.Arvo (Address(..), Ipv4(..), Ipv6(..), Method) +import Urbit.Vere.Eyre.KingSubsite (KingSubsite, runKingSubsite) import qualified Network.HTTP.Types as H import qualified Network.Wai as W @@ -179,7 +180,7 @@ streamBlocks env init getAct = send init >> loop send "" = pure () send c = do - runRIO env (logTrace (display ("sending chunk " <> tshow c))) + runRIO env (logDebug (display ("sending chunk " <> tshow c))) yield $ Chunk $ fromByteString c yield Flush @@ -209,21 +210,25 @@ app -> TVar LiveReqs -> (Word64 -> ReqInfo -> STM ()) -> (Word64 -> STM ()) + -> KingSubsite -> W.Application -app env who liv inform cancel req respond = - runRIO env $ rwith (liveReq who liv) $ \(reqId, respApi) -> do - bod <- io (toStrict <$> W.strictRequestBody req) - met <- maybe (error "bad method") pure (cookMeth req) +app env who liv inform cancel sub req respond = + case W.pathInfo req of + ("~_~":_) -> runKingSubsite sub req respond + _ -> + runRIO env $ rwith (liveReq who liv) $ \(reqId, respApi) -> do + bod <- io (toStrict <$> W.strictRequestBody req) + met <- maybe (error "bad method") pure (cookMeth req) - let adr = reqAddr req - hdr = W.requestHeaders req - url = reqUrl req + let adr = reqAddr req + hdr = W.requestHeaders req + url = reqUrl req - atomically $ inform reqId $ ReqInfo adr met url hdr bod + atomically $ inform reqId $ ReqInfo adr met url hdr bod - try (sendResponse respond respApi) >>= \case - Right rr -> pure rr - Left exn -> do - atomically (cancel reqId) - logError $ display ("Exception during request" <> tshow exn) - throwIO (exn :: SomeException) + try (sendResponse respond respApi) >>= \case + Right rr -> pure rr + Left exn -> do + atomically (cancel reqId) + logError $ display ("Exception during request" <> tshow exn) + throwIO (exn :: SomeException) diff --git a/pkg/hs/urbit-king/lib/Urbit/Vere/Http/Client.hs b/pkg/hs/urbit-king/lib/Urbit/Vere/Http/Client.hs index 011ce86ac..b3a24ddff 100644 --- a/pkg/hs/urbit-king/lib/Urbit/Vere/Http/Client.hs +++ b/pkg/hs/urbit-king/lib/Urbit/Vere/Http/Client.hs @@ -17,7 +17,7 @@ import Urbit.Arvo (BlipEv(..), Ev(..), HttpClientEf(..), HttpClientEv(..), HttpClientReq(..), HttpEvent(..), KingId, ResponseHeader(..)) -import qualified Data.Map as M +import qualified Data.Map.Strict as M import qualified Network.HTTP.Client as H import qualified Network.HTTP.Client.TLS as TLS import qualified Network.HTTP.Types as HT @@ -126,7 +126,7 @@ client env plan = (initialEvents, runHttpClient) newReq :: HttpClientDrv -> ReqId -> HttpClientReq -> RIO e () newReq drv id req = do async <- runReq drv id req - atomically $ modifyTVar (hcdLive drv) (insertMap id async) + atomically $ modifyTVar' (hcdLive drv) (insertMap id async) -- The problem with the original http client code was that it was written -- to the idea of what the events "should have" been instead of what they @@ -140,7 +140,7 @@ client env plan = (initialEvents, runHttpClient) runReq HttpClientDrv{..} id req = async $ case cvtReq req of Nothing -> do - logDebug $ displayShow ("(malformed http client request)", id, req) + logInfo $ displayShow ("(malformed http client request)", id, req) planEvent id (Cancel ()) Just r -> do logDebug $ displayShow ("(http client request)", id, req) diff --git a/pkg/hs/urbit-king/lib/Urbit/Vere/LMDB.hs b/pkg/hs/urbit-king/lib/Urbit/Vere/LMDB.hs index 8e544647b..8e32246df 100644 --- a/pkg/hs/urbit-king/lib/Urbit/Vere/LMDB.hs +++ b/pkg/hs/urbit-king/lib/Urbit/Vere/LMDB.hs @@ -169,7 +169,7 @@ streamEvents log first = do for_ batch yield streamEvents log (first + word (length batch)) -streamEffectsRows :: ∀e. HasLogFunc e +streamEffectsRows :: forall e. HasLogFunc e => EventLog -> EventId -> ConduitT () (Word64, ByteString) (RIO e) () streamEffectsRows log = go @@ -221,12 +221,12 @@ readBatch log first = start {- Read 1000 rows from the database, starting from key `first`. -} -readRowsBatch :: ∀e. HasLogFunc e +readRowsBatch :: forall e. HasLogFunc e => Env -> Dbi -> Word64 -> RIO e (V.Vector (Word64, ByteString)) readRowsBatch env dbi first = readRows where readRows = do - logDebug $ display ("(readRowsBatch) From: " <> tshow first) + logInfo $ display ("(readRowsBatch) From: " <> tshow first) withWordPtr first $ \pIdx -> withKVPtrs' (MDB_val 8 (castPtr pIdx)) nullVal $ \pKey pVal -> rwith (readTxn env) $ \txn -> diff --git a/pkg/hs/urbit-king/lib/Urbit/Vere/NounServ.hs b/pkg/hs/urbit-king/lib/Urbit/Vere/NounServ.hs index f23f82aa3..4f60929ac 100644 --- a/pkg/hs/urbit-king/lib/Urbit/Vere/NounServ.hs +++ b/pkg/hs/urbit-king/lib/Urbit/Vere/NounServ.hs @@ -44,7 +44,7 @@ data Server i o a = Server -------------------------------------------------------------------------------- -withRIOThread ∷ RIO e a → RIO e (Async a) +withRIOThread :: RIO e a -> RIO e (Async a) withRIOThread act = do env <- ask io $ async $ runRIO env $ act @@ -82,12 +82,12 @@ wsConn pre inp out wsc = do flip finally cleanup $ do res <- atomically (waitCatchSTM writer <|> waitCatchSTM reader) - logDebug $ displayShow (res :: Either SomeException ()) + logInfo $ displayShow (res :: Either SomeException ()) -------------------------------------------------------------------------------- -wsClient :: ∀i o e. (ToNoun o, FromNoun i, Show o, Show i, HasLogFunc e) +wsClient :: forall i o e. (ToNoun o, FromNoun i, Show o, Show i, HasLogFunc e) => Text -> W.Port -> RIO e (Client i o) wsClient pax por = do env <- ask @@ -95,7 +95,7 @@ wsClient pax por = do out <- io $ newTBMChanIO 5 con <- pure (mkConn inp out) - logDebug "NOUNSERV (wsClie) Trying to connect" + logInfo "NOUNSERV (wsClie) Trying to connect" tid <- io $ async $ WS.runClient "127.0.0.1" por (unpack pax) @@ -111,24 +111,24 @@ wsServApp :: (HasLogFunc e, ToNoun o, FromNoun i, Show i, Show o) -> WS.PendingConnection -> RIO e () wsServApp cb pen = do - logDebug "NOUNSERV (wsServer) Got connection!" + logInfo "NOUNSERV (wsServer) Got connection!" wsc <- io $ WS.acceptRequest pen inp <- io $ newTBMChanIO 5 out <- io $ newTBMChanIO 5 atomically $ cb (mkConn inp out) wsConn "NOUNSERV (wsServ) " inp out wsc -wsServer :: ∀i o e. (ToNoun o, FromNoun i, Show i, Show o, HasLogFunc e) +wsServer :: forall i o e. (ToNoun o, FromNoun i, Show i, Show o, HasLogFunc e) => RIO e (Server i o W.Port) wsServer = do con <- io $ newTBMChanIO 5 tid <- async $ do env <- ask - logDebug "NOUNSERV (wsServer) Starting server" + logInfo "NOUNSERV (wsServer) Starting server" io $ WS.runServer "127.0.0.1" 9999 $ runRIO env . wsServApp (writeTBMChan con) - logDebug "NOUNSERV (wsServer) Server died" + logInfo "NOUNSERV (wsServer) Server died" atomically $ closeTBMChan con pure $ Server (readTBMChan con) tid 9999 diff --git a/pkg/hs/urbit-king/lib/Urbit/Vere/Pier.hs b/pkg/hs/urbit-king/lib/Urbit/Vere/Pier.hs index 78b6d9e5d..8f1169e88 100644 --- a/pkg/hs/urbit-king/lib/Urbit/Vere/Pier.hs +++ b/pkg/hs/urbit-king/lib/Urbit/Vere/Pier.hs @@ -25,28 +25,30 @@ import Urbit.King.App import Urbit.Vere.Pier.Types import Control.Monad.STM (retry) +import System.Environment (getExecutablePath) +import System.FilePath (splitFileName) import System.Posix.Files (ownerModes, setFileMode) import Urbit.EventLog.LMDB (EventLog) import Urbit.King.API (TermConn) import Urbit.Noun.Time (Wen) -import Urbit.TermSize (TermSize(..)) -import Urbit.Vere.Eyre.Multi (MultiEyreApi) +import Urbit.TermSize (TermSize(..), termSize) import Urbit.Vere.Serf (Serf) -import qualified Data.Text as T -import qualified System.Entropy as Ent -import qualified Urbit.EventLog.LMDB as Log -import qualified Urbit.King.API as King -import qualified Urbit.Noun.Time as Time -import qualified Urbit.Vere.Ames as Ames -import qualified Urbit.Vere.Behn as Behn -import qualified Urbit.Vere.Clay as Clay -import qualified Urbit.Vere.Eyre as Eyre -import qualified Urbit.Vere.Http.Client as Iris -import qualified Urbit.Vere.Serf as Serf -import qualified Urbit.Vere.Term as Term -import qualified Urbit.Vere.Term.API as Term -import qualified Urbit.Vere.Term.Demux as Term +import qualified Data.Text as T +import qualified System.Entropy as Ent +import qualified Urbit.EventLog.LMDB as Log +import qualified Urbit.King.API as King +import qualified Urbit.Noun.Time as Time +import qualified Urbit.Vere.Ames as Ames +import qualified Urbit.Vere.Behn as Behn +import qualified Urbit.Vere.Clay as Clay +import qualified Urbit.Vere.Eyre as Eyre +import qualified Urbit.Vere.Eyre.KingSubsite as Site +import qualified Urbit.Vere.Http.Client as Iris +import qualified Urbit.Vere.Serf as Serf +import qualified Urbit.Vere.Term as Term +import qualified Urbit.Vere.Term.API as Term +import qualified Urbit.Vere.Term.Demux as Term -- Initialize pier directory. -------------------------------------------------- @@ -110,35 +112,38 @@ writeJobs log !jobs = do -- Acquire a running serf. ----------------------------------------------------- -printTank :: (Text -> IO ()) -> Atom -> Tank -> IO () -printTank f _priority = f . unlines . fmap unTape . wash (WashCfg 0 80) . tankTree - where - tankTree (Tank t) = t - runSerf :: HasPierEnv e - => TVar (Text -> IO ()) + => TVar ((Atom, Tank) -> IO ()) -> FilePath -> RAcquire e Serf runSerf vSlog pax = do env <- ask - Serf.withSerf (config env) + serfProg <- io getSerfProg + Serf.withSerf (config env serfProg) where - slog txt = atomically (readTVar vSlog) >>= (\f -> f txt) - config env = Serf.Config - { scSerf = env ^. pierConfigL . pcSerfExe . to unpack + slog s = atomically (readTVar vSlog) >>= (\f -> f s) + config env serfProg = Serf.Config + { scSerf = env ^. pierConfigL . pcSerfExe . to (maybe serfProg unpack) , scPier = pax , scFlag = env ^. pierConfigL . pcSerfFlags - , scSlog = \(pri, tank) -> printTank slog pri tank - , scStdr = \txt -> slog (txt <> "\r\n") + , scSlog = slog + , scStdr = \txt -> slog (0, (textToTank txt)) , scDead = pure () -- TODO: What can be done? } + getSerfProg :: IO FilePath + getSerfProg = do + (path, filename) <- splitFileName <$> getExecutablePath + pure $ case filename of + "urbit" -> path "urbit-worker" + "urbit-king" -> path "urbit-worker" + _ -> "urbit-worker" -- Boot a new ship. ------------------------------------------------------------ booted - :: TVar (Text -> IO ()) + :: TVar ((Atom, Tank) -> IO ()) -> Pill -> Bool -> Ship @@ -169,27 +174,27 @@ bootNewShip -> RIO e () bootNewShip pill lite ship bootEv = do seq@(BootSeq ident x y) <- genBootSeq ship pill lite bootEv - logDebug "BootSeq Computed" + logInfo "BootSeq Computed" pierPath <- view pierPathL rio (setupPierDirectory pierPath) - logDebug "Directory setup." + logInfo "Directory setup." let logPath = (pierPath ".urb/log") rwith (Log.new logPath ident) $ \log -> do - logDebug "Event log onitialized." + logInfo "Event log initialized." jobs <- (\now -> bootSeqJobs now seq) <$> io Time.now writeJobs log (fromList jobs) - logDebug "Finsihed populating event log with boot sequence" + logInfo "Finsihed populating event log with boot sequence" -- Resume an existing ship. ---------------------------------------------------- resumed - :: TVar (Text -> IO ()) + :: TVar ((Atom, Tank) -> IO ()) -> Maybe Word64 -> RAcquire PierEnv (Serf, EventLog) resumed vSlog replayUntil = do @@ -207,16 +212,16 @@ resumed vSlog replayUntil = do serf <- runSerf vSlog tap rio $ do - logDebug "Replaying events" + logInfo "Replaying events" Serf.execReplay serf log replayUntil >>= \case Left err -> error (show err) Right 0 -> do - logDebug "No work during replay so no snapshot" + logInfo "No work during replay so no snapshot" pure () Right _ -> do - logDebug "Taking snapshot" + logInfo "Taking snapshot" io (Serf.snapshot serf) - logDebug "SNAPSHOT TAKEN" + logInfo "SNAPSHOT TAKEN" pure (serf, log) @@ -236,40 +241,21 @@ getSnapshot top last = do pure $ sort (filter (<= fromIntegral last) snapshotNums) --- Utils for Spawning Worker Threads ------------------------------------------- - -acquireWorker :: HasLogFunc e => Text -> RIO e () -> RAcquire e (Async ()) -acquireWorker nam act = mkRAcquire (async act) kill - where - kill tid = do - logDebug ("Killing worker thread: " <> display nam) - cancel tid - -acquireWorkerBound :: HasLogFunc e => Text -> RIO e () -> RAcquire e (Async ()) -acquireWorkerBound nam act = mkRAcquire (asyncBound act) kill - where - kill tid = do - logDebug ("Killing worker thread: " <> display nam) - cancel tid - - - -- Run Pier -------------------------------------------------------------------- pier :: (Serf, EventLog) - -> TVar (Text -> IO ()) + -> TVar ((Atom, Tank) -> IO ()) -> MVar () - -> MultiEyreApi + -> [Ev] -> RAcquire PierEnv () -pier (serf, log) vSlog startedSig multi = do +pier (serf, log) vSlog startedSig injected = do let logId = Log.identity log :: LogIdentity let ship = who logId :: Ship -- TODO Instead of using a TMVar, pull directly from the IO driver -- event sources. computeQ :: TMVar RunReq <- newEmptyTMVarIO - persistQ :: TQueue (Fact, FX) <- newTQueueIO executeQ :: TQueue FX <- newTQueueIO saveSig :: TMVar () <- newEmptyTMVarIO @@ -280,22 +266,21 @@ pier (serf, log) vSlog startedSig multi = do writeTVar (King.kTermConn kingApi) (Just $ writeTQueue q) pure q + initialTermSize <- io $ termSize + (demux :: Term.Demux, muxed :: Term.Client) <- atomically $ do - res <- Term.mkDemux + res <- Term.mkDemux initialTermSize pure (res, Term.useDemux res) void $ acquireWorker "TERMSERV Listener" $ forever $ do - logDebug "TERMSERV Waiting for external terminal." + logInfo "TERMSERV Waiting for external terminal." atomically $ do ext <- Term.connClient <$> readTQueue termApiQ Term.addDemux ext demux - logDebug "TERMSERV External terminal connected." + logInfo "TERMSERV External terminal connected." - -- Slogs go to both stderr and to the terminal. - env <- ask - atomically $ writeTVar vSlog $ \txt -> runRIO env $ do - atomically $ Term.trace muxed txt - logOther "serf" (display $ T.strip txt) + scryQ <- newTQueueIO + onKill <- view onKillPierSigL -- Our call above to set the logging function which echos errors from the -- Serf doesn't have the appended \r\n because those \r\n s are added in @@ -305,21 +290,33 @@ pier (serf, log) vSlog startedSig multi = do let execute = writeTQueue executeQ let persist = writeTQueue persistQ let sigint = Serf.sendSIGINT serf + let scry = \w b g -> do + res <- newEmptyMVar + atomically $ writeTQueue scryQ (w, b, g, putMVar res) + takeMVar res + + -- Set up the runtime subsite server and its capability to slog + siteSlog <- newTVarIO (const $ pure ()) + runtimeSubsite <- Site.kingSubsite ship scry siteSlog + + -- Slogs go to stderr, to the runtime subsite, and to the terminal. + env <- ask + atomically $ writeTVar vSlog $ \s@(_, tank) -> runRIO env $ do + atomically $ Term.slog muxed s + io $ readTVarIO siteSlog >>= ($ s) + logOther "serf" (display $ T.strip $ tankToText tank) (bootEvents, startDrivers) <- do env <- ask let err = atomically . Term.trace muxed . (<> "\r\n") - let siz = TermSize { tsWide = 80, tsTall = 24 } + siz <- atomically $ Term.curDemuxSize demux let fak = isFake logId - drivers env multi ship fak compute (siz, muxed) err sigint - - scrySig <- newEmptyTMVarIO - onKill <- view onKillPierSigL + drivers env ship fak compute scry (siz, muxed) err sigint runtimeSubsite let computeConfig = ComputeConfig { ccOnWork = takeTMVar computeQ , ccOnKill = onKill , ccOnSave = takeTMVar saveSig - , ccOnScry = takeTMVar scrySig + , ccOnScry = readTQueue scryQ , ccPutResult = persist , ccShowSpinner = Term.spin muxed , ccHideSpinner = Term.stopSpin muxed @@ -349,31 +346,41 @@ pier (serf, log) vSlog startedSig multi = do let slog :: Text -> IO () slog txt = do fn <- atomically (readTVar vSlog) - fn txt + fn (0, textToTank txt) drivz <- startDrivers tExec <- acquireWorker "Effects" (router slog (readTQueue executeQ) drivz) tDisk <- acquireWorkerBound "Persist" (runPersist log persistQ execute) + -- Now that the Serf is configured, the IO drivers are hooked up, their + -- starting events have been dispatched, and the terminal is live, we can now + -- handle injecting events requested from the command line. + for_ (zip [1..] injected) $ \(num, ev) -> rio $ do + logTrace $ display @Text ("Injecting event " ++ (tshow num) ++ " of " ++ + (tshow $ length injected) ++ "...") + okaySig :: MVar (Either [Goof] ()) <- newEmptyMVar + + let inject = atomically $ compute $ RRWork $ EvErr ev $ cb + cb :: WorkError -> IO () + cb = \case + RunOkay _ -> putMVar okaySig (Right ()) + RunSwap _ _ _ _ _ -> putMVar okaySig (Right ()) + RunBail goofs -> putMVar okaySig (Left goofs) + + io inject + + takeMVar okaySig >>= \case + Left goof -> logError $ display @Text ("Goof in injected event: " <> + tshow goof) + Right () -> pure () + + let snapshotEverySecs = 120 void $ acquireWorker "Save" $ forever $ do threadDelay (snapshotEverySecs * 1_000_000) void $ atomically $ tryPutTMVar saveSig () - -- TODO bullshit scry tester - when False $ do - void $ acquireWorker "bullshit scry tester" $ do - env <- ask - forever $ do - threadDelay 15_000_000 - wen <- io Time.now - let kal = \mTermNoun -> runRIO env $ do - logDebug $ displayShow ("scry result: ", mTermNoun) - let nkt = MkKnot $ tshow $ Time.MkDate wen - let pax = Path ["j", "~zod", "life", nkt, "~zod"] - atomically $ putTMVar scrySig (wen, Nothing, pax, kal) - putMVar startedSig () -- Wait for something to die. @@ -412,22 +419,25 @@ data Drivers = Drivers drivers :: HasPierEnv e => e - -> MultiEyreApi -> Ship -> Bool -> (RunReq -> STM ()) + -> (Wen -> Gang -> Path -> IO (Maybe (Term, Noun))) -> (TermSize, Term.Client) -> (Text -> RIO e ()) -> IO () + -> Site.KingSubsite -> RAcquire e ([Ev], RAcquire e Drivers) -drivers env multi who isFake plan termSys stderr serfSIGINT = do +drivers env who isFake plan scry termSys stderr serfSIGINT sub = do (behnBorn, runBehn) <- rio Behn.behn' (termBorn, runTerm) <- rio (Term.term' termSys serfSIGINT) - (amesBorn, runAmes) <- rio (Ames.ames' who isFake stderr) - (httpBorn, runEyre) <- rio (Eyre.eyre' multi who isFake) + (amesBorn, runAmes) <- rio (Ames.ames' who isFake scry stderr) + (httpBorn, runEyre) <- rio (Eyre.eyre' who isFake stderr sub) (clayBorn, runClay) <- rio Clay.clay' (irisBorn, runIris) <- rio Iris.client' + putStrLn ("ship is " <> tshow who) + let initialEvents = mconcat [behnBorn,clayBorn,amesBorn,httpBorn,irisBorn,termBorn] let runDrivers = do @@ -494,7 +504,7 @@ router slog waitFx Drivers {..} = do logEvent :: HasLogFunc e => Ev -> RIO e () logEvent ev = do - logTrace $ "<- " <> display (summarizeEvent ev) + --logInfo $ "<- " <> display (summarizeEvent ev) logDebug $ "[EVENT]\n" <> display pretty where pretty :: Text @@ -502,7 +512,7 @@ logEvent ev = do logEffect :: HasLogFunc e => Lenient Ef -> RIO e () logEffect ef = do - logTrace $ " -> " <> display (summarizeEffect ef) + --logInfo $ " -> " <> display (summarizeEffect ef) logDebug $ display $ "[EFFECT]\n" <> pretty ef where pretty :: Lenient Ef -> Text diff --git a/pkg/hs/urbit-king/lib/Urbit/Vere/Pier/Types.hs b/pkg/hs/urbit-king/lib/Urbit/Vere/Pier/Types.hs index 6dbffa34c..ab5adba73 100644 --- a/pkg/hs/urbit-king/lib/Urbit/Vere/Pier/Types.hs +++ b/pkg/hs/urbit-king/lib/Urbit/Vere/Pier/Types.hs @@ -45,13 +45,13 @@ instance Show Nock where -------------------------------------------------------------------------------- data Pill = Pill - { pBootFormulas :: [Nock] - , pKernelOvums :: [Ev] - , pUserspaceOvums :: [Ev] + { pBootFormulas :: ![Nock] + , pKernelOvums :: ![Ev] + , pUserspaceOvums :: ![Ev] } deriving (Eq, Show) -data BootSeq = BootSeq LogIdentity [Nock] [Ev] +data BootSeq = BootSeq !LogIdentity ![Nock] ![Ev] deriving (Eq, Show) deriveNoun ''Pill diff --git a/pkg/hs/urbit-king/lib/Urbit/Vere/Ports.hs b/pkg/hs/urbit-king/lib/Urbit/Vere/Ports.hs new file mode 100644 index 000000000..024bd391f --- /dev/null +++ b/pkg/hs/urbit-king/lib/Urbit/Vere/Ports.hs @@ -0,0 +1,314 @@ +module Urbit.Vere.Ports (HasPortControlApi(..), + PortControlApi, + buildInactivePorts, + buildNatPortsWhenPrivate, + buildNatPorts, + requestPortAccess) where + +import Control.Monad.STM (check) +import Urbit.Prelude +import Network.NatPmp +import Data.Time.Clock.POSIX +import Network.Socket + +import qualified Data.Heap as DH + +-- This module deals with ports and port requests. When a component wants to +-- ensure that it is externally reachable, possibly from outside a NAT, it +-- makes a request to this module to hole-punch. + +class HasPortControlApi a where + portControlApiL :: Lens' a PortControlApi + +data PortControlApi = PortControlApi + { pAddPortRequest :: Word16 -> IO () + , pRemovePortRequest :: Word16 -> IO () + } + +-- | Builds a PortControlApi struct which does nothing when called. +buildInactivePorts :: PortControlApi +buildInactivePorts = PortControlApi noop noop + where + noop x = pure () + +-- | Builds a PortControlApi struct which tries to hole-punch by talking to the +-- NAT gateway over NAT-PMP iff we are on a private network ip. +buildNatPortsWhenPrivate :: (HasLogFunc e) + => (Text -> RIO e ()) + -> RIO e PortControlApi +buildNatPortsWhenPrivate stderr = do + behind <- likelyBehindRouter + if behind + then buildNatPorts stderr + else pure buildInactivePorts + +-- | Builds a PortControlApi struct which tries to hole-punch by talking to the +-- NAT gateway over NAT-PMP. +buildNatPorts :: (HasLogFunc e) + => (Text -> RIO e ()) + -> RIO e PortControlApi +buildNatPorts stderr = do + q <- newTQueueIO + async $ portThread q stderr + + let addRequest port = do + resp <- newEmptyTMVarIO + atomically $ + writeTQueue q (PTMOpen port (putTMVar resp True)) + atomically $ takeTMVar resp + pure () + + let removeRequest port = atomically $ writeTQueue q (PTMClose port) + + pure $ PortControlApi addRequest removeRequest + +portLeaseLifetime :: Word32 +portLeaseLifetime = 15 * 60 + +-- Be paranoid and renew leases a full minute before they would naturally expire. +portRenewalTime :: Word32 +portRenewalTime = portLeaseLifetime - 60 + +-- Number of retries before we give up on performing nat operations. +maxRetries :: Int +maxRetries = 3 + +-- How long to wait between retries. +networkRetryDelay :: Int +networkRetryDelay = 5 * 1_000_000 + +-- Messages sent from the main thread to the port mapping communication thread. +data PortThreadMsg + = PTMOpen Word16 (STM ()) + -- ^ Does the open request, and then runs the passed in stm action to + -- signal completion to the main thread. We want to block on the initial + -- setting opening because we want the forwarding set up before we actually + -- start using the port. + + | PTMClose Word16 + -- ^ Close command. No synchronization because there's nothing we can do if + -- it fails. + +-- We get requests to acquire a port as an RAII condition, but the actual APIs +-- are timeout based, so we have to maintain a heap of the next timer to +-- rerequest port access. +data RenewAction = RenewAction Word16 + +-- The port thread is an async which reads commands from an STM queue and then +-- executes them. This thread is here to bind the semantics that we want to how +-- NAT-PMP sees the world. We want for an RAcquire to be able to start a +-- request for port forwarding and then to release it when it goes out of +-- scope. OTOH, NAT-PMP is all timeout based, and we want that timeout to be +-- fairly short, such as 15 minutes, so the portThread needs to keep track of +-- the time of the next port request. +portThread :: forall e. (HasLogFunc e) + => TQueue PortThreadMsg + -> (Text -> RIO e ()) + -> RIO e () +portThread q stderr = do + initNatPmp >>= \case + Left ErrCannotGetGateway -> do + assumeOnPublicInternet + Left err -> do + likelyIPAddress >>= \case + Just ip@(192, 168, _, _) -> warnBehindRouterAndErr ip err + Just ip@(172, x, _, _) + | (x >= 16 && x <= 31) -> warnBehindRouterAndErr ip err + Just ip@(10, _, _, _) -> warnBehindRouterAndErr ip err + _ -> assumeOnPublicInternet + Right pmp -> foundRouter pmp + where + warnBehindRouterAndErr (a, b, c, d) err = do + stderr $ "port: you appear to be behind a router since your ip " ++ + "is " ++ (tshow a) ++ "." ++ (tshow b) ++ "." ++ (tshow c) ++ + "." ++ (tshow d) ++ ", but " ++ + "we could not request port forwarding (NAT-PMP error: " ++ + (tshow err) ++ ")" + stderr $ "port: urbit performance will be degregaded unless you " ++ + "manually forward your ames port." + loopErr q + + assumeOnPublicInternet = do + stderr $ "port: couldn't find router; assuming on public internet" + loopErr q + + foundRouter :: NatPmpHandle -> RIO e () + foundRouter pmp = do + getPublicAddress pmp >>= \case + Left ErrCannotGetGateway -> assumeOnPublicInternet + Left ErrNoGatewaySupport -> assumeOnPublicInternet + Left err -> do + stderr $ "port: received error when asking router for public ip: " ++ + (tshow err) + loopErr q + Right addr -> do + let (a, b, c, d) = hostAddressToTuple addr + stderr $ "port: router reports that our public IP is " ++ (tshow a) ++ + "." ++ (tshow b) ++ "." ++ (tshow c) ++ "." ++ (tshow d) + loop pmp mempty + + loop :: NatPmpHandle -> DH.MinPrioHeap POSIXTime RenewAction -> RIO e () + loop pmp nextRenew = do + now <- io $ getPOSIXTime + delay <- case DH.viewHead nextRenew of + Nothing -> newTVarIO False + Just (fireTime, _) -> do + let timeTo = fireTime - now + let ms = round $ timeTo * 1000000 + registerDelay ms + command <- atomically $ + (Left <$> fini delay) <|> (Right <$> readTQueue q) + case command of + Left () -> handleRenew pmp nextRenew + Right msg -> handlePTM pmp msg nextRenew + + handlePTM :: NatPmpHandle + -> PortThreadMsg + -> DH.MinPrioHeap POSIXTime RenewAction + -> RIO e () + handlePTM pmp msg nextRenew = case msg of + PTMOpen p notifyComplete -> do + logInfo $ + displayShow ("port: sending initial request to NAT-PMP for port ", p) + setPortMapping pmp PTUdp p p portLeaseLifetime >>= \case + Left err | isResetAndRetry err -> do + closeNatPmp pmp + attemptReestablishNatPmpThen (\pmp -> handlePTM pmp msg nextRenew) + Left err -> do + logError $ + displayShow ("port: failed to request NAT-PMP for port ", p, + ":", err, ", disabling NAT-PMP") + loopErr q + Right _ -> do + -- Filter any existing references to this port on the heap to ensure + -- we don't double up on tasks. + let filteredHeap = filterPort p nextRenew + now <- io $ getPOSIXTime + let withRenew = + DH.insert (now + fromIntegral portRenewalTime, RenewAction p) + filteredHeap + atomically notifyComplete + loop pmp withRenew + + PTMClose p -> do + logInfo $ + displayShow ("port: releasing lease for ", p) + setPortMapping pmp PTUdp p p 0 + let removed = filterPort p nextRenew + loop pmp removed + + handleRenew :: NatPmpHandle + -> DH.MinPrioHeap POSIXTime RenewAction + -> RIO e () + handleRenew pmp nextRenew = do + case (DH.view nextRenew) of + Nothing -> error "Internal heap managing error." + Just ((_, RenewAction p), rest) -> do + logInfo $ + displayShow ("port: sending renewing request to NAT-PMP for port ", + p) + setPortMapping pmp PTUdp p p portLeaseLifetime >>= \case + Left err | isResetAndRetry err -> do + closeNatPmp pmp + attemptReestablishNatPmpThen (\pmp -> handleRenew pmp nextRenew) + Left err -> do + logError $ + displayShow ("port: failed to request NAT-PMP for port ", p, + ":", err, ". disabling NAT-PMP") + loopErr q + Right _ -> do + -- We don't need to filter the port because we just did. + now <- io $ getPOSIXTime + let withRenew = + DH.insert (now + fromIntegral portRenewalTime, RenewAction p) + rest + loop pmp withRenew + + -- If the internal natpmp socket is closed (laptop lid closed, network + -- change, etc), attempt to reestablish a connection. + attemptReestablishNatPmpThen :: (NatPmpHandle -> RIO e ()) + -> RIO e () + attemptReestablishNatPmpThen andThen = do + logInfo $ + displayShow ("port: network changed. Attempting NAT reconnect"); + loop 0 + where + loop :: Int -> RIO e () + loop tryNum = do + initNatPmp >>= \case + Left err -> do + if tryNum == maxRetries + then do + stderr $ "port: failed to reestablish a connection to your router" + loopErr q + else do + threadDelay networkRetryDelay + loop (tryNum + 1) + Right pmp -> do + andThen pmp + + filterPort :: Word16 + -> DH.MinPrioHeap POSIXTime RenewAction + -> DH.MinPrioHeap POSIXTime RenewAction + filterPort p = DH.filter okPort + where + okPort (_, RenewAction x) = p /= x + + -- block (retry) until the delay TVar is set to True + fini :: TVar Bool -> STM () + fini = check <=< readTVar + + -- The NAT system is considered "off" but we still need to signal back to + -- the main thread that blocking actions are complete. + loopErr q = forever $ do + (atomically $ readTQueue q) >>= \case + PTMOpen _ onComplete -> atomically onComplete + PTMClose _ -> pure () + +-- When we were unable to connect to a router, get the ip address on the +-- default ipv4 interface to check if we look like we're on an internal network +-- or not. +likelyIPAddress :: MonadIO m => m (Maybe (Word8, Word8, Word8, Word8)) +likelyIPAddress = liftIO do + -- Try opening a socket to 1.1.1.1 to get our own IP address. Since UDP is + -- stateless and we aren't sending anything, we aren't actually contacting + -- them in any way. + sock <- socket AF_INET Datagram 0 + connect sock (SockAddrInet 53 (tupleToHostAddress (1, 1, 1, 1))) + sockAddr <- getSocketName sock + case sockAddr of + SockAddrInet _ addr -> pure $ Just $ hostAddressToTuple addr + _ -> pure $ Nothing + +likelyBehindRouter :: MonadIO m => m Bool +likelyBehindRouter = do + likelyIPAddress >>= \case + Just ip@(192, 168, _, _) -> pure True + Just ip@(172, x, _, _) + | (x >= 16 && x <= 31) -> pure True + Just ip@(10, _, _, _) -> pure True + _ -> pure False + +-- Some of the errors that we encounter happen when the underlying sockets have +-- closed out from under us. When this happens, we want to wait a short time +-- and reset the system. +isResetAndRetry :: Error -> Bool +isResetAndRetry ErrRecvFrom = True +isResetAndRetry ErrSendErr = True +isResetAndRetry _ = False + +-- Acquire a port for the duration of the RAcquire. +requestPortAccess :: forall e. (HasPortControlApi e) => Word16 -> RAcquire e () +requestPortAccess port = do + mkRAcquire request release + where + request :: RIO e () + request = do + api <- view portControlApiL + io $ pAddPortRequest api port + + release :: () -> RIO e () + release _ = do + api <- view portControlApiL + io $ pRemovePortRequest api port + diff --git a/pkg/hs/urbit-king/lib/Urbit/Vere/Serf.hs b/pkg/hs/urbit-king/lib/Urbit/Vere/Serf.hs index e03512883..6a65f2d23 100644 --- a/pkg/hs/urbit-king/lib/Urbit/Vere/Serf.hs +++ b/pkg/hs/urbit-king/lib/Urbit/Vere/Serf.hs @@ -40,7 +40,7 @@ withSerf config = mkRAcquire startup kill where startup = do (serf, st) <- io $ start config - logDebug (displayShow ("serf state", st)) + logInfo (displayShow ("serf state", st)) pure serf kill serf = do void $ rio $ stop serf @@ -58,7 +58,7 @@ execReplay serf log last = do where doBoot :: RIO e (Either PlayBail Word) doBoot = do - logDebug "Beginning boot sequence" + logInfo "Beginning boot sequence" let bootSeqLen = lifecycleLen (Log.identity log) @@ -72,14 +72,14 @@ execReplay serf log last = do when (numEvs /= bootSeqLen) $ do throwIO (MissingBootEventsInEventLog numEvs bootSeqLen) - logDebug $ display ("Sending " <> tshow numEvs <> " boot events to serf") + logInfo $ display ("Sending " <> tshow numEvs <> " boot events to serf") io (boot serf evs) >>= \case Just err -> do - logDebug "Error on replay, exiting" + logInfo "Error on replay, exiting" pure (Left err) Nothing -> do - logDebug "Finished boot events, moving on to more events from log." + logInfo "Finished boot events, moving on to more events from log." doReplay <&> \case Left err -> Left err Right num -> Right (num + numEvs) diff --git a/pkg/hs/urbit-king/lib/Urbit/Vere/Serf/IPC.hs b/pkg/hs/urbit-king/lib/Urbit/Vere/Serf/IPC.hs index 3b0d5b140..71c2ef2f3 100644 --- a/pkg/hs/urbit-king/lib/Urbit/Vere/Serf/IPC.hs +++ b/pkg/hs/urbit-king/lib/Urbit/Vere/Serf/IPC.hs @@ -74,6 +74,7 @@ import Data.Bits import Data.Conduit import System.Process import Urbit.Vere.Serf.Types +import Urbit.Vere.Serf.IPC.Types import Control.Monad.STM (retry) import Control.Monad.Trans.Resource (MonadResource, allocate, runResourceT) @@ -103,55 +104,6 @@ data Serf = Serf } --- Internal Protocol Types ----------------------------------------------------- - -data Live - = LExit Atom -- exit status code - | LSave EventId - | LCram EventId - | LPack () - deriving (Show) - -data Play - = PDone Mug - | PBail PlayBail - deriving (Show) - -data Scry - = SDone (Maybe (Term, Noun)) - | SBail Goof - deriving (Show) - -data Work - = WDone EventId Mug FX - | WSwap EventId Mug (Wen, Noun) FX - | WBail [Goof] - deriving (Show) - -data Writ - = WLive Live - | WPeek Atom Wen Gang Path - | WPlay EventId [Noun] - | WWork Atom Wen Ev - deriving (Show) - -data Plea - = PLive () - | PRipe SerfInfo - | PSlog Slog - | PPeek Scry - | PPlay Play - | PWork Work - deriving (Show) - -deriveNoun ''Live -deriveNoun ''Play -deriveNoun ''Scry -deriveNoun ''Work -deriveNoun ''Writ -deriveNoun ''Plea - - -- Access Current Serf State --------------------------------------------------- serfLastEventBlocking :: Serf -> IO EventId @@ -250,8 +202,8 @@ recvPeek :: Serf -> IO (Maybe (Term, Noun)) recvPeek serf = do recvPleaHandlingSlog serf >>= \case PPeek (SDone peek) -> pure peek - -- XX produce error - PPeek (SBail dud) -> throwIO (PeekBail dud) + -- XX surface error content + PPeek (SBail dud) -> pure Nothing plea -> throwIO (UnexpectedPlea (toNoun plea) "expecting %peek") diff --git a/pkg/hs/urbit-king/lib/Urbit/Vere/Serf/IPC/Types.hs b/pkg/hs/urbit-king/lib/Urbit/Vere/Serf/IPC/Types.hs new file mode 100644 index 000000000..88fed803e --- /dev/null +++ b/pkg/hs/urbit-king/lib/Urbit/Vere/Serf/IPC/Types.hs @@ -0,0 +1,58 @@ +{-# LANGUAGE StrictData #-} + +module Urbit.Vere.Serf.IPC.Types where + +import Urbit.Prelude hiding ((<|)) +import Urbit.Arvo (Ev, FX) +import Urbit.Noun.Time (Wen) +import Urbit.Vere.Serf.Types + +-- Private data structures for Urbit.Vere.Serf.IPC, but made StrictData without +-- making the rest of Urbit.Vere.Serf.IPC strict. + +data Live + = LExit Atom -- exit status code + | LSave EventId + | LCram EventId + | LPack () + deriving (Show) + +data Play + = PDone Mug + | PBail PlayBail + deriving (Show) + +data Scry + = SDone (Maybe (Term, Noun)) + | SBail Goof + deriving (Show) + +data Work + = WDone EventId Mug FX + | WSwap EventId Mug (Wen, Noun) FX + | WBail [Goof] + deriving (Show) + +data Writ + = WLive Live + | WPeek Atom Wen Gang Path + | WPlay EventId [Noun] + | WWork Atom Wen Ev + deriving (Show) + +data Plea + = PLive () + | PRipe SerfInfo + | PSlog Slog + | PPeek Scry + | PPlay Play + | PWork Work + deriving (Show) + +deriveNoun ''Live +deriveNoun ''Play +deriveNoun ''Scry +deriveNoun ''Work +deriveNoun ''Writ +deriveNoun ''Plea + diff --git a/pkg/hs/urbit-king/lib/Urbit/Vere/Serf/Types.hs b/pkg/hs/urbit-king/lib/Urbit/Vere/Serf/Types.hs index 1544a56fe..91f8a659e 100644 --- a/pkg/hs/urbit-king/lib/Urbit/Vere/Serf/Types.hs +++ b/pkg/hs/urbit-king/lib/Urbit/Vere/Serf/Types.hs @@ -15,21 +15,21 @@ type PlayBail = (EventId, Mug, Goof) type Slog = (Atom, Tank) data SerfState = SerfState - { ssLast :: EventId - , ssHash :: Mug + { ssLast :: !EventId + , ssHash :: !Mug } deriving (Show, Eq) data RipeInfo = RipeInfo - { riProt :: Atom - , riHoon :: Atom - , riNock :: Atom + { riProt :: !Atom + , riHoon :: !Atom + , riNock :: !Atom } deriving (Show) data SerfInfo = SerfInfo - { siRipe :: RipeInfo - , siStat :: SerfState + { siRipe :: !RipeInfo + , siStat :: !SerfState } deriving (Show) diff --git a/pkg/hs/urbit-king/lib/Urbit/Vere/Term.hs b/pkg/hs/urbit-king/lib/Urbit/Vere/Term.hs index 06082c142..b6c8db0d2 100644 --- a/pkg/hs/urbit-king/lib/Urbit/Vere/Term.hs +++ b/pkg/hs/urbit-king/lib/Urbit/Vere/Term.hs @@ -28,8 +28,9 @@ import Data.List ((!!)) import RIO.Directory (createDirectoryIfMissing) import Urbit.King.API (readPortsFile) import Urbit.TermSize (TermSize(TermSize)) -import Urbit.Vere.Term.API (Client(Client)) +import Urbit.Vere.Term.API (Client(Client), ClientTake(..)) +import qualified Data.Set as S import qualified Data.ByteString.Internal as BS import qualified Data.ByteString.UTF8 as BS import qualified System.Console.ANSI as ANSI @@ -70,7 +71,7 @@ data Private = Private -- Utils ----------------------------------------------------------------------- -initialBlew w h = EvBlip $ BlipEvTerm $ TermEvBlew (UD 1, ()) w h +blewEvent w h = EvBlip $ BlipEvTerm $ TermEvBlew (UD 1, ()) w h initialHail = EvBlip $ BlipEvTerm $ TermEvHail (UD 1, ()) () @@ -97,13 +98,13 @@ isTerminalBlit _ = True -------------------------------------------------------------------------------- -connClient :: Serv.Conn Belt [Term.Ev] -> Client +connClient :: Serv.Conn ClientTake [Term.Ev] -> Client connClient c = Client { give = Serv.cSend c , take = Serv.cRecv c } -connectToRemote :: ∀e. HasLogFunc e +connectToRemote :: forall e. HasLogFunc e => Port -> Client -> RAcquire e (Async (), Async ()) @@ -129,12 +130,12 @@ data HackConfigDir = HCD { _hcdPax :: FilePath } makeLenses ''HackConfigDir instance HasPierPath HackConfigDir where pierPathL = hcdPax -runTerminalClient :: ∀e. HasLogFunc e => FilePath -> RIO e () +runTerminalClient :: forall e. HasLogFunc e => FilePath -> RIO e () runTerminalClient pier = runRAcquire $ do mPort <- runRIO (HCD pier) readPortsFile port <- maybe (error "Can't connect") pure mPort mExit <- io newEmptyTMVarIO - (siz, cli) <- localClient (putTMVar mExit ()) + cli <- localClient (putTMVar mExit ()) (tid, sid) <- connectToRemote (Port $ fromIntegral port) cli atomically $ waitSTM tid <|> waitSTM sid <|> takeTMVar mExit @@ -172,16 +173,32 @@ _spin_idle_us = 500000 {-| Initializes the generalized input/output parts of the terminal. -} -localClient :: ∀e. HasLogFunc e +localClient :: forall e. HasLogFunc e => STM () - -> RAcquire e (TermSize, Client) + -> RAcquire e Client localClient doneSignal = fst <$> mkRAcquire start stop where - start :: HasLogFunc e => RIO e ((TermSize, Client), Private) + start :: HasLogFunc e => RIO e (Client, Private) start = do tsWriteQueue <- newTQueueIO :: RIO e (TQueue [Term.Ev]) spinnerMVar <- newEmptyTMVarIO :: RIO e (TMVar ()) - pWriterThread <- asyncBound (writeTerminal tsWriteQueue spinnerMVar) + + -- Track the terminal size, keeping track of the size of the local + -- terminal for our own printing, as well as putting size changes into an + -- event queue so we can send changes to the terminal muxing system. + tsizeTVar <- newTVarIO (TermSize 80 24) -- Value doesn't matter. + tsSizeChange <- newEmptyTMVarIO + io $ T.liveTermSize (\ts -> atomically $ do + -- We keep track of the console's local size for + -- our own tank washing. + writeTVar tsizeTVar ts + + -- We queue up changes so we can broadcast them + -- to the muxing client. + putTMVar tsSizeChange ts) + + pWriterThread <- asyncBound + (writeTerminal tsWriteQueue spinnerMVar tsizeTVar) pPreviousConfiguration <- io $ getTerminalAttributes stdInput @@ -198,17 +215,18 @@ localClient doneSignal = fst <$> mkRAcquire start stop pReaderThread <- asyncBound (readTerminal tsReadQueue tsWriteQueue (bell tsWriteQueue)) - let client = Client { take = Just <$> readTQueue tsReadQueue + let client = Client { take = Just <$> asum + [ readTQueue tsReadQueue <&> ClientTakeBelt, + takeTMVar tsSizeChange <&> ClientTakeSize + ] , give = writeTQueue tsWriteQueue } - tsize <- io $ T.termSize - - pure ((tsize, client), Private{..}) + pure (client, Private{..}) stop :: HasLogFunc e - => ((TermSize, Client), Private) -> RIO e () - stop ((_, Client{..}), Private{..}) = do + => (Client, Private) -> RIO e () + stop (Client{..}, Private{..}) = do -- Note that we don't `cancel pReaderThread` here. This is a deliberate -- decision because fdRead calls into a native function which the runtime -- can't kill. If we were to cancel here, the internal `waitCatch` would @@ -244,8 +262,8 @@ localClient doneSignal = fst <$> mkRAcquire start stop -- Writes data to the terminal. Both the terminal reading, normal logging, -- and effect handling can all emit bytes which go to the terminal. - writeTerminal :: TQueue [Term.Ev] -> TMVar () -> RIO e () - writeTerminal q spinner = do + writeTerminal :: TQueue [Term.Ev] -> TMVar () -> TVar TermSize -> RIO e () + writeTerminal q spinner termSizeVar = do currentTime <- io $ now loop (LineState "" 0 Nothing Nothing True 0 currentTime) where @@ -259,6 +277,17 @@ localClient doneSignal = fst <$> mkRAcquire start stop putStr p termRefreshLine ls + writeSlog :: LineState -> (Atom, Tank) -> RIO e LineState + writeSlog ls slog = do + putStr "\r" + T.clearLine + TermSize width _ <- atomically $ readTVar termSizeVar + -- TODO: Ignoring priority for now. Priority changes the color of, + -- and adds a prefix of '>' to, the output. + let lines = fmap unTape $ wash (WashCfg 0 width) $ tankTree $ snd slog + forM lines $ \line -> putStr (line <> "\r\n") + termRefreshLine ls + {- Figure out how long to wait to show the spinner. When we don't have a vane name to display, we assume its a user @@ -309,6 +338,7 @@ localClient doneSignal = fst <$> mkRAcquire start stop execEv ls = \case Term.Blits bs -> foldM writeBlit ls bs Term.Trace p -> writeTrace ls (unCord p) + Term.Slog s -> writeSlog ls s Term.Blank -> writeBlank ls Term.Spinr (Just txt) -> doSpin ls (unCord <$> txt) Term.Spinr Nothing -> unspin ls @@ -342,6 +372,8 @@ localClient doneSignal = fst <$> mkRAcquire start stop Clr () -> do T.clearScreen termRefreshLine ls Hop w -> termShowCursor ls (fromIntegral w) + Klr s -> do ls2 <- termShowClear ls + termShowStub ls2 s Lin c -> do ls2 <- termShowClear ls termShowLine ls2 (pack c) Mor () -> termShowMore ls @@ -349,6 +381,55 @@ localClient doneSignal = fst <$> mkRAcquire start stop Sav path atom -> pure ls Url url -> pure ls + termRenderDeco :: Deco -> Char + termRenderDeco = \case + DecoBr -> '1' + DecoUn -> '4' + DecoBl -> '5' + DecoNull -> '0' + + termRenderTint :: Tint -> Char + termRenderTint = \case + TintK -> '0' + TintR -> '1' + TintG -> '2' + TintY -> '3' + TintB -> '4' + TintM -> '5' + TintC -> '6' + TintW -> '7' + TintNull -> '9' + + -- Wraps the appropriate escape sequence around a piece of styled text + termRenderStubSegment :: Stye -> [Char] -> [Char] + termRenderStubSegment Stye {..} tape = + case (S.null decoset, back, fore) of + (True, TintNull, TintNull) -> tape + _ -> styled + where + decoset = setFromHoonSet deco + escape = [chr 27, '['] + + styles = intercalate ";" $ filter (not . null) + [ intersperse ';' $ fmap termRenderDeco $ toList decoset + , case back of + TintNull -> [] + tint -> ['4', termRenderTint tint] + , case fore of + TintNull -> [] + tint -> ['3', termRenderTint tint] + ] + + styled = mconcat [escape, styles, "m", tape, escape, "0m"] + + -- Displays and sets styled text as the current line + termShowStub :: LineState -> Stub -> RIO e LineState + termShowStub ls (Stub s) = do + let visualLength = sum $ fmap (length . snd) s + let outText = pack $ mconcat $ fmap (uncurry termRenderStubSegment) s + putStr outText + pure ls { lsLine = outText, lsCurPos = visualLength } + -- Moves the cursor to the requested position termShowCursor :: LineState -> Int -> RIO e LineState termShowCursor ls@LineState{..} {-line pos)-} newPos = do @@ -363,7 +444,7 @@ localClient doneSignal = fst <$> mkRAcquire start stop -- Moves the cursor left without any mutation of the LineState. Used only -- in cursor spinning. - _termSpinnerMoveLeft :: Int → RIO e () + _termSpinnerMoveLeft :: Int -> RIO e () _termSpinnerMoveLeft = T.cursorLeft -- Displays and sets the current line @@ -472,7 +553,7 @@ localClient doneSignal = fst <$> mkRAcquire start stop loop rd else if w == 3 then do -- ETX (^C) - logDebug $ displayShow "Ctrl-c interrupt" + logInfo $ displayShow "Ctrl-c interrupt" atomically $ do writeTQueue wq [Term.Trace "interrupt\r\n"] writeTQueue rq $ Ctl $ Cord "c" @@ -517,7 +598,7 @@ term' -> RIO e ([Ev], RAcquire e (DriverApi TermEf)) term' (tsize, client) serfSIGINT = do let TermSize wi hi = tsize - initEv = [initialBlew wi hi, initialHail] + initEv = [blewEvent wi hi, initialHail] pure (initEv, runDriver) where @@ -553,13 +634,16 @@ term env (tsize, Client{..}) plan serfSIGINT = runTerm readLoop :: RIO e () readLoop = forever $ do atomically take >>= \case - Nothing -> pure () - Just b -> do + Nothing -> pure () + Just (ClientTakeBelt b) -> do when (b == Ctl (Cord "c")) $ do io serfSIGINT let beltEv = EvBlip $ BlipEvTerm $ TermEvBelt (UD 1, ()) $ b let beltFailed _ = pure () atomically $ plan (EvErr beltEv beltFailed) + Just (ClientTakeSize ts@(TermSize w h)) -> do + let blewFailed _ = pure () + atomically $ plan (EvErr (blewEvent w h) blewFailed) handleEffect :: TermEf -> RIO e () handleEffect = \case diff --git a/pkg/hs/urbit-king/lib/Urbit/Vere/Term/API.hs b/pkg/hs/urbit-king/lib/Urbit/Vere/Term/API.hs index 0d38b8ffd..7e65d49e5 100644 --- a/pkg/hs/urbit-king/lib/Urbit/Vere/Term/API.hs +++ b/pkg/hs/urbit-king/lib/Urbit/Vere/Term/API.hs @@ -1,12 +1,20 @@ {-| Interface Terminal API. -} -module Urbit.Vere.Term.API (Ev(..), Client(..), trace, spin, stopSpin) where +module Urbit.Vere.Term.API (Ev(..), + Client(..), + ClientTake(..), + trace, + slog, + spin, + stopSpin) where import Urbit.Prelude hiding (trace) import Urbit.Arvo (Belt, Blit) +import Control.Monad.Fail (fail) +import Urbit.TermSize -- External Types -------------------------------------------------------------- @@ -15,17 +23,42 @@ import Urbit.Arvo (Belt, Blit) %blits -- list of blits from arvo. %trace -- stderr line from runtime. + %slog -- nock worker logging with priority %blank -- print a blank line %spinr -- Start or stop the spinner -} -data Ev = Blits [Blit] - | Trace Cord +data Ev = Blits ![Blit] + | Trace !Cord + | Slog !(Atom, Tank) | Blank - | Spinr (Maybe (Maybe Cord)) + | Spinr !(Maybe (Maybe Cord)) deriving (Show) +data ClientTake + = ClientTakeBelt Belt + | ClientTakeSize TermSize + deriving (Show) + +instance ToNoun ClientTake where + toNoun = \case + ClientTakeBelt b -> toNoun $ (Cord "belt", b) + ClientTakeSize (TermSize w h) -> toNoun $ (Cord "size", (w, h)) + +instance FromNoun ClientTake where + parseNoun n = named "ClientTake" $ do + (Cord name, rest) <- parseNoun n + case name of + "belt" -> do + b <- parseNoun rest + pure (ClientTakeBelt b) + "size" -> do + (w, h) <- parseNoun rest + pure (ClientTakeSize (TermSize w h)) + _ -> fail "weird client take" + + data Client = Client - { take :: STM (Maybe Belt) + { take :: STM (Maybe ClientTake) , give :: [Ev] -> STM () } @@ -37,6 +70,9 @@ deriveNoun ''Ev trace :: Client -> Text -> STM () trace ts = give ts . singleton . Trace . Cord +slog :: Client -> (Atom, Tank) -> STM () +slog ts = give ts . singleton . Slog + spin :: Client -> Maybe Text -> STM () spin ts = give ts . singleton . Spinr . Just . fmap Cord diff --git a/pkg/hs/urbit-king/lib/Urbit/Vere/Term/Demux.hs b/pkg/hs/urbit-king/lib/Urbit/Vere/Term/Demux.hs index 5a7bd23e2..e9e774867 100644 --- a/pkg/hs/urbit-king/lib/Urbit/Vere/Term/Demux.hs +++ b/pkg/hs/urbit-king/lib/Urbit/Vere/Term/Demux.hs @@ -4,12 +4,15 @@ given full event history since the creation of the demuxer. -} -module Urbit.Vere.Term.Demux (Demux, mkDemux, addDemux, useDemux) where +module Urbit.Vere.Term.Demux (Demux, + mkDemux, + addDemux, + useDemux, + curDemuxSize) where import Urbit.Prelude - -import Urbit.Arvo (Belt) -import Urbit.Vere.Term.API (Client(Client)) +import Urbit.TermSize +import Urbit.Vere.Term.API (Client(Client), ClientTake(..)) import qualified Urbit.Vere.Term.API as Term import qualified Urbit.Vere.Term.Logic as Logic @@ -42,11 +45,17 @@ ksDelete k (KeyedSet t n) = KeyedSet (deleteMap k t) n data Demux = Demux { dConns :: TVar (KeyedSet Client) + , dSizes :: TVar (IntMap TermSize) , dStash :: TVar Logic.St + , dMinSize :: TVar TermSize } -mkDemux :: STM Demux -mkDemux = Demux <$> newTVar mempty <*> newTVar Logic.init +mkDemux :: TermSize -> STM Demux +mkDemux ts = Demux <$> + newTVar mempty <*> + newTVar mempty <*> + newTVar Logic.init <*> + newTVar ts addDemux :: Client -> Demux -> STM () addDemux conn Demux{..} = do @@ -57,6 +66,8 @@ addDemux conn Demux{..} = do useDemux :: Demux -> Client useDemux d = Client { give = dGive d, take = dTake d } +curDemuxSize :: Demux -> STM TermSize +curDemuxSize Demux{..} = readTVar dMinSize -- Internal -------------------------------------------------------------------- @@ -77,16 +88,45 @@ dGive Demux{..} evs = do If there are no attached clients, this will not return until one is attached. -} -dTake :: Demux -> STM (Maybe Belt) +dTake :: Demux -> STM (Maybe ClientTake) dTake Demux{..} = do conns <- readTVar dConns - waitForBelt conns >>= \case - (_, Just b ) -> pure (Just b) - (k, Nothing) -> do writeTVar dConns (ksDelete k conns) - pure Nothing + waitForTake conns >>= \case + (_, Just (ClientTakeBelt b)) -> pure (Just (ClientTakeBelt b)) + + (k, Just (ClientTakeSize s)) -> do + newSizeTree <- modifyAndReadTVar' dSizes (insertMap k s) + maybeUpdateTerminalSize newSizeTree + + (k, Nothing) -> do + writeTVar dConns (ksDelete k conns) + newSizeTree <- modifyAndReadTVar' dSizes (deleteMap k) + maybeUpdateTerminalSize newSizeTree + where - waitForBelt :: KeyedSet Client -> STM (Int, Maybe Belt) - waitForBelt ks = asum + waitForTake :: KeyedSet Client -> STM (Int, Maybe ClientTake) + waitForTake ks = asum $ fmap (\(k,c) -> (k,) <$> Term.take c) $ mapToList $ _ksTable ks + + maybeUpdateTerminalSize :: IntMap TermSize -> STM (Maybe ClientTake) + maybeUpdateTerminalSize newSizeTree = do + let termSize = foldr minTermSize (TermSize 1024 1024) newSizeTree + curSize <- readTVar dMinSize + if curSize == termSize + then pure Nothing + else do + writeTVar dMinSize termSize + pure $ Just (ClientTakeSize termSize) + + modifyAndReadTVar' :: TVar a -> (a -> a) -> STM a + modifyAndReadTVar' var fun = do + pre <- readTVar var + let !post = fun pre + writeTVar var post + pure post + + minTermSize :: TermSize -> TermSize -> TermSize + minTermSize (TermSize wa ha) (TermSize wb hb) = + TermSize (min wa wb) (min ha hb) diff --git a/pkg/hs/urbit-king/lib/Urbit/Vere/Term/Logic.hs b/pkg/hs/urbit-king/lib/Urbit/Vere/Term/Logic.hs index 8e739b88a..770742a68 100644 --- a/pkg/hs/urbit-king/lib/Urbit/Vere/Term/Logic.hs +++ b/pkg/hs/urbit-king/lib/Urbit/Vere/Term/Logic.hs @@ -37,6 +37,7 @@ type SpinnerState = Maybe SpinnerCause -} data Ev = EvLine Text + | EvSlog (Atom, Tank) | EvSpin SpinnerState | EvMove Word | EvBell @@ -53,11 +54,16 @@ data Ef | EfSpin SpinnerState deriving (Show) +data History + = HistoryText !Text + | HistorySlog !(Atom, Tank) + deriving (Show) + data St = St - { sHistory :: Seq Text - , sLine :: Text - , sCurPos :: Word - , sSpinner :: SpinnerState + { sHistory :: !(Seq History) + , sLine :: !Text + , sCurPos :: !Word + , sSpinner :: !SpinnerState } deriving (Show) @@ -74,19 +80,27 @@ init = St mempty "" 0 Nothing -} step :: St -> Ev -> St step st@St{..} = \case - EvLine t -> st & record t + EvLine t -> st & recordText t + EvSlog s -> st & recordSlog s EvSpin s -> st { sSpinner = s } EvMove w -> st { sCurPos = min w (word $ length sLine) } EvEdit t -> st { sLine = t, sCurPos = word (length t) } - EvMore -> st { sLine = "", sCurPos = 0 } & record (sLine <> "\n") + EvMore -> st { sLine = "", sCurPos = 0 } & recordText (sLine <> "\n") EvBell -> st EvDraw -> st where word :: Integral i => i -> Word word = fromIntegral - record :: Text -> St -> St - record t st@St{..} = st { sHistory = trim (sHistory |> t) } + recordText :: Text -> St -> St + recordText !t st@St{..} = st { + sHistory = trim (sHistory |> (HistoryText t)) + } + + recordSlog :: (Atom, Tank) -> St -> St + recordSlog !t st@St{..} = st { + sHistory = trim (sHistory |> (HistorySlog t)) + } trim :: Seq a -> Seq a trim s | length s < 20 = s @@ -96,11 +110,14 @@ step st@St{..} = \case drawState :: St -> [Ev] drawState St{..} = hist <> out <> cur <> spin where - hist = EvLine <$> toList sHistory + hist = drawHistory <$> toList sHistory out = if null sLine then [] else [EvEdit sLine] cur = if 0 == sCurPos then [] else [EvMove $ fromIntegral $ sCurPos] spin = maybe [] (singleton . EvSpin . Just) sSpinner + drawHistory (HistoryText t) = EvLine t + drawHistory (HistorySlog s) = EvSlog s + -- Conversion ------------------------------------------------------------------ @@ -127,11 +144,13 @@ fromTermEv = \case Term.Trace t -> [EvLine $ unCord t] Term.Blank -> [EvLine ""] Term.Spinr s -> [EvSpin $ toCause <$> s] + Term.Slog s -> [EvSlog s] toTermEv :: Ev -> Term.Ev toTermEv = \case EvLine "" -> Term.Blank EvLine t -> Term.Trace (Cord t) + EvSlog s -> Term.Slog s EvSpin s -> Term.Spinr (fromCause <$> s) EvMove w -> Term.Blits [Arvo.Hop $ fromIntegral w] EvBell -> Term.Blits [Arvo.Bel ()] diff --git a/pkg/hs/urbit-king/lib/Urbit/Vere/Term/Render.hs b/pkg/hs/urbit-king/lib/Urbit/Vere/Term/Render.hs index a7751a005..3765f9cb2 100644 --- a/pkg/hs/urbit-king/lib/Urbit/Vere/Term/Render.hs +++ b/pkg/hs/urbit-king/lib/Urbit/Vere/Term/Render.hs @@ -16,17 +16,17 @@ import qualified System.Console.ANSI as ANSI -- Types ----------------------------------------------------------------------- -clearScreen ∷ MonadIO m ⇒ m () +clearScreen :: MonadIO m => m () clearScreen = liftIO $ ANSI.clearScreen -clearLine ∷ MonadIO m ⇒ m () +clearLine :: MonadIO m => m () clearLine = liftIO $ ANSI.clearLine -soundBell ∷ MonadIO m ⇒ m () +soundBell :: MonadIO m => m () soundBell = liftIO $ putStr "\a" -cursorLeft ∷ MonadIO m ⇒ Int → m () +cursorLeft :: MonadIO m => Int -> m () cursorLeft = liftIO . ANSI.cursorBackward -cursorRight ∷ MonadIO m ⇒ Int → m () +cursorRight :: MonadIO m => Int -> m () cursorRight = liftIO . ANSI.cursorForward diff --git a/pkg/hs/urbit-king/package.yaml b/pkg/hs/urbit-king/package.yaml index d1e8b6a77..10bbe8328 100644 --- a/pkg/hs/urbit-king/package.yaml +++ b/pkg/hs/urbit-king/package.yaml @@ -2,6 +2,9 @@ name: urbit-king version: 0.10.8 license: MIT license-file: LICENSE +data-files: + - test/gold/hoontree.gold + - test/gold/hoontree.pill library: source-dirs: lib @@ -31,6 +34,7 @@ dependencies: - binary - bytestring - case-insensitive + - cereal - classy-prelude - conduit - containers @@ -50,6 +54,8 @@ dependencies: - Glob - hashable - hashtables + - heap + - hexstring - http-client - http-client-tls - http-types @@ -64,6 +70,7 @@ dependencies: - mtl - multimap - murmur3 + - natpmp-static - network - optparse-applicative - para @@ -75,7 +82,6 @@ dependencies: - racquire - random - regex-tdfa - - regex-tdfa-text - resourcet - rio - semigroups @@ -99,7 +105,6 @@ dependencies: - unliftio-core - unordered-containers - urbit-atom - - urbit-azimuth - urbit-eventlog-lmdb - urbit-hob - urbit-noun @@ -112,12 +117,12 @@ dependencies: - wai-websockets - warp - warp-tls - - web3 - websockets default-extensions: - ApplicativeDo - BangPatterns + - BinaryLiterals - BlockArguments - ConstraintKinds - DataKinds @@ -144,6 +149,7 @@ default-extensions: - OverloadedStrings - PackageImports - PartialTypeSignatures + - PatternGuards - PatternSynonyms - QuasiQuotes - Rank2Types diff --git a/pkg/hs/urbit-king/test/AmesTests.hs b/pkg/hs/urbit-king/test/AmesTests.hs index 6be3e5c8c..01cfbcb97 100644 --- a/pkg/hs/urbit-king/test/AmesTests.hs +++ b/pkg/hs/urbit-king/test/AmesTests.hs @@ -12,142 +12,30 @@ import Urbit.EventLog.LMDB import Urbit.King.Config import Urbit.Noun import Urbit.Noun.Time -import Urbit.Prelude +import Urbit.Prelude hiding (elements) import Urbit.Vere.Ames +import Urbit.Vere.Ames.Packet import Urbit.Vere.Pier.Types +import Urbit.Vere.Ports import Control.Concurrent (runInBoundThread) +import Data.Serialize (decode, encode) import Data.LargeWord (LargeKey(..)) import GHC.Natural (Natural) import Network.Socket (tupleToHostAddress) import Urbit.King.App (HasKingId(..)) import qualified Urbit.EventLog.LMDB as Log +import qualified Urbit.Noun.Time as Time - --------------------------------------------------------------------------------- - -type HasAmes e = (HasLogFunc e, HasNetworkConfig e, HasKingId e) - --- Utils ----------------------------------------------------------------------- - -pid :: KingId -pid = KingId 0 - -turfEf :: NewtEf -turfEf = NewtEfTurf (0, ()) [] - -sendEf :: Galaxy -> Wen -> Bytes -> NewtEf -sendEf g w bs = NewtEfSend (0, ()) (EachYes g) bs - -data NetworkTestApp = NetworkTestApp - { _ntaLogFunc :: !LogFunc - , _ntaNetworkConfig :: !NetworkConfig - , _ntaKingId :: !Word16 - } - -makeLenses ''NetworkTestApp - -instance HasLogFunc NetworkTestApp where - logFuncL = ntaLogFunc - -instance HasNetworkConfig NetworkTestApp where - networkConfigL = ntaNetworkConfig - -instance HasKingId NetworkTestApp where - kingIdL = ntaKingId - -runNetworkApp :: RIO NetworkTestApp a -> IO a -runNetworkApp = runRIO NetworkTestApp - { _ntaLogFunc = mkLogFunc (\_ _ _ _ -> pure ()) - , _ntaKingId = 34 - , _ntaNetworkConfig = NetworkConfig { _ncNetMode = NMNormal - , _ncAmesPort = Nothing - , _ncNoAmes = False - , _ncNoHttp = False - , _ncNoHttps = False - , _ncHttpPort = Nothing - , _ncHttpsPort = Nothing - , _ncLocalPort = Nothing - } - } - -runGala - :: forall e - . HasAmes e - => Word8 - -> RAcquire e (TQueue EvErr, NewtEf -> IO ()) -runGala point = do - env <- ask - que <- newTQueueIO - let enqueue = \p -> writeTQueue que p $> Intake - let (_, runAmes) = ames env (fromIntegral point) True enqueue noStderr - cb <- runAmes - io (cb turfEf) - pure (que, cb) - where - noStderr _ = pure () - -waitForPacket :: TQueue EvErr -> Bytes -> IO Bool -waitForPacket q val = go - where - go = atomically (readTQueue q) >>= \case - EvErr (EvBlip (BlipEvNewt (NewtEvBorn (_, ()) ()))) _ -> go - EvErr (EvBlip (BlipEvAmes (AmesEvHear () _ bs))) _ -> pure (bs == val) - _ -> pure False - -runRAcquire :: RAcquire e a -> RIO e a -runRAcquire acq = rwith acq pure - -sendThread :: (NewtEf -> IO ()) -> (Galaxy, Bytes) -> RAcquire e () -sendThread cb (to, val) = void $ mkRAcquire start cancel - where - start = async $ forever $ do threadDelay 1_000 - wen <- io $ now - io $ cb (sendEf to wen val) - threadDelay 10_000 - -zodSelfMsg :: Property -zodSelfMsg = forAll arbitrary (ioProperty . runNetworkApp . runTest) - where - runTest - :: (HasLogFunc e, HasNetworkConfig e, HasKingId e) => Bytes -> RIO e Bool - runTest val = runRAcquire $ do - env <- ask - (zodQ, zod) <- runGala 0 - () <- sendThread zod (0, val) - liftIO (waitForPacket zodQ val) - -twoTalk :: Property -twoTalk = forAll arbitrary (ioProperty . runNetworkApp . runTest) - where - runTest :: (HasLogFunc e, HasNetworkConfig e, HasKingId e) - => (Word8, Word8, Bytes) -> RIO e Bool - runTest (aliceShip, bobShip, val) = - if aliceShip == bobShip - then pure True - else go aliceShip bobShip val - - go :: (HasLogFunc e, HasNetworkConfig e, HasKingId e) - => Word8 -> Word8 -> Bytes -> RIO e Bool - go aliceShip bobShip val = runRAcquire $ do - (aliceQ, alice) <- runGala aliceShip - (bobQ, bob) <- runGala bobShip - sendThread alice (Patp bobShip, val) - sendThread bob (Patp aliceShip, val) - liftIO (waitForPacket aliceQ val >> waitForPacket bobQ val) +packetSplitMorphism :: Packet -> Bool +packetSplitMorphism p = (decode . encode) p == Right p tests :: TestTree tests = testGroup "Ames" - [ localOption (QuickCheckTests 10) $ - testProperty "Zod can send a message to itself" $ - zodSelfMsg - - -- TODO Why doesn't this work in CI? - -- , localOption (QuickCheckTests 10) $ - -- testProperty "Two galaxies can talk" $ - -- twoTalk + [ testProperty "Packet coding looks good" $ + packetSplitMorphism ] @@ -203,7 +91,26 @@ instance Arbitrary AmesAddress where arbitrary = AAIpv4 <$> arb <*> arb instance Arbitrary Ship where - arbitrary = Ship <$> arb + arbitrary = Ship <$> elements + [ 0 + , 42 + , 256 + , 24_530 + , 2_071_856_128 + , 2_824_325_100 + , 430_648_908_188_615_680 + , 2^60 + 1337 + ] instance Arbitrary LogIdentity where arbitrary = LogIdentity <$> arb <*> arb <*> arb + +instance Arbitrary Packet where + arbitrary = do + pktVersion <- suchThat arb (< 8) + pktEncrypted <- arb + pktSndr <- arb + pktRcvr <- arb + pktOrigin <- arb + pktContent <- arb + pure Packet {..} diff --git a/pkg/hs/urbit-king/test/HoonMapSetTests.hs b/pkg/hs/urbit-king/test/HoonMapSetTests.hs index f5b797a9a..a012e16ea 100644 --- a/pkg/hs/urbit-king/test/HoonMapSetTests.hs +++ b/pkg/hs/urbit-king/test/HoonMapSetTests.hs @@ -1,17 +1,22 @@ module HoonMapSetTests (tests) where import RIO.Directory -import Urbit.Prelude hiding (encodeUtf8) +import Urbit.Prelude -import Data.Text.Lazy.Encoding (encodeUtf8) +import Data.ByteString.Lazy (ByteString) import Numeric.Natural (Natural) import Test.QuickCheck hiding ((.&.)) import Test.Tasty -import Test.Tasty.Golden as G import Test.Tasty.QuickCheck import Test.Tasty.TH -import qualified Data.ByteString.Lazy as L +import qualified Data.ByteString as ByteString +import qualified Data.ByteString.Lazy as ByteString.Lazy +import qualified Data.Text.Lazy as Text.Lazy +import qualified Data.Text.Lazy.Encoding as Text.Lazy.Encoding +import qualified Paths_urbit_king +import qualified Test.Tasty.Golden as Golden +import qualified Test.Tasty.Golden.Advanced as Golden.Advanced -- Types ----------------------------------------------------------------------- @@ -38,7 +43,7 @@ type TreeTests = [TreeTest] -- Utils ----------------------------------------------------------------------- -roundTrip :: ∀a. Eq a => (a -> a) -> a -> Bool +roundTrip :: forall a. Eq a => (a -> a) -> a -> Bool roundTrip f x = f x == x @@ -50,7 +55,6 @@ mapRoundtrip = roundTrip (mapFromHoonMap . mapToHoonMap) setRoundtrip :: Set SmallNoun -> Bool setRoundtrip = roundTrip (setFromHoonSet . setToHoonSet) - -- Golden Tests ---------------------------------------------------------------- treeTestsIdentity :: TreeTests -> TreeTests @@ -60,30 +64,72 @@ treeTestsIdentity = fmap go TTSet s -> (TTSet . setToHoonSet . setFromHoonSet) s TTMap m -> (TTMap . mapToHoonMap . mapFromHoonMap) m -treeRTMug :: FilePath -> IO L.ByteString +treeRTMug :: FilePath -> IO ByteString.Lazy.ByteString treeRTMug inp = do byt <- readFile inp non <- cueBSExn byt tee <- fromNounExn non mug <- evaluate $ mug $ toNoun $ treeTestsIdentity tee - pure $ encodeUtf8 $ tlshow (mug :: Natural) + pure $ Text.Lazy.Encoding.encodeUtf8 $ tlshow (mug :: Natural) - -goldenFile :: String -> String -> (FilePath -> IO L.ByteString) -> TestTree -goldenFile testName testFileName action = - goldenVsString testName gold (action pill) +goldenPill + :: TestName + -> String + -> (FilePath -> IO ByteString.Lazy.ByteString) + -> TestTree +goldenPill test name action = + goldenVsString test gold (action pill) + where + gold = "test/gold" name <.> "gold" + pill = "test/gold" name <.> "pill" + +-- | Compare a given string against the golden file's contents. +goldenVsString + :: TestName + -- ^ Test name + -> String + -- ^ The «golden» file that will be retrieved via 'getDataFileName'. + -> IO ByteString.Lazy.ByteString + -- ^ Action that returns the string for comparison. + -> TestTree + -- ^ Verifies the golden file contents is identical to the returned string. +goldenVsString test name action = + askOption $ \cutoff -> + Golden.Advanced.goldenTest name acquire action (comparator cutoff) update where - root = "pkg/hs/urbit-king/test/gold" testFileName - gold = root <.> "gold" - pill = root <.> "pill" + acquire = do + path <- Paths_urbit_king.getDataFileName name + bytes <- ByteString.readFile path + pure (ByteString.Lazy.fromStrict bytes) + + comparator cutoff x y = + pure $ + if x == y + then Nothing + else Just + ( printf "Test output was different from '%s'. It was:\n" name + <> unpackUTF8 (truncate cutoff y) + ) + unpackUTF8 = Text.Lazy.unpack . Text.Lazy.Encoding.decodeUtf8 + + truncate (Golden.SizeCutoff cutoff) bytes = + if ByteString.Lazy.length bytes <= cutoff + then bytes + else ByteString.Lazy.take cutoff bytes + <> "" + <> "\nUse --accept or increase --size-cutoff to see full output." + + -- The update function is a noop as we don't have the golden file name. + update _ = pure () + -- Test Tree ------------------------------------------------------------------- tests :: TestTree tests = testGroup "Map/Set Conversions" - [ goldenFile "Golden Map Roundtrip" "hoontree" treeRTMug + [ goldenPill "Golden Map Roundtrip" "hoontree" treeRTMug , testProperty "Map Rountrip" mapRoundtrip , testProperty "Set Rountrip" setRoundtrip ] diff --git a/pkg/hs/urbit-king/test/LogTests.hs b/pkg/hs/urbit-king/test/LogTests.hs index d4317069e..ef34e2c95 100644 --- a/pkg/hs/urbit-king/test/LogTests.hs +++ b/pkg/hs/urbit-king/test/LogTests.hs @@ -16,6 +16,7 @@ import Data.LargeWord (LargeKey(..)) import GHC.Natural (Natural) import Urbit.King.App (KingEnv, runKingEnvNoLog) +import qualified Options import qualified Urbit.EventLog.LMDB as Log @@ -123,15 +124,17 @@ tryAppend = forAll arbitrary (ioProperty . runApp . runTest) readDb log >>= assertEqual db' pure True -tryAppendHuge :: Property -tryAppendHuge = forAll arbitrary (ioProperty . runApp . runTest) +tryAppendHuge :: Options.Brass -> Property +tryAppendHuge brass = + forAll arbitrary (ioProperty . runApp . runTest) where runTest :: ([ByteString], Db) -> RIO KingEnv Bool runTest (extra, db) = do env <- ask io $ runInBoundThread $ runRIO env $ do - extra <- do b <- readFile "./bin/brass.pill" - pure (extra <> [b] <> extra) + extra <- do + b <- readFile =<< Options.getPillPath brass + pure (extra <> [b] <> extra) withTestDir $ \dir -> do db' <- pure (addEvents db extra) withDb dir db $ \log -> do @@ -147,20 +150,21 @@ tests :: TestTree tests = testGroup "Log" [ localOption (QuickCheckTests 10) $ - testProperty "Read/Write Log Identity" $ - tryReadIdentity + testProperty "Read/Write Log Identity" $ + tryReadIdentity , localOption (QuickCheckTests 15) $ - testProperty "Read/Write Database" $ - tryReadDatabase + testProperty "Read/Write Database" $ + tryReadDatabase , localOption (QuickCheckTests 5) $ - testProperty "Read/Write Database Multiple Times" $ - tryReadDatabaseFuzz + testProperty "Read/Write Database Multiple Times" $ + tryReadDatabaseFuzz , localOption (QuickCheckTests 10) $ - testProperty "Append Random Events" $ - tryAppend + testProperty "Append Random Events" $ + tryAppend , localOption (QuickCheckTests 1) $ + askOption $ \path -> testProperty "Append Huge Events" $ - tryAppendHuge + tryAppendHuge path ] diff --git a/pkg/hs/urbit-king/test/Main.hs b/pkg/hs/urbit-king/test/Main.hs index cc76dd6db..f58e2bb4e 100644 --- a/pkg/hs/urbit-king/test/Main.hs +++ b/pkg/hs/urbit-king/test/Main.hs @@ -2,14 +2,14 @@ module Main (main) where import ClassyPrelude +import Control.Concurrent (runInBoundThread) +import Data.Proxy (Proxy (Proxy)) import RIO.Directory import Test.QuickCheck hiding ((.&.)) import Test.Tasty import Test.Tasty.QuickCheck import Test.Tasty.TH - -import Control.Concurrent (runInBoundThread) -import System.Environment (setEnv) +import Test.Tasty.Options (OptionDescription (Option)) import qualified AmesTests import qualified ArvoTests @@ -21,20 +21,28 @@ import qualified HoonMapSetTests import qualified JamTests import qualified LogTests import qualified NounConversionTests +import qualified Options +import qualified Test.Tasty.Runners as Runners main :: IO () main = do - makeAbsolute "../../.." >>= setCurrentDirectory - setEnv "TASTY_NUM_THREADS" "1" - runInBoundThread $ defaultMain $ testGroup "Urbit" - [ AmesTests.tests - , ArvoTests.tests - , BehnTests.tests - , ClayTests.tests - , DawnTests.tests - , DeriveNounTests.tests - , HoonMapSetTests.tests - , JamTests.tests - , LogTests.tests - , NounConversionTests.tests - ] + let ingredients = + includingOptions + [ Option (Proxy @Options.Brass) + ] : defaultIngredients + + runInBoundThread $ + defaultMainWithIngredients ingredients $ + localOption (Runners.NumThreads 1) $ + testGroup "Urbit" + [ AmesTests.tests + , ArvoTests.tests + , BehnTests.tests + , ClayTests.tests + , DawnTests.tests + , DeriveNounTests.tests + , HoonMapSetTests.tests + , JamTests.tests + , LogTests.tests + , NounConversionTests.tests + ] diff --git a/pkg/hs/urbit-king/test/Options.hs b/pkg/hs/urbit-king/test/Options.hs new file mode 100644 index 000000000..21f1f826a --- /dev/null +++ b/pkg/hs/urbit-king/test/Options.hs @@ -0,0 +1,48 @@ +module Options + ( Brass + , Pill + , getPillPath + ) where + +import Control.Monad.IO.Class (MonadIO) +import Data.Proxy (Proxy (Proxy)) +import Data.String (IsString) +import GHC.TypeLits (KnownSymbol, Symbol) +import Prelude + +import qualified GHC.TypeLits as TypeLits +import qualified RIO.Directory as Directory +import qualified Test.Tasty as Tasty +import qualified Test.Tasty.Options as Options + +type Brass = Pill "brass" + +-- | A file-system path tagged by the pill name. +newtype Pill (name :: Symbol) = Pill FilePath + deriving stock (Eq, Show) + deriving newtype (IsString) + +instance KnownSymbol name => Options.IsOption (Pill name) where + optionName = + pure ( TypeLits.symbolVal (Proxy @name) + ++ "-pill" + ) + + optionHelp = + pure ( "The file path to the " + ++ TypeLits.symbolVal (Proxy @name) + ++ " pill" + ) + + defaultValue = + Pill ( "../../../bin" + ++ TypeLits.symbolVal (Proxy @name) + ++ ".pill" + ) + + parseValue = \case + "" -> Nothing + path -> Just (Pill path) + +getPillPath :: MonadIO m => Pill name -> m FilePath +getPillPath (Pill path) = Directory.canonicalizePath path diff --git a/pkg/hs/urbit-noun-core/lib/Urbit/Noun/Convert.hs b/pkg/hs/urbit-noun-core/lib/Urbit/Noun/Convert.hs index f72a465df..d78a097b9 100644 --- a/pkg/hs/urbit-noun-core/lib/Urbit/Noun/Convert.hs +++ b/pkg/hs/urbit-noun-core/lib/Urbit/Noun/Convert.hs @@ -12,95 +12,22 @@ module Urbit.Noun.Convert import ClassyPrelude hiding (hash) +import Control.Monad.Fail (MonadFail (fail)) import Urbit.Noun.Core -import qualified Control.Monad.Fail as Fail - -- Types ----------------------------------------------------------------------- type ParseStack = [Text] --- IResult --------------------------------------------------------------------- - -data IResult a = IError ParseStack String | ISuccess a - deriving (Eq, Show, Typeable, Functor, Foldable, Traversable) - -instance Applicative IResult where - pure = ISuccess - (<*>) = ap - -instance Fail.MonadFail IResult where - fail err = IError [] err - -instance Monad IResult where - return = pure - fail = Fail.fail - ISuccess a >>= k = k a - IError path err >>= _ = IError path err - -instance MonadPlus IResult where - mzero = fail "mzero" - mplus a@(ISuccess _) _ = a - mplus _ b = b - -instance Alternative IResult where - empty = mzero - (<|>) = mplus - -instance Semigroup (IResult a) where - (<>) = mplus - -instance Monoid (IResult a) where - mempty = fail "mempty" - mappend = (<>) - - --- Result ---------------------------------------------------------------------- - -data Result a = Error String | Success a - deriving (Eq, Show, Typeable, Functor, Foldable, Traversable) - -instance Applicative Result where - pure = Success - (<*>) = ap - -instance Fail.MonadFail Result where - fail err = Error err - -instance Monad Result where - return = pure - fail = Fail.fail - - Success a >>= k = k a - Error err >>= _ = Error err - -instance MonadPlus Result where - mzero = fail "mzero" - mplus a@(Success _) _ = a - mplus _ b = b - -instance Alternative Result where - empty = mzero - (<|>) = mplus - -instance Semigroup (Result a) where - (<>) = mplus - {-# INLINE (<>) #-} - -instance Monoid (Result a) where - mempty = fail "mempty" - mappend = (<>) - - -- "Parser" -------------------------------------------------------------------- -type Failure f r = ParseStack -> String -> f r -type Success a f r = a -> f r +type Failure a = ParseStack -> String -> a +type Success a b = a -> b newtype Parser a = Parser { - runParser :: forall f r. ParseStack -> Failure f r -> Success a f r -> f r + runParser :: forall r. ParseStack -> Failure r -> Success a r -> r } named :: Text -> Parser a -> Parser a @@ -111,9 +38,8 @@ instance Monad Parser where m >>= g = Parser $ \path kf ks -> let ks' a = runParser (g a) path kf ks in runParser m path kf ks' return = pure - fail = Fail.fail -instance Fail.MonadFail Parser where +instance MonadFail Parser where fail msg = Parser $ \path kf _ks -> kf (reverse path) msg instance Functor Parser where @@ -160,13 +86,13 @@ fromNoun :: FromNoun a => Noun -> Maybe a fromNoun n = runParser (parseNoun n) [] onFail onSuccess where onFail p m = Nothing - onSuccess x = Just x + onSuccess !x = Just x fromNounErr :: FromNoun a => Noun -> Either ([Text], Text) a fromNounErr n = runParser (parseNoun n) [] onFail onSuccess where onFail p m = Left (p, pack m) - onSuccess x = Right x + onSuccess !x = Right x data BadNoun = BadNoun [Text] String deriving (Eq, Ord) @@ -186,7 +112,7 @@ fromNounExn :: MonadIO m => FromNoun a => Noun -> m a fromNounExn n = runParser (parseNoun n) [] onFail onSuccess where onFail p m = throwIO (BadNoun p m) - onSuccess x = pure x + onSuccess !x = pure x -- Cord Conversions ------------------------------------------------------------ diff --git a/pkg/hs/urbit-noun-core/lib/Urbit/Noun/Core.hs b/pkg/hs/urbit-noun-core/lib/Urbit/Noun/Core.hs index fba92b0c4..0aeca303d 100644 --- a/pkg/hs/urbit-noun-core/lib/Urbit/Noun/Core.hs +++ b/pkg/hs/urbit-noun-core/lib/Urbit/Noun/Core.hs @@ -34,8 +34,8 @@ import qualified Data.Char as C -- Types ----------------------------------------------------------------------- data Noun - = NCell Int Word Noun Noun - | NAtom Int Atom + = NCell Int Word !Noun !Noun + | NAtom Int !Atom pattern Cell x y <- NCell _ _ x y where Cell = mkCell pattern Atom a <- NAtom _ a where Atom = mkAtom diff --git a/pkg/hs/urbit-noun-core/lib/Urbit/Noun/Cue.hs b/pkg/hs/urbit-noun-core/lib/Urbit/Noun/Cue.hs index 0bc122fb3..7fcc79ff4 100644 --- a/pkg/hs/urbit-noun-core/lib/Urbit/Noun/Cue.hs +++ b/pkg/hs/urbit-noun-core/lib/Urbit/Noun/Cue.hs @@ -12,14 +12,15 @@ import ClassyPrelude import Urbit.Atom import Urbit.Noun.Core -import Data.Bits (shiftL, shiftR, (.&.), (.|.)) -import Data.Function ((&)) -import Foreign.Ptr (Ptr, castPtr, plusPtr, ptrToWordPtr) -import Foreign.Storable (peek) -import GHC.Prim (ctz#) -import GHC.Word (Word(..)) -import System.IO.Unsafe (unsafePerformIO) -import Text.Printf (printf) +import Control.Monad.Fail (MonadFail (fail)) +import Data.Bits (shiftL, shiftR, (.&.), (.|.)) +import Data.Function ((&)) +import Foreign.Ptr (Ptr, castPtr, plusPtr, ptrToWordPtr) +import Foreign.Storable (peek) +import GHC.Prim (ctz#) +import GHC.Word (Word(..)) +import System.IO.Unsafe (unsafePerformIO) +import Text.Printf (printf) import qualified Data.ByteString.Unsafe as BS import qualified Data.HashTable.IO as H @@ -136,6 +137,7 @@ instance Monad Get where runGet (f x') end tbl s' {-# INLINE (>>=) #-} +instance MonadFail Get where fail msg = Get $ \end tbl s -> do badEncoding end s msg {-# INLINE fail #-} diff --git a/pkg/hs/urbit-noun-core/lib/Urbit/Noun/Jam.hs b/pkg/hs/urbit-noun-core/lib/Urbit/Noun/Jam.hs index 3b5d02f44..e9f2a64c2 100644 --- a/pkg/hs/urbit-noun-core/lib/Urbit/Noun/Jam.hs +++ b/pkg/hs/urbit-noun-core/lib/Urbit/Noun/Jam.hs @@ -1,7 +1,7 @@ {-# OPTIONS_GHC -O2 #-} {-| - Fast implementation of Jam (Noun → Atom). + Fast implementation of Jam (Noun -> Atom). This is based on the implementation of `flat`. -} diff --git a/pkg/hs/urbit-noun-core/lib/Urbit/Noun/TH.hs b/pkg/hs/urbit-noun-core/lib/Urbit/Noun/TH.hs index 641161d1d..f2a97569a 100644 --- a/pkg/hs/urbit-noun-core/lib/Urbit/Noun/TH.hs +++ b/pkg/hs/urbit-noun-core/lib/Urbit/Noun/TH.hs @@ -4,6 +4,7 @@ module Urbit.Noun.TH (deriveNoun, deriveToNoun, deriveFromNoun) where import ClassyPrelude hiding (fromList) +import Control.Monad.Fail (fail) import Language.Haskell.TH import Language.Haskell.TH.Syntax import Urbit.Noun.Convert diff --git a/pkg/hs/urbit-noun/lib/Urbit/Noun.hs b/pkg/hs/urbit-noun/lib/Urbit/Noun.hs index 7e443992a..ac62f5c3f 100644 --- a/pkg/hs/urbit-noun/lib/Urbit/Noun.hs +++ b/pkg/hs/urbit-noun/lib/Urbit/Noun.hs @@ -49,7 +49,7 @@ data LoadErr instance Exception LoadErr -loadFile :: ∀a. FromNoun a => FilePath -> IO (Either LoadErr a) +loadFile :: forall a. FromNoun a => FilePath -> IO (Either LoadErr a) loadFile pax = try $ do byt <- try (readFile pax) >>= either (throwIO . FileErr) pure non <- cueBS byt & either (throwIO . CueErr) pure diff --git a/pkg/hs/urbit-noun/lib/Urbit/Noun/Conversions.hs b/pkg/hs/urbit-noun/lib/Urbit/Noun/Conversions.hs index 6a8b5fddf..499712fc7 100644 --- a/pkg/hs/urbit-noun/lib/Urbit/Noun/Conversions.hs +++ b/pkg/hs/urbit-noun/lib/Urbit/Noun/Conversions.hs @@ -12,11 +12,14 @@ module Urbit.Noun.Conversions , UD(..), UV(..), UW(..), cordToUW , Mug(..), Path(..), EvilPath(..), Ship(..) , Lenient(..), pathToFilePath, filePathToPath + , showUD, tshowUD + , textAsTa ) where import ClassyPrelude hiding (hash) import Control.Lens hiding (Each, Index, (<.>)) +import Control.Monad.Fail (fail) import Data.Void import Data.Word import Text.Regex.TDFA @@ -33,12 +36,15 @@ import GHC.Types (Char(C#)) import GHC.Word (Word32(W32#)) import Prelude ((!!)) import RIO.FilePath (joinPath, splitDirectories, takeBaseName, - takeDirectory, takeExtension, (<.>)) + takeDirectory, takeExtension) import Urbit.Noun.Cue (cue) import Urbit.Noun.Jam (jam) +import Urbit.Ob (patp) import qualified Data.Char as C +import qualified Data.Text as T import qualified Data.Text.Encoding as T +import qualified Numeric as N -- Noun ------------------------------------------------------------------------ @@ -97,22 +103,28 @@ instance FromNoun UD where Nothing -> fail ("invalid decimal atom: " <> unpack (filter (/= '.') t)) Just vl -> pure (UD vl) +showUD :: (Show i, Integral i) => i -> String +showUD = uTypeAddDots 3 . show + +tshowUD :: (Show i, Integral i) => i -> Text +tshowUD = pack . uTypeAddDots 3 . show + -------------------------------------------------------------------------------- -uTypeAddDots :: String -> String -uTypeAddDots = reverse . go . reverse +uTypeAddDots :: Int -> String -> String +uTypeAddDots n = reverse . go . reverse where go s = if null tel then hed else hed <> "." <> go tel where - hed = take 5 s - tel = drop 5 s + hed = take n s + tel = drop n s convertToU :: [Char] -> [Char] -> Atom -> String convertToU baseMap prefix = go [] where - go acc 0 = "0" <> prefix <> uTypeAddDots acc + go acc 0 = "0" <> prefix <> uTypeAddDots 5 acc go acc n = go (char n : acc) (n `div` len) char n = baseMap !! (fromIntegral (n `mod` len)) @@ -543,6 +555,18 @@ instance FromNoun Knot where then pure (MkKnot txt) else fail ("Non-ASCII chars in knot: " <> unpack txt) +-- equivalent of (cury scot %t) +textAsTa :: Text -> Text +textAsTa = ("~~" <>) . concatMap \case + ' ' -> "." + '.' -> "~." + '~' -> "~~" + c -> + if C.isAlphaNum c || (c == '-') then + T.singleton c + else + if C.ord c < 0x10 then "~0" else "~" + <> (pack $ N.showHex (C.ord c) ".") -- Term ------------------------------------------------------------------------ @@ -571,7 +595,10 @@ instance FromNoun Term where -- XX TODO -- Ship ------------------------------------------------------------------------ newtype Ship = Ship Word128 -- @p - deriving newtype (Eq, Ord, Show, Enum, Real, Integral, Num, ToNoun, FromNoun) + deriving newtype (Eq, Ord, Enum, Real, Integral, Num, ToNoun, FromNoun) + +instance Show Ship where + show = show . patp . fromIntegral -- Path ------------------------------------------------------------------------ diff --git a/pkg/hs/urbit-noun/lib/Urbit/Noun/Tank.hs b/pkg/hs/urbit-noun/lib/Urbit/Noun/Tank.hs index bfba684c7..9dbbdf2f9 100644 --- a/pkg/hs/urbit-noun/lib/Urbit/Noun/Tank.hs +++ b/pkg/hs/urbit-noun/lib/Urbit/Noun/Tank.hs @@ -85,6 +85,12 @@ ram = \case loop [x] = ram x <> r loop (x:xs) = ram x <> p <> loop xs +tankToText :: Tank -> Text +tankToText (Tank t) = unlines $ fmap unTape $ wash (WashCfg 0 80) t + +textToTank :: Text -> Tank +textToTank = Tank . Leaf . Tape + {- ++ win |= {tab/@ edg/@} diff --git a/pkg/hs/urbit-noun/lib/Urbit/Noun/Tree.hs b/pkg/hs/urbit-noun/lib/Urbit/Noun/Tree.hs index 61751cca5..6ff6d7586 100644 --- a/pkg/hs/urbit-noun/lib/Urbit/Noun/Tree.hs +++ b/pkg/hs/urbit-noun/lib/Urbit/Noun/Tree.hs @@ -27,14 +27,14 @@ import GHC.Natural (Natural) -- Types ----------------------------------------------------------------------- data NounVal a = NounVal - { non ∷ Noun - , val ∷ !a + { non :: !Noun + , val :: !a } data HoonTreeNode a = NTN - { n ∷ NounVal a - , l ∷ HoonTree a - , r ∷ HoonTree a + { n :: !(NounVal a) + , l :: !(HoonTree a) + , r :: !(HoonTree a) } deriving (Eq, Ord, Show) @@ -43,10 +43,10 @@ data HoonTree a = E | Node (HoonTreeNode a) pattern N n l r = Node (NTN n l r) -newtype HoonSet a = HoonSet { unHoonSet ∷ HoonTree a } +newtype HoonSet a = HoonSet { unHoonSet :: HoonTree a } deriving newtype (Eq, Ord, Show, FromNoun, ToNoun) -newtype HoonMap k v = HoonMap { unHoonMap ∷ HoonTree (k, v) } +newtype HoonMap k v = HoonMap { unHoonMap :: HoonTree (k, v) } deriving newtype (Eq, Ord, Show, FromNoun, ToNoun) @@ -61,17 +61,17 @@ instance Ord (NounVal a) where instance ToNoun (NounVal a) where toNoun = non -instance Show a ⇒ Show (NounVal a) where +instance Show a => Show (NounVal a) where show = show . val -instance FromNoun a ⇒ FromNoun (NounVal a) where +instance FromNoun a => FromNoun (NounVal a) where parseNoun x = NounVal x <$> parseNoun x -instance ToNoun a ⇒ ToNoun (HoonTree a) where +instance ToNoun a => ToNoun (HoonTree a) where toNoun E = A 0 toNoun (Node n) = toNoun n -instance FromNoun a ⇒ FromNoun (HoonTree a) where +instance FromNoun a => FromNoun (HoonTree a) where parseNoun (A 0) = pure E parseNoun n = Node <$> parseNoun n @@ -82,60 +82,60 @@ deriveNoun ''HoonTreeNode type Nat = Natural -slowMug ∷ Noun → Nat +slowMug :: Noun -> Nat slowMug = trim 0xcafe_babe . \case - A a → a - C h t → mix (slowMug h) $ mix 0x7fff_ffff (slowMug t) + A a -> a + C h t -> mix (slowMug h) $ mix 0x7fff_ffff (slowMug t) where - trim ∷ Nat → Nat → Nat + trim :: Nat -> Nat -> Nat trim syd key = if 0/=ham then ham else trim (succ syd) key where haz = muk syd (met 3 key) key ham = mix (rsh 0 31 haz) (end 0 31 haz) -mix ∷ Nat → Nat → Nat +mix :: Nat -> Nat -> Nat mix = xor -- Murmur3 -muk ∷ Nat → Nat → Nat → Nat +muk :: Nat -> Nat -> Nat -> Nat muk seed len = fromIntegral . murmur3 (word32 seed) . resize . atomBytes where - resize ∷ ByteString → ByteString + resize :: ByteString -> ByteString resize buf = case compare (length buf) (int len) of - EQ → buf - LT → error "bad-muk" - GT → error "bad-muk" --- LT → buf <> replicate (len - length buf) 0 --- GT → take len buf + EQ -> buf + LT -> error "bad-muk" + GT -> error "bad-muk" +-- LT -> buf <> replicate (len - length buf) 0 +-- GT -> take len buf -int ∷ Integral i ⇒ i → Int +int :: Integral i => i -> Int int = fromIntegral -word32 ∷ Integral i ⇒ i → Word32 +word32 :: Integral i => i -> Word32 word32 = fromIntegral -bex ∷ Nat → Nat +bex :: Nat -> Nat bex = (2^) -end ∷ Nat → Nat → Nat → Nat +end :: Nat -> Nat -> Nat -> Nat end blockSize blocks n = n `mod` (bex (bex blockSize * blocks)) -rsh ∷ Nat → Nat → Nat → Nat +rsh :: Nat -> Nat -> Nat -> Nat rsh blockSize blocks n = shiftR n $ fromIntegral $ (bex blockSize * blocks) -met ∷ Nat → Nat → Nat +met :: Nat -> Nat -> Nat met bloq = go 0 where go c 0 = c go c n = go (succ c) (rsh bloq 1 n) -- XX TODO -mug ∷ Noun → Nat +mug :: Noun -> Nat mug = slowMug @@ -144,7 +144,7 @@ mug = slowMug {- Orders in ascending double mug hash order, collisions fall back to dor. -} -mor ∷ Noun → Noun → Bool +mor :: Noun -> Noun -> Bool mor a b = if c == d then dor a b else c < d where c = mug $ A $ mug a @@ -153,7 +153,7 @@ mor a b = if c == d then dor a b else c < d {- Orders in ascending tree depth. -} -dor ∷ Noun → Noun → Bool +dor :: Noun -> Noun -> Bool dor a b | a == b = True dor (A a) (C _ _) = True dor (C x y) (A b) = False @@ -166,80 +166,80 @@ dor (C x y) (C p q) = dor x p Collisions fall back to dor. -} -gor ∷ Noun → Noun → Bool +gor :: Noun -> Noun -> Bool gor a b = if c==d then dor a b else c NounVal a -> Bool morVal = on mor non gorVal = on gor non -------------------------------------------------------------------------------- -nounVal ∷ ToNoun a ⇒ Iso' a (NounVal a) +nounVal :: ToNoun a => Iso' a (NounVal a) nounVal = iso to val where to x = NounVal (toNoun x) x -treeToList ∷ ∀a. HoonTree a → [a] +treeToList :: forall a. HoonTree a -> [a] treeToList = go [] where - go ∷ [a] → HoonTree a → [a] + go :: [a] -> HoonTree a -> [a] go acc = \case - E → acc - Node (NTN v l r) → go (go (val v : acc) l) r + E -> acc + Node (NTN v l r) -> go (go (val v : acc) l) r -setFromHoonSet ∷ Ord a ⇒ HoonSet a → Set a +setFromHoonSet :: Ord a => HoonSet a -> Set a setFromHoonSet = setFromList . treeToList . unHoonSet -mapFromHoonMap ∷ Ord k ⇒ HoonMap k v → Map k v +mapFromHoonMap :: Ord k => HoonMap k v -> Map k v mapFromHoonMap = mapFromList . treeToList . unHoonMap -setToHoonSet ∷ ∀a. (Ord a, ToNoun a) ⇒ Set a → HoonSet a +setToHoonSet :: forall a. (Ord a, ToNoun a) => Set a -> HoonSet a setToHoonSet = HoonSet . foldr put E . fmap (view nounVal) . setToList where put x = \case - E → N x E E - Node a | x == n a → Node a - Node a | gorVal x (n a) → lef x a - Node a → rit x a + E -> N x E E + Node a | x == n a -> Node a + Node a | gorVal x (n a) -> lef x a + Node a -> rit x a rit x a = put x (r a) & \case - E → error "bad-put-set" - Node c | morVal (n a) (n c) → N (n a) (l a) (Node c) - Node c → N (n c) (N (n a) (l a) (l c)) (r c) + E -> error "bad-put-set" + Node c | morVal (n a) (n c) -> N (n a) (l a) (Node c) + Node c -> N (n c) (N (n a) (l a) (l c)) (r c) lef x a = put x (l a) & \case - E → error "bad-put-set" - Node c | morVal (n a) (n c) → N (n a) (Node c) (r a) - Node c → N (n c) (l c) (N (n a) (r c) (r a)) + E -> error "bad-put-set" + Node c | morVal (n a) (n c) -> N (n a) (Node c) (r a) + Node c -> N (n c) (l c) (N (n a) (r c) (r a)) -p ∷ (ToNoun a, ToNoun b) ⇒ NounVal (a,b) → NounVal a +p :: (ToNoun a, ToNoun b) => NounVal (a,b) -> NounVal a p = view (from nounVal . to fst . nounVal) -pq ∷ (ToNoun a, ToNoun b) ⇒ NounVal (a,b) → (NounVal a, NounVal b) +pq :: (ToNoun a, ToNoun b) => NounVal (a,b) -> (NounVal a, NounVal b) pq = boof . view (from nounVal) where boof (x, y) = (x ^. nounVal, y ^. nounVal) -mapToHoonMap ∷ ∀k v. (ToNoun k, ToNoun v, Ord k, Ord v) ⇒ Map k v → HoonMap k v +mapToHoonMap :: forall k v. (ToNoun k, ToNoun v, Ord k, Ord v) => Map k v -> HoonMap k v mapToHoonMap = HoonMap . foldr put E . fmap (view nounVal) . mapToList where - put ∷ NounVal (k, v) → HoonTree (k, v) → HoonTree (k, v) + put :: NounVal (k, v) -> HoonTree (k, v) -> HoonTree (k, v) put kv@(pq -> (b, c)) = \case - E → N kv E E - Node a | kv == n a → Node a - Node a | b == p (n a) → N kv (l a) (r a) - Node a | gorVal b (p $ n a) → lef kv a - Node a → rit kv a + E -> N kv E E + Node a | kv == n a -> Node a + Node a | b == p (n a) -> N kv (l a) (r a) + Node a | gorVal b (p $ n a) -> lef kv a + Node a -> rit kv a lef kv@(pq -> (b, c)) a = put kv (l a) & \case - E → error "bad-put-map" - Node d | morVal (p $ n a) (p $ n d) → N (n a) (Node d) (r a) - Node d → N (n d) (l d) (N (n a) (r d) (r a)) + E -> error "bad-put-map" + Node d | morVal (p $ n a) (p $ n d) -> N (n a) (Node d) (r a) + Node d -> N (n d) (l d) (N (n a) (r d) (r a)) rit kv@(pq -> (b, c)) a = put kv (r a) & \case - E → error "bad-put-map" - Node d | morVal (p $ n a) (p $ n d) → N (n a) (l a) (Node d) - Node d → N (n d) (N (n a) (l a) (l d)) (r d) + E -> error "bad-put-map" + Node d | morVal (p $ n a) (p $ n d) -> N (n a) (l a) (Node d) + Node d -> N (n d) (N (n a) (l a) (l d)) (r d) diff --git a/pkg/hs/urbit-noun/package.yaml b/pkg/hs/urbit-noun/package.yaml index d94be31f6..03d08a117 100644 --- a/pkg/hs/urbit-noun/package.yaml +++ b/pkg/hs/urbit-noun/package.yaml @@ -20,11 +20,11 @@ dependencies: - lens - murmur3 - regex-tdfa - - regex-tdfa-text - rio - text - time - urbit-atom + - urbit-hob - urbit-noun-core default-extensions: diff --git a/pkg/hs/urbit-termsize/lib/Urbit/TermSize.hs b/pkg/hs/urbit-termsize/lib/Urbit/TermSize.hs index 33f2d78cf..af4e1f0b0 100644 --- a/pkg/hs/urbit-termsize/lib/Urbit/TermSize.hs +++ b/pkg/hs/urbit-termsize/lib/Urbit/TermSize.hs @@ -37,4 +37,6 @@ termSize = size <&> \case liveTermSize :: (TermSize -> IO ()) -> IO TermSize liveTermSize cb = do Sys.installHandler Sys.sigWINCH (Sys.Catch (termSize >>= cb)) Nothing - termSize + ts <- termSize + cb ts + pure ts diff --git a/pkg/interface/package-lock.json b/pkg/interface/package-lock.json index 812ebc329..1adb54bcd 100644 --- a/pkg/interface/package-lock.json +++ b/pkg/interface/package-lock.json @@ -1585,22 +1585,6 @@ } } }, - "@react-dnd/asap": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/@react-dnd/asap/-/asap-4.0.0.tgz", - "integrity": "sha512-0XhqJSc6pPoNnf8DhdsPHtUhRzZALVzYMTzRwV4VI6DJNJ/5xxfL9OQUwb8IH5/2x7lSf7nAZrnzUD+16VyOVQ==" - }, - "@react-dnd/invariant": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@react-dnd/invariant/-/invariant-2.0.0.tgz", - "integrity": "sha512-xL4RCQBCBDJ+GRwKTFhGUW8GXa4yoDfJrPbLblc3U09ciS+9ZJXJ3Qrcs/x2IODOdIE5kQxvMmE2UKyqUictUw==" - }, - "@react-dnd/shallowequal": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@react-dnd/shallowequal/-/shallowequal-2.0.0.tgz", - "integrity": "sha512-Pc/AFTdwZwEKJxFJvlxrSmGe/di+aAOBn60sremrpLo6VI/6cmiUYNNwlI5KNYttg7uypzA3ILPMPgxB2GYZEg==", - "dev": true - }, "@styled-system/background": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/@styled-system/background/-/background-5.1.2.tgz", @@ -1709,8 +1693,9 @@ "integrity": "sha512-3OPSdf9cejP/TSzWXuBaYbzLtAfBzQnc75SlPLkoPfwpxnv1Bvy9hiWngLY0WnKRR6lMOldnkYQCCuNWeDibYQ==" }, "@tlon/indigo-react": { - "version": "github:urbit/indigo-react#a9ad1e2ca3c318b7455ed942d288340400e2481d", - "from": "github:urbit/indigo-react#lf/1.2.9", + "version": "1.2.13", + "resolved": "https://registry.npmjs.org/@tlon/indigo-react/-/indigo-react-1.2.13.tgz", + "integrity": "sha512-6qYLjVcGZtDjI+BqS2PRrfAh9mUCDtYwDOHuYuPyV87mdVRAhduBlQ/3tDVlTNWICF9DeAhozeClxalACs5Ipw==", "requires": { "@reach/menu-button": "^0.10.5", "react": "^16.13.1", @@ -1718,9 +1703,9 @@ }, "dependencies": { "tslib": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.0.2.tgz", - "integrity": "sha512-wAH28hcEKwna96/UacuWaVspVLkg4x1aDM9JlzqaQTOFczCktkVAb5fmXChgandR1EraDPs2w8P+ozM+oafwxg==" + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.0.3.tgz", + "integrity": "sha512-uZtkfKblCEQtZKBF6EBXVZeQNl82yqtDQdv+eck8u7tdPxjLu2/lp5/uPW+um2tpuxINHWy3GhiccY7QgEaVHQ==" } } }, @@ -2682,6 +2667,11 @@ "integrity": "sha1-3DQxT05nkxgJP8dgJyUl+UvyXBY=", "dev": true }, + "big-integer": { + "version": "1.6.48", + "resolved": "https://registry.npmjs.org/big-integer/-/big-integer-1.6.48.tgz", + "integrity": "sha512-j51egjPa7/i+RdiRuJbPdJ2FIUYYPhvYLjzoYbcMMm62ooO6F94fETG4MTs46zPAF9Brs04OajboA/qTGuz78w==" + }, "big.js": { "version": "5.2.2", "resolved": "https://registry.npmjs.org/big.js/-/big.js-5.2.2.tgz", @@ -3909,21 +3899,6 @@ "randombytes": "^2.0.0" } }, - "dnd-core": { - "version": "11.1.3", - "resolved": "https://registry.npmjs.org/dnd-core/-/dnd-core-11.1.3.tgz", - "integrity": "sha512-QugF55dNW+h+vzxVJ/LSJeTeUw9MCJ2cllhmVThVPEtF16ooBkxj0WBE5RB+AceFxMFo1rO6bJKXtqKl+JNnyA==", - "requires": { - "@react-dnd/asap": "^4.0.0", - "@react-dnd/invariant": "^2.0.0", - "redux": "^4.0.4" - } - }, - "dnd-multi-backend": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/dnd-multi-backend/-/dnd-multi-backend-6.0.0.tgz", - "integrity": "sha512-qfUO4V0IACs24xfE9m9OUnwIzoL+SWzSiFbKVIHE0pFddJeZ93BZOdHS1XEYr8X3HNh+CfnfjezXgOMgjvh74g==" - }, "dns-equal": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/dns-equal/-/dns-equal-1.0.0.tgz", @@ -4876,6 +4851,11 @@ } } }, + "file-saver": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/file-saver/-/file-saver-2.0.2.tgz", + "integrity": "sha512-Wz3c3XQ5xroCxd1G8b7yL0Ehkf0TC9oYC6buPFkNnU9EnaPlifeAFCyCh+iewXTyFRcg0a6j3J7FmJsIhlhBdw==" + }, "file-uri-to-path": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz", @@ -7872,53 +7852,6 @@ "resolved": "https://registry.npmjs.org/react-codemirror2/-/react-codemirror2-6.0.1.tgz", "integrity": "sha512-rutEKVgvFhWcy/GeVA1hFbqrO89qLqgqdhUr7YhYgIzdyICdlRQv+ztuNvOFQMXrO0fLt0VkaYOdMdYdQgsSUA==" }, - "react-dnd": { - "version": "11.1.3", - "resolved": "https://registry.npmjs.org/react-dnd/-/react-dnd-11.1.3.tgz", - "integrity": "sha512-8rtzzT8iwHgdSC89VktwhqdKKtfXaAyC4wiqp0SywpHG12TTLvfOoL6xNEIUWXwIEWu+CFfDn4GZJyynCEuHIQ==", - "dev": true, - "requires": { - "@react-dnd/shallowequal": "^2.0.0", - "@types/hoist-non-react-statics": "^3.3.1", - "dnd-core": "^11.1.3", - "hoist-non-react-statics": "^3.3.0" - } - }, - "react-dnd-html5-backend": { - "version": "11.1.3", - "resolved": "https://registry.npmjs.org/react-dnd-html5-backend/-/react-dnd-html5-backend-11.1.3.tgz", - "integrity": "sha512-/1FjNlJbW/ivkUxlxQd7o3trA5DE33QiRZgxent3zKme8DwF4Nbw3OFVhTRFGaYhHFNL1rZt6Rdj1D78BjnNLw==", - "requires": { - "dnd-core": "^11.1.3" - } - }, - "react-dnd-multi-backend": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/react-dnd-multi-backend/-/react-dnd-multi-backend-6.0.2.tgz", - "integrity": "sha512-SwpqRv0HkJYu244FbHf9NbvGzGy14Ir9wIAhm909uvOVaHgsOq6I1THMSWSgpwUI31J3Bo5uS19tuvGpVPjzZw==", - "requires": { - "dnd-multi-backend": "^6.0.0", - "prop-types": "^15.7.2", - "react-dnd-preview": "^6.0.2" - } - }, - "react-dnd-preview": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/react-dnd-preview/-/react-dnd-preview-6.0.2.tgz", - "integrity": "sha512-F2+uK4Be+q+7mZfNh9kaZols7wp1hX6G7UBTVaTpDsBpMhjFvY7/v7odxYSerSFBShh23MJl33a4XOVRFj1zoQ==", - "requires": { - "prop-types": "^15.7.2" - } - }, - "react-dnd-touch-backend": { - "version": "11.1.3", - "resolved": "https://registry.npmjs.org/react-dnd-touch-backend/-/react-dnd-touch-backend-11.1.3.tgz", - "integrity": "sha512-8lz4fxfYwUuJ6Y2seQYwh8+OfwKcbBX0CIbz7AwXfBYz54Wg2nIDU6CP8Dyybt/Wyx4D3oXmTPEaOMB62uqJvQ==", - "requires": { - "@react-dnd/invariant": "^2.0.0", - "dnd-core": "^11.1.3" - } - }, "react-dom": { "version": "16.13.1", "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-16.13.1.tgz", @@ -8095,15 +8028,6 @@ "picomatch": "^2.2.1" } }, - "redux": { - "version": "4.0.5", - "resolved": "https://registry.npmjs.org/redux/-/redux-4.0.5.tgz", - "integrity": "sha512-VSz1uMAH24DM6MF72vcojpYPtrTUu3ByVWfPL1nPfVRb5mZVTve5GnNCUV53QM/BZ66xfWrm0CTWoM+Xlz8V1w==", - "requires": { - "loose-envify": "^1.4.0", - "symbol-observable": "^1.2.0" - } - }, "regenerate": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/regenerate/-/regenerate-1.4.0.tgz", @@ -9446,11 +9370,6 @@ "xml-reader": "2.4.3" } }, - "symbol-observable": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/symbol-observable/-/symbol-observable-1.2.0.tgz", - "integrity": "sha512-e900nM8RRtGhlV36KGEU9k65K3mPb1WV70OdjfxlG2EAuM1noi/E/BaW/uMhL7bPEssK8QV57vN3esixjUvcXQ==" - }, "synchronous-promise": { "version": "2.0.13", "resolved": "https://registry.npmjs.org/synchronous-promise/-/synchronous-promise-2.0.13.tgz", diff --git a/pkg/interface/package.json b/pkg/interface/package.json index 619ccae0f..5786f7494 100644 --- a/pkg/interface/package.json +++ b/pkg/interface/package.json @@ -9,12 +9,14 @@ "@reach/menu-button": "^0.10.5", "@reach/tabs": "^0.10.5", "@tlon/indigo-light": "^1.0.3", - "@tlon/indigo-react": "urbit/indigo-react#lf/1.2.9", + "@tlon/indigo-react": "1.2.13", "@tlon/sigil-js": "^1.4.2", "aws-sdk": "^2.726.0", + "big-integer": "^1.6.48", "classnames": "^2.2.6", "codemirror": "^5.55.0", "css-loader": "^3.5.3", + "file-saver": "^2.0.2", "formik": "^2.1.4", "lodash": "^4.17.15", "markdown-to-jsx": "^6.11.4", @@ -26,9 +28,6 @@ "prop-types": "^15.7.2", "react": "^16.5.2", "react-codemirror2": "^6.0.1", - "react-dnd-html5-backend": "^11.1.3", - "react-dnd-multi-backend": "^6.0.2", - "react-dnd-touch-backend": "^11.1.3", "react-dom": "^16.8.6", "react-helmet": "^6.1.0", "react-markdown": "^4.3.1", @@ -73,7 +72,6 @@ "file-loader": "^6.0.0", "html-webpack-plugin": "^4.2.0", "moment-locales-webpack-plugin": "^1.2.0", - "react-dnd": "^11.1.3", "react-hot-loader": "^4.12.21", "sass": "^1.26.5", "sass-loader": "^8.0.2", diff --git a/pkg/interface/src/logic/api/global.ts b/pkg/interface/src/logic/api/global.ts index ac4cc8a50..5388259a2 100644 --- a/pkg/interface/src/logic/api/global.ts +++ b/pkg/interface/src/logic/api/global.ts @@ -9,9 +9,9 @@ import MetadataApi from './metadata'; import ContactsApi from './contacts'; import GroupsApi from './groups'; import LaunchApi from './launch'; -import PublishApi from './publish'; import GraphApi from './graph'; import S3Api from './s3'; +import {HarkApi} from './hark'; export default class GlobalApi extends BaseApi { chat = new ChatApi(this.ship, this.channel, this.store); @@ -21,10 +21,9 @@ export default class GlobalApi extends BaseApi { contacts = new ContactsApi(this.ship, this.channel, this.store); groups = new GroupsApi(this.ship, this.channel, this.store); launch = new LaunchApi(this.ship, this.channel, this.store); - publish = new PublishApi(this.ship, this.channel, this.store); s3 = new S3Api(this.ship, this.channel, this.store); graph = new GraphApi(this.ship, this.channel, this.store); - + hark = new HarkApi(this.ship, this.channel, this.store); constructor( public ship: Patp, diff --git a/pkg/interface/src/logic/api/graph.ts b/pkg/interface/src/logic/api/graph.ts index 98097016e..6f9cec4a2 100644 --- a/pkg/interface/src/logic/api/graph.ts +++ b/pkg/interface/src/logic/api/graph.ts @@ -3,13 +3,56 @@ import { StoreState } from '../store/type'; import { Patp, Path, PatpNoSig } from '~/types/noun'; import _ from 'lodash'; import {makeResource, resourceFromPath} from '../lib/group'; -import {GroupPolicy, Enc, Post} from '~/types'; -import { deSig } from '~/logic/lib/util'; +import {GroupPolicy, Enc, Post, NodeMap, Content} from '~/types'; +import { numToUd, unixToDa } from '~/logic/lib/util'; -export const createPost = (contents: Object[], parentIndex: string = '') => { +export const createBlankNodeWithChildPost = ( + parentIndex: string = '', + childIndex: string = '', + contents: Content[] +) => { + const date = unixToDa(Date.now()).toString(); + const nodeIndex = parentIndex + '/' + date; + + const childGraph = {}; + childGraph[childIndex] = { + post: { + author: `~${window.ship}`, + index: nodeIndex + '/' + childIndex, + 'time-sent': Date.now(), + contents, + hash: null, + signatures: [] + }, + children: { empty: null } + }; + + return { + post: { + author: `~${window.ship}`, + index: nodeIndex, + 'time-sent': Date.now(), + contents: [], + hash: null, + signatures: [] + }, + children: { + graph: childGraph + } + }; +}; + +export const createPost = ( + contents: Content[], + parentIndex: string = '', + childIndex:string = 'DATE_PLACEHOLDER' +) => { + if (childIndex === 'DATE_PLACEHOLDER') { + childIndex = unixToDa(Date.now()).toString(); + } return { author: `~${window.ship}`, - index: parentIndex + '/' + Date.now(), + index: parentIndex + '/' + childIndex, 'time-sent': Date.now(), contents, hash: null, @@ -17,6 +60,16 @@ export const createPost = (contents: Object[], parentIndex: string = '') => { }; }; +function moduleToMark(mod: string): string | undefined { + if(mod === 'link') { + return 'graph-validator-link'; + } + if(mod === 'publish') { + return 'graph-validator-publish'; + } + return undefined; +} + export default class GraphApi extends BaseApi { private storeAction(action: any): Promise { @@ -47,7 +100,8 @@ export default class GraphApi extends BaseApi { title, description, associated, - "module": mod + "module": mod, + mark: moduleToMark(mod) } }); } @@ -67,7 +121,8 @@ export default class GraphApi extends BaseApi { title, description, associated: { policy }, - "module": mod + "module": mod, + mark: moduleToMark(mod) } }); } @@ -138,8 +193,21 @@ export default class GraphApi extends BaseApi { }); } + addNode(ship: Patp, name: string, node: Object) { + let nodes = {}; + const resource = { ship, name }; + nodes[node.post.index] = node; + + return this.hookAction(ship, { + 'add-nodes': { + resource, + nodes + } + }); + } + addNodes(ship: Patp, name: string, nodes: Object) { - this.hookAction(ship, { + return this.hookAction(ship, { 'add-nodes': { resource: { ship, name }, nodes @@ -204,9 +272,10 @@ export default class GraphApi extends BaseApi { } getNode(ship: string, resource: string, index: string) { + const idx = index.split('/').map(numToUd).join('/'); return this.scry( 'graph-store', - `/node/${ship}/${resource}/${index}` + `/node/${ship}/${resource}${idx}` ).then((node) => { this.store.handleEvent({ data: node diff --git a/pkg/interface/src/logic/api/hark.ts b/pkg/interface/src/logic/api/hark.ts new file mode 100644 index 000000000..8f33fc480 --- /dev/null +++ b/pkg/interface/src/logic/api/hark.ts @@ -0,0 +1,180 @@ +import BaseApi from "./base"; +import { StoreState } from "../store/type"; +import { dateToDa, decToUd } from "../lib/util"; +import {NotifIndex, IndexedNotification} from "~/types"; +import { BigInteger } from 'big-integer'; +import {getParentIndex} from "../lib/notification"; + +export class HarkApi extends BaseApi { + private harkAction(action: any): Promise { + return this.action("hark-store", "hark-action", action); + } + + private graphHookAction(action: any) { + return this.action("hark-graph-hook", "hark-graph-hook-action", action); + } + + private groupHookAction(action: any) { + return this.action("hark-group-hook", "hark-group-hook-action", action); + } + + private chatHookAction(action: any) { + return this.action("hark-chat-hook", "hark-chat-hook-action", action); + } + + private actOnNotification(frond: string, intTime: BigInteger, index: NotifIndex) { + const time = decToUd(intTime.toString()); + return this.harkAction({ + [frond]: { + time, + index + } + }); + } + + async setMentions(mentions: boolean) { + await this.graphHookAction({ + 'set-mentions': mentions + }); + return this.chatHookAction({ + 'set-mentions': mentions + }); + } + + setWatchOnSelf(watchSelf: boolean) { + return this.graphHookAction({ + 'set-watch-on-self': watchSelf + }); + } + + setDoNotDisturb(dnd: boolean) { + return this.harkAction({ + 'set-dnd': dnd + }); + } + + archive(time: BigInteger, index: NotifIndex) { + return this.actOnNotification('archive', time, index); + } + + read(time: BigInteger, index: NotifIndex) { + return this.actOnNotification('read', time, index); + } + + readIndex(index: NotifIndex) { + return this.harkAction({ + 'read-index': index + }); + } + + unread(time: BigInteger, index: NotifIndex) { + return this.actOnNotification('unread', time, index); + } + + seen() { + return this.harkAction({ seen: null }); + } + + mute(notif: IndexedNotification) { + if('graph' in notif.index && 'graph' in notif.notification.contents) { + const { index } = notif; + const parentIndex = getParentIndex(index.graph, notif.notification.contents.graph) + if(!parentIndex) { + return Promise.resolve(); + } + return this.ignoreGraph(index.graph.graph, parentIndex); + } + if('group' in notif.index) { + const { group } = notif.index.group; + return this.ignoreGroup(group); + } + if('chat' in notif.index) { + return this.ignoreChat(notif.index.chat.chat); + } + return Promise.resolve(); + } + + unmute(notif: IndexedNotification) { + if('graph' in notif.index && 'graph' in notif.notification.contents) { + const { index } = notif; + const parentIndex = getParentIndex(index.graph, notif.notification.contents.graph) + if(!parentIndex) { + return Promise.resolve(); + } + return this.listenGraph(index.graph.graph, parentIndex); + } + if('group' in notif.index) { + return this.listenGroup(notif.index.group.group); + } + if('chat' in notif.index) { + return this.listenChat(notif.index.chat.chat); + } + return Promise.resolve(); + } + + ignoreGroup(group: string) { + return this.groupHookAction({ + ignore: group + }) + } + + ignoreGraph(graph: string, index: string) { + return this.graphHookAction({ + ignore: { + graph, + index + } + }) + } + + ignoreChat(chat: string) { + return this.chatHookAction({ + ignore: chat + }); + } + + + listenGroup(group: string) { + return this.groupHookAction({ + listen: group + }) + } + + listenGraph(graph: string, index: string) { + return this.graphHookAction({ + listen: { + graph, + index + } + }) + } + + listenChat(chat: string) { + return this.chatHookAction({ + listen: chat + }); + } + + getMore(archive = false) { + const offset = this.store.state[ + archive ? 'archivedNotifications' : 'notifications' + ].size; + const count = 3; + return this.getSubset(offset,count, archive); + } + + async getSubset(offset:number, count:number, isArchive: boolean) { + const where = isArchive ? 'archive' : 'inbox'; + const data = await this.scry("hark-store", `/recent/${where}/${offset}/${count}`); + this.store.handleEvent({ data }); + } + + async getTimeSubset(start?: Date, end?: Date) { + const s = start ? dateToDa(start) : "-"; + const e = end ? dateToDa(end) : "-"; + const result = await this.scry("hark-hook", `/recent/${s}/${e}`); + this.store.handleEvent({ + data: result, + }); + } +} diff --git a/pkg/interface/src/logic/api/invite.ts b/pkg/interface/src/logic/api/invite.ts index 432a266bc..89a730768 100644 --- a/pkg/interface/src/logic/api/invite.ts +++ b/pkg/interface/src/logic/api/invite.ts @@ -3,25 +3,25 @@ import { StoreState } from "../store/type"; import { Serial, Path } from "~/types/noun"; export default class InviteApi extends BaseApi { - accept(app: Path, uid: Serial) { + accept(app: string, uid: Serial) { return this.inviteAction({ accept: { - path: app, + term: app, uid } }); } - decline(app: Path, uid: Serial) { + decline(app: string, uid: Serial) { return this.inviteAction({ decline: { - path: app, + term: app, uid } }); } private inviteAction(action) { - return this.action('invite-store', 'json', action); + return this.action('invite-store', 'invite-action', action); } } diff --git a/pkg/interface/src/logic/api/launch.ts b/pkg/interface/src/logic/api/launch.ts index 30661df83..bbe49db73 100644 --- a/pkg/interface/src/logic/api/launch.ts +++ b/pkg/interface/src/logic/api/launch.ts @@ -1,9 +1,7 @@ import BaseApi from './base'; import { StoreState } from '../store/type'; - export default class LaunchApi extends BaseApi { - add(name: string, tile = { basic : { title: '', linkedUrl: '', iconUrl: '' }}) { return this.launchAction({ add: { name, tile } }); } @@ -12,10 +10,6 @@ export default class LaunchApi extends BaseApi { return this.launchAction({ remove: name }); } - changeOrder(orderedTiles: string[] = []) { - return this.launchAction({ 'change-order': orderedTiles }); - } - changeFirstTime(firstTime = true) { return this.launchAction({ 'change-first-time': firstTime }); } @@ -31,6 +25,5 @@ export default class LaunchApi extends BaseApi { private launchAction(data) { return this.action('launch', 'launch-action', data); } - } diff --git a/pkg/interface/src/logic/api/publish.ts b/pkg/interface/src/logic/api/publish.ts deleted file mode 100644 index 27d4bfdad..000000000 --- a/pkg/interface/src/logic/api/publish.ts +++ /dev/null @@ -1,224 +0,0 @@ -import BaseApi from './base'; - -import { PublishResponse } from '~/types/publish-response'; -import { PatpNoSig, Path } from '~/types/noun'; -import { BookId, NoteId } from '~/types/publish-update'; - -export default class PublishApi extends BaseApi { - handleEvent(data: PublishResponse) { - this.store.handleEvent({ data: { 'publish-response' : data } }); - } - - fetchNotebooks() { - return fetch('/publish-view/notebooks.json') - .then(response => response.json()) - .then((json) => { - this.handleEvent({ - type: 'notebooks', - data: json - }); - }); - } - - fetchNotebook(host: PatpNoSig, book: BookId) { - return fetch(`/publish-view/${host}/${book}.json`) - .then(response => response.json()) - .then((json) => { - this.handleEvent({ - type: 'notebook', - data: json, - host: host, - notebook: book - }); - }); - } - - fetchNote(host: PatpNoSig, book: BookId, note: NoteId) { - return fetch(`/publish-view/${host}/${book}/${note}.json`) - .then(response => response.json()) - .then((json) => { - this.handleEvent({ - type: 'note', - data: json, - host: host, - notebook: book, - note: note - }); - }); - } - - fetchNotesPage(host: PatpNoSig, book: BookId, start: number, length: number) { - return fetch(`/publish-view/notes/${host}/${book}/${start}/${length}.json`) - .then(response => response.json()) - .then((json) => { - this.handleEvent({ - type: 'notes-page', - data: json, - host: host, - notebook: book, - startIndex: start, - length: length - }); - }); - } - - fetchCommentsPage(host: PatpNoSig, book: BookId, note: NoteId, start: number, length: number) { - return fetch(`/publish-view/comments/${host}/${book}/${note}/${start}/${length}.json`) - .then(response => response.json()) - .then((json) => { - this.handleEvent({ - type: 'comments-page', - data: json, - host: host, - notebook: book, - note: note, - startIndex: start, - length: length - }); - }); - } - - subscribeNotebook(who: PatpNoSig, book: BookId) { - return this.publishAction({ - subscribe: { - who, - book - } - }); - } - - unsubscribeNotebook(who: PatpNoSig, book: BookId) { - return this.publishAction({ - unsubscribe: { - who, - book - } - }); - } - - publishAction(act: any) { - return this.action('publish', 'publish-action', act); - } - - groupify(bookId: string, group: Path | null) { - return this.publishAction({ - groupify: { - book: bookId, - target: group, - inclusive: false - } - }); - } - - - newBook(bookId: string, title: string, description: string, group?: Path) { - const groupInfo = group ? { 'group-path': group, - invitees: [], - 'use-preexisting': true, - 'make-managed': true - } : { - 'group-path': `/ship/~${window.ship}/${bookId}`, - invitees: [], - 'use-preexisting': false, - 'make-managed': false - }; - return this.publishAction({ - "new-book": { - book: bookId, - title: title, - about: description, - coms: true, - group: groupInfo - } - }); - } - - editBook(bookId: string, title: string, description: string, coms: boolean) { - return this.publishAction({ - "edit-book": { - book: bookId, - title: title, - about: description, - coms, - group: null - } - }); - } - - delBook(book: string) { - return this.publishAction({ - "del-book": { - book - } - }); - } - - newNote(who: PatpNoSig, book: string, note: string, title: string, body: string) { - return this.publishAction({ - 'new-note': { - who, - book, - note, - title, - body - } - }); - } - - editNote(who: PatpNoSig, book: string, note: string, title: string, body: string) { - return this.publishAction({ - 'edit-note': { - who, - book, - note, - title, - body - } - }); - } - - delNote(who: PatpNoSig, book: string, note: string) { - return this.publishAction({ - 'del-note': { - who, - book, - note - } - }); - } - - readNote(who: PatpNoSig, book: string, note: string) { - return this.publishAction({ - read: { - who, - book, - note - } - }); - } - - updateComment(who: PatpNoSig, book: string, note: string, comment: Path, body: string) { - return this.publishAction({ - 'edit-comment': { - who, - book, - note, - comment, - body - } - }); - } - - deleteComment(who: PatpNoSig, book: string, note: string, comment: Path ) { - return this.publishAction({ - "del-comment": { - who, - book, - note, - comment - }, - }); - } - -} - diff --git a/pkg/interface/src/logic/lib/BigIntOrderedMap.ts b/pkg/interface/src/logic/lib/BigIntOrderedMap.ts new file mode 100644 index 000000000..cf7883d97 --- /dev/null +++ b/pkg/interface/src/logic/lib/BigIntOrderedMap.ts @@ -0,0 +1,197 @@ +import bigInt, { BigInteger } from "big-integer"; + +interface NonemptyNode { + n: [BigInteger, V]; + l: MapNode; + r: MapNode; +} + +type MapNode = NonemptyNode | null; + +/** + * An implementation of ordered maps for JS + * Plagiarised wholesale from sys/zuse + */ +export class BigIntOrderedMap implements Iterable<[BigInteger, V]> { + private root: MapNode = null; + size: number = 0; + + constructor(initial: [BigInteger, V][] = []) { + initial.forEach(([key, val]) => { + this.set(key, val); + }); + } + + /** + * Retrieve an value for a key + */ + get(key: BigInteger): V | null { + const inner = (node: MapNode) => { + if (!node) { + return node; + } + const [k, v] = node.n; + if (key.eq(k)) { + return v; + } + if (key.gt(k)) { + return inner(node.l); + } else { + return inner(node.r); + } + }; + + return inner(this.root); + } + + /** + * Put an item by a key + */ + set(key: BigInteger, value: V): void { + + const inner = (node: MapNode) => { + if (!node) { + return { + n: [key, value], + l: null, + r: null, + }; + } + const [k] = node.n; + if (key.eq(k)) { + this.size--; + return { + ...node, + n: [k, value], + }; + } + if (key.gt(k)) { + const l = inner(node.l); + if (!l) { + throw new Error("invariant violation"); + } + return { + ...node, + l, + }; + } + const r = inner(node.r); + if (!r) { + throw new Error("invariant violation"); + } + + return { ...node, r }; + }; + this.size++; + this.root = inner(this.root); + } + + /** + * Remove all entries + */ + clear() { + this.root = null; + } + + /** + * Predicate testing if map contains key + */ + has(key: BigInteger): boolean { + const inner = (node: MapNode) => { + if (!node) { + return false; + } + const [k] = node.n; + + if (k.eq(key)) { + return true; + } + if (key.gt(k)) { + return inner(node.l); + } + return inner(node.r); + }; + return inner(this.root); + } + + /** + * Remove value associated with key, returning whether that key + * existed in the first place + */ + delete(key: BigInteger) { + const inner = (node: MapNode): [boolean, MapNode] => { + if (!node) { + return [false, null]; + } + const [k] = node.n; + if (k.eq(key)) { + return [true, this.nip(node)]; + } + if (key.gt(k)) { + const [bool, l] = inner(node.l); + return [ + bool, + { + ...node, + l, + }, + ]; + } + + const [bool, r] = inner(node.r); + return [ + bool, + { + ...node, + r, + }, + ]; + }; + const [ret, newRoot] = inner(this.root); + if(ret) { + this.size--; + } + this.root = newRoot; + return ret; + } + + private nip(nod: NonemptyNode): MapNode { + const inner = (node: NonemptyNode) => { + if (!node.l) { + return node.r; + } + if (!node.r) { + return node.l; + } + return { + ...node.l, + r: inner(node.r), + }; + }; + return inner(nod); + } + + [Symbol.iterator](): IterableIterator<[BigInteger, V]> { + let result: [BigInteger, V][] = []; + const inner = (node: MapNode) => { + if (!node) { + return; + } + inner(node.l); + result.push(node.n); + inner(node.r); + }; + inner(this.root); + + let idx = 0; + return { + [Symbol.iterator]: this[Symbol.iterator], + next: (): IteratorResult<[BigInteger, V]> => { + if (idx < result.length) { + return { value: result[idx++], done: false }; + } + return { done: true, value: null }; + }, + }; + } +} diff --git a/pkg/interface/src/logic/lib/OrderedMap.ts b/pkg/interface/src/logic/lib/OrderedMap.ts index a640c2906..d66346ff6 100644 --- a/pkg/interface/src/logic/lib/OrderedMap.ts +++ b/pkg/interface/src/logic/lib/OrderedMap.ts @@ -6,7 +6,6 @@ export class OrderedMap extends Map const sorted = Array.from(super[Symbol.iterator]()).sort( ([a], [b]) => b - a ); - let index = 0; return { [Symbol.iterator]: this[Symbol.iterator], diff --git a/pkg/interface/src/logic/lib/bel.js b/pkg/interface/src/logic/lib/bel.js new file mode 100644 index 000000000..77ee14507 --- /dev/null +++ b/pkg/interface/src/logic/lib/bel.js @@ -0,0 +1 @@ +export default new Audio('data:@file/ogg;base64,T2dnUwACAAAAAAAAAAAu9RJ+AAAAAO+u/l4BHgF2b3JiaXMAAAAAAUAfAAAAAAAAYG0AAAAAAACZAU9nZ1MAAAAAAAAAAAAALvUSfgEAAACXEgK4Czv///////////+1A3ZvcmJpcysAAABYaXBoLk9yZyBsaWJWb3JiaXMgSSAyMDEyMDIwMyAoT21uaXByZXNlbnQpAAAAAAEFdm9yYmlzEkJDVgEAAAEADFIUISUZU0pjCJVSUikFHWNQW0cdY9Q5RiFkEFOISRmle08qlVhKyBFSWClFHVNMU0mVUpYpRR1jFFNIIVPWMWWhcxRLhkkJJWxNrnQWS+iZY5YxRh1jzlpKnWPWMUUdY1JSSaFzGDpmJWQUOkbF6GJ8MDqVokIovsfeUukthYpbir3XGlPrLYQYS2nBCGFz7bXV3EpqxRhjjDHGxeJTKILQkFUAAAEAAEAEAUJDVgEACgAAwlAMRVGA0JBVAEAGAIAAFEVxFMdxHEeSJMsCQkNWAQBAAAACAAAojuEokiNJkmRZlmVZlqZ5lqi5qi/7ri7rru3qug6EhqwEAMgAABiGIYfeScyQU5BJJilVzDkIofUOOeUUZNJSxphijFHOkFMMMQUxhtAphRDUTjmlDCIIQ0idZM4gSz3o4GLnOBAasiIAiAIAAIxBjCHGkHMMSgYhco5JyCBEzjkpnZRMSiittJZJCS2V1iLnnJROSialtBZSy6SU1kIrBQAABDgAAARYCIWGrAgAogAAEIOQUkgpxJRiTjGHlFKOKceQUsw5xZhyjDHoIFTMMcgchEgpxRhzTjnmIGQMKuYchAwyAQAAAQ4AAAEWQqEhKwKAOAEAgyRpmqVpomhpmih6pqiqoiiqquV5pumZpqp6oqmqpqq6rqmqrmx5nml6pqiqnimqqqmqrmuqquuKqmrLpqvatumqtuzKsm67sqzbnqrKtqm6sm6qrm27smzrrizbuuR5quqZput6pum6quvasuq6su2ZpuuKqivbpuvKsuvKtq3Ksq5rpum6oqvarqm6su3Krm27sqz7puvqturKuq7Ksu7btq77sq0Lu+i6tq7Krq6rsqzrsi3rtmzbQsnzVNUzTdf1TNN1Vde1bdV1bVszTdc1XVeWRdV1ZdWVdV11ZVv3TNN1TVeVZdNVZVmVZd12ZVeXRde1bVWWfV11ZV+Xbd33ZVnXfdN1dVuVZdtXZVn3ZV33hVm3fd1TVVs3XVfXTdfVfVvXfWG2bd8XXVfXVdnWhVWWdd/WfWWYdZ0wuq6uq7bs66os676u68Yw67owrLpt/K6tC8Or68ax676u3L6Patu+8Oq2Mby6bhy7sBu/7fvGsamqbZuuq+umK+u6bOu+b+u6cYyuq+uqLPu66sq+b+u68Ou+Lwyj6+q6Ksu6sNqyr8u6Lgy7rhvDatvC7tq6cMyyLgy37yvHrwtD1baF4dV1o6vbxm8Lw9I3dr4AAIABBwCAABPKQKEhKwKAOAEABiEIFWMQKsYghBBSCiGkVDEGIWMOSsYclBBKSSGU0irGIGSOScgckxBKaKmU0EoopaVQSkuhlNZSai2m1FoMobQUSmmtlNJaaim21FJsFWMQMuekZI5JKKW0VkppKXNMSsagpA5CKqWk0kpJrWXOScmgo9I5SKmk0lJJqbVQSmuhlNZKSrGl0kptrcUaSmktpNJaSam11FJtrbVaI8YgZIxByZyTUkpJqZTSWuaclA46KpmDkkopqZWSUqyYk9JBKCWDjEpJpbWSSiuhlNZKSrGFUlprrdWYUks1lJJaSanFUEprrbUaUys1hVBSC6W0FkpprbVWa2ottlBCa6GkFksqMbUWY22txRhKaa2kElspqcUWW42ttVhTSzWWkmJsrdXYSi051lprSi3W0lKMrbWYW0y5xVhrDSW0FkpprZTSWkqtxdZaraGU1koqsZWSWmyt1dhajDWU0mIpKbWQSmyttVhbbDWmlmJssdVYUosxxlhzS7XVlFqLrbVYSys1xhhrbjXlUgAAwIADAECACWWg0JCVAEAUAABgDGOMQWgUcsw5KY1SzjknJXMOQggpZc5BCCGlzjkIpbTUOQehlJRCKSmlFFsoJaXWWiwAAKDAAQAgwAZNicUBCg1ZCQBEAQAgxijFGITGIKUYg9AYoxRjECqlGHMOQqUUY85ByBhzzkEpGWPOQSclhBBCKaWEEEIopZQCAAAKHAAAAmzQlFgcoNCQFQFAFAAAYAxiDDGGIHRSOikRhExKJ6WREloLKWWWSoolxsxaia3E2EgJrYXWMmslxtJiRq3EWGIqAADswAEA7MBCKDRkJQCQBwBAGKMUY845ZxBizDkIITQIMeYchBAqxpxzDkIIFWPOOQchhM455yCEEELnnHMQQgihgxBCCKWU0kEIIYRSSukghBBCKaV0EEIIoZRSCgAAKnAAAAiwUWRzgpGgQkNWAgB5AACAMUo5JyWlRinGIKQUW6MUYxBSaq1iDEJKrcVYMQYhpdZi7CCk1FqMtXYQUmotxlpDSq3FWGvOIaXWYqw119RajLXm3HtqLcZac865AADcBQcAsAMbRTYnGAkqNGQlAJAHAEAgpBRjjDmHlGKMMeecQ0oxxphzzinGGHPOOecUY4w555xzjDHnnHPOOcaYc84555xzzjnnoIOQOeecc9BB6JxzzjkIIXTOOecchBAKAAAqcAAACLBRZHOCkaBCQ1YCAOEAAIAxlFJKKaWUUkqoo5RSSimllFICIaWUUkoppZRSSimllFJKKaWUUkoppZRSSimllFJKKaWUUkoppZRSSimllFJKKaWUUkoppZRSSimllFJKKaWUUkoppZRSSimllFJKKaWUUkoppZRSSimllFJKKaWUUkoppZRSSimllFJKKZVSSimllFJKKaWUUkoppQAg3woHAP8HG2dYSTorHA0uNGQlABAOAAAYwxiEjDknJaWGMQildE5KSSU1jEEopXMSUkopg9BaaqWk0lJKGYSUYgshlZRaCqW0VmspqbWUUigpxRpLSqml1jLnJKSSWkuttpg5B6Wk1lpqrcUQQkqxtdZSa7F1UlJJrbXWWm0tpJRaay3G1mJsJaWWWmupxdZaTKm1FltLLcbWYkutxdhiizHGGgsA4G5wAIBIsHGGlaSzwtHgQkNWAgAhAQAEMko555yDEEIIIVKKMeeggxBCCCFESjHmnIMQQgghhIwx5yCEEEIIoZSQMeYchBBCCCGEUjrnIIRQSgmllFJK5xyEEEIIpZRSSgkhhBBCKKWUUkopIYQQSimllFJKKSWEEEIopZRSSimlhBBCKKWUUkoppZQQQiillFJKKaWUEkIIoZRSSimllFJCCKWUUkoppZRSSighhFJKKaWUUkoJJZRSSimllFJKKSGUUkoppZRSSimlAACAAwcAgAAj6CSjyiJsNOHCAxAAAAACAAJMAIEBgoJRCAKEEQgAAAAAAAgA+AAASAqAiIho5gwOEBIUFhgaHB4gIiQAAAAAAAAAAAAAAAAET2dnUwAE1RQAAAAAAAAu9RJ+AgAAAI+1UkUWbC8oJiQmKTc/RUVKWVRZVFZSUlJUV7qmd+oLx6QAgM0PiQSIMgmD9wDA8XvSzu5GNsva2WY+G85HV47eGBjur0ytj7Wzzewi2GBPz+5b5X5VkUKKy+P8uLzGfVyHdViHddimbdqm+vy4btM2bZ807djY2NjYmGmapmlWldmyNVsAAKbnXqjLAQBgrioAKJAAb4YBnDhIAAC27/UAAAAA45QIAAC7yTUBAADiQvsHAQAAot2slRd+/38AgHMlAEjBmx0AbNebLgAAgELltBYAAKCevTMAAMApA6ZROkjlAADw70oAkIIW6jMGALj7mNICAAC4yb6wAAAA5AL8fgcApk062Be+/wAA/AYAAgwQThMFAACArWpvCQAAdgKupkwAjRoAps1ukZcDAMAtAEAABjMCALC6GAAAgGj+odwAAEAB+7LvDnsP2ACWzMqoP/z7/wWADgBjwHY/xAAAAMy+IgAANBy/qbrb7Pbss8JZflyWAZJLw6WXAQAwDBDkAkwOAwBQfkkAAADcl8vVBAAAmA8+jGyL2BYT0nbND21ideV3xyy/U4qXlgCSSeNKfvjxNQHAMMAMKiBsbroBAABqor8AAAA6aM89+Rq6bvnk8LNKyvW8jyhPT0aIqTiefOpxfN3z2tGPswCGCW+WuQAAVJUF+uoAADx8dxcAAMBXAMBZqNm60vW9d44pypUD2Tl6luUkHdG5J5lxL0aFvMpY9y7OZ7Kj0NP2j2okAQCKCd8aEhwAAAOrCubTAADuuhZTAACwBktTUYYsypUTa++md34yaa8b31WXw4LZ/dB8ts9kOjOBgcl+OxzDxOtjXdfTAQB2C59ZJvj+zwcgKg5g8281AADAO8mGdLGzJy8kGD3ERJHSb8+KRreqsOe5iOA9+xk5RWYymr8t9fFcQ7UYPsf2CXO954oPXuRGAHoJPwokOACABBB9GJcCcDgMkr9TdStxcpHkOhDXOM40AADwtFHUzvluekfz0aP9mJ3KRw3ySu6e3k716lNJD845W/XCUnx96lspKQF7uni6YS5qaskPgwAAbkmqK5Xg958CMAAAlRyw848OAABGZmJHO2stuZprN5F4lw9/TAzNcykrrzAGQKT90Hzs9r7xON7h+h6Zh6owHipmsZi9HkcWrez9yFa25VxKjKcAcgnfCiQ4AIAK4PvQVQCcdOjsxOnO6qfsJ3fjALg3jw8MAAA4+pwLvc++cP6Da+BtgBK2Vchno/edw7oW9qfD6fCyfroxFvJA1J66eKe6X3caZA80fZoqIQBiCluUZIK/vwYgxgAIxWABAACzX87PD/e+ypZoZFzUgnT++Vfp0fDw+dEiJHzSHi4Jx9zOop6xOOLsMJv7o2RE6rgPC7QJCbg+iH2+j5OvYnTfZwBiieULScgAAGICCMAMAJTm0o7SZPPNrK4DMVjQnAAAoNazHL/ohsVDDnj1B1OiRQleU7ja3e2WOJqRCOfRlPtCOd1Xx4golDIpbZ458jXWa69PzEdWAl6L5caU4PODAKhVnoBdv4oBAAAxjs0+Ot/dzjg+fzV8Ll86PhGvL0dScur4fEpmFL7LtyA4Nl/lyJPLSxTD6GB2yR6n0xYDZ/Ki8AelZfcaMgBeiuWCJAzH3QEgRh/Ax9sXACBA5nuEprLWUgjNyZV1K0F59+b2ekLV6ygkgbeuJuF2mIlXES5VwEoYIydHWAcdfNrynQPbVs45Vnz0W5H4NsEAXorpzpTguRsArDEAQsVfDQAAai1bx861KN9Kd+gh5gcxYnuummTspfSig6uKfBD7ktk84S407NtlEEiIwPnIFpc0c1mn6l8NIwrqmFZHTOQGAGKK9H2ZQX8AgOgHABonCQAIoSiMDtY3DcIRbnuH6zxUxkQwrhJ2Fpywr6ZfxvDqKyW58VRo40fXz5/cpf7H3YRUgr9LRT8pNxJaVHecnCt+ZV2tAWJK8dUqwVUdACVGkCL93e/IyMrIukLnyg3MMbZ+9/OKc/Z7JuYqlQlMVOODRxW/vKQqdZPbQui0ML9ZSJo33YhFgCY5BjIkne70UjNULgVNfirfm7+RSw=='); diff --git a/pkg/interface/src/logic/lib/default-apps.js b/pkg/interface/src/logic/lib/default-apps.js index 4fcbe3733..c9e1708fb 100644 --- a/pkg/interface/src/logic/lib/default-apps.js +++ b/pkg/interface/src/logic/lib/default-apps.js @@ -1,3 +1,3 @@ -const defaultApps = ['chat', 'dojo', 'groups', 'link', 'publish']; +const defaultApps = ['chat', 'term', 'groups', 'link', 'publish']; export default defaultApps; diff --git a/pkg/interface/src/logic/lib/graph.ts b/pkg/interface/src/logic/lib/graph.ts new file mode 100644 index 000000000..b17848ded --- /dev/null +++ b/pkg/interface/src/logic/lib/graph.ts @@ -0,0 +1,24 @@ +import { Content } from "~/types"; +import urbitOb from "urbit-ob"; + +export function scanForMentions(text: string) { + const regex = /~([a-z]|-)+/g; + let result: Content[] = []; + let match: RegExpExecArray | null; + let lastPos = 0; + while ((match = regex.exec(text)) !== null) { + const newPos = match.index + match[0].length; + if (urbitOb.isValidPatp(match[0])) { + if (match.index !== lastPos) { + result.push({ text: text.slice(lastPos, match.index) }); + } + result.push({ mention: match[0] }); + } + lastPos = newPos; + } + const remainder = text.slice(lastPos, text.length); + if (remainder) { + result.push({ text: remainder }); + } + return result; +} diff --git a/pkg/interface/src/logic/lib/group.ts b/pkg/interface/src/logic/lib/group.ts index bd4939c2f..7d554cc1b 100644 --- a/pkg/interface/src/logic/lib/group.ts +++ b/pkg/interface/src/logic/lib/group.ts @@ -4,7 +4,7 @@ import { PatpNoSig, Path } from '~/types/noun'; export function roleForShip(group: Group, ship: PatpNoSig): RoleTags | undefined { return roleTags.reduce((currRole, role) => { - const roleShips = group.tags.role[role]; + const roleShips = group?.tags?.role?.[role]; return roleShips && roleShips.has(ship) ? role : currRole; }, undefined as RoleTags | undefined); } diff --git a/pkg/interface/src/logic/lib/notification.ts b/pkg/interface/src/logic/lib/notification.ts new file mode 100644 index 000000000..87288d83d --- /dev/null +++ b/pkg/interface/src/logic/lib/notification.ts @@ -0,0 +1,21 @@ +import { GraphNotifIndex, GraphNotificationContents } from "~/types"; + +export function getParentIndex( + idx: GraphNotifIndex, + contents: GraphNotificationContents +) { + const origIndex = contents[0].index.slice(1).split("/"); + const ret = (i: string[]) => `/${i.join("/")}`; + switch (idx.description) { + case "link": + return "/"; + case "comment": + return ret(origIndex.slice(0, 1)); + case "note": + return "/"; + case "mention": + return undefined; + default: + return undefined; + } +} diff --git a/pkg/interface/src/logic/lib/omnibox.js b/pkg/interface/src/logic/lib/omnibox.js index 3c3de5dce..1bc38a4e2 100644 --- a/pkg/interface/src/logic/lib/omnibox.js +++ b/pkg/interface/src/logic/lib/omnibox.js @@ -54,7 +54,8 @@ const appIndex = function (apps) { const otherIndex = function() { const other = []; - other.push(result('Home', '/~landscape/home', 'home', null)); + other.push(result('DMs + Drafts', '/~landscape/home', 'home', null)); + other.push(result('Notifications', '/~notifications', 'inbox', null)); other.push(result('Profile and Settings', '/~profile/identity', 'profile', null)); other.push(result('Log Out', '/~/logout', 'logout', null)); diff --git a/pkg/interface/src/logic/lib/post.ts b/pkg/interface/src/logic/lib/post.ts new file mode 100644 index 000000000..1a311de55 --- /dev/null +++ b/pkg/interface/src/logic/lib/post.ts @@ -0,0 +1,18 @@ +import { Post, GraphNode } from "~/types"; + +export const buntPost = (): Post => ({ + author: '', + contents: [], + hash: null, + index: '', + signatures: [], + 'time-sent': 0 +}); + +export function makeNodeMap(posts: Post[]): Record { + let nodes = {}; + posts.forEach((p) => { + nodes[p.index] = { children: { empty: null }, post: p }; + }); + return nodes; +} diff --git a/pkg/interface/src/logic/lib/publish.ts b/pkg/interface/src/logic/lib/publish.ts new file mode 100644 index 000000000..8e44e423a --- /dev/null +++ b/pkg/interface/src/logic/lib/publish.ts @@ -0,0 +1,118 @@ +import { Post, GraphNode, TextContent, Graph, NodeMap } from "~/types"; +import { buntPost } from '~/logic/lib/post'; +import { unixToDa } from "~/logic/lib/util"; +import {BigIntOrderedMap} from "./BigIntOrderedMap"; +import bigInt, {BigInteger} from 'big-integer'; + +export function newPost( + title: string, + body: string +): [BigInteger, NodeMap] { + const now = Date.now(); + const nowDa = unixToDa(now); + const root: Post = { + author: `~${window.ship}`, + index: "/" + nowDa.toString(), + "time-sent": now, + contents: [], + hash: null, + signatures: [], + }; + + const revContainer: Post = { ...root, index: root.index + "/1" }; + const commentsContainer = { ...root, index: root.index + "/2" }; + + const firstRevision: Post = { + ...revContainer, + index: revContainer.index + "/1", + contents: [{ text: title }, { text: body }], + }; + + const nodes = { + [root.index]: { + post: root, + children: { + graph: { + 1: { + post: revContainer, + children: { + graph: { + 1: { + post: firstRevision, + children: { empty: null }, + }, + }, + }, + }, + 2: { + post: commentsContainer, + children: { empty: null }, + }, + }, + }, + }, + }; + + return [nowDa, nodes]; +} + +export function editPost(rev: number, noteId: BigInteger, title: string, body: string) { + const now = Date.now(); + const newRev: Post = { + author: `~${window.ship}`, + index: `/${noteId.toString()}/1/${rev}`, + "time-sent": now, + contents: [{ text: title }, { text: body }], + hash: null, + signatures: [], + }; + const nodes = { + [newRev.index]: { + post: newRev, + children: { empty: null } + } + }; + + return nodes; +} + +export function getLatestRevision(node: GraphNode): [number, string, string, Post] { + const revs = node.children.get(bigInt(1)); + const empty = [1, "", "", buntPost()] as [number, string, string, Post]; + if(!revs) { + return empty; + } + const [revNum, rev] = [...revs.children][0]; + if(!rev) { + return empty; + } + const [title, body] = rev.post.contents as TextContent[]; + return [revNum.toJSNumber(), title.text, body.text, rev.post]; +} + +export function getLatestCommentRevision(node: GraphNode): [number, Post] { + const empty = [1, buntPost()] as [number, Post]; + if (node.children.size <= 0) { + return empty; + } + const [revNum, rev] = [...node.children][0]; + if(!rev) { + return empty; + } + return [revNum.toJSNumber(), rev.post]; +} + + +export function getComments(node: GraphNode): GraphNode { + const comments = node.children.get(bigInt(2)); + if(!comments) { + return { post: buntPost(), children: new BigIntOrderedMap() } + } + return comments; +} + +export function getSnippet(body: string) { + const start = body.slice(0, 400); + return start === body ? start : `${start}...`; +} + diff --git a/pkg/interface/src/logic/lib/sigil.js b/pkg/interface/src/logic/lib/sigil.js index 6e606be90..067711236 100644 --- a/pkg/interface/src/logic/lib/sigil.js +++ b/pkg/interface/src/logic/lib/sigil.js @@ -25,6 +25,7 @@ export const Sigil = memo(({ classes = '', color, foreground = '', ship, size, s display='inline-block' height={size} width={size} + className={classes} />) : ( ( candidates: C[], key: (c: C) => string, - searchPred: (query: string, c: C) => boolean + searchPred: (query: string, c: C) => boolean, + isExact: (query: string) => C | undefined ) { const [options, setOptions] = useState(candidates); const [selected, setSelected] = useState(); const search = useCallback( (s: string) => { - const opts = candidates.filter((c) => searchPred(s, c)); + const exactMatch = isExact(s); + const exact = exactMatch ? [exactMatch] : []; + const opts = [...new Set([...exact, ...candidates.filter((c) => searchPred(s, c))])]; setOptions(opts); if (selected) { const idx = opts.findIndex((c) => key(c) === key(selected)); diff --git a/pkg/interface/src/logic/lib/util.js b/pkg/interface/src/logic/lib/util.js deleted file mode 100644 index 8a1a4b8da..000000000 --- a/pkg/interface/src/logic/lib/util.js +++ /dev/null @@ -1,258 +0,0 @@ -import _ from 'lodash'; -import f from 'lodash/fp'; - -export const MOBILE_BROWSER_REGEX = /Android|webOS|iPhone|iPad|iPod|BlackBerry/i; - -export function parentPath(path) { - return _.dropRight(path.split('/'), 1).join('/'); -} - -export function clamp(x,min,max) { - return Math.max(min, Math.min(max, x)); -} - -// color is a #000000 color -export function adjustHex(color, amount) { - const res = f.flow( - f.split(''), f.chunk(2), // get individual color channels - f.map(c => parseInt(c.join(''), 16)), // as hex - f.map(c => clamp(c + amount, 0, 255).toString(16)), // adjust - f.join('') - )(color.slice(1)) - return `#${res}`; -} - - -export function resourceAsPath(resource) { - const { name, ship } = resource; - return `/ship/~${ship}/${name}`; -} - -export function uuid() { - let str = '0v'; - str += Math.ceil(Math.random()*8)+'.'; - for (let i = 0; i < 5; i++) { - let _str = Math.ceil(Math.random()*10000000).toString(32); - _str = ('00000'+_str).substr(-5,5); - str += _str+'.'; - } - - return str.slice(0,-1); -} - -/* - Goes from: - ~2018.7.17..23.15.09..5be5 // urbit @da - To: - (javascript Date object) -*/ -export function daToDate(st) { - const dub = function(n) { - return parseInt(n) < 10 ? '0' + parseInt(n) : n.toString(); - }; - const da = st.split('..'); - const bigEnd = da[0].split('.'); - const lilEnd = da[1].split('.'); - const ds = `${bigEnd[0].slice(1)}-${dub(bigEnd[1])}-${dub(bigEnd[2])}T${dub(lilEnd[0])}:${dub(lilEnd[1])}:${dub(lilEnd[2])}Z`; - return new Date(ds); -} - -/* - Goes from: - (javascript Date object) - To: - ~2018.7.17..23.15.09..5be5 // urbit @da -*/ - -export function dateToDa(d, mil) { -  const fil = function(n) { -    return n >= 10 ? n : '0' + n; -  }; -  return ( -    `~${d.getUTCFullYear()}.` + -    `${(d.getUTCMonth() + 1)}.` + -    `${fil(d.getUTCDate())}..` + -    `${fil(d.getUTCHours())}.` + -    `${fil(d.getUTCMinutes())}.` + -    `${fil(d.getUTCSeconds())}` + - `${mil ? '..0000' : ''}` -  ); -} - -export function deSig(ship) { - if(!ship) { - return null; - } - return ship.replace('~', ''); -} - -export function uxToHex(ux) { - if (ux.length > 2 && ux.substr(0,2) === '0x') { - const value = ux.substr(2).replace('.', '').padStart(6, '0'); - return value; - } - - const value = ux.replace('.', '').padStart(6, '0'); - return value; -} - -export function hexToUx(hex) { - const ux = f.flow( - f.chunk(4), - f.map(x => _.dropWhile(x, y => y === 0).join('')), - f.join('.') - )(hex.split('')) - return `0x${ux}`; -} - -export function writeText(str) { - return new Promise(((resolve, reject) => { - const range = document.createRange(); - range.selectNodeContents(document.body); - document.getSelection().addRange(range); - - let success = false; - function listener(e) { - e.clipboardData.setData('text/plain', str); - e.preventDefault(); - success = true; - } - document.addEventListener('copy', listener); - document.execCommand('copy'); - document.removeEventListener('copy', listener); - - document.getSelection().removeAllRanges(); - - success ? resolve() : reject(); - })).catch((error) => { - console.error(error); - });; -}; - -// trim patps to match dojo, chat-cli -export function cite(ship) { - let patp = ship, shortened = ''; - if (patp === null || patp === '') { - return null; - } - if (patp.startsWith('~')) { - patp = patp.substr(1); - } - // comet - if (patp.length === 56) { - shortened = '~' + patp.slice(0, 6) + '_' + patp.slice(50, 56); - return shortened; - } - // moon - if (patp.length === 27) { - shortened = '~' + patp.slice(14, 20) + '^' + patp.slice(21, 27); - return shortened; - } - return `~${patp}`; -} - -export function alphabeticalOrder(a,b) { - return a.toLowerCase().localeCompare(b.toLowerCase()); -} - -// TODO: deprecated -export function alphabetiseAssociations(associations) { - const result = {}; - Object.keys(associations).sort((a, b) => { - let aName = a.substr(1); - let bName = b.substr(1); - if (associations[a].metadata && associations[a].metadata.title) { - aName = associations[a].metadata.title !== '' - ? associations[a].metadata.title - : a.substr(1); - } - if (associations[b].metadata && associations[b].metadata.title) { - bName = associations[b].metadata.title !== '' - ? associations[b].metadata.title - : b.substr(1); - } - return alphabeticalOrder(aName,bName); - }).map((each) => { - result[each] = associations[each]; - }); - return result; -} - -// encode the string into @ta-safe format, using logic from +wood. -// for example, 'some Chars!' becomes '~.some.~43.hars~21.' -// -export function stringToTa(string) { - let out = ''; - for (let i = 0; i < string.length; i++) { - const char = string[i]; - let add = ''; - switch (char) { - case ' ': - add = '.'; - break; - case '.': - add = '~.'; - break; - case '~': - add = '~~'; - break; - default: - const charCode = string.charCodeAt(i); - if ( - (charCode >= 97 && charCode <= 122) || // a-z - (charCode >= 48 && charCode <= 57) || // 0-9 - char === '-' - ) { - add = char; - } else { - // TODO behavior for unicode doesn't match +wood's, - // but we can probably get away with that for now. - add = '~' + charCode.toString(16) + '.'; - } - } - out = out + add; - } - return '~.' + out; -} - -export function amOwnerOfGroup(groupPath) { - if (!groupPath) -return false; - const groupOwner = /(\/~)?\/~([a-z-]{3,})\/.*/.exec(groupPath)[2]; - return window.ship === groupOwner; -} - -export function getContactDetails(contact) { - const member = !contact; - contact = contact || { - nickname: '', - avatar: null, - color: '0x0' - }; - const nickname = contact.nickname || ''; - const color = uxToHex(contact.color || '0x0'); - const avatar = contact.avatar || null; - return { nickname, color, member, avatar }; -} - -export function stringToSymbol(str) { - let result = ''; - for (let i = 0; i < str.length; i++) { - const n = str.charCodeAt(i); - if (((n >= 97) && (n <= 122)) || - ((n >= 48) && (n <= 57))) { - result += str[i]; - } else if ((n >= 65) && (n <= 90)) { - result += String.fromCharCode(n + 32); - } else { - result += '-'; - } - } - result = result.replace(/^[\-\d]+|\-+/g, '-'); - result = result.replace(/^\-+|\-+$/g, ''); - if (result === '') { - return dateToDa(new Date()); - } - return result; -} - diff --git a/pkg/interface/src/logic/lib/util.ts b/pkg/interface/src/logic/lib/util.ts new file mode 100644 index 000000000..1ebe6e48b --- /dev/null +++ b/pkg/interface/src/logic/lib/util.ts @@ -0,0 +1,356 @@ +import { useEffect } from 'react'; +import _ from "lodash"; +import f from "lodash/fp"; +import bigInt, { BigInteger } from "big-integer"; + +export const MOBILE_BROWSER_REGEX = /Android|webOS|iPhone|iPad|iPod|BlackBerry/i; + +export const MOMENT_CALENDAR_DATE = { + sameDay: "[Today]", + nextDay: "[Tomorrow]", + nextWeek: "dddd", + lastDay: "[Yesterday]", + lastWeek: "[Last] dddd", + sameElse: "DD/MM/YYYY", +}; + +export function appIsGraph(app: string) { + return app === 'publish' || app == 'link'; +} + +export function parentPath(path: string) { + return _.dropRight(path.split('/'), 1).join('/'); +} + +const DA_UNIX_EPOCH = bigInt("170141184475152167957503069145530368000"); // `@ud` ~1970.1.1 +const DA_SECOND = bigInt("18446744073709551616"); // `@ud` ~s1 +export function daToUnix(da: BigInteger) { + // ported from +time:enjs:format in hoon.hoon + const offset = DA_SECOND.divide(bigInt(2000)); + const epochAdjusted = offset.add(da.subtract(DA_UNIX_EPOCH)); + + return Math.round( + epochAdjusted.multiply(bigInt(1000)).divide(DA_SECOND).toJSNumber() + ); +} + +export function unixToDa(unix: number) { + const timeSinceEpoch = bigInt(unix).multiply(DA_SECOND).divide(bigInt(1000)); + return DA_UNIX_EPOCH.add(timeSinceEpoch); +} + +export function makePatDa(patda: string) { + return bigInt(udToDec(patda)); +} + +export function udToDec(ud: string): string { + return ud.replace(/\./g, ""); +} + +export function decToUd(str: string): string { + return _.trimStart( + f.flow( + f.split(""), + f.reverse, + f.chunk(3), + f.map(f.flow(f.reverse, f.join(""))), + f.reverse, + f.join(".") + )(str), + "0." + ); +} + +/** + * Clamp a number between a min and max + */ +export function clamp(x: number, min: number, max: number) { + return Math.max(min, Math.min(max, x)); +} + +// color is a #000000 color +export function adjustHex(color: string, amount: number): string { + return f.flow( + f.split(""), + f.chunk(2), // get RGB channels + f.map((c) => parseInt(c.join(""), 16)), // as hex + f.map((c) => clamp(c + amount, 0, 255).toString(16)), // adjust + f.join(""), + (res) => `#${res}` //format + )(color.slice(1)); +} + +export function resourceAsPath(resource: any) { + const { name, ship } = resource; + return `/ship/~${ship}/${name}`; +} + +export function uuid() { + let str = "0v"; + str += Math.ceil(Math.random() * 8) + "."; + for (let i = 0; i < 5; i++) { + let _str = Math.ceil(Math.random() * 10000000).toString(32); + _str = ("00000" + _str).substr(-5, 5); + str += _str + "."; + } + + return str.slice(0, -1); +} + +/* + Goes from: + ~2018.7.17..23.15.09..5be5 // urbit @da + To: + (javascript Date object) +*/ +export function daToDate(st: string) { + const dub = function (n: string) { + return parseInt(n) < 10 ? "0" + parseInt(n) : n.toString(); + }; + const da = st.split(".."); + const bigEnd = da[0].split("."); + const lilEnd = da[1].split("."); + const ds = `${bigEnd[0].slice(1)}-${dub(bigEnd[1])}-${dub(bigEnd[2])}T${dub( + lilEnd[0] + )}:${dub(lilEnd[1])}:${dub(lilEnd[2])}Z`; + return new Date(ds); +} + +/* + Goes from: + (javascript Date object) + To: + ~2018.7.17..23.15.09..5be5 // urbit @da +*/ + +export function dateToDa(d: Date, mil: boolean = false) { + const fil = function (n: number) { + return n >= 10 ? n : "0" + n; + }; + return ( + `~${d.getUTCFullYear()}.` + + `${d.getUTCMonth() + 1}.` + + `${fil(d.getUTCDate())}..` + + `${fil(d.getUTCHours())}.` + + `${fil(d.getUTCMinutes())}.` + + `${fil(d.getUTCSeconds())}` + + `${mil ? "..0000" : ""}` + ); +} + +export function deSig(ship: string) { + if (!ship) { + return null; + } + return ship.replace("~", ""); +} + +export function uxToHex(ux: string) { + if (ux.length > 2 && ux.substr(0, 2) === "0x") { + const value = ux.substr(2).replace(".", "").padStart(6, "0"); + return value; + } + + const value = ux.replace(".", "").padStart(6, "0"); + return value; +} + +export const hexToUx = (hex) => { + const ux = f.flow( + f.chunk(4), + f.map(x => _.dropWhile(x, y => y === 0).join('')), + f.join('.') + )(hex.split('')); + return `0x${ux}`; +}; + +export function writeText(str: string) { + return new Promise((resolve, reject) => { + const range = document.createRange(); + range.selectNodeContents(document.body); + document?.getSelection()?.addRange(range); + + let success = false; + function listener(e) { + e.clipboardData.setData("text/plain", str); + e.preventDefault(); + success = true; + } + document.addEventListener("copy", listener); + document.execCommand("copy"); + document.removeEventListener("copy", listener); + + document?.getSelection()?.removeAllRanges(); + + success ? resolve() : reject(); + }).catch((error) => { + console.error(error); + }); +} + +// trim patps to match dojo, chat-cli +export function cite(ship: string) { + let patp = ship, + shortened = ""; + if (patp === null || patp === "") { + return null; + } + if (patp.startsWith("~")) { + patp = patp.substr(1); + } + // comet + if (patp.length === 56) { + shortened = "~" + patp.slice(0, 6) + "_" + patp.slice(50, 56); + return shortened; + } + // moon + if (patp.length === 27) { + shortened = "~" + patp.slice(14, 20) + "^" + patp.slice(21, 27); + return shortened; + } + return `~${patp}`; +} + +export function alphabeticalOrder(a: string, b: string) { + return a.toLowerCase().localeCompare(b.toLowerCase()); +} + +// TODO: deprecated +export function alphabetiseAssociations(associations: any) { + const result = {}; + Object.keys(associations) + .sort((a, b) => { + let aName = a.substr(1); + let bName = b.substr(1); + if (associations[a].metadata && associations[a].metadata.title) { + aName = + associations[a].metadata.title !== "" + ? associations[a].metadata.title + : a.substr(1); + } + if (associations[b].metadata && associations[b].metadata.title) { + bName = + associations[b].metadata.title !== "" + ? associations[b].metadata.title + : b.substr(1); + } + return alphabeticalOrder(aName, bName); + }) + .map((each) => { + result[each] = associations[each]; + }); + return result; +} + +// encode the string into @ta-safe format, using logic from +wood. +// for example, 'some Chars!' becomes '~.some.~43.hars~21.' +// +export function stringToTa(str: string) { + let out = ""; + for (let i = 0; i < str.length; i++) { + const char = str[i]; + let add = ""; + switch (char) { + case " ": + add = "."; + break; + case ".": + add = "~."; + break; + case "~": + add = "~~"; + break; + default: + const charCode = str.charCodeAt(i); + if ( + (charCode >= 97 && charCode <= 122) || // a-z + (charCode >= 48 && charCode <= 57) || // 0-9 + char === "-" + ) { + add = char; + } else { + // TODO behavior for unicode doesn't match +wood's, + // but we can probably get away with that for now. + add = "~" + charCode.toString(16) + "."; + } + } + out = out + add; + } + return "~." + out; +} + +export function amOwnerOfGroup(groupPath: string) { + if (!groupPath) return false; + const groupOwner = /(\/~)?\/~([a-z-]{3,})\/.*/.exec(groupPath)?.[2]; + return window.ship === groupOwner; +} + +export function getContactDetails(contact: any) { + const member = !contact; + contact = contact || { + nickname: "", + avatar: null, + color: "0x0", + }; + const nickname = contact.nickname || ""; + const color = uxToHex(contact.color || "0x0"); + const avatar = contact.avatar || null; + return { nickname, color, member, avatar }; +} + +export function stringToSymbol(str: string) { + let result = ""; + for (let i = 0; i < str.length; i++) { + const n = str.charCodeAt(i); + if ((n >= 97 && n <= 122) || (n >= 48 && n <= 57)) { + result += str[i]; + } else if (n >= 65 && n <= 90) { + result += String.fromCharCode(n + 32); + } else { + result += "-"; + } + } + result = result.replace(/^[\-\d]+|\-+/g, "-"); + result = result.replace(/^\-+|\-+$/g, ""); + if (result === "") { + return dateToDa(new Date()); + } + return result; +} + + + +/** + * Formats a numbers as a `@ud` inserting dot where needed + */ +export function numToUd(num: number) { + return f.flow( + f.split(''), + f.reverse, + f.chunk(3), + f.reverse, + f.map(s => s.join('')), + f.join('.') + )(num.toString()) +} + +export function usePreventWindowUnload(shouldPreventDefault: boolean, message = "You have unsaved changes. Are you sure you want to exit?") { + useEffect(() => { + if (!shouldPreventDefault) return; + const handleBeforeUnload = event => { + event.preventDefault(); + return message; + } + window.addEventListener("beforeunload", handleBeforeUnload); + window.onbeforeunload = handleBeforeUnload; + return () => { + window.removeEventListener("beforeunload", handleBeforeUnload); + // @ts-ignore + window.onbeforeunload = undefined; + } + }, [shouldPreventDefault]); +} + +export function pluralize(text: string, isPlural = false, vowel = false) { + return isPlural ? `${text}s`: `${vowel ? 'an' : 'a'} ${text}`; +} diff --git a/pkg/interface/src/logic/lib/workspace.ts b/pkg/interface/src/logic/lib/workspace.ts index a5ba59bb1..2ab8ce65b 100644 --- a/pkg/interface/src/logic/lib/workspace.ts +++ b/pkg/interface/src/logic/lib/workspace.ts @@ -6,7 +6,7 @@ export function getTitleFromWorkspace( ) { switch (workspace.type) { case "home": - return "Home"; + return "DMs + Drafts"; case "group": const association = associations.contacts[workspace.group]; return association?.metadata?.title || ""; diff --git a/pkg/interface/src/logic/reducers/graph-update.js b/pkg/interface/src/logic/reducers/graph-update.js index ead7c8209..d5884b4fe 100644 --- a/pkg/interface/src/logic/reducers/graph-update.js +++ b/pkg/interface/src/logic/reducers/graph-update.js @@ -1,16 +1,6 @@ import _ from 'lodash'; -import { OrderedMap } from "~/logic/lib/OrderedMap"; - -const DA_UNIX_EPOCH = 170141184475152167957503069145530368000; -const normalizeKey = (key) => { - if(key > DA_UNIX_EPOCH) { - // new links uses milliseconds since unix epoch - // old (pre-graph-store) use @da - // ported from +time:enjs:format in hoon.hoon - return Math.round((1000 * (9223372036854775 + (key - DA_UNIX_EPOCH))) / 18446744073709551616); - } - return key; -} +import { BigIntOrderedMap } from "~/logic/lib/BigIntOrderedMap"; +import bigInt, { BigInteger } from "big-integer"; export const GraphReducer = (json, state) => { const data = _.get(json, 'graph-update', false); @@ -38,33 +28,26 @@ const addGraph = (json, state) => { const _processNode = (node) => { // is empty if (!node.children) { - node.children = new OrderedMap(); - node.post.originalIndex = node.post.index; - node.post.index = node.post.index.split('/').map(x => x.length === 0 ? '' : normalizeKey(parseInt(x, 10))).join('/'); + node.children = new BigIntOrderedMap(); return node; } // is graph - let converted = new OrderedMap(); + let converted = new BigIntOrderedMap(); for (let i in node.children) { let item = node.children[i]; let index = item[0].split('/').slice(1).map((ind) => { - return parseInt(ind, 10); + return bigInt(ind); }); if (index.length === 0) { break; } - - const normalKey = normalizeKey(index[index.length - 1]); - item[1].post.originalKey = index[index.length - 1]; converted.set( - normalKey, + index[index.length - 1], _processNode(item[1]) ); } node.children = converted; - node.post.originalIndex = node.post.index; - node.post.index = node.post.index.split('/').map(x => x.length === 0 ? '' : normalizeKey(parseInt(x, 10))).join('/'); return node; }; @@ -75,21 +58,22 @@ const addGraph = (json, state) => { } let resource = data.resource.ship + '/' + data.resource.name; - state.graphs[resource] = new OrderedMap(); + state.graphs[resource] = new BigIntOrderedMap(); for (let i in data.graph) { let item = data.graph[i]; let index = item[0].split('/').slice(1).map((ind) => { - return parseInt(ind, 10); + return bigInt(ind); }); if (index.length === 0) { break; } let node = _processNode(item[1]); - const normalKey = normalizeKey(index[index.length - 1]) - node.post.originalKey = index[index.length - 1]; - state.graphs[resource].set(normalKey, node); + state.graphs[resource].set( + index[index.length - 1], + node + ); } state.graphKeys.add(resource); } @@ -102,16 +86,16 @@ const removeGraph = (json, state) => { if (!('graphs' in state)) { state.graphs = {}; } - let resource = data.resource.ship + '/' + data.resource.name; + let resource = data.ship + '/' + data.name; delete state.graphs[resource]; } }; const mapifyChildren = (children) => { - return new OrderedMap( + return new BigIntOrderedMap( children.map(([idx, node]) => { const nd = {...node, children: mapifyChildren(node.children || []) }; - return [normalizeKey(parseInt(idx.slice(1), 10)), nd]; + return [bigInt(idx.slice(1)), nd]; })); }; @@ -119,23 +103,18 @@ const addNodes = (json, state) => { const _addNode = (graph, index, node) => { // set child of graph if (index.length === 1) { - node.post.originalIndex = node.post.index; - node.post.index = node.post.index.split('/').map(x => x.length === 0 ? '' : normalizeKey(parseInt(x, 10))).join('/'); - - const normalKey = normalizeKey(index[0]) - node.post.originalKey = index[0]; - graph.set(normalKey, node); + graph.set(index[0], node); return graph; } // set parent of graph - let parNode = graph.get(normalizeKey(index[0])); + let parNode = graph.get(index[0]); if (!parNode) { console.error('parent node does not exist, cannot add child'); return; } parNode.children = _addNode(parNode.children, index.slice(1), node); - graph.set(normalizeKey(index[0]), parNode); + graph.set(index[0], parNode); return graph; }; @@ -151,7 +130,7 @@ const addNodes = (json, state) => { if (item[0].split('/').length === 0) { return; } let index = item[0].split('/').slice(1).map((ind) => { - return parseInt(ind, 10); + return bigInt(ind); }); if (index.length === 0) { return; } @@ -174,9 +153,9 @@ const removeNodes = (json, state) => { if (index.length === 1) { graph.delete(index[0]); } else { - const child = graph.get(normalizeKey(index[0])); + const child = graph.get(index[0]); _remove(child.children, index.slice(1)); - graph.set(normalizeKey(index[0]), child); + graph.set(index[0], child); } }; const data = _.get(json, 'remove-nodes', false); @@ -188,7 +167,7 @@ const removeNodes = (json, state) => { data.indices.forEach((index) => { if (index.split('/').length === 0) { return; } let indexArr = index.split('/').slice(1).map((ind) => { - return parseInt(ind, 10); + return bigInt(ind); }); _remove(state.graphs[res], indexArr); }); diff --git a/pkg/interface/src/logic/reducers/group-update.ts b/pkg/interface/src/logic/reducers/group-update.ts index 5862aaca1..0b0ad8ec0 100644 --- a/pkg/interface/src/logic/reducers/group-update.ts +++ b/pkg/interface/src/logic/reducers/group-update.ts @@ -103,7 +103,7 @@ export default class GroupReducer { const resourcePath = resourceAsPath(resource); state.groups[resourcePath] = { members: new Set(), - tags: { role: {} }, + tags: { role: { admin: new Set([window.ship]) } }, policy: decodePolicy(policy), hidden, }; diff --git a/pkg/interface/src/logic/reducers/hark-update.ts b/pkg/interface/src/logic/reducers/hark-update.ts new file mode 100644 index 000000000..590afde13 --- /dev/null +++ b/pkg/interface/src/logic/reducers/hark-update.ts @@ -0,0 +1,306 @@ +import { + Notifications, + NotifIndex, + NotificationGraphConfig, + GroupNotificationsConfig, +} from "~/types"; +import { makePatDa } from "~/logic/lib/util"; +import _ from "lodash"; +import { StoreState } from "../store/type"; + +type HarkState = Pick; + +export const HarkReducer = (json: any, state: HarkState) => { + const data = _.get(json, "harkUpdate", false); + if (data) { + reduce(data, state); + } + const graphHookData = _.get(json, "hark-graph-hook-update", false); + if (graphHookData) { + graphInitial(graphHookData, state); + graphIgnore(graphHookData, state); + graphListen(graphHookData, state); + graphWatchSelf(graphHookData, state); + graphMentions(graphHookData, state); + } + const groupHookData = _.get(json, "hark-group-hook-update", false); + if (groupHookData) { + groupInitial(groupHookData, state); + groupListen(groupHookData, state); + groupIgnore(groupHookData, state); + } + + const chatHookData = _.get(json, "hark-chat-hook-update", false); + if(chatHookData) { + + chatInitial(chatHookData, state); + chatListen(chatHookData, state); + chatIgnore(chatHookData, state); + + } +}; + +function chatInitial(json: any, state: HarkState) { + const data = _.get(json, "initial", false); + if (data) { + state.notificationsChatConfig = data; + } +} + + +function chatListen(json: any, state: HarkState) { + const data = _.get(json, "listen", false); + if (data) { + state.notificationsChatConfig = [...state.notificationsChatConfig, data]; + } +} + +function chatIgnore(json: any, state: HarkState) { + const data = _.get(json, "ignore", false); + if (data) { + state.notificationsChatConfig = state.notificationsChatConfig.filter(x => x !== data); + } +} + +function groupInitial(json: any, state: HarkState) { + const data = _.get(json, "initial", false); + if (data) { + state.notificationsGroupConfig = data; + } +} + +function graphInitial(json: any, state: HarkState) { + const data = _.get(json, "initial", false); + if (data) { + state.notificationsGraphConfig = data; + } +} + +function graphListen(json: any, state: HarkState) { + const data = _.get(json, "listen", false); + if (data) { + state.notificationsGraphConfig.watching = [ + ...state.notificationsGraphConfig.watching, + data, + ]; + } +} + +function graphIgnore(json: any, state: HarkState) { + const data = _.get(json, "ignore", false); + if (data) { + state.notificationsGraphConfig.watching = state.notificationsGraphConfig.watching.filter( + ({ graph, index }) => !(graph === data.graph && index === data.index) + ); + } +} + +function groupListen(json: any, state: HarkState) { + const data = _.get(json, "listen", false); + if (data) { + state.notificationsGroupConfig = [...state.notificationsGroupConfig, data]; + } +} + +function groupIgnore(json: any, state: HarkState) { + const data = _.get(json, "ignore", false); + if (data) { + state.notificationsGroupConfig = state.notificationsGroupConfig.filter( + (n) => n !== data + ); + } +} + +function graphMentions(json: any, state: HarkState) { + const data = _.get(json, "set-mentions", undefined); + if (!_.isUndefined(data)) { + state.notificationsGraphConfig.mentions = data; + } +} + +function graphWatchSelf(json: any, state: HarkState) { + const data = _.get(json, "set-watch-on-self", undefined); + if (!_.isUndefined(data)) { + state.notificationsGraphConfig.watchOnSelf = data; + } +} + +function reduce(data: any, state: HarkState) { + unread(data, state); + read(data, state); + archive(data, state); + timebox(data, state); + more(data, state); + dnd(data, state); + added(data, state); + unreads(data, state); +} + +function unreads(json: any, state: HarkState) { + const data = _.get(json, 'unreads'); + if(data) { + data.forEach(({ index, unread }) => { + updateUnreads(state, index, x => x + unread); + }); + } +} + +function updateUnreads(state: HarkState, index: NotifIndex, f: (u: number) => number) { + state.notificationsCount = f(state.notificationsCount); + if('graph' in index) { + const curr = state.unreads.graph[index.graph.graph] || 0; + state.unreads.graph[index.graph.graph] = f(curr); + } else if('group' in index) { + const curr = state.unreads.group[index.group.group] || 0; + state.unreads.group[index.group.group] = f(curr); + } else if('chat' in index) { + const curr = state.unreads.chat[index.chat.chat] || 0 + state.unreads.chat[index.chat.chat] = f(curr); + } +} + +function added(json: any, state: HarkState) { + const data = _.get(json, "added", false); + if (data) { + const { index, notification } = data; + const time = makePatDa(data.time); + const timebox = state.notifications.get(time) || []; + + const arrIdx = timebox.findIndex((idxNotif) => + notifIdxEqual(index, idxNotif.index) + ); + if (arrIdx !== -1) { + if(timebox[arrIdx]?.notification?.read) { + updateUnreads(state, index, x => x+1); + } + timebox[arrIdx] = { index, notification }; + state.notifications.set(time, timebox); + } else { + updateUnreads(state, index, x => x+1); + state.notifications.set(time, [...timebox, { index, notification }]); + } + } +} + +const dnd = (json: any, state: HarkState) => { + const data = _.get(json, "set-dnd", undefined); + if (!_.isUndefined(data)) { + state.doNotDisturb = data; + } +}; + +const timebox = (json: any, state: HarkState) => { + const data = _.get(json, "timebox", false); + if (data) { + const time = makePatDa(data.time); + if (data.archive) { + state.archivedNotifications.set(time, data.notifications); + } else { + state.notifications.set(time, data.notifications); + } + } +}; + +function more(json: any, state: HarkState) { + const data = _.get(json, "more", false); + if (data) { + _.forEach(data, (d) => reduce(d, state)); + } +} + +function notifIdxEqual(a: NotifIndex, b: NotifIndex) { + if ("graph" in a && "graph" in b) { + return ( + a.graph.graph === b.graph.graph && + a.graph.group === b.graph.group && + a.graph.module === b.graph.module && + a.graph.description === b.graph.description + ); + } else if ("group" in a && "group" in b) { + return ( + a.group.group === b.group.group && + a.group.description === b.group.description + ); + } else if ("chat" in a && "chat" in b) { + return a.chat.chat === b.chat.chat && + a.chat.mention === b.chat.mention; + } + return false; +} + +function setRead( + time: string, + index: NotifIndex, + read: boolean, + state: HarkState +) { + const patDa = makePatDa(time); + const timebox = state.notifications.get(patDa); + if (_.isNull(timebox)) { + console.warn("Modifying nonexistent timebox"); + return; + } + const arrIdx = timebox.findIndex((idxNotif) => + notifIdxEqual(index, idxNotif.index) + ); + if (arrIdx === -1) { + console.warn("Modifying nonexistent index"); + return; + } + timebox[arrIdx].notification.read = read; + state.notifications.set(patDa, timebox); +} + +function read(json: any, state: HarkState) { + const data = _.get(json, "read", false); + if (data) { + const { time, index } = data; + updateUnreads(state, index, x => x-1); + setRead(time, index, true, state); + } +} + +function unread(json: any, state: HarkState) { + const data = _.get(json, "unread", false); + if (data) { + const { time, index } = data; + updateUnreads(state, index, x => x+1); + setRead(time, index, false, state); + } +} + +function archive(json: any, state: HarkState) { + const data = _.get(json, "archive", false); + if (data) { + const { index } = data; + const time = makePatDa(data.time); + const timebox = state.notifications.get(time); + if (!timebox) { + console.warn("Modifying nonexistent timebox"); + return; + } + const [archived, unarchived] = _.partition(timebox, (idxNotif) => + notifIdxEqual(index, idxNotif.index) + ); + state.notifications.set(time, unarchived); + const archiveBox = state.archivedNotifications.get(time) || []; + const readCount = archived.filter( + ({ notification }) => !notification.read + ).length; + updateUnreads(state, index, x => x - readCount); + state.archivedNotifications.set(time, [ + ...archiveBox, + ...archived.map(({ notification, index }) => ({ + notification: { ...notification, read: true }, + index, + })), + ]); + } +} diff --git a/pkg/interface/src/logic/reducers/invite-update.ts b/pkg/interface/src/logic/reducers/invite-update.ts index bdeba0e4f..30fb61042 100644 --- a/pkg/interface/src/logic/reducers/invite-update.ts +++ b/pkg/interface/src/logic/reducers/invite-update.ts @@ -29,35 +29,35 @@ export default class InviteReducer { create(json: InviteUpdate, state: S) { const data = _.get(json, 'create', false); if (data) { - state.invites[data.path] = {}; + state.invites[data] = {}; } } delete(json: InviteUpdate, state: S) { const data = _.get(json, 'delete', false); if (data) { - delete state.invites[data.path]; + delete state.invites[data]; } } invite(json: InviteUpdate, state: S) { const data = _.get(json, 'invite', false); if (data) { - state.invites[data.path][data.uid] = data.invite; + state.invites[data.term][data.uid] = data.invite; } } accepted(json: InviteUpdate, state: S) { const data = _.get(json, 'accepted', false); if (data) { - delete state.invites[data.path][data.uid]; + delete state.invites[data.term][data.uid]; } } decline(json: InviteUpdate, state: S) { const data = _.get(json, 'decline', false); if (data) { - delete state.invites[data.path][data.uid]; + delete state.invites[data.term][data.uid]; } } } diff --git a/pkg/interface/src/logic/reducers/publish-response.ts b/pkg/interface/src/logic/reducers/publish-response.ts deleted file mode 100644 index 073efa399..000000000 --- a/pkg/interface/src/logic/reducers/publish-response.ts +++ /dev/null @@ -1,205 +0,0 @@ -import _ from 'lodash'; -import { StoreState } from '../../store/type'; -import { Cage } from '~/types/cage'; - -type PublishState = Pick; - -export default class PublishResponseReducer { - reduce(json: Cage, state: S) { - const data = _.get(json, 'publish-response', false); - if (!data) { return; } - switch(data.type) { - case "notebooks": - this.handleNotebooks(data, state); - break; - case "notebook": - this.handleNotebook(data, state); - break; - case "note": - this.handleNote(data, state); - break; - case "notes-page": - this.handleNotesPage(data, state); - break; - case "comments-page": - this.handleCommentsPage(data, state); - break; - default: - break; - } - } - - handleNotebooks(json, state) { - for (var host in state.notebooks) { - if (json.data[host]) { - for (var book in state.notebooks[host]) { - if (!json.data[host][book]) { - delete state.notebooks[host][book]; - } - } - } else { - delete state.notebooks[host]; - } - } - - for (var host in json.data) { - if (state.notebooks[host]) { - for (var book in json.data[host]) { - if (state.notebooks[host][book]) { - state.notebooks[host][book]["title"] = json.data[host][book]["title"]; - state.notebooks[host][book]["date-created"] = - json.data[host][book]["date-created"]; - state.notebooks[host][book]["num-notes"] = - json.data[host][book]["num-notes"]; - state.notebooks[host][book]["num-unread"] = - json.data[host][book]["num-unread"]; - } else { - state.notebooks[host][book] = json.data[host][book]; - } - } - } else { - state.notebooks[host] = json.data[host]; - } - } - } - - handleNotebook(json, state) { - if (state.notebooks[json.host]) { - if (state.notebooks[json.host][json.notebook]) { - state.notebooks[json.host][json.notebook]["notes-by-date"] = - json.data.notebook["notes-by-date"]; - state.notebooks[json.host][json.notebook].subscribers = - json.data.notebook.subscribers; - state.notebooks[json.host][json.notebook].writers = - json.data.notebook.writers; - state.notebooks[json.host][json.notebook].comments = - json.data.notebook.comments; - state.notebooks[json.host][json.notebook]["subscribers-group-path"] = - json.data.notebook["subscribers-group-path"]; - state.notebooks[json.host][json.notebook]["writers-group-path"] = - json.data.notebook["writers-group-path"]; - state.notebooks[json.host][json.notebook].about = - json.data.notebook.about; - if (state.notebooks[json.host][json.notebook].notes) { - for (var key in json.data.notebook.notes) { - let oldNote = state.notebooks[json.host][json.notebook].notes[key]; - if (!(oldNote)) { - state.notebooks[json.host][json.notebook].notes[key] = - json.data.notebook.notes[key]; - } else if (!(oldNote.build)) { - state.notebooks[json.host][json.notebook].notes[key]["author"] = - json.data.notebook.notes[key]["author"]; - state.notebooks[json.host][json.notebook].notes[key]["date-created"] = - json.data.notebook.notes[key]["date-created"]; - state.notebooks[json.host][json.notebook].notes[key]["note-id"] = - json.data.notebook.notes[key]["note-id"]; - state.notebooks[json.host][json.notebook].notes[key]["num-comments"] = - json.data.notebook.notes[key]["num-comments"]; - state.notebooks[json.host][json.notebook].notes[key]["title"] = - json.data.notebook.notes[key]["title"]; - } - } - } else { - state.notebooks[json.host][json.notebook].notes = - json.data.notebook.notes; - } - } else { - state.notebooks[json.host][json.notebook] = json.data.notebook; - } - } else { - state.notebooks[json.host] = {[json.notebook]: json.data.notebook}; - } - } - - handleNote(json, state) { - if (state.notebooks[json.host] && - state.notebooks[json.host][json.notebook]) { - state.notebooks[json.host][json.notebook]["notes-by-date"] = - json.data["notes-by-date"]; - if (state.notebooks[json.host][json.notebook].notes) { - for (var key in json.data.notes) { - let oldNote = state.notebooks[json.host][json.notebook].notes[key]; - if (!(oldNote && oldNote.build && key !== json.note)) { - state.notebooks[json.host][json.notebook].notes[key] = - json.data.notes[key]; - } - } - } else { - state.notebooks[json.host][json.notebook].notes = json.data.notes; - } - } else { - throw Error("tried to fetch note, but we don't have the notebook"); - } - } - - handleNotesPage(json, state) { - if (state.notebooks[json.host] && state.notebooks[json.host][json.notebook]) { - state.notebooks[json.host][json.notebook]["notes-by-date"] = - json.data["notes-by-date"]; - if (state.notebooks[json.host][json.notebook].notes) { - for (var key in json.data.notes) { - let oldNote = state.notebooks[json.host][json.notebook].notes[key]; - if (!(oldNote)) { - state.notebooks[json.host][json.notebook].notes[key] = - json.data.notes[key]; - } else if (!(oldNote.build)) { - state.notebooks[json.host][json.notebook].notes[key]["author"] = - json.data.notes[key]["author"]; - state.notebooks[json.host][json.notebook].notes[key]["date-created"] = - json.data.notes[key]["date-created"]; - state.notebooks[json.host][json.notebook].notes[key]["note-id"] = - json.data.notes[key]["note-id"]; - state.notebooks[json.host][json.notebook].notes[key]["num-comments"] = - json.data.notes[key]["num-comments"]; - state.notebooks[json.host][json.notebook].notes[key]["title"] = - json.data.notes[key]["title"]; - } - } - } else { - state.notebooks[json.host][json.notebook].notes = - json.data.notes; - } - } else { - throw Error("tried to fetch paginated notes, but we don't have the notebook"); - } - } - - handleCommentsPage(json, state) { - if (state.notebooks[json.host] && - state.notebooks[json.host][json.notebook] && - state.notebooks[json.host][json.notebook].notes[json.note]) - { - if (state.notebooks[json.host][json.notebook].notes[json.note].comments) { - json.data.forEach((val, i) => { - let newKey = Object.keys(val)[0]; - let newDate = val[newKey]["date-created"] - let oldComments = state.notebooks[json.host][json.notebook].notes[json.note].comments; - let insertIdx = -1; - - for (var j=0; j newDate) && - (j === oldComments.length-1)){ - insertIdx = j+1; - } - } - if (insertIdx !== -1) { - state.notebooks[json.host][json.notebook].notes[json.note].comments - .splice(insertIdx, 0, val); - } - }); - } else { - state.notebooks[json.host][json.notebook].notes[json.note].comments = - json.data; - } - } else { - throw Error("tried to fetch paginated comments, but we don't have the note"); - } - } -} diff --git a/pkg/interface/src/logic/reducers/publish-update.ts b/pkg/interface/src/logic/reducers/publish-update.ts deleted file mode 100644 index 3be297737..000000000 --- a/pkg/interface/src/logic/reducers/publish-update.ts +++ /dev/null @@ -1,269 +0,0 @@ -import _ from 'lodash'; - -import { PublishUpdate } from '~/types/publish-update'; -import { Cage } from '~/types/cage'; -import { StoreState } from '../../store/type'; -import { getTagFromFrond } from '~/types/noun'; - -type PublishState = Pick; - - -export default class PublishUpdateReducer { - reduce(data: Cage, state: S){ - let json = data["publish-update"]; - if(!json) { - return; - } - const tag = getTagFromFrond(json); - switch(tag){ - case "add-book": - this.addBook(json["add-book"], state); - break; - case "add-note": - this.addNote(json["add-note"], state); - break; - case "add-comment": - this.addComment(json["add-comment"], state); - break; - case "edit-book": - this.editBook(json["edit-book"], state); - break; - case "edit-note": - this.editNote(json["edit-note"], state); - break; - case "edit-comment": - this.editComment(json["edit-comment"], state); - break; - case "del-book": - this.delBook(json["del-book"], state); - break; - case "del-note": - this.delNote(json["del-note"], state); - break; - case "del-comment": - this.delComment(json["del-comment"], state); - break; - case "read": - this.read(json["read"], state); - break; - default: - break; - } - } - - addBook(json, state: S) { - let host = Object.keys(json)[0]; - let book = Object.keys(json[host])[0]; - if (state.notebooks[host]) { - state.notebooks[host][book] = json[host][book]; - } else { - state.notebooks[host] = json[host]; - } - } - - addNote(json, state: S) { - let host = Object.keys(json)[0]; - let book = Object.keys(json[host])[0]; - let noteId = json[host][book]["note-id"]; - if (state.notebooks[host] && state.notebooks[host][book]) { - if (state.notebooks[host][book].notes) { - if (state.notebooks[host][book].notes[noteId] && - state.notebooks[host][book].notes[noteId].pending) - { - state.notebooks[host][book].notes[noteId].pending = false; - return; - } - if (state.notebooks[host][book]["notes-by-date"]) { - state.notebooks[host][book]["notes-by-date"].unshift(noteId); - } else { - state.notebooks[host][book]["notes-by-date"] = [noteId]; - } - state.notebooks[host][book].notes[noteId] = json[host][book]; - } else { - state.notebooks[host][book].notes = {[noteId]: json[host][book]}; - } - state.notebooks[host][book]["num-notes"] += 1; - if (!json[host][book].read) { - state.notebooks[host][book]["num-unread"] += 1; - } - let prevNoteId = state.notebooks[host][book]["notes-by-date"][1] || null; - state.notebooks[host][book].notes[noteId]["prev-note"] = prevNoteId - state.notebooks[host][book].notes[noteId]["next-note"] = null; - if (prevNoteId && state.notebooks[host][book].notes[prevNoteId]) { - state.notebooks[host][book].notes[prevNoteId]["next-note"] = noteId; - } - } - } - - addComment(json, state: S) { - let host = json.host - let book = json.book - let note = json.note - let comment = json.comment; - if (state.notebooks[host] && - state.notebooks[host][book] && - state.notebooks[host][book].notes && - state.notebooks[host][book].notes[note]) - { - - if (state.notebooks[host][book].notes[note].comments) { - let limboCommentIdx = - _.findIndex(state.notebooks[host][book].notes[note].comments, (o) => { - let oldVal = o[getTagFromFrond(o)]; - let newVal = comment[Object.keys(comment)[0]]; - return (oldVal.pending && - (oldVal.author === newVal.author) && - (oldVal.content === newVal.content) - ); - }); - if (limboCommentIdx === -1) { - state.notebooks[host][book].notes[note]["num-comments"] += 1; - state.notebooks[host][book].notes[note].comments.unshift(comment); - } else { - state.notebooks[host][book].notes[note].comments[limboCommentIdx] = - comment; - } - } else if (state.notebooks[host][book].notes[note]["num-comments"] === 1) { - state.notebooks[host][book].notes[note]["num-comments"] += 1; - state.notebooks[host][book].notes[note].comments = [comment]; - } - } - } - - editBook(json, state) { - let host = Object.keys(json)[0]; - let book = Object.keys(json[host])[0]; - if (state.notebooks[host] && state.notebooks[host][book]) { - state.notebooks[host][book]["comments"] = json[host][book]["comments"]; - state.notebooks[host][book]["date-created"] = json[host][book]["date-created"]; - state.notebooks[host][book]["num-notes"] = json[host][book]["num-notes"]; - state.notebooks[host][book]["num-unread"] = json[host][book]["num-unread"]; - state.notebooks[host][book]["title"] = json[host][book]["title"]; - state.notebooks[host][book]["writers-group-path"] = - json[host][book]["writers-group-path"]; - state.notebooks[host][book]["subscribers-group-path"] = - json[host][book]["subscribers-group-path"]; - } - } - - editNote(json, state) { - let host = Object.keys(json)[0]; - let book = Object.keys(json[host])[0]; - let noteId = json[host][book]["note-id"]; - let note = json[host][book]; - if (state.notebooks[host] && - state.notebooks[host][book] && - state.notebooks[host][book].notes && - state.notebooks[host][book].notes[noteId]) - { - state.notebooks[host][book].notes[noteId]["author"] = note["author"]; - state.notebooks[host][book].notes[noteId]["build"] = note["build"]; - state.notebooks[host][book].notes[noteId]["file"] = note["file"]; - state.notebooks[host][book].notes[noteId]["title"] = note["title"]; - } - } - - editComment(json, state) { - let host = json.host - let book = json.book - let note = json.note - let comment = json.comment; - let commentId = Object.keys(comment)[0] - if (state.notebooks[host] && - state.notebooks[host][book] && - state.notebooks[host][book].notes && - state.notebooks[host][book].notes[note] && - state.notebooks[host][book].notes[note].comments) - { - let keys = state.notebooks[host][book].notes[note].comments.map((com) => { - return Object.keys(com)[0]; - }); - let index = keys.indexOf(commentId); - if (index > -1) { - state.notebooks[host][book].notes[note].comments[index] = comment; - } - } - } - - delBook(json, state) { - let host = json.host; - let book = json.book; - if (state.notebooks[host]) { - if (state.notebooks[host][book]) { - delete state.notebooks[host][book]; - } - if (Object.keys(state.notebooks[host]).length === 0) { - delete state.notebooks[host]; - } - } - } - - delNote(json, state) { - let host = json.host; - let book = json.book; - let note = json.note; - if (state.notebooks[host] && - state.notebooks[host][book] && - state.notebooks[host][book].notes) - { - if (state.notebooks[host][book].notes[note]) { - state.notebooks[host][book]["num-notes"] -= 1; - if (!state.notebooks[host][book].notes[note].read) { - state.notebooks[host][book]["num-unread"] -= 1; - } - - delete state.notebooks[host][book].notes[note]; - let index = state.notebooks[host][book]["notes-by-date"].indexOf(note); - if (index > -1) { - state.notebooks[host][book]["notes-by-date"].splice(index, 1); - } - - } - if (Object.keys(state.notebooks[host][book].notes).length === 0) { - delete state.notebooks[host][book].notes; - delete state.notebooks[host][book]["notes-by-date"]; - } - } - } - - delComment(json, state) { - let host = json.host - let book = json.book - let note = json.note - let comment = json.comment; - if (state.notebooks[host] && - state.notebooks[host][book] && - state.notebooks[host][book].notes && - state.notebooks[host][book].notes[note]) - { - state.notebooks[host][book].notes[note]["num-comments"] -= 1; - if (state.notebooks[host][book].notes[note].comments) { - let keys = state.notebooks[host][book].notes[note].comments.map((com) => { - return Object.keys(com)[0]; - }); - - let index = keys.indexOf(comment); - if (index > -1) { - state.notebooks[host][book].notes[note].comments.splice(index, 1); - } - } - } - } - - read(json, state){ - let host = json.host; - let book = json.book; - let noteId = json.note - if (state.notebooks[host] && - state.notebooks[host][book] && - state.notebooks[host][book].notes && - state.notebooks[host][book].notes[noteId]) - { - if (!state.notebooks[host][book].notes[noteId]["read"]) { - state.notebooks[host][book].notes[noteId]["read"] = true; - state.notebooks[host][book]["num-unread"] -= 1; - } - } - } - -} diff --git a/pkg/interface/src/logic/store/store.ts b/pkg/interface/src/logic/store/store.ts index 14f523e31..4b80ec83a 100644 --- a/pkg/interface/src/logic/store/store.ts +++ b/pkg/interface/src/logic/store/store.ts @@ -5,15 +5,17 @@ import LocalReducer from '../reducers/local'; import ChatReducer from '../reducers/chat-update'; import { StoreState } from './type'; +import { Timebox } from '~/types'; import { Cage } from '~/types/cage'; import ContactReducer from '../reducers/contact-update'; import S3Reducer from '../reducers/s3-update'; import { GraphReducer } from '../reducers/graph-update'; +import { HarkReducer } from '../reducers/hark-update'; import GroupReducer from '../reducers/group-update'; -import PublishUpdateReducer from '../reducers/publish-update'; -import PublishResponseReducer from '../reducers/publish-response'; import LaunchReducer from '../reducers/launch-update'; import ConnectionReducer from '../reducers/connection'; +import {OrderedMap} from '../lib/OrderedMap'; +import { BigIntOrderedMap } from '../lib/BigIntOrderedMap'; export const homeAssociation = { "app-path": "/home", @@ -21,7 +23,7 @@ export const homeAssociation = { "group-path": "/home", metadata: { color: "0x0", - title: "Home", + title: "DMs + Drafts", description: "", "date-created": "", module: "", @@ -37,8 +39,6 @@ export default class GlobalStore extends BaseStore { contactReducer = new ContactReducer(); s3Reducer = new S3Reducer(); groupReducer = new GroupReducer(); - publishUpdateReducer = new PublishUpdateReducer(); - publishResponseReducer = new PublishResponseReducer(); launchReducer = new LaunchReducer(); connReducer = new ConnectionReducer(); @@ -73,7 +73,6 @@ export default class GlobalStore extends BaseStore { chat: {}, contacts: {}, graph: {}, - publish: {} }, groups: {}, groupKeys: new Set(), @@ -98,6 +97,21 @@ export default class GlobalStore extends BaseStore { dark: false, inbox: {}, chatSynced: null, + notifications: new BigIntOrderedMap(), + archivedNotifications: new BigIntOrderedMap(), + notificationsGroupConfig: [], + notificationsChatConfig: [], + notificationsGraphConfig: { + watchOnSelf: false, + mentions: false, + watching: [], + }, + notificationsCount: 0, + unreads: { + graph: {}, + group: {}, + chat: {}, + } }; } @@ -109,10 +123,9 @@ export default class GlobalStore extends BaseStore { this.contactReducer.reduce(data, this.state); this.s3Reducer.reduce(data, this.state); this.groupReducer.reduce(data, this.state); - this.publishUpdateReducer.reduce(data, this.state); - this.publishResponseReducer.reduce(data, this.state); this.launchReducer.reduce(data, this.state); this.connReducer.reduce(data, this.state); GraphReducer(data, this.state); + HarkReducer(data, this.state); } } diff --git a/pkg/interface/src/logic/store/type.ts b/pkg/interface/src/logic/store/type.ts index a7b666b62..88ef6d3ad 100644 --- a/pkg/interface/src/logic/store/type.ts +++ b/pkg/interface/src/logic/store/type.ts @@ -4,13 +4,19 @@ import { Path } from '~/types/noun'; import { Invites } from '~/types/invite-update'; import { Associations } from '~/types/metadata-update'; import { Rolodex } from '~/types/contact-update'; -import { Notebooks } from '~/types/publish-update'; import { Groups } from '~/types/group-update'; import { S3State } from '~/types/s3-update'; import { LaunchState, WeatherState } from '~/types/launch-update'; import { ConnectionStatus } from '~/types/connection'; -import { BackgroundConfig, LocalUpdateRemoteContentPolicy } from '~/types/local-update'; import {Graphs} from '~/types/graph-update'; +import { + Notifications, + NotificationGraphConfig, + GroupNotificationsConfig, + LocalUpdateRemoteContentPolicy, + BackgroundConfig, + Unreads +} from "~/types"; export interface StoreState { // local state @@ -46,11 +52,20 @@ export interface StoreState { userLocation: string | null; // publish state - notebooks: Notebooks; + notebooks: any; // Chat state chatInitialized: boolean; chatSynced: ChatHookUpdate | null; inbox: Inbox; pendingMessages: Map; + + archivedNotifications: Notifications; + notifications: Notifications; + notificationsGraphConfig: NotificationGraphConfig; + notificationsGroupConfig: GroupNotificationsConfig; + notificationsChatConfig: string[]; + notificationsCount: number, + doNotDisturb: boolean; + unreads: Unreads; } diff --git a/pkg/interface/src/logic/subscription/global.ts b/pkg/interface/src/logic/subscription/global.ts index d3ad08e9e..4e6d6e1e9 100644 --- a/pkg/interface/src/logic/subscription/global.ts +++ b/pkg/interface/src/logic/subscription/global.ts @@ -51,6 +51,10 @@ export default class GlobalSubscription extends BaseSubscription { this.subscribe('/all', 'launch'); this.subscribe('/all', 'weather'); this.subscribe('/keys', 'graph-store'); + this.subscribe('/updates', 'hark-store'); + this.subscribe('/updates', 'hark-graph-hook'); + this.subscribe('/updates', 'hark-group-hook'); + this.subscribe('/updates', 'hark-chat-hook'); } restart() { diff --git a/pkg/interface/src/types/graph-update.ts b/pkg/interface/src/types/graph-update.ts index c9fb6a19c..2f452f908 100644 --- a/pkg/interface/src/types/graph-update.ts +++ b/pkg/interface/src/types/graph-update.ts @@ -1,30 +1,44 @@ -import {Patp} from "./noun"; +import { Patp } from "./noun"; +import { BigIntOrderedMap } from "~/logic/lib/BigIntOrderedMap"; - -export interface TextContent { text: string; }; -export interface UrlContent { url: string; } -export interface CodeContent { expresssion: string; output: string; }; -export interface ReferenceContent { uid: string; } -export type Content = TextContent | UrlContent | CodeContent | ReferenceContent; +export interface TextContent { + text: string; +} +export interface UrlContent { + url: string; +} +export interface CodeContent { + expresssion: string; + output: string; +} +export interface ReferenceContent { + uid: string; +} +export interface MentionContent { + mention: string; +} +export type Content = + | TextContent + | UrlContent + | CodeContent + | ReferenceContent + | MentionContent; export interface Post { author: Patp; contents: Content[]; - hash?: string; + hash: string | null; index: string; pending?: boolean; signatures: string[]; - 'time-sent': number; + "time-sent": number; } - export interface GraphNode { children: Graph; post: Post; } -export type Graph = Map; +export type Graph = BigIntOrderedMap; export type Graphs = { [rid: string]: Graph }; - - diff --git a/pkg/interface/src/types/hark-update.ts b/pkg/interface/src/types/hark-update.ts new file mode 100644 index 000000000..44935831f --- /dev/null +++ b/pkg/interface/src/types/hark-update.ts @@ -0,0 +1,73 @@ +import _ from "lodash"; +import { Post } from "./graph-update"; +import { GroupUpdate } from "./group-update"; +import { BigIntOrderedMap } from "~/logic/lib/BigIntOrderedMap"; +import { Envelope } from './chat-update'; + +type GraphNotifDescription = "link" | "comment" | "note" | "mention"; + +export interface GraphNotifIndex { + graph: string; + group: string; + description: GraphNotifDescription; + module: string; +} + +export interface GroupNotifIndex { + group: string; + description: string; +} + +export interface ChatNotifIndex { + chat: string; + mention: boolean; +} + +export type NotifIndex = + | { graph: GraphNotifIndex } + | { group: GroupNotifIndex } + | { chat: ChatNotifIndex }; + +export type GraphNotificationContents = Post[]; + +export type GroupNotificationContents = GroupUpdate[]; + +export type ChatNotificationContents = Envelope[]; + +export type NotificationContents = + | { graph: GraphNotificationContents } + | { group: GroupNotificationContents } + | { chat: ChatNotificationContents }; + +export interface Notification { + read: boolean; + time: number; + contents: NotificationContents; +} + +export interface IndexedNotification { + index: NotifIndex; + notification: Notification; +} + +export type Timebox = IndexedNotification[]; + +export type Notifications = BigIntOrderedMap; + +export interface NotificationGraphConfig { + watchOnSelf: boolean; + mentions: boolean; + watching: WatchedIndex[] +} + +export interface Unreads { + chat: Record; + group: Record; + graph: Record; +} + +interface WatchedIndex { + graph: string; + index: string; +} +export type GroupNotificationsConfig = string[]; diff --git a/pkg/interface/src/types/index.ts b/pkg/interface/src/types/index.ts index 10135b03d..684763c57 100644 --- a/pkg/interface/src/types/index.ts +++ b/pkg/interface/src/types/index.ts @@ -6,15 +6,11 @@ export * from './contact-update'; export * from './global'; export * from './group-update'; export * from './graph-update'; +export * from './hark-update'; export * from './invite-update'; export * from './launch-update'; -export * from './link-listen-update'; -export * from './link-update'; export * from './local-update'; export * from './metadata-update'; export * from './noun'; -export * from './permission-update'; -export * from './publish-response'; -export * from './publish-update'; export * from './s3-update'; export * from './workspace'; diff --git a/pkg/interface/src/types/publish-response.ts b/pkg/interface/src/types/publish-response.ts deleted file mode 100644 index addc55921..000000000 --- a/pkg/interface/src/types/publish-response.ts +++ /dev/null @@ -1,48 +0,0 @@ -import { Notebooks, Notebook, Note, BookId, NoteId } from './publish-update'; -import { Patp } from './noun'; - -export type PublishResponse = - NotebooksResponse -| NotebookResponse -| NoteResponse -| NotesPageResponse -| CommentsPageResponse; - -interface NotebooksResponse { - type: 'notebooks'; - data: Notebooks; -} - -interface NotebookResponse { - type: 'notebook'; - data: Notebook; - host: Patp; - notebook: BookId; -} - -interface NoteResponse { - type: 'note'; - data: Note; - host: Patp; - notebook: BookId; - note: NoteId; -} - -interface NotesPageResponse { - type: 'notes-page'; - data: Note[]; - host: Patp; - notebook: BookId; - startIndex: number; - length: number; -} - -interface CommentsPageResponse { - type: 'comments-page'; - data: Comment[]; - host: Patp; - notebook: BookId; - note: NoteId; - startIndex: number; - length: number; -} diff --git a/pkg/interface/src/types/publish-update.ts b/pkg/interface/src/types/publish-update.ts deleted file mode 100644 index f3f1e3d4d..000000000 --- a/pkg/interface/src/types/publish-update.ts +++ /dev/null @@ -1,158 +0,0 @@ -import { Patp, PatpNoSig, Path } from './noun'; - - -export type NoteId = string; -export type BookId = string; - - -export type PublishUpdate = - PublishUpdateAddBook -| PublishUpdateAddNote -| PublishUpdateAddComment -| PublishUpdateEditBook -| PublishUpdateEditNote -| PublishUpdateEditComment -| PublishUpdateDelBook -| PublishUpdateDelNote -| PublishUpdateDelComment; - - -type PublishUpdateBook = { - [s in Patp]: { - [b in BookId]: { - title: string; - 'date-created': number; - about: string; - 'num-notes': number; - 'num-unread': number; - comments: boolean; - 'writers-group-path': Path; - 'subscribers-group-path': Path; - }; - }; -} - -type PublishUpdateNote = { - [s in Patp]: { - [b in BookId]: { - 'note-id': NoteId; - author: Patp; - title: string; - 'date-created': string; - snippet: string; - file: string; - 'num-comments': number; - comments: Comment[]; - read: boolean; - pending: boolean; - }; - }; -}; - -interface PublishUpdateAddBook { - 'add-book': PublishUpdateBook; -} - -interface PublishUpdateEditBook { - 'edit-book': PublishUpdateBook; -} - -interface PublishUpdateDelBook { - 'del-book': { - host: Patp; - book: string; - } -} - -interface PublishUpdateAddNote { - 'add-note': PublishUpdateNote; -} - -interface PublishUpdateEditNote { - 'edit-note': PublishUpdateNote; -} - -interface PublishUpdateDelNote { - 'del-note': { - host: Patp; - book: BookId; - note: NoteId; - } -} - -interface PublishUpdateAddComment { - 'add-comment': { - who: Patp; - host: BookId; - note: NoteId; - body: string; - } -} - -interface PublishUpdateEditComment { - 'edit-comment': { - host: Patp; - book: BookId; - note: NoteId; - body: string; - comment: Comment; - } -} - -interface PublishUpdateDelComment { - 'del-comment': { - host: Patp; - book: BookId; - note: NoteId; - comment: string; - } -} - -export type Notebooks = { - [host in Patp]: { - [book in BookId]: Notebook; - } -} - - -export interface Notebook { - about: string; - comments: boolean; - 'date-created': number; - notes: Notes; - 'notes-by-date': NoteId[]; - 'num-notes': number; - 'num-unread': number; - subscribers: PatpNoSig[]; - 'subscribers-group-path': Path; - title: string; - 'writers-group-path': Path; -} - -export type Notes = { - [id in NoteId]: Note; -}; - -export interface Note { - author: Patp; - comments: Comment[]; - 'date-created': number; - file: string; - 'next-note': NoteId | null; - 'note-id': NoteId; - 'num-comments': number; - pending: boolean; - 'prev-note': NoteId | null; - read: boolean; - snippet: string; - title: string; -} - -export interface Comment { - [date: string]: { - author: Patp; - content: string; - 'date-created': number; - pending: boolean; - }; -} diff --git a/pkg/interface/src/views/App.js b/pkg/interface/src/views/App.js index 87dc0f57a..f3315809c 100644 --- a/pkg/interface/src/views/App.js +++ b/pkg/interface/src/views/App.js @@ -43,7 +43,7 @@ const Root = styled.div` * { scrollbar-width: thin; - scrollbar-color: ${ p => p.theme.colors.gray } ${ p => p.theme.colors.white }; + scrollbar-color: ${ p => p.theme.colors.gray } transparent; } /* Works on Chrome/Edge/Safari */ @@ -125,6 +125,9 @@ class App extends React.Component { const theme = state.dark ? dark : light; const { background } = state; + const notificationsCount = state.notificationsCount || 0; + const doNotDisturb = state.doNotDisturb || false; + return ( @@ -143,6 +146,8 @@ class App extends React.Component { connection={this.state.connection} subscription={this.subscription} ship={this.ship} + doNotDisturb={doNotDisturb} + notificationsCount={notificationsCount} /> @@ -150,7 +155,8 @@ class App extends React.Component { associations={state.associations} apps={state.launch} api={this.api} - dark={state.dark} + notifications={state.notificationsCount} + invites={state.invites} groups={state.groups} show={state.omniboxShown} /> diff --git a/pkg/interface/src/views/apps/chat/ChatResource.tsx b/pkg/interface/src/views/apps/chat/ChatResource.tsx index 54b950480..0d8c905f7 100644 --- a/pkg/interface/src/views/apps/chat/ChatResource.tsx +++ b/pkg/interface/src/views/apps/chat/ChatResource.tsx @@ -1,17 +1,16 @@ -import React, { useRef, useCallback } from "react"; -import { RouteComponentProps } from "react-router-dom"; -import { Col } from "@tlon/indigo-react"; +import React, { useRef, useCallback, useEffect } from 'react'; +import { RouteComponentProps } from 'react-router-dom'; +import { Col } from '@tlon/indigo-react'; import _ from 'lodash'; -import { Association } from "~/types/metadata-update"; -import { StoreState } from "~/logic/store/type"; -import { useFileDrag } from "~/logic/lib/useDrag"; -import ChatWindow from "./components/ChatWindow"; -import ChatInput from "./components/ChatInput"; -import GlobalApi from "~/logic/api/global"; -import { deSig } from "~/logic/lib/util"; -import { SubmitDragger } from "~/views/components/s3-upload"; -import { useLocalStorageState } from "~/logic/lib/useLocalStorageState"; +import { Association } from '~/types/metadata-update'; +import { StoreState } from '~/logic/store/type'; +import { useFileDrag } from '~/logic/lib/useDrag'; +import ChatWindow from './components/ChatWindow'; +import ChatInput from './components/ChatInput'; +import GlobalApi from '~/logic/api/global'; +import { SubmitDragger } from '~/views/components/s3-upload'; +import { useLocalStorageState } from '~/logic/lib/useLocalStorageState'; type ChatResourceProps = StoreState & { association: Association; @@ -20,22 +19,22 @@ type ChatResourceProps = StoreState & { } & RouteComponentProps; export function ChatResource(props: ChatResourceProps) { - const station = props.association["app-path"]; + const station = props.association['app-path']; if (!props.chatInitialized) { return null; } - const { envelopes, config } = (props.inbox?.[station]) ? props.inbox[station] : {envelopes: [], config: {}}; + const { envelopes, config } = (props.inbox?.[station]) ? props.inbox[station] : { envelopes: [], config: {} }; const { read, length } = (config) ? config : undefined; - const groupPath = props.association["group-path"]; + const groupPath = props.association['group-path']; const group = props.groups[groupPath]; const contacts = props.contacts[groupPath] || {}; const pendingMessages = (props.pendingMessages.get(station) || []).map( - (value) => ({ + value => ({ ...value, - pending: true, + pending: true }) ); @@ -62,7 +61,7 @@ export function ChatResource(props: ChatResourceProps) { const unreadCount = length - read; const unreadMsg = unreadCount > 0 && envelopes[unreadCount - 1]; - const [, owner, name] = station.split("/"); + const [, owner, name] = station.split('/'); const ourContact = contacts?.[window.ship]; const lastMsgNum = envelopes.length || 0; @@ -81,19 +80,28 @@ export function ChatResource(props: ChatResourceProps) { const { bind, dragging } = useFileDrag(onFileDrag); const [unsent, setUnsent] = useLocalStorageState>( - "chat-unsent", + 'chat-unsent', {} ); const appendUnsent = useCallback( - (u: string) => setUnsent((s) => ({ ...s, [station]: u })), + (u: string) => setUnsent(s => ({ ...s, [station]: u })), [station] ); - const clearUnsent = useCallback(() => setUnsent((s) => _.omit(s, station)), [ - station, + const clearUnsent = useCallback(() => setUnsent(s => _.omit(s, station)), [ + station ]); + const scrollTo = new URLSearchParams(location.search).get('msg'); + useEffect(() => { + const clear = () => { + props.history.replace(location.pathname); + }; + setTimeout(clear, 10000); + return clear; + }, [station]); + return ( {dragging && } @@ -118,6 +126,7 @@ export function ChatResource(props: ChatResourceProps) { hideNicknames={props.hideNicknames} hideAvatars={props.hideAvatars} location={props.location} + scrollTo={scrollTo ? parseInt(scrollTo, 10) : undefined} /> diff --git a/pkg/interface/src/views/apps/chat/components/ChatInput.tsx b/pkg/interface/src/views/apps/chat/components/ChatInput.tsx index 7e10e3b53..09ff23904 100644 --- a/pkg/interface/src/views/apps/chat/components/ChatInput.tsx +++ b/pkg/interface/src/views/apps/chat/components/ChatInput.tsx @@ -1,13 +1,13 @@ import React, { Component } from 'react'; import ChatEditor from './chat-editor'; -import { S3Upload, SubmitDragger } from '~/views/components/s3-upload' ; +import { S3Upload } from '~/views/components/s3-upload' ; import { uxToHex } from '~/logic/lib/util'; import { Sigil } from '~/logic/lib/sigil'; import tokenizeMessage, { isUrl } from '~/logic/lib/tokenizeMessage'; import GlobalApi from '~/logic/api/global'; import { Envelope } from '~/types/chat-update'; -import { Contacts, S3Configuration } from '~/types'; -import { Row } from '@tlon/indigo-react'; +import { Contacts } from '~/types'; +import { Row, BaseImage, Box, Icon } from '@tlon/indigo-react'; interface ChatInputProps { api: GlobalApi; @@ -31,7 +31,6 @@ interface ChatInputState { uploadingPaste: boolean; } - export default class ChatInput extends Component { public s3Uploader: React.RefObject; private chatEditor: React.RefObject; @@ -42,7 +41,7 @@ export default class ChatInput extends Component this.state = { inCodeMode: false, submitFocus: false, - uploadingPaste: false, + uploadingPaste: false }; this.s3Uploader = React.createRef(); @@ -50,7 +49,6 @@ export default class ChatInput extends Component this.submit = this.submit.bind(this); this.toggleCode = this.toggleCode.bind(this); - } toggleCode() { @@ -82,8 +80,6 @@ export default class ChatInput extends Component } } - - submit(text) { const { props, state } = this; if (state.inCodeMode) { @@ -134,7 +130,6 @@ export default class ChatInput extends Component { url } ); } - } uploadError(error) { @@ -159,10 +154,11 @@ export default class ChatInput extends Component if (!this.readyToUpload()) { return; } - if (!this.s3Uploader.current || !this.s3Uploader.current.inputRef.current) return; + if (!this.s3Uploader.current || !this.s3Uploader.current.inputRef.current) +return; this.s3Uploader.current.inputRef.current.files = files; - const fire = document.createEvent("HTMLEvents"); - fire.initEvent("change", true, true); + const fire = document.createEvent('HTMLEvents'); + fire.initEvent('change', true, true); this.s3Uploader.current?.inputRef.current?.dispatchEvent(fire); } @@ -179,7 +175,7 @@ export default class ChatInput extends Component props.ourContact && ((props.ourContact.avatar !== null) && !props.hideAvatars) ) - ? + ? : className='cf' zIndex='0' > -
+ {avatar} -
+ onPaste={this.onPaste.bind(this)} placeholder='Message...' /> -
+ uploadError={this.uploadError.bind(this)} accept="*" > - -
-
- + + -
+ color={state.inCodeMode ? 'blue' : 'black'} + /> +
); } diff --git a/pkg/interface/src/views/apps/chat/components/ChatMessage.tsx b/pkg/interface/src/views/apps/chat/components/ChatMessage.tsx index 1cf993ddf..4299d39bc 100644 --- a/pkg/interface/src/views/apps/chat/components/ChatMessage.tsx +++ b/pkg/interface/src/views/apps/chat/components/ChatMessage.tsx @@ -18,24 +18,21 @@ export const UnreadMarker = React.forwardRef(({ dayBreak, when }, ref) => ( New messages below - {dayBreak - ? {moment(when).calendar()} - : null} )); export const DayBreak = ({ when }) => (
-

{moment(when).calendar()}

+

{moment(when).calendar(null, { sameElse: DATESTAMP_FORMAT })}

); interface ChatMessageProps { measure(element): void; msg: Envelope | IMessage; - previousMsg: Envelope | IMessage | undefined; - nextMsg: Envelope | IMessage | undefined; + previousMsg?: Envelope | IMessage; + nextMsg?: Envelope | IMessage; isLastRead: boolean; group: Group; association: Association; @@ -51,6 +48,7 @@ interface ChatMessageProps { unreadMarkerRef: React.RefObject; history: any; api: any; + highlighted?: boolean; } export default class ChatMessage extends Component { @@ -87,14 +85,16 @@ export default class ChatMessage extends Component { isLastMessage, unreadMarkerRef, history, - api + api, + highlighted, + fontSize } = this.props; const renderSigil = Boolean((nextMsg && msg.author !== nextMsg.author) || !nextMsg || msg.number === 1); const dayBreak = nextMsg && new Date(msg.when).getDate() !== new Date(nextMsg.when).getDate(); const containerClass = `${renderSigil - ? `cf pt2 pl3 lh-copy` + ? `cf pl2 lh-copy` : `items-top cf hide-child`} ${isPending ? 'o-40' : ''} ${className}` const timestamp = moment.unix(msg.when / 1000).format(renderSigil ? 'hh:mm a' : 'hh:mm'); @@ -118,7 +118,9 @@ export default class ChatMessage extends Component { isPending, history, api, - scrollWindow + scrollWindow, + highlighted, + fontSize }; const unreadContainerStyle = { @@ -127,9 +129,11 @@ export default class ChatMessage extends Component { return ( { + isDark = window.matchMedia('(prefers-color-scheme: dark)').matches; + render() { const { msg, @@ -178,17 +184,18 @@ export class MessageWithSigil extends PureComponent { hideAvatars, remoteContentPolicy, measure, - history, api, - scrollWindow + history, + scrollWindow, + fontSize } = this.props; const datestamp = moment.unix(msg.when / 1000).format(DATESTAMP_FORMAT); const contact = msg.author in contacts ? contacts[msg.author] : false; const showNickname = !hideNicknames && contact && contact.nickname; const name = showNickname ? contact.nickname : cite(msg.author); - const color = contact ? `#${uxToHex(contact.color)}` : '#000000'; - const sigilClass = contact ? '' : 'mix-blend-diff'; + const color = contact ? `#${uxToHex(contact.color)}` : this.isDark ? '#000000' :'#FFFFFF' + const sigilClass = contact ? '' : this.isDark ? 'mix-blend-diff' : 'mix-blend-darken'; let nameSpan = null; @@ -215,7 +222,7 @@ export class MessageWithSigil extends PureComponent { scrollWindow={scrollWindow} history={history} api={api} - className="fl pr3 v-top bg-white bg-gray0-d pt1" + className="fl pr3 v-top pt1" /> { fontSize={0} mr={3} mono={!showNickname} + fontWeight={(showNickname) ? '500' : '400'} className={`mw5 db truncate pointer`} ref={e => nameSpan = e} onClick={() => { @@ -240,7 +248,7 @@ export class MessageWithSigil extends PureComponent { {timestamp} {datestamp} - + ); @@ -249,19 +257,19 @@ export class MessageWithSigil extends PureComponent { export const MessageWithoutSigil = ({ timestamp, msg, remoteContentPolicy, measure }) => ( <> - {timestamp} + {timestamp} ); -export const MessageContent = ({ content, remoteContentPolicy, measure }) => { +export const MessageContent = ({ content, remoteContentPolicy, measure, fontSize }) => { if ('code' in content) { return ; } else if ('url' in content) { return ( - + { ); } else if ('me' in content) { return ( - + {content.me} ); } else if ('text' in content) { - return ; + return ; } else { return null; } }; export const MessagePlaceholder = ({ height, index, className = '', style = {}, ...props }) => ( -
-
- + + -
-
-
-

- -

-

-

-
- -
-
+ >
+
+ + + + + + + + + + + + + + + + + ); diff --git a/pkg/interface/src/views/apps/chat/components/ChatWindow.tsx b/pkg/interface/src/views/apps/chat/components/ChatWindow.tsx index 31b0ab6d0..ee56242f7 100644 --- a/pkg/interface/src/views/apps/chat/components/ChatWindow.tsx +++ b/pkg/interface/src/views/apps/chat/components/ChatWindow.tsx @@ -43,6 +43,7 @@ type ChatWindowProps = RouteComponentProps<{ hideNicknames: boolean; hideAvatars: boolean; remoteContentPolicy: LocalUpdateRemoteContentPolicy; + scrollTo?: number; } interface ChatWindowState { @@ -84,6 +85,10 @@ export default class ChatWindow extends Component { + if(this.props.scrollTo) { + this.scrollToUnread(); + } + this.setState({ initialized: true }); }, this.INITIALIZATION_MAX_TIME); } @@ -167,14 +172,16 @@ export default class ChatWindow extends Component { @@ -297,7 +304,8 @@ export default class ChatWindow extends Component { if (!props.isChatLoading) { return null; } return ( -
-
- + + -

Past messages are being restored

-
-
+ Past messages are being restored + + ); -} +}; diff --git a/pkg/interface/src/views/apps/chat/components/chat-editor.js b/pkg/interface/src/views/apps/chat/components/chat-editor.js index 936bd3989..2bf49bf7e 100644 --- a/pkg/interface/src/views/apps/chat/components/chat-editor.js +++ b/pkg/interface/src/views/apps/chat/components/chat-editor.js @@ -186,6 +186,7 @@ export default class ChatEditor extends Component { {...props} /> : this.messageChange(e, d, v)} diff --git a/pkg/interface/src/views/apps/chat/components/content/code.js b/pkg/interface/src/views/apps/chat/components/content/code.js index 3a133e06d..49808a394 100644 --- a/pkg/interface/src/views/apps/chat/components/content/code.js +++ b/pkg/interface/src/views/apps/chat/components/content/code.js @@ -15,12 +15,12 @@ export default class CodeContent extends Component { mono p='1' my='0' - fontSize='14px' + borderRadius='1' overflow='auto' maxHeight='10em' maxWidth='100%' style={{ whiteSpace: 'pre' }} - backgroundColor='scales.black10' + backgroundColor='washedGray' > {content.code.output[0].join('\n')} @@ -33,7 +33,7 @@ export default class CodeContent extends Component { mono my='0' p='1' - fontSize='14px' + borderRadius='1' overflow='auto' maxHeight='10em' maxWidth='100%' diff --git a/pkg/interface/src/views/apps/chat/components/content/text.js b/pkg/interface/src/views/apps/chat/components/content/text.js index d15b14ba9..26ec452ac 100644 --- a/pkg/interface/src/views/apps/chat/components/content/text.js +++ b/pkg/interface/src/views/apps/chat/components/content/text.js @@ -24,10 +24,30 @@ const DISABLED_INLINE_TOKENS = [ 'reference' ]; +const renderers = { + inlineCode: ({language, value}) => { + return {value} + }, + code: ({language, value}) => { + return + {value} + + } +}; + const MessageMarkdown = React.memo(props => ( { if ( node.type === 'blockquote' @@ -63,7 +83,7 @@ export default class TextContent extends Component { && (urbitOb.isValidPatp(group[2]) // valid patp? && (group[0] === content.text))) { // entire message is room name? return ( - + @@ -73,7 +93,7 @@ export default class TextContent extends Component { ); } else { return ( - + ); diff --git a/pkg/interface/src/views/apps/chat/components/overlay-sigil.js b/pkg/interface/src/views/apps/chat/components/overlay-sigil.js index ad64cdbba..6194b1631 100644 --- a/pkg/interface/src/views/apps/chat/components/overlay-sigil.js +++ b/pkg/interface/src/views/apps/chat/components/overlay-sigil.js @@ -4,6 +4,7 @@ import { ProfileOverlay, OVERLAY_HEIGHT } from './profile-overlay'; +import { Box, BaseImage } from '@tlon/indigo-react'; export class OverlaySigil extends PureComponent { constructor() { @@ -58,7 +59,7 @@ export class OverlaySigil extends PureComponent { const { hideAvatars } = props; const img = (props.contact && (props.contact.avatar !== null) && !hideAvatars) - ? + ? : ; return ( -
{state.profileClicked && ( @@ -91,7 +94,7 @@ export class OverlaySigil extends PureComponent { /> )} {img} -
+ ); } } diff --git a/pkg/interface/src/views/apps/chat/components/profile-overlay.js b/pkg/interface/src/views/apps/chat/components/profile-overlay.js index 2d9708cc8..ef6303c14 100644 --- a/pkg/interface/src/views/apps/chat/components/profile-overlay.js +++ b/pkg/interface/src/views/apps/chat/components/profile-overlay.js @@ -2,7 +2,7 @@ import React, { PureComponent } from 'react'; import { cite } from '~/logic/lib/util'; import { Sigil } from '~/logic/lib/sigil'; -import { Box, Col, Button, Text } from "@tlon/indigo-react"; +import { Box, Col, Button, Text, BaseImage } from '@tlon/indigo-react'; export const OVERLAY_HEIGHT = 250; @@ -51,8 +51,8 @@ export class ProfileOverlay extends PureComponent { const isOwn = window.ship === ship; - let img = contact?.avatar && !hideAvatars - ? + const img = contact?.avatar && !hideAvatars + ? : (isHidden) ? history.push('/~profile/identity') : history.push(`${history.location.pathname}/popover/profile`)} > Edit Identity diff --git a/pkg/interface/src/views/apps/chat/components/resubscribe-element.js b/pkg/interface/src/views/apps/chat/components/resubscribe-element.js index 6ac875705..d12e39332 100644 --- a/pkg/interface/src/views/apps/chat/components/resubscribe-element.js +++ b/pkg/interface/src/views/apps/chat/components/resubscribe-element.js @@ -1,4 +1,5 @@ import React, { Component } from 'react'; +import { Box, Text, Button } from '@tlon/indigo-react'; export class ResubscribeElement extends Component { onClickResubscribe() { @@ -9,21 +10,23 @@ export class ResubscribeElement extends Component { } render() { - const { props } = this; + const { props } = this; if (props.isChatUnsynced) { return ( -
+ + ); } else { return null; diff --git a/pkg/interface/src/views/apps/chat/css/custom.css b/pkg/interface/src/views/apps/chat/css/custom.css index bc3f69166..898422b79 100644 --- a/pkg/interface/src/views/apps/chat/css/custom.css +++ b/pkg/interface/src/views/apps/chat/css/custom.css @@ -87,24 +87,14 @@ h2 { mix-blend-mode: difference; } +.mix-blend-darken { + mix-blend-mode: darken; +} + .placeholder-inter::placeholder { font-family: "Inter", sans-serif; } -/* spinner */ - -.spin-active { - animation: spin 2s infinite; -} - -@keyframes spin { - 0% {transform: rotate(0deg);} - 25% {transform: rotate(90deg);} - 50% {transform: rotate(180deg);} - 75% {transform: rotate(270deg);} - 100% {transform: rotate(360deg);} -} - .embed-container iframe { max-width: 100%; } @@ -225,16 +215,7 @@ blockquote { font-size: 14px; } -pre, code { - background-color: var(--light-gray); -} - -pre code { - background-color: transparent; - white-space: pre-wrap; -} - -code, .code, .chat.code .react-codemirror2 .CodeMirror * { +.chat.code .react-codemirror2 .CodeMirror * { font-family: 'Source Code Pro'; } diff --git a/pkg/interface/src/views/apps/dojo/components/input.js b/pkg/interface/src/views/apps/dojo/components/input.js deleted file mode 100644 index a3a910541..000000000 --- a/pkg/interface/src/views/apps/dojo/components/input.js +++ /dev/null @@ -1,119 +0,0 @@ -import React, { Component } from 'react'; -import { cite } from '~/logic/lib/util'; -import { Spinner } from '~/views/components/Spinner'; - -export class Input extends Component { - constructor(props) { - super(props); - this.state = { - awaiting: false, - type: 'Sending to Dojo' - }; - this.keyPress = this.keyPress.bind(this); - this.inputRef = React.createRef(); - } - - componentDidUpdate() { - if ( - !document.activeElement == document.body - || document.activeElement == this.inputRef.current - ) { - this.inputRef.current.focus(); - this.inputRef.current.setSelectionRange(this.props.cursor, this.props.cursor); - } - } - - keyPress(e) { - if ((e.getModifierState('Control') || event.getModifierState('Meta')) - && e.key === 'v') { - return; - } - - e.preventDefault(); - - const allowedKeys = [ - 'Enter', 'Backspace', 'ArrowLeft', 'ArrowRight', 'Tab' - ]; - - if ((e.key.length > 1) && (!(allowedKeys.includes(e.key)))) { - return; - } - - // submit on enter - if (e.key === 'Enter') { - this.setState({ awaiting: true, type: 'Sending to Dojo' }); - this.props.api.soto('ret').then(() => { - this.setState({ awaiting: false }); - }); - } else if ((e.key === 'Backspace') && (this.props.cursor > 0)) { - this.props.store.doEdit({ del: this.props.cursor - 1 }); - return this.props.store.setState({ cursor: this.props.cursor - 1 }); - } else if (e.key === 'Backspace') { - return; - } else if (e.key.startsWith('Arrow')) { - if (e.key === 'ArrowLeft') { - if (this.props.cursor > 0) { - this.props.store.setState({ cursor: this.props.cursor - 1 }); - } - } else if (e.key === 'ArrowRight') { - if (this.props.cursor < this.props.input.length) { - this.props.store.setState({ cursor: this.props.cursor + 1 }); - } - } - } - - // tab completion - else if (e.key === 'Tab') { - this.setState({ awaiting: true, type: 'Getting suggestions' }); - this.props.api.soto({ tab: this.props.cursor }).then(() => { - this.setState({ awaiting: false }); - }); - } - - // capture and transmit most characters - else { - this.props.store.doEdit({ ins: { cha: e.key, at: this.props.cursor } }); - this.props.store.setState({ cursor: this.props.cursor + 1 }); - } - } - - render() { - return ( -
-
{cite(this.props.ship)}:dojo -
- - {this.props.prompt} - - this.props.store.setState({ cursor: e.target.selectionEnd })} - onKeyDown={this.keyPress} - onPaste={(e) => { - const clipboardData = e.clipboardData || window.clipboardData; - const paste = Array.from(clipboardData.getData('Text')); - paste.reduce(async (previous, next) => { - await previous; - this.setState({ cursor: this.props.cursor + 1 }); - return this.props.store.doEdit({ ins: { cha: next, at: this.props.cursor } }); - }, Promise.resolve()); - e.preventDefault(); - }} - ref={this.inputRef} - defaultValue={this.props.input} - /> - -
- ); - } -} - -export default Input; diff --git a/pkg/interface/src/views/apps/dojo/components/lib/sole.js b/pkg/interface/src/views/apps/dojo/components/lib/sole.js deleted file mode 100644 index 368ceae25..000000000 --- a/pkg/interface/src/views/apps/dojo/components/lib/sole.js +++ /dev/null @@ -1,157 +0,0 @@ -// See /lib/sole/hoon - -const str = JSON.stringify; - -export class Share { - constructor(buf, ven, leg) { - if (buf == null) { - buf = ''; - } - this.buf = buf; - if (ven == null) { - ven = [0, 0]; - } - this.ven = ven; - if (leg == null) { - leg = []; - } - this.leg = leg; - } - - abet() { - return { - buf: this.buf, - leg: this.leg.slice(), - ven: this.ven.slice() - }; - } - - apply(ted) { - switch (false) { - case 'nop' !== ted: return; - case !ted.map: return ted.map(this.apply, this); - default: switch (Object.keys(ted)[0]) { - case 'set': return this.buf = ted.set; - case 'del': return this.buf = this.buf.slice(0, ted.del) + this.buf.slice(ted.del + 1); - case 'ins': - var { at, cha } = ted.ins; - return this.buf = this.buf.slice(0, at) + cha + this.buf.slice(at); - default: throw `%sole-edit -lost.${str(ted)}`; - } - } - } - - transmute(sin, dex) { - switch (false) { - case (sin !== 'nop') && (dex !== 'nop'): return dex; - case !sin.reduce: - return sin.reduce(((dex, syn) => this.transmute(syn, dex)), dex); - case !dex.map: return dex.map(dax => this.transmute(sin, dax)); - case dex.set === undefined: return dex; - default: switch (Object.keys(sin)[0]) { - case 'set': return 'nop'; - case 'del': - if (sin.del === dex.del) { - return 'nop'; - } - dex = { ...dex }; - switch (Object.keys(dex)[0]) { - case 'del': if (sin.del < dex.del) { - dex.del--; - } - break; - case 'ins': if (sin.del < dex.ins.at) { - dex.ins.at--; - } - break; - } - return dex; - case 'ins': - dex = { ...dex }; - var { at, cha } = sin.ins; - switch (Object.keys(dex)[0]) { - case 'del': if (at < dex.del) { - dex.del++; - } - break; - case 'ins': if ((at < dex.ins.at) || - ((at === dex.ins.at) && !(cha <= dex.ins.cha))) { - dex.ins.at++; - } else if (at >= dex.ins.at) { - dex.ins.at = at; // NOTE possibly unpredictable behaviour - dex.ins.at++; // for sole inserts that aren't tabs - } - break; - } - return dex; - default: throw `%sole-edit -lost.${str(sin)}`; - } - } - } - - commit(ted) { - this.ven[0]++; - this.leg.push(ted); - return this.apply(ted); - } - - inverse(ted) { - switch (false) { - case 'nop' !== ted: return ted; - case !ted.map: - return ted.map((tad) => { - const res = this.inverse(tad); - this.apply(tad); - return res; - }).reverse(); - default: switch (Object.keys(ted)[0]) { - case 'set': return { set: this.buf }; - case 'ins': return { del: ted.ins }; - case 'del': return { ins: { at: ted.del, cha: this.buf[ted.del] } }; - default: throw `%sole-edit -lost.${str(ted)}`; - } - } - } - - receive({ ler, ted }) { - if (!(ler[1] === this.ven[1])) { - throw `-out-of-sync.[${str(ler)} ${str(this.ven)}]`; - } - this.leg = this.leg.slice((this.leg.length + ler[0]) - this.ven[0]); - const dat = this.transmute(this.leg, ted); - this.ven[1]++; - this.apply(dat); - return dat; - } - - remit() { - throw 'stub'; - } - - transmit(ted) { - const act = { ted, ler: [this.ven[1], this.ven[0]] }; - this.commit(ted); - return act; - } - - transceive({ ler, ted }) { - const old = new Share(this.buf); - const dat = this.receive({ ler, ted }); - return old.inverse(dat); - } - - transpose(ted, pos) { - if (pos === undefined) { - return this.transpose(this.leg, ted); - } else { - let left; - return ((left = - (this.transmute( - ted, { ins: { at: pos } })).ins) != null ? - left : { at: 0 } - ).at; - } - } -}; - -export default Share; diff --git a/pkg/interface/src/views/apps/dojo/css/custom.css b/pkg/interface/src/views/apps/dojo/css/custom.css deleted file mode 100644 index fd3c5b6cf..000000000 --- a/pkg/interface/src/views/apps/dojo/css/custom.css +++ /dev/null @@ -1,11 +0,0 @@ -input#dojo { - background-color: inherit; - color: inherit; -} - -/* responsive */ -@media all and (max-width: 34.375em) { - .h-100-m40-s { - height: calc(100% - 40px); - } -} diff --git a/pkg/interface/src/views/apps/dojo/store.js b/pkg/interface/src/views/apps/dojo/store.js deleted file mode 100644 index c5a755da8..000000000 --- a/pkg/interface/src/views/apps/dojo/store.js +++ /dev/null @@ -1,95 +0,0 @@ -import Share from './components/lib/sole'; -export default class Store { - constructor() { - this.state = this.initialState(); - this.sync = this.sync.bind(this); - this.print = this.print.bind(this); - this.buffer = new Share(); - } - - initialState() { - return { - txt: [], - prompt: '', - cursor: 0, - input: '' - }; - } - - clear() { - this.handleEvent({ - data: { clear: true } - }); - } - - handleEvent(data) { - // recursive handler - if (data.data) { - var dojoReply = data.data; - } else { - var dojoReply = data; - } - - if (dojoReply.clear) { - this.setState(this.initialState(), (() => { - return; - })); - } - - // %mor sole-effects are nested, so throw back to handler - if (dojoReply.map) { - return dojoReply.map(reply => this.handleEvent(reply)); - } - - switch (Object.keys(dojoReply)[0]) { - case 'txt': - return this.print(dojoReply.txt); - case 'tab': - this.print(dojoReply.tab.match + ' ' + dojoReply.tab.info); - return; - case 'tan': - return dojoReply.tan.split('\n').map(this.print); - case 'pro': - return this.setState({ prompt: dojoReply.pro.cad }); - case 'hop': - return this.setState({ cursor: dojoReply.hop }); - case 'det': - this.buffer.receive(dojoReply.det); - return this.sync(dojoReply.det.ted); - case 'act': - switch (dojoReply.act) { - case 'clr': return this.setState({ txt: [] }); - case 'nex': return this.setState({ - input: '', - cursor: 0 - }); - } - break; - default: console.log(dojoReply); - } - } - - doEdit(ted) { - const detSend = this.buffer.transmit(ted); - this.sync(ted); - return this.api.soto({ det: detSend }); - } - - print(txt) { - const textLog = this.state.txt; - textLog.push(txt); - return this.setState({ txt: textLog }); - } - - sync(ted) { - return this.setState({ - input: this.buffer.buf, - cursor: this.buffer.transpose(ted, this.state.cursor) - }); - } - - setStateHandler(setState) { - this.setState = setState; - } -} - diff --git a/pkg/interface/src/views/apps/graph/app.js b/pkg/interface/src/views/apps/graph/app.js index bfe47dc91..9e1112713 100644 --- a/pkg/interface/src/views/apps/graph/app.js +++ b/pkg/interface/src/views/apps/graph/app.js @@ -25,6 +25,10 @@ export default class GraphApp extends PureComponent { render={ (props) => { const resource = `${deSig(props.match.params.ship)}/${props.match.params.name}`; + const { ship, name } = props.match.params; + const path = `/ship/~${deSig(ship)}/${name}`; + const association = associations.graph[path]; + const autoJoin = () => { try { @@ -33,13 +37,7 @@ export default class GraphApp extends PureComponent { props.match.params.name ); - if (props.match.params.module) { - props.history.push( - `/~${props.match.params.module}/${resource}` - ); - } else { - props.history.push('/'); - } + } catch(err) { setTimeout(autoJoin, 2000); } @@ -47,8 +45,8 @@ export default class GraphApp extends PureComponent { if(!graphKeys.has(resource)) { autoJoin(); - } else if(props.match.params.module) { - props.history.push(`/~${props.match.params.module}/${resource}`); + } else if(!!association) { + props.history.push(`/~landscape/home/resource/${association.metadata.module}${path}`); } return (
diff --git a/pkg/interface/src/views/apps/launch/app.js b/pkg/interface/src/views/apps/launch/app.js index 0d85f11f5..4d7eea7e3 100644 --- a/pkg/interface/src/views/apps/launch/app.js +++ b/pkg/interface/src/views/apps/launch/app.js @@ -1,100 +1,94 @@ -import React from 'react'; +import React, { useState } from 'react'; import Helmet from 'react-helmet'; +import styled from 'styled-components'; -import { Box, Row, Icon, Text, Center } from '@tlon/indigo-react'; -import { uxToHex, adjustHex } from '~/logic/lib/util'; +import { Box, Row, Icon, Text } from '@tlon/indigo-react'; import './css/custom.css'; -import { Sigil } from '~/logic/lib/sigil'; import Tiles from './components/tiles'; import Tile from './components/tiles/tile'; import Welcome from './components/welcome'; import Groups from './components/Groups'; +import { writeText } from '~/logic/lib/util'; -export default class LaunchApp extends React.Component { - componentDidMount() { - // preload spinner asset - new Image().src = '/~landscape/img/Spinner.png'; +const ScrollbarLessBox = styled(Box)` + scrollbar-width: none !important; + + ::-webkit-scrollbar { + display: none; } +`; - render() { - const { props } = this; - const contact = props.contacts?.['/~/default']?.[window.ship]; - const sigilColor = contact?.color - ? `#${uxToHex(contact.color)}` - : props.dark - ? '#FFFFFF' - : '#000000'; +export default function LaunchApp(props) { + const [hashText, setHashText] = useState(props.baseHash); - return ( - <> - - OS1 - Home - - - - - - - - - Home - - - - -
- -
-
- -
- -
+ return ( + <> + + OS1 - Home + + + - {props.baseHash} + + + + + DMs + Drafts + + + + + + - - ); - } + + { + writeText(props.baseHash); + setHashText('copied'); + setTimeout(() => { + setHashText(props.baseHash); + }, 2000); + }} + > + {hashText || props.baseHash} + + + ); } - diff --git a/pkg/interface/src/views/apps/launch/components/Groups.tsx b/pkg/interface/src/views/apps/launch/components/Groups.tsx index 4f9302741..98692be60 100644 --- a/pkg/interface/src/views/apps/launch/components/Groups.tsx +++ b/pkg/interface/src/views/apps/launch/components/Groups.tsx @@ -1,11 +1,11 @@ import React from "react"; -import { Box, Text } from "@tlon/indigo-react"; -import { Link } from "react-router-dom"; +import { Box, Text, Col } from "@tlon/indigo-react"; +import f from "lodash/fp"; +import _ from "lodash"; -import { useLocalStorageState } from "~/logic/lib/useLocalStorageState"; -import { Associations, Association } from "~/types"; +import { Associations, Association, Unreads } from "~/types"; import { alphabeticalOrder } from "~/logic/lib/util"; -import Tile from '../components/tiles/tile'; +import Tile from "../components/tiles/tile"; interface GroupsProps { associations: Associations; @@ -14,72 +14,59 @@ interface GroupsProps { const sortGroupsAlph = (a: Association, b: Association) => alphabeticalOrder(a.metadata.title, b.metadata.title); -export default function Groups(props: GroupsProps & Parameters[0]) { - const { associations, invites, api, ...boxProps } = props; +const getKindUnreads = (associations: Associations) => (path: string) => ( + kind: "chat" | "graph" +): ((unreads: Unreads) => number) => + f.flow( + (x) => x[kind], + f.pickBy((_v, key) => associations[kind]?.[key]?.["group-path"] === path), + f.values, + f.reduce(f.add, 0) + ); - const incomingGroups = Object.values(invites?.['/contacts'] || {}); - const getKeyByValue = (object, value) => { - return Object.keys(object).find(key => object[key] === value); - } +export default function Groups(props: GroupsProps & Parameters[0]) { + const { associations, unreads, ...boxProps } = props; const groups = Object.values(associations?.contacts || {}) + .filter((e) => e?.["group-path"] in props.groups) .sort(sortGroupsAlph); - - const acceptInvite = (invite) => { - const [, , ship, name] = invite.path.split('/'); - const resource = { ship, name }; - return api.contacts.join(resource).then(() => { - api.invite.accept('/contacts', getKeyByValue(invites['/contacts'], invite)); - }); - }; + const getUnreads = getKindUnreads(associations || {}); return ( - - {incomingGroups.map((invite) => ( - - You have been invited to: - {invite.path.slice(6)} - - acceptInvite(invite)} - color='blue' - mr='2' - cursor='pointer'> - Accept - - api.invite.decline('/contacts', getKeyByValue(invites['/contacts'], invite))} - cursor='pointer'> - Reject - - - - ))} - {groups.map((group) => ( - - {group.metadata.title} - - ))} - + <> + {groups.map((group) => { + const path = group?.["group-path"]; + const unreadCount = (["chat", "graph"] as const) + .map(getUnreads(path)) + .map((f) => f(unreads)) + .reduce(f.add, 0); + return ( + + ); + })} + + ); +} + +interface GroupProps { + path: string; + title: string; + unreads: number; +} +function Group(props: GroupProps) { + const { path, title, unreads } = props; + return ( + + + {title} + {unreads > 0 && + ({unreads} update{unreads !== 1 && 's'} ) + } + + ); } diff --git a/pkg/interface/src/views/apps/launch/components/tiles/basic.js b/pkg/interface/src/views/apps/launch/components/tiles/basic.js index c511432a8..6206dd78b 100644 --- a/pkg/interface/src/views/apps/launch/components/tiles/basic.js +++ b/pkg/interface/src/views/apps/launch/components/tiles/basic.js @@ -9,11 +9,11 @@ export default class BasicTile extends React.PureComponent { return ( - - {props.title === 'Dojo' + + {props.title === 'Terminal' ? { + { const d = [ 'M', CX, CY, 'L', x1, y1, - 'A', RADIUS, RADIUS, '0', (isLarge ? '1' : '0'), '1', x2, y2, 'z' + 'A', RADIUS, RADIUS, '0', '1', '1', x2, y2, 'z' ].join(' '); return ; diff --git a/pkg/interface/src/views/apps/launch/components/tiles/tile.js b/pkg/interface/src/views/apps/launch/components/tiles/tile.js index 3d6d8d966..348234d5e 100644 --- a/pkg/interface/src/views/apps/launch/components/tiles/tile.js +++ b/pkg/interface/src/views/apps/launch/components/tiles/tile.js @@ -1,8 +1,24 @@ import React from 'react'; import { Link } from 'react-router-dom'; +import styled from 'styled-components'; + import defaultApps from '~/logic/lib/default-apps'; -import { Box, DisclosureBox } from "@tlon/indigo-react"; +import { Box } from "@tlon/indigo-react"; + +const SquareBox = styled(Box)` + &::before { + content: ""; + display: inline-block; + width: 1px; + height: 0; + padding-bottom: 100%; + } + & > * { + position: absolute; + top: 0; + } +`; const routeList = defaultApps.map(a => `/~${a}`); export default class Tile extends React.Component { @@ -26,11 +42,11 @@ export default class Tile extends React.Component { return ( - {childElement} - + ); } } diff --git a/pkg/interface/src/views/apps/launch/components/tiles/weather.js b/pkg/interface/src/views/apps/launch/components/tiles/weather.js index 64140b271..b973e3729 100644 --- a/pkg/interface/src/views/apps/launch/components/tiles/weather.js +++ b/pkg/interface/src/views/apps/launch/components/tiles/weather.js @@ -23,7 +23,7 @@ export default class WeatherTile extends React.Component { }, (err) => { console.log(err); }, { maximumAge: Infinity, timeout: 10000 }); - this.props.api.weather(latlng); + this.props.api.launch.weather(latlng); this.setState({ manualEntry: !this.state.manualEntry }); }); } diff --git a/pkg/interface/src/views/apps/links/LinkResource.tsx b/pkg/interface/src/views/apps/links/LinkResource.tsx index 571f9b2d1..e269ac012 100644 --- a/pkg/interface/src/views/apps/links/LinkResource.tsx +++ b/pkg/interface/src/views/apps/links/LinkResource.tsx @@ -1,6 +1,7 @@ import React, { useEffect } from "react"; import { Box, Row, Col, Center, LoadingSpinner } from "@tlon/indigo-react"; import { Switch, Route, Link } from "react-router-dom"; +import bigInt from 'big-integer'; import GlobalApi from "~/logic/api/global"; import { StoreState } from "~/logic/store/type"; @@ -11,8 +12,7 @@ import { RouteComponentProps } from "react-router-dom"; import { LinkItem } from "./components/link-item"; import { LinkSubmit } from "./components/link-submit"; import { LinkPreview } from "./components/link-preview"; -import { CommentSubmit } from "./components/comment-submit"; -import { Comments } from "./components/comments"; +import { Comments } from "~/views/components/comments"; import "./css/custom.css"; @@ -36,6 +36,7 @@ export function LinkResource(props: LinkResourceProps) { hideAvatars, hideNicknames, remoteContentPolicy, + history } = props; const appPath = association["app-path"]; @@ -48,6 +49,7 @@ export function LinkResource(props: LinkResourceProps) { ? associations.graph[appPath] : { metadata: {} }; const contactDetails = contacts[resource["group-path"]] || {}; + const group = groups[resource["group-path"]] || {}; const graph = graphs[resourcePath] || null; useEffect(() => { @@ -68,14 +70,14 @@ export function LinkResource(props: LinkResourceProps) { render={(props) => { return ( - + - + {Array.from(graph).map(([date, node]) => { const contact = contactDetails[node.post.author]; return ( ); })} @@ -91,15 +95,15 @@ export function LinkResource(props: LinkResourceProps) { }} /> { - const indexArr = props.match.params.index.split("-"); + const index = bigInt(props.match.params.index); + const editCommentId = props.match.params.commentId || null; - if (indexArr.length <= 1) { + if (!index) { return
Malformed URL
; } - const index = parseInt(indexArr[1], 10); const node = !!graph ? graph.get(index) : null; if (!node) { @@ -119,22 +123,19 @@ export function LinkResource(props: LinkResourceProps) { commentNumber={node.children.size} remoteContentPolicy={remoteContentPolicy} /> - - - ); diff --git a/pkg/interface/src/views/apps/links/components/comment-item.js b/pkg/interface/src/views/apps/links/components/comment-item.js deleted file mode 100644 index 1f37e045b..000000000 --- a/pkg/interface/src/views/apps/links/components/comment-item.js +++ /dev/null @@ -1,45 +0,0 @@ -import React, { Component } from 'react'; -import { Sigil } from '~/logic/lib/sigil'; -import { cite } from '~/logic/lib/util'; -import moment from 'moment'; -import { Box, Text, Row } from '@tlon/indigo-react'; -import RichText from '~/views/components/RichText'; - -export const CommentItem = (props) => { - const content = props.post.contents[0].text; - const timeSent = - moment.unix(props.post['time-sent'] / 1000).format('hh:mm a'); - - const showAvatar = props.avatar && !props.hideAvatars; - const showNickname = props.nickname && !props.hideNicknames; - const img = showAvatar - ? - : ; - - return ( - - - {img} - - - {showNickname ? props.nickname : cite(props.post.author)} - - {timeSent} - - - - - - {content} - - - - - ); -} - diff --git a/pkg/interface/src/views/apps/links/components/comment-submit.js b/pkg/interface/src/views/apps/links/components/comment-submit.js deleted file mode 100644 index baff2d4a8..000000000 --- a/pkg/interface/src/views/apps/links/components/comment-submit.js +++ /dev/null @@ -1,84 +0,0 @@ -import React, { Component } from 'react'; -import { Spinner } from '~/views/components/Spinner'; -import { createPost } from '~/logic/api/graph'; -import { deSig } from "~/logic/lib/util"; - - -export class CommentSubmit extends Component { - constructor(props) { - super(props); - - this.state = { - comment: '', - commentFocus: false, - disabled: false - }; - } - - onClickPost() { - const parentIndex = this.props.parentIndex || ''; - let post = createPost([ - { text: this.state.comment }, - ], parentIndex); - - this.setState({ disabled: true }, () => { - this.props.api.graph.addPost( - `~${deSig(this.props.ship)}`, - this.props.name, - post - ).then((r) => { - this.setState({ - disabled: false, - comment: '' - }); - }); - }); - } - - setComment(event) { - this.setState({ comment: event.target.value }); - } - - render() { - const { state, props } = this; - const focus = (state.commentFocus) - ? 'b--black b--white-d' - : 'b--gray4 b--gray2-d'; - - const activeClasses = state.comment - ? 'black white-d pointer' - : 'gray2 b--gray2'; - - return ( -
-