diff --git a/ci.nix b/ci.nix index f2558898..98a7d1b2 100644 --- a/ci.nix +++ b/ci.nix @@ -1,22 +1,17 @@ let b = builtins; flakeCompatSrc = b.fetchurl "https://raw.githubusercontent.com/edolstra/flake-compat/12c64ca55c1014cdc1b16ed5a804aa8576601ff2/default.nix"; - flake = (import flakeCompatSrc { src = ./.; }).defaultNix; + flake = (import flakeCompatSrc {src = ./.;}).defaultNix; pkgs = import flake.inputs.nixpkgs {}; recurseIntoAll = b.mapAttrs (name: val: pkgs.recurseIntoAttrs val); - in -# { -# inherit flake; -# } - -# // (recurseIntoAll { - -# checks = flake.checks.x86_64-linux; - -# }) - -# hercules ci's nix version cannot fetch submodules and crashes -{ - inherit (pkgs) hello; -} + # { + # inherit flake; + # } + # // (recurseIntoAll { + # checks = flake.checks.x86_64-linux; + # }) + # hercules ci's nix version cannot fetch submodules and crashes + { + inherit (pkgs) hello; + } diff --git a/flake.nix b/flake.nix index 1b35d780..bdabb711 100644 --- a/flake.nix +++ b/flake.nix @@ -14,22 +14,36 @@ flake-utils-pre-commit.url = "github:numtide/flake-utils"; pre-commit-hooks.inputs.flake-utils.follows = "flake-utils-pre-commit"; - ### framework dependencies # required for builder go/gomod2nix - gomod2nix = { url = "github:tweag/gomod2nix"; flake = false; }; + gomod2nix = { + url = "github:tweag/gomod2nix"; + flake = false; + }; # required for translator pip - mach-nix = { url = "mach-nix"; flake = false; }; + mach-nix = { + url = "mach-nix"; + flake = false; + }; # required for builder nodejs/node2nix - node2nix = { url = "github:svanderburg/node2nix"; flake = false; }; + node2nix = { + url = "github:svanderburg/node2nix"; + flake = false; + }; # required for utils.satisfiesSemver - poetry2nix = { url = "github:nix-community/poetry2nix/1.21.0"; flake = false; }; + poetry2nix = { + url = "github:nix-community/poetry2nix/1.21.0"; + flake = false; + }; # required for builder rust/crane - crane = { url = "github:ipetkov/crane"; flake = false; }; + crane = { + url = "github:ipetkov/crane"; + flake = false; + }; }; outputs = { @@ -43,84 +57,85 @@ pre-commit-hooks, crane, ... - }@inp: - let + } @ inp: let + b = builtins; + l = lib // builtins; - b = builtins; - l = lib // builtins; + lib = nixpkgs.lib; - lib = nixpkgs.lib; + # dream2nix lib (system independent utils) + dlib = import ./src/lib {inherit lib;}; - # dream2nix lib (system independent utils) - dlib = import ./src/lib { inherit lib; }; + supportedSystems = ["x86_64-linux" "x86_64-darwin" "aarch64-darwin"]; - supportedSystems = [ "x86_64-linux" "x86_64-darwin" "aarch64-darwin" ]; + forSystems = systems: f: + lib.genAttrs systems + (system: f system nixpkgs.legacyPackages.${system}); - forSystems = systems: f: lib.genAttrs systems - (system: f system nixpkgs.legacyPackages.${system}); + forAllSystems = forSystems supportedSystems; - forAllSystems = forSystems supportedSystems; + # To use dream2nix in non-flake + non-IFD enabled repos, the source code of dream2nix + # must be installed into these repos (using nix run dream2nix#install). + # The problem is, all of dream2nix' dependecies need to be installed as well. + # Therefore 'externalPaths' contains all relevant files of external projects + # which dream2nix depends on. Exactly these files will be installed. + externalPaths = { + mach-nix = [ + "lib/extractor/default.nix" + "lib/extractor/distutils.patch" + "lib/extractor/setuptools.patch" + "LICENSE" + ]; + node2nix = [ + "nix/node-env.nix" + "LICENSE" + ]; + poetry2nix = [ + "semver.nix" + "LICENSE" + ]; + crane = [ + "lib/buildDepsOnly.nix" + "lib/buildPackage.nix" + "lib/cargoBuild.nix" + "lib/cleanCargoToml.nix" + "lib/findCargoFiles.nix" + "lib/mkCargoDerivation.nix" + "lib/mkDummySrc.nix" + "lib/writeTOML.nix" + "pkgs/configureCargoCommonVarsHook.sh" + "pkgs/configureCargoVendoredDepsHook.sh" + "pkgs/installFromCargoBuildLogHook.sh" + "pkgs/inheritCargoArtifactsHook.sh" + "pkgs/installCargoArtifactsHook.sh" + "pkgs/remapSourcePathPrefixHook.sh" + "LICENSE" + ]; + }; - # To use dream2nix in non-flake + non-IFD enabled repos, the source code of dream2nix - # must be installed into these repos (using nix run dream2nix#install). - # The problem is, all of dream2nix' dependecies need to be installed as well. - # Therefore 'externalPaths' contains all relevant files of external projects - # which dream2nix depends on. Exactly these files will be installed. - externalPaths = { - mach-nix = [ - "lib/extractor/default.nix" - "lib/extractor/distutils.patch" - "lib/extractor/setuptools.patch" - "LICENSE" - ]; - node2nix = [ - "nix/node-env.nix" - "LICENSE" - ]; - poetry2nix = [ - "semver.nix" - "LICENSE" - ]; - crane = [ - "lib/buildDepsOnly.nix" - "lib/buildPackage.nix" - "lib/cargoBuild.nix" - "lib/cleanCargoToml.nix" - "lib/findCargoFiles.nix" - "lib/mkCargoDerivation.nix" - "lib/mkDummySrc.nix" - "lib/writeTOML.nix" - "pkgs/configureCargoCommonVarsHook.sh" - "pkgs/configureCargoVendoredDepsHook.sh" - "pkgs/installFromCargoBuildLogHook.sh" - "pkgs/inheritCargoArtifactsHook.sh" - "pkgs/installCargoArtifactsHook.sh" - "pkgs/remapSourcePathPrefixHook.sh" - "LICENSE" - ]; - }; + # create a directory containing the files listed in externalPaths + makeExternalDir = import ./src/utils/external-dir.nix; - # create a directory containing the files listed in externalPaths - makeExternalDir = import ./src/utils/external-dir.nix; - - externalDirFor = forAllSystems (system: pkgs: makeExternalDir { + externalDirFor = forAllSystems (system: pkgs: + makeExternalDir { inherit externalPaths externalSources pkgs; }); - # An interface to access files of external projects. - # This implementation accesses the flake inputs directly, - # but if dream2nix is used without flakes, it defaults - # to another implementation of that function which - # uses the installed external paths instead (see default.nix) - externalSources = - lib.genAttrs - (lib.attrNames externalPaths) - (inputName: inp."${inputName}"); + # An interface to access files of external projects. + # This implementation accesses the flake inputs directly, + # but if dream2nix is used without flakes, it defaults + # to another implementation of that function which + # uses the installed external paths instead (see default.nix) + externalSources = + lib.genAttrs + (lib.attrNames externalPaths) + (inputName: inp."${inputName}"); - overridesDirs = [ "${./overrides}" ]; + overridesDirs = ["${./overrides}"]; - # system specific dream2nix api - dream2nixFor = forAllSystems (system: pkgs: import ./src rec { + # system specific dream2nix api + dream2nixFor = forAllSystems (system: pkgs: + import ./src rec { externalDir = externalDirFor."${system}"; inherit dlib externalPaths externalSources lib pkgs; config = { @@ -128,7 +143,8 @@ }; }); - pre-commit-check = forAllSystems (system: pkgs: + pre-commit-check = forAllSystems ( + system: pkgs: pre-commit-hooks.lib.${system}.run { src = ./.; hooks = { @@ -144,139 +160,144 @@ }; }; } - ); + ); + in { + # System independent dream2nix api. + # Similar to drem2nixFor but will require 'system(s)' or 'pkgs' as an argument. + # Produces flake-like output schema. + lib = + (import ./src/lib.nix { + inherit dlib externalPaths externalSources overridesDirs lib; + nixpkgsSrc = "${nixpkgs}"; + }) + # system specific dream2nix library + // (forAllSystems (system: pkgs: dream2nixFor."${system}")); - in - { - # System independent dream2nix api. - # Similar to drem2nixFor but will require 'system(s)' or 'pkgs' as an argument. - # Produces flake-like output schema. - lib = (import ./src/lib.nix { - inherit dlib externalPaths externalSources overridesDirs lib; - nixpkgsSrc = "${nixpkgs}"; - }) - # system specific dream2nix library - // (forAllSystems (system: pkgs: dream2nixFor."${system}")); + # with project discovery enabled + lib2 = import ./src/libV2.nix { + inherit dlib externalPaths externalSources overridesDirs lib; + nixpkgsSrc = "${nixpkgs}"; + }; - # with project discovery enabled - lib2 = (import ./src/libV2.nix { - inherit dlib externalPaths externalSources overridesDirs lib; - nixpkgsSrc = "${nixpkgs}"; - }); + # the dream2nix cli to be used with 'nix run dream2nix' + defaultApp = + forAllSystems (system: pkgs: self.apps."${system}".dream2nix); - # the dream2nix cli to be used with 'nix run dream2nix' - defaultApp = - forAllSystems (system: pkgs: self.apps."${system}".dream2nix); + # all apps including cli, install, etc. + apps = forAllSystems ( + system: pkgs: + dream2nixFor."${system}".apps.flakeApps + // { + tests-impure.type = "app"; + tests-impure.program = + b.toString + (dream2nixFor."${system}".callPackageDream ./tests/impure {}); - # all apps including cli, install, etc. - apps = forAllSystems (system: pkgs: - dream2nixFor."${system}".apps.flakeApps // { - tests-impure.type = "app"; - tests-impure.program = b.toString - (dream2nixFor."${system}".callPackageDream ./tests/impure {}); + tests-unit.type = "app"; + tests-unit.program = + b.toString + (dream2nixFor."${system}".callPackageDream ./tests/unit { + inherit self; + }); - tests-unit.type = "app"; - tests-unit.program = b.toString - (dream2nixFor."${system}".callPackageDream ./tests/unit { - inherit self; - }); + tests-all.type = "app"; + tests-all.program = + l.toString + (dream2nixFor.${system}.utils.writePureShellScript + [ + alejandra.defaultPackage.${system} + pkgs.coreutils + pkgs.gitMinimal + pkgs.nix + ] + '' + echo "running unit tests" + ${self.apps.${system}.tests-unit.program} - tests-all.type = "app"; - tests-all.program = l.toString - (dream2nixFor.${system}.utils.writePureShellScript - [ - alejandra.defaultPackage.${system} - pkgs.coreutils - pkgs.gitMinimal - pkgs.nix - ] - '' - echo "running unit tests" - ${self.apps.${system}.tests-unit.program} + echo "running impure CLI tests" + ${self.apps.${system}.tests-impure.program} - echo "running impure CLI tests" - ${self.apps.${system}.tests-impure.program} - - echo "running nix flake check" - cd $WORKDIR - nix flake check - ''); - - # passes through extra flags to treefmt - format.type = "app"; - format.program = l.toString - (pkgs.writeScript "format" '' - export PATH="${alejandra.defaultPackage.${system}}/bin" - ${pkgs.treefmt}/bin/treefmt "$@" + echo "running nix flake check" + cd $WORKDIR + nix flake check ''); - } - ); - # a dev shell for working on dream2nix - # use via 'nix develop . -c $SHELL' - devShell = forAllSystems (system: pkgs: pkgs.mkShell { + # passes through extra flags to treefmt + format.type = "app"; + format.program = + l.toString + (pkgs.writeScript "format" '' + export PATH="${alejandra.defaultPackage.${system}}/bin" + ${pkgs.treefmt}/bin/treefmt "$@" + ''); + } + ); - buildInputs = - (with pkgs; [ - nix - treefmt - ]) - ++ [ - alejandra.defaultPackage."${system}" - ] - # using linux is highly recommended as cntr is amazing for debugging builds - ++ lib.optionals pkgs.stdenv.isLinux [ pkgs.cntr ]; + # a dev shell for working on dream2nix + # use via 'nix develop . -c $SHELL' + devShell = forAllSystems (system: pkgs: + pkgs.mkShell { + buildInputs = + (with pkgs; [ + nix + treefmt + ]) + ++ [ + alejandra.defaultPackage."${system}" + ] + # using linux is highly recommended as cntr is amazing for debugging builds + ++ lib.optionals pkgs.stdenv.isLinux [pkgs.cntr]; - shellHook = - # TODO: enable this once code base is formatted - # self.checks.${system}.pre-commit-check.shellHook - '' - export NIX_PATH=nixpkgs=${nixpkgs} - export d2nExternalDir=${externalDirFor."${system}"} - export dream2nixWithExternals=${dream2nixFor."${system}".dream2nixWithExternals} - - if [ -e ./overrides ]; then - export d2nOverridesDir=$(realpath ./overrides) - else - export d2nOverridesDir=${./overrides} - echo -e "\nManually execute 'export d2nOverridesDir={path to your dream2nix overrides dir}'" - fi - - if [ -e ../dream2nix ]; then - export dream2nixWithExternals=$(realpath ./src) - else - export dream2nixWithExternals=${./src} - echo -e "\nManually execute 'export dream2nixWithExternals={path to your dream2nix checkout}'" - fi - ''; - }); - - checks = l.recursiveUpdate - (forAllSystems (system: pkgs: - (import ./tests/pure { - inherit lib pkgs; - dream2nix = dream2nixFor."${system}"; - }))) - {} + shellHook = # TODO: enable this once code base is formatted - # (forAllSystems (system: pkgs:{ - # pre-commit-check = - # pre-commit-hooks.lib.${system}.run { - # src = ./.; - # hooks = { - # treefmt = { - # enable = true; - # name = "treefmt"; - # pass_filenames = false; - # entry = l.toString (pkgs.writeScript "treefmt" '' - # #!${pkgs.bash}/bin/bash - # export PATH="$PATH:${alejandra.defaultPackage.${system}}/bin" - # ${pkgs.treefmt}/bin/treefmt --fail-on-change - # ''); - # }; - # }; - # }; - # })) - ; - }; + # self.checks.${system}.pre-commit-check.shellHook + '' + export NIX_PATH=nixpkgs=${nixpkgs} + export d2nExternalDir=${externalDirFor."${system}"} + export dream2nixWithExternals=${dream2nixFor."${system}".dream2nixWithExternals} + + if [ -e ./overrides ]; then + export d2nOverridesDir=$(realpath ./overrides) + else + export d2nOverridesDir=${./overrides} + echo -e "\nManually execute 'export d2nOverridesDir={path to your dream2nix overrides dir}'" + fi + + if [ -e ../dream2nix ]; then + export dream2nixWithExternals=$(realpath ./src) + else + export dream2nixWithExternals=${./src} + echo -e "\nManually execute 'export dream2nixWithExternals={path to your dream2nix checkout}'" + fi + ''; + }); + + checks = + l.recursiveUpdate + (forAllSystems (system: pkgs: (import ./tests/pure { + inherit lib pkgs; + dream2nix = dream2nixFor."${system}"; + }))) + {} + # TODO: enable this once code base is formatted + # (forAllSystems (system: pkgs:{ + # pre-commit-check = + # pre-commit-hooks.lib.${system}.run { + # src = ./.; + # hooks = { + # treefmt = { + # enable = true; + # name = "treefmt"; + # pass_filenames = false; + # entry = l.toString (pkgs.writeScript "treefmt" '' + # #!${pkgs.bash}/bin/bash + # export PATH="$PATH:${alejandra.defaultPackage.${system}}/bin" + # ${pkgs.treefmt}/bin/treefmt --fail-on-change + # ''); + # }; + # }; + # }; + # })) + ; + }; } diff --git a/overrides/nodejs/default.nix b/overrides/nodejs/default.nix index 9827355b..18d84016 100644 --- a/overrides/nodejs/default.nix +++ b/overrides/nodejs/default.nix @@ -1,19 +1,16 @@ { lib, pkgs, - # dream2nix satisfiesSemver, ... -}: - -let +}: let l = lib // builtins; # include this into an override to enable cntr debugging # (linux only) cntr = { - nativeBuildInputs = old: old ++ [ pkgs.breakpointHook ]; + nativeBuildInputs = old: old ++ [pkgs.breakpointHook]; b = "${pkgs.busybox}/bin/busybox"; }; @@ -29,168 +26,154 @@ let exit 1 fi ''; - in - -## OVERRIDES -{ - - atom = { - build = { - buildScript = '' - node script/build --no-bootstrap - ''; + ## OVERRIDES + { + atom = { + build = { + buildScript = '' + node script/build --no-bootstrap + ''; + }; }; - }; - balena-etcher = { - build = { - buildScript = '' - npm run webpack - ''; + balena-etcher = { + build = { + buildScript = '' + npm run webpack + ''; + }; }; - }; - code-oss-dev = { - build = { - buildScript = '' - npm run compile-extensions-build - ''; + code-oss-dev = { + build = { + buildScript = '' + npm run compile-extensions-build + ''; + }; }; - }; - cpu-features = { - add-inputs = { - nativeBuildInputs = old: old ++ [ - pkgs.cmake - ]; + cpu-features = { + add-inputs = { + nativeBuildInputs = old: + old + ++ [ + pkgs.cmake + ]; + }; }; - }; - css-loader = { + css-loader = { + disable-source-map-v4-v5 = { + _condition = pkg: + satisfiesSemver "^4.0.0" pkg + || satisfiesSemver "^5.0.0" pkg; - disable-source-map-v4-v5 = { - - _condition = pkg: - satisfiesSemver "^4.0.0" pkg - || satisfiesSemver "^5.0.0" pkg; - - postPatch = '' - substituteInPlace ./dist/utils.js --replace \ - "sourceMap: typeof rawOptions.sourceMap === "boolean" ? rawOptions.sourceMap : loaderContext.sourceMap," \ - "sourceMap: false," - ''; + postPatch = '' + substituteInPlace ./dist/utils.js --replace \ + "sourceMap: typeof rawOptions.sourceMap === "boolean" ? rawOptions.sourceMap : loaderContext.sourceMap," \ + "sourceMap: false," + ''; + }; }; - }; - cypress = { - - add-binary = { - - dontBuild = true; + cypress = { + add-binary = { + dontBuild = true; + }; }; - }; - "draw.io" = { - - build = { - - buildScript = '' - mkdir $out/bin - makeWrapper \ - $(realpath ./node_modules/electron/dist/electron) \ - $out/bin/drawio \ - --add-flags \ - "$(realpath ./drawio/src/main/webapp)" - ''; + "draw.io" = { + build = { + buildScript = '' + mkdir $out/bin + makeWrapper \ + $(realpath ./node_modules/electron/dist/electron) \ + $out/bin/drawio \ + --add-flags \ + "$(realpath ./drawio/src/main/webapp)" + ''; + }; }; - }; - dugite = { - - add-git = { - - buildScript = '' - ln -s ${pkgs.git} ./git - ''; + dugite = { + add-git = { + buildScript = '' + ln -s ${pkgs.git} ./git + ''; + }; }; - }; - edex-ui = { + edex-ui = { + build = { + electronAppDir = "src"; - build = { + preBuild = {outputs, ...}: '' + # link dependencies of subpackage + ln -s \ + ${outputs.subPackages.edex-ui-subpackage.packages.edex-ui-subpackage}/lib/node_modules/edex-ui-subpackage/node_modules \ + ./src/node_modules - electronAppDir = "src"; - - preBuild = { outputs, ... }: '' - # link dependencies of subpackage - ln -s \ - ${outputs.subPackages.edex-ui-subpackage.packages.edex-ui-subpackage}/lib/node_modules/edex-ui-subpackage/node_modules \ - ./src/node_modules - - # transform symlinked subpackage 'node-pty' to copies, - # in order to allow re-building - mv src/node_modules src/node_modules.bac - mkdir src/node_modules - cp -r src/node_modules.bac/* src/node_modules/ - symlinksToCopies ./src/node_modules/node-pty - ''; + # transform symlinked subpackage 'node-pty' to copies, + # in order to allow re-building + mv src/node_modules src/node_modules.bac + mkdir src/node_modules + cp -r src/node_modules.bac/* src/node_modules/ + symlinksToCopies ./src/node_modules/node-pty + ''; + }; }; - }; - - electron = - let + electron = let mkElectron = pkgs.callPackage - ./electron/generic.nix - {}; + ./electron/generic.nix + {}; nixpkgsElectrons = lib.mapAttrs - (version: hashes: - (mkElectron version hashes).overrideAttrs (old: { - dontStrip = true; - })) - hashes; + (version: hashes: + (mkElectron version hashes).overrideAttrs (old: { + dontStrip = true; + })) + hashes; - getElectronFor = version: - let - semVerSpec = "~${version}"; + getElectronFor = version: let + semVerSpec = "~${version}"; - filteredElectrons = - lib.filterAttrs - (electronVer: _: satisfiesSemver semVerSpec { - version = electronVer; - }) - nixpkgsElectrons; + filteredElectrons = + lib.filterAttrs + (electronVer: _: + satisfiesSemver semVerSpec { + version = electronVer; + }) + nixpkgsElectrons; - electrons = l.attrValues filteredElectrons; + electrons = l.attrValues filteredElectrons; + in + if l.length electrons == 0 + then + throw '' + Electron binary hashes are missing for required version ${version} + Please add the hashes in the override below the origin of this error. + To get the hashes, execute: + ${./.}/electron/print-hashes.sh ${version} + '' + else if l.length electrons > 1 + then let + versionsSorted = + l.sort + (v1: v2: l.compareVersions v1 v2 == 1) + (l.attrNames filteredElectrons); + versionsToRemove = l.tail versionsSorted; in - if l.length electrons == 0 then - throw '' - Electron binary hashes are missing for required version ${version} - Please add the hashes in the override below the origin of this error. - To get the hashes, execute: - ${./.}/electron/print-hashes.sh ${version} - '' - else if l.length electrons > 1 then - let - versionsSorted = - l.sort - (v1: v2: l.compareVersions v1 v2 == 1) - (l.attrNames filteredElectrons); - - versionsToRemove = l.tail versionsSorted; - in - throw '' - Multiple electron minor releases found. - Please delete the hashes for versions ${l.toString versionsToRemove} - in the override below the origin of this error. - '' - else - l.head electrons; + throw '' + Multiple electron minor releases found. + Please delete the hashes for versions ${l.toString versionsToRemove} + in the override below the origin of this error. + '' + else l.head electrons; # TODO: generate more of these via the script in nixpkgs, # once we feel confident about this approach @@ -302,13 +285,8 @@ in headers = "1xnbzskvf8p5a07bha41qqnw1hb68f019qrda3z2jn96m3qnj46r"; }; }; - - in - - { - + in { add-binary = { - overrideAttrs = old: { postPatch = '' cp -r ${getElectronFor "${old.version}"}/lib/electron ./dist @@ -325,61 +303,58 @@ in }; }; - # TODO: fix electron-builder call or find alternative - element-desktop = { - build = { + # TODO: fix electron-builder call or find alternative + element-desktop = { + build = { + # TODO: build rust extensions to enable searching encrypted messages + # TODO: create lower case symlinks for all i18n strings + buildScript = {outputs, ...}: '' + npm run build:ts + npm run i18n + npm run build:res - # TODO: build rust extensions to enable searching encrypted messages - # TODO: create lower case symlinks for all i18n strings - buildScript = { outputs, ... }: '' - npm run build:ts - npm run i18n - npm run build:res + # build rust extensions + # npm run hak - # build rust extensions - # npm run hak + ln -s ${outputs.subPackages.element-web.packages.element-web}/lib/node_modules/element-web/webapp ./webapp - ln -s ${outputs.subPackages.element-web.packages.element-web}/lib/node_modules/element-web/webapp ./webapp + # ln -s ./lib/i18n/strings/en{-US,}.json + ln -s \ + $(realpath ./lib/i18n/strings/en_US.json) \ + $(realpath ./lib/i18n/strings/en-us.json) + ''; - # ln -s ./lib/i18n/strings/en{-US,}.json - ln -s \ - $(realpath ./lib/i18n/strings/en_US.json) \ - $(realpath ./lib/i18n/strings/en-us.json) - ''; - - # buildInputs = old: old ++ [ - # pkgs.rustc - # ]; + # buildInputs = old: old ++ [ + # pkgs.rustc + # ]; + }; }; - }; - element-web = { + element-web = { + build = { + installMethod = "copy"; - build = { - installMethod = "copy"; + # TODO: file upstream issue because of non-reproducible jitsi api file + buildScript = '' + # install jitsi api + mkdir webapp + cp ${./element-web/external_api.min.js} webapp/external_api.min.js - # TODO: file upstream issue because of non-reproducible jitsi api file - buildScript = '' - # install jitsi api - mkdir webapp - cp ${./element-web/external_api.min.js} webapp/external_api.min.js + # set version variables + export DIST_VERSION=$version + export VERSION=$version - # set version variables - export DIST_VERSION=$version - export VERSION=$version - - npm run reskindex - npm run build:res - npm run build:bundle - ''; + npm run reskindex + npm run build:res + npm run build:bundle + ''; + }; }; - }; - esbuild = { - "add-binary-0.12.17" = { - _condition = pkg: pkg.version == "0.12.17"; - ESBUILD_BINARY_PATH = - let + esbuild = { + "add-binary-0.12.17" = { + _condition = pkg: pkg.version == "0.12.17"; + ESBUILD_BINARY_PATH = let esbuild = pkgs.buildGoModule rec { pname = "esbuild"; version = "0.12.17"; @@ -393,379 +368,374 @@ in vendorSha256 = "sha256-2ABWPqhK2Cf4ipQH7XvRrd+ZscJhYPc3SV2cGT0apdg="; }; - in - "${esbuild}/bin/esbuild"; - }; - }; - - fontmanager-redux = { - add-inputs = { - nativeBuildInputs = old: old ++ [ - pkgs.fontconfig - ]; - }; - }; - - geckodriver = { - add-binary = { - GECKODRIVER_FILEPATH = "${pkgs.geckodriver}/bin/geckodriver"; - }; - }; - - enhanced-resolve = { - - fix-resolution-v4 = { - - _condition = satisfiesSemver "^4.0.0"; - - # respect node path - postPatch = '' - ${ensureFileModified} lib/ResolverFactory.js \ - sed -zi 's/const symlinks =.*options.symlinks : true;/const symlinks = false;/g' lib/ResolverFactory.js - - substituteInPlace lib/ResolverFactory.js --replace \ - 'let modules = options.modules || ["node_modules"];' \ - 'let modules = (options.modules || ["node_modules"]).concat(process.env.NODE_PATH.split( /[;:]/ ));' - ''; + in "${esbuild}/bin/esbuild"; + }; }; - fix-resolution-v5 = { - - _condition = satisfiesSemver "^5.0.0"; - - patches = [ - ./enhanced-resolve/npm-preserve-symlinks-v5.patch - ./enhanced-resolve/respect-node-path-v5.patch - ]; + fontmanager-redux = { + add-inputs = { + nativeBuildInputs = old: + old + ++ [ + pkgs.fontconfig + ]; + }; }; - }; - gifsicle = { - add-binary = { - buildScript = '' - mkdir -p ./vendor - ln -s ${pkgs.gifsicle}/bin/gifsicle ./vendor/gifsicle - npm run postinstall - ''; + geckodriver = { + add-binary = { + GECKODRIVER_FILEPATH = "${pkgs.geckodriver}/bin/geckodriver"; + }; }; - }; - keytar = { - add-pkg-config = { - nativeBuildInputs = old: old ++ [ - pkgs.libsecret - pkgs.pkg-config - ]; + enhanced-resolve = { + fix-resolution-v4 = { + _condition = satisfiesSemver "^4.0.0"; + + # respect node path + postPatch = '' + ${ensureFileModified} lib/ResolverFactory.js \ + sed -zi 's/const symlinks =.*options.symlinks : true;/const symlinks = false;/g' lib/ResolverFactory.js + + substituteInPlace lib/ResolverFactory.js --replace \ + 'let modules = options.modules || ["node_modules"];' \ + 'let modules = (options.modules || ["node_modules"]).concat(process.env.NODE_PATH.split( /[;:]/ ));' + ''; + }; + + fix-resolution-v5 = { + _condition = satisfiesSemver "^5.0.0"; + + patches = [ + ./enhanced-resolve/npm-preserve-symlinks-v5.patch + ./enhanced-resolve/respect-node-path-v5.patch + ]; + }; }; - }; - ledger-live-desktop = { - - build = { - - installMethod = "copy"; - - postPatch = '' - substituteInPlace ./tools/main.js --replace \ - "git rev-parse --short HEAD" \ - "echo unknown" - ''; + gifsicle = { + add-binary = { + buildScript = '' + mkdir -p ./vendor + ln -s ${pkgs.gifsicle}/bin/gifsicle ./vendor/gifsicle + npm run postinstall + ''; + }; }; - }; - mattermost-desktop = { - - build = { - - postPatch = '' - substituteInPlace webpack.config.base.js --replace \ - "git rev-parse --short HEAD" \ - "echo foo" - - - ${pkgs.jq}/bin/jq ".electronDist = \"$TMP/dist\"" electron-builder.json \ - | ${pkgs.moreutils}/bin/sponge electron-builder.json - - ${pkgs.jq}/bin/jq ".linux.target = [\"dir\"]" electron-builder.json \ - | ${pkgs.moreutils}/bin/sponge electron-builder.json - ''; - - # TODO: - # - figure out if building via electron-build is feasible - # (if not, remove commented out instructions) - # - app seems to logout immediately after login (token expired) - buildScript = '' - # copy over the electron dist, as write access seems required - cp -r ./node_modules/electron/dist $TMP/dist - chmod -R +w $TMP/dist - - # required if electron-builder is used - # mv $TMP/dist/electron $TMP/dist/electron-wrapper - # mv $TMP/dist/.electron-wrapped $TMP/dist/electron - - NODE_ENV=production npm-run-all check-build-config build-prod - - # skipping electron-builder, as produced executable crashes on startup - # electron-builder --linux --x64 --publish=never - - # the electron wrapper wants to read the name and version from there - cp package.json dist/package.json - - mkdir -p $out/bin - makeWrapper \ - $(realpath ./node_modules/electron/dist/electron) \ - $out/bin/mattermost-desktop \ - --add-flags \ - "$(realpath ./dist) --disable-dev-mode" - ''; + keytar = { + add-pkg-config = { + nativeBuildInputs = old: + old + ++ [ + pkgs.libsecret + pkgs.pkg-config + ]; + }; }; - }; - mozjpeg = { - add-binary = { - buildScript = '' - mkdir -p ./vendor - ln -s ${pkgs.mozjpeg}/bin/cjpeg ./vendor/cjpeg - npm run postinstall - ''; + ledger-live-desktop = { + build = { + installMethod = "copy"; + + postPatch = '' + substituteInPlace ./tools/main.js --replace \ + "git rev-parse --short HEAD" \ + "echo unknown" + ''; + }; }; - }; - Motrix = { - build = { - postPatch = '' - ${pkgs.jq}/bin/jq ".build.electronDist = \"$TMP/dist\"" package.json \ - | ${pkgs.moreutils}/bin/sponge package.json - ''; + mattermost-desktop = { + build = { + postPatch = '' + substituteInPlace webpack.config.base.js --replace \ + "git rev-parse --short HEAD" \ + "echo foo" + + + ${pkgs.jq}/bin/jq ".electronDist = \"$TMP/dist\"" electron-builder.json \ + | ${pkgs.moreutils}/bin/sponge electron-builder.json + + ${pkgs.jq}/bin/jq ".linux.target = [\"dir\"]" electron-builder.json \ + | ${pkgs.moreutils}/bin/sponge electron-builder.json + ''; + + # TODO: + # - figure out if building via electron-build is feasible + # (if not, remove commented out instructions) + # - app seems to logout immediately after login (token expired) + buildScript = '' + # copy over the electron dist, as write access seems required + cp -r ./node_modules/electron/dist $TMP/dist + chmod -R +w $TMP/dist + + # required if electron-builder is used + # mv $TMP/dist/electron $TMP/dist/electron-wrapper + # mv $TMP/dist/.electron-wrapped $TMP/dist/electron + + NODE_ENV=production npm-run-all check-build-config build-prod + + # skipping electron-builder, as produced executable crashes on startup + # electron-builder --linux --x64 --publish=never + + # the electron wrapper wants to read the name and version from there + cp package.json dist/package.json + + mkdir -p $out/bin + makeWrapper \ + $(realpath ./node_modules/electron/dist/electron) \ + $out/bin/mattermost-desktop \ + --add-flags \ + "$(realpath ./dist) --disable-dev-mode" + ''; + }; }; - }; - node-hid = { - - build = { - - nativeBuildInputs = old: old ++ [ - pkgs.pkg-config - pkgs.libusb - ]; + mozjpeg = { + add-binary = { + buildScript = '' + mkdir -p ./vendor + ln -s ${pkgs.mozjpeg}/bin/cjpeg ./vendor/cjpeg + npm run postinstall + ''; + }; }; - }; - npm = { - dont-install-deps = { - installDeps = ""; + Motrix = { + build = { + postPatch = '' + ${pkgs.jq}/bin/jq ".build.electronDist = \"$TMP/dist\"" package.json \ + | ${pkgs.moreutils}/bin/sponge package.json + ''; + }; }; - }; - npx = { - dont-install-deps = { - installDeps = ""; + node-hid = { + build = { + nativeBuildInputs = old: + old + ++ [ + pkgs.pkg-config + pkgs.libusb + ]; + }; }; - }; - optipng-bin = { - add-binary = { - buildScript = '' - mkdir -p ./vendor - ln -s ${pkgs.optipng}/bin/optipng ./vendor/optipng - npm run postinstall - ''; + npm = { + dont-install-deps = { + installDeps = ""; + }; }; - }; - pngquant-bin = { - add-binary = { - buildScript = '' - mkdir -p ./vendor - ln -s ${pkgs.pngquant}/bin/pngquant ./vendor/pngquant - npm run postinstall - ''; + npx = { + dont-install-deps = { + installDeps = ""; + }; }; - }; - quill = { - disable-build = { - runBuild = false; + optipng-bin = { + add-binary = { + buildScript = '' + mkdir -p ./vendor + ln -s ${pkgs.optipng}/bin/optipng ./vendor/optipng + npm run postinstall + ''; + }; }; - }; - rollup = { - preserve-symlinks = { - postPatch = '' - find -name '*.js' -exec \ - ${ensureFileModified} {} sed -i "s/preserveSymlinks: .*/preserveSymlinks: true,/g" {} \; - ''; + pngquant-bin = { + add-binary = { + buildScript = '' + mkdir -p ./vendor + ln -s ${pkgs.pngquant}/bin/pngquant ./vendor/pngquant + npm run postinstall + ''; + }; }; - }; - simple-git-hooks = { - dont-postinstall = { - buildScript = "true"; + quill = { + disable-build = { + runBuild = false; + }; }; - }; - sodium-native = { - - build = { - - nativeBuildInputs = old: old ++ [ - pkgs.autoconf - pkgs.automake - pkgs.libtool - ]; + rollup = { + preserve-symlinks = { + postPatch = '' + find -name '*.js' -exec \ + ${ensureFileModified} {} sed -i "s/preserveSymlinks: .*/preserveSymlinks: true,/g" {} \; + ''; + }; }; - }; - tabby = { - inherit cntr; - fix-build = { + simple-git-hooks = { + dont-postinstall = { + buildScript = "true"; + }; + }; - electronAppDir = "./app"; + sodium-native = { + build = { + nativeBuildInputs = old: + old + ++ [ + pkgs.autoconf + pkgs.automake + pkgs.libtool + ]; + }; + }; - nativeBuildInputs = old: old ++ [ - pkgs.fontconfig - pkgs.libsecret - pkgs.pkg-config - ]; + tabby = { + inherit cntr; + fix-build = { + electronAppDir = "./app"; - postPatch = { outputs, ... }: '' - substituteInPlace ./scripts/vars.js --replace \ - "exports.version = childProcess.execSync('git describe --tags', { encoding:'utf-8' })" \ - "exports.version = '$version'" + nativeBuildInputs = old: + old + ++ [ + pkgs.fontconfig + pkgs.libsecret + pkgs.pkg-config + ]; - ${pkgs.jq}/bin/jq ".typeAcquisition = {}" tsconfig.json \ - | ${pkgs.moreutils}/bin/sponge tsconfig.json + postPatch = {outputs, ...}: '' + substituteInPlace ./scripts/vars.js --replace \ + "exports.version = childProcess.execSync('git describe --tags', { encoding:'utf-8' })" \ + "exports.version = '$version'" - substituteInPlace app/webpack.main.config.js --replace \ - "configFile: path.resolve(__dirname, 'tsconfig.main.json')," \ - "configFile: path.resolve(__dirname, 'tsconfig.main.json'), allowTsInNodeModules: true," + ${pkgs.jq}/bin/jq ".typeAcquisition = {}" tsconfig.json \ + | ${pkgs.moreutils}/bin/sponge tsconfig.json - substituteInPlace app/webpack.config.js --replace \ - "configFile: path.resolve(__dirname, 'tsconfig.json')," \ - "configFile: path.resolve(__dirname, 'tsconfig.json'), allowTsInNodeModules: true," + substituteInPlace app/webpack.main.config.js --replace \ + "configFile: path.resolve(__dirname, 'tsconfig.main.json')," \ + "configFile: path.resolve(__dirname, 'tsconfig.main.json'), allowTsInNodeModules: true," - substituteInPlace web/webpack.config.js --replace \ - "configFile: path.resolve(__dirname, 'tsconfig.json')," \ - "configFile: path.resolve(__dirname, 'tsconfig.json'), allowTsInNodeModules: true," + substituteInPlace app/webpack.config.js --replace \ + "configFile: path.resolve(__dirname, 'tsconfig.json')," \ + "configFile: path.resolve(__dirname, 'tsconfig.json'), allowTsInNodeModules: true," - otherModules=${pkgs.writeText "other-modules.json" (l.toJSON - (l.mapAttrs - (pname: subOutputs: - let - pkg = subOutputs.packages."${pname}".overrideAttrs (old: { - buildScript = "true"; - installMethod = "copy"; - }); - in - "${pkg}/lib/node_modules/${pname}/node_modules") - outputs.subPackages))} + substituteInPlace web/webpack.config.js --replace \ + "configFile: path.resolve(__dirname, 'tsconfig.json')," \ + "configFile: path.resolve(__dirname, 'tsconfig.json'), allowTsInNodeModules: true," - symlinksToCopies() { - local dir="$1" + otherModules=${pkgs.writeText "other-modules.json" (l.toJSON + (l.mapAttrs + (pname: subOutputs: let + pkg = subOutputs.packages."${pname}".overrideAttrs (old: { + buildScript = "true"; + installMethod = "copy"; + }); + in "${pkg}/lib/node_modules/${pname}/node_modules") + outputs.subPackages))} - echo "transforming symlinks to copies..." - for f in $(find -L "$dir" -xtype l); do - if [ -f $f ]; then - continue + symlinksToCopies() { + local dir="$1" + + echo "transforming symlinks to copies..." + for f in $(find -L "$dir" -xtype l); do + if [ -f $f ]; then + continue + fi + echo "copying $f" + chmod +wx $(dirname "$f") + mv "$f" "$f.bak" + mkdir "$f" + if [ -n "$(ls -A "$f.bak/")" ]; then + cp -r "$f.bak"/* "$f/" + chmod -R +w $f + fi + rm "$f.bak" + done + } + + for dir in $(ls -d */); do + if [ -f $dir/package.json ]; then + echo "installing sub-package $dir" + name=$(${pkgs.jq}/bin/jq -r '.name' $dir/package.json) + node_modules=$(${pkgs.jq}/bin/jq -r ".\"$name\"" $otherModules) + if [ "$node_modules" == "null" ]; then + node_modules=$(${pkgs.jq}/bin/jq -r ".\"''${dir%/}\"" $otherModules) + fi + cp -r $node_modules $dir/node_modules + chmod -R +w $dir fi - echo "copying $f" - chmod +wx $(dirname "$f") - mv "$f" "$f.bak" - mkdir "$f" - if [ -n "$(ls -A "$f.bak/")" ]; then - cp -r "$f.bak"/* "$f/" - chmod -R +w $f - fi - rm "$f.bak" done - } - - for dir in $(ls -d */); do - if [ -f $dir/package.json ]; then - echo "installing sub-package $dir" - name=$(${pkgs.jq}/bin/jq -r '.name' $dir/package.json) - node_modules=$(${pkgs.jq}/bin/jq -r ".\"$name\"" $otherModules) - if [ "$node_modules" == "null" ]; then - node_modules=$(${pkgs.jq}/bin/jq -r ".\"''${dir%/}\"" $otherModules) - fi - cp -r $node_modules $dir/node_modules - chmod -R +w $dir - fi - done - ''; + ''; + }; }; - }; - # TODO: confirm this is actually working - typescript = { - preserve-symlinks = { - postPatch = '' - find -name '*.js' -exec \ - ${ensureFileModified} {} sed -i "s/options.preserveSymlinks/true/g; s/compilerOptions.preserveSymlinks/true/g" {} \; - ''; + # TODO: confirm this is actually working + typescript = { + preserve-symlinks = { + postPatch = '' + find -name '*.js' -exec \ + ${ensureFileModified} {} sed -i "s/options.preserveSymlinks/true/g; s/compilerOptions.preserveSymlinks/true/g" {} \; + ''; + }; }; - }; - usb-detection = { - - build = { - - nativeBuildInputs = old: old ++ [ - pkgs.libudev - ]; + usb-detection = { + build = { + nativeBuildInputs = old: + old + ++ [ + pkgs.libudev + ]; + }; }; - }; - vscode-ripgrep = { - add-binary = { - buildScript = '' - mkdir bin - mkdir -p $out/bin - ln -s ${pkgs.ripgrep}/bin/rg bin/rg - ln -s ${pkgs.ripgrep}/bin/rg $out/bin/rg - ''; + vscode-ripgrep = { + add-binary = { + buildScript = '' + mkdir bin + mkdir -p $out/bin + ln -s ${pkgs.ripgrep}/bin/rg bin/rg + ln -s ${pkgs.ripgrep}/bin/rg $out/bin/rg + ''; + }; }; - }; - # TODO: ensure preserving symlinks on dependency resolution always works - # The patch is currently done in `enhanced-resolve` which is used - # by webpack for module resolution - webpack = { - remove-webpack-cli-check = { - _condition = satisfiesSemver "^5.0.0"; - patches = [ - ./webpack/remove-webpack-cli-check.patch - ]; + # TODO: ensure preserving symlinks on dependency resolution always works + # The patch is currently done in `enhanced-resolve` which is used + # by webpack for module resolution + webpack = { + remove-webpack-cli-check = { + _condition = satisfiesSemver "^5.0.0"; + patches = [ + ./webpack/remove-webpack-cli-check.patch + ]; + }; }; - }; - # TODO: Maybe should replace binaries with the ones from nixpkgs - "7zip-bin" = { + # TODO: Maybe should replace binaries with the ones from nixpkgs + "7zip-bin" = { + patch-binaries = { + nativeBuildInputs = old: + old + ++ [ + pkgs.autoPatchelfHook + ]; - patch-binaries = { - - nativeBuildInputs = old: old ++ [ - pkgs.autoPatchelfHook - ]; - - buildInputs = old: old ++ [ - pkgs.gcc-unwrapped.lib - ]; + buildInputs = old: + old + ++ [ + pkgs.gcc-unwrapped.lib + ]; + }; }; - }; - "@alicloud/fun" = { - build = { - buildScript = '' - tsc -p ./ - ''; + "@alicloud/fun" = { + build = { + buildScript = '' + tsc -p ./ + ''; + }; }; - }; - "@ledgerhq/ledger-core" = { - - build = - let + "@ledgerhq/ledger-core" = { + build = let ledger-core-version = "4.2.0"; ledger-core = pkgs.stdenv.mkDerivation { @@ -787,86 +757,81 @@ in url = "https://github.com/chfast/secp256k1/archive/ac8ccf29b8c6b2b793bc734661ce43d1f952977a.tar.gz"; hash = "sha256-7i61CGd+xFvPQkyN7CI7eEoTtko0S77eY+DXEbd3BE8="; }; + in { + buildInputs = [ + ledger-core + ]; - in - { - buildInputs = [ - ledger-core - ]; + # TODO: patch core/lib/cmake/ProjectSecp256k1.cmake + # to use this secp256k1 instead of downloading from github + postPatch = '' + cp -r ${secp256k1-src} ./secp256k1 + ''; - # TODO: patch core/lib/cmake/ProjectSecp256k1.cmake - # to use this secp256k1 instead of downloading from github - postPatch = '' - cp -r ${secp256k1-src} ./secp256k1 - ''; - - preBuild = '' - # npm --nodedir=$nodeSources run install - npm --nodedir=$nodeSources run gypconfig - npm --nodedir=$nodeSources run gypinstall - ''; - }; - }; - - "@mattermost/webapp" = { - - run-webpack = { - - # custom webpack config - postPatch = '' - substituteInPlace webpack.config.js --replace \ - "crypto: require.resolve('crypto-browserify')," \ - "crypto: 'node_modules/crypto-browserify'," - - substituteInPlace webpack.config.js --replace \ - "stream: require.resolve('stream-browserify')," \ - "stream: 'node_modules/stream-browserify'," - - substituteInPlace webpack.config.js --replace \ - "DEV ? 'style-loader' : MiniCssExtractPlugin.loader," \ - "" - ''; - - # there seems to be a memory leak in some module - # -> incleasing max memory - buildScript = '' - NODE_ENV=production node --max-old-space-size=8192 ./node_modules/webpack/bin/webpack.js - ''; + preBuild = '' + # npm --nodedir=$nodeSources run install + npm --nodedir=$nodeSources run gypconfig + npm --nodedir=$nodeSources run gypinstall + ''; + }; }; - }; - # This should not be necessary, as this plugin claims to - # respect the `preserveSymlinks` option of rollup. - # Adding the NODE_PATH to the module directories fixes it for now. - "@rollup/plugin-node-resolve" = { - respect-node-path = { - postPatch = '' - for f in $(find -name '*.js'); do - substituteInPlace $f --replace \ - "moduleDirectories: ['node_modules']," \ - "moduleDirectories: ['node_modules'].concat(process.env.NODE_PATH.split( /[;:]/ ))," - done - ''; + "@mattermost/webapp" = { + run-webpack = { + # custom webpack config + postPatch = '' + substituteInPlace webpack.config.js --replace \ + "crypto: require.resolve('crypto-browserify')," \ + "crypto: 'node_modules/crypto-browserify'," + + substituteInPlace webpack.config.js --replace \ + "stream: require.resolve('stream-browserify')," \ + "stream: 'node_modules/stream-browserify'," + + substituteInPlace webpack.config.js --replace \ + "DEV ? 'style-loader' : MiniCssExtractPlugin.loader," \ + "" + ''; + + # there seems to be a memory leak in some module + # -> incleasing max memory + buildScript = '' + NODE_ENV=production node --max-old-space-size=8192 ./node_modules/webpack/bin/webpack.js + ''; + }; }; - }; - "@sentry/cli" = { - add-binary = { - buildScript = '' - ln -s ${pkgs.sentry-cli}/bin $out/bin - exit - ''; + # This should not be necessary, as this plugin claims to + # respect the `preserveSymlinks` option of rollup. + # Adding the NODE_PATH to the module directories fixes it for now. + "@rollup/plugin-node-resolve" = { + respect-node-path = { + postPatch = '' + for f in $(find -name '*.js'); do + substituteInPlace $f --replace \ + "moduleDirectories: ['node_modules']," \ + "moduleDirectories: ['node_modules'].concat(process.env.NODE_PATH.split( /[;:]/ ))," + done + ''; + }; }; - }; - "strapi" = { - build = { - buildScript = '' - yarn(){ - npm "$@" - } - ''; + "@sentry/cli" = { + add-binary = { + buildScript = '' + ln -s ${pkgs.sentry-cli}/bin $out/bin + exit + ''; + }; }; - }; -} + "strapi" = { + build = { + buildScript = '' + yarn(){ + npm "$@" + } + ''; + }; + }; + } diff --git a/src/apps/cli/default.nix b/src/apps/cli/default.nix index 21b35376..00c2e8bf 100644 --- a/src/apps/cli/default.nix +++ b/src/apps/cli/default.nix @@ -6,68 +6,59 @@ nix, translators, utils, - # from nixpkgs gitMinimal, lib, python3, ... -}: - -let - +}: let b = builtins; - cliPython = python3.withPackages (ps: [ ps.networkx ps.cleo ps.jsonschema ]); - -in -{ + cliPython = python3.withPackages (ps: [ps.networkx ps.cleo ps.jsonschema]); +in { program = utils.writePureShellScript - [ - gitMinimal - nix - ] - '' - # escape the temp dir created by writePureShellScript - cd - > /dev/null - - # run the cli - dream2nixConfig=${configFile} \ - dream2nixSrc=${dream2nixWithExternals} \ - fetcherNames="${b.toString (lib.attrNames fetchers.fetchers)}" \ - ${cliPython}/bin/python ${./.}/cli.py "$@" - ''; - - templateDefaultNix = - { - dream2nixLocationRelative, - dreamLock, - sourcePathRelative, - }: - let - defaultPackage = dreamLock._generic.defaultPackage; - defaultPackageVersion = dreamLock._generic.packages."${defaultPackage}"; - in + [ + gitMinimal + nix + ] '' - { - dream2nix ? import ( - let - dream2nixWithExternals = (builtins.getEnv "dream2nixWithExternals"); - in - if dream2nixWithExternals != "" then dream2nixWithExternals else - throw ''' - This default.nix is for debugging purposes and can only be evaluated within the dream2nix devShell env. - ''') {}, - }: + # escape the temp dir created by writePureShellScript + cd - > /dev/null - dream2nix.makeOutputs { - source = ./dream-lock.json; - ${lib.optionalString (dreamLock.sources."${defaultPackage}"."${defaultPackageVersion}".type == "unknown") '' - sourceOverrides = oldSources: { - "${defaultPackage}"."${defaultPackageVersion}" = ./${sourcePathRelative}; - }; - ''} - } + # run the cli + dream2nixConfig=${configFile} \ + dream2nixSrc=${dream2nixWithExternals} \ + fetcherNames="${b.toString (lib.attrNames fetchers.fetchers)}" \ + ${cliPython}/bin/python ${./.}/cli.py "$@" + ''; + + templateDefaultNix = { + dream2nixLocationRelative, + dreamLock, + sourcePathRelative, + }: let + defaultPackage = dreamLock._generic.defaultPackage; + defaultPackageVersion = dreamLock._generic.packages."${defaultPackage}"; + in '' + { + dream2nix ? import ( + let + dream2nixWithExternals = (builtins.getEnv "dream2nixWithExternals"); + in + if dream2nixWithExternals != "" then dream2nixWithExternals else + throw ''' + This default.nix is for debugging purposes and can only be evaluated within the dream2nix devShell env. + ''') {}, + }: + + dream2nix.makeOutputs { + source = ./dream-lock.json; + ${lib.optionalString (dreamLock.sources."${defaultPackage}"."${defaultPackageVersion}".type == "unknown") '' + sourceOverrides = oldSources: { + "${defaultPackage}"."${defaultPackageVersion}" = ./${sourcePathRelative}; + }; + ''} + } ''; } diff --git a/src/apps/cli2/default.nix b/src/apps/cli2/default.nix index 21b35376..00c2e8bf 100644 --- a/src/apps/cli2/default.nix +++ b/src/apps/cli2/default.nix @@ -6,68 +6,59 @@ nix, translators, utils, - # from nixpkgs gitMinimal, lib, python3, ... -}: - -let - +}: let b = builtins; - cliPython = python3.withPackages (ps: [ ps.networkx ps.cleo ps.jsonschema ]); - -in -{ + cliPython = python3.withPackages (ps: [ps.networkx ps.cleo ps.jsonschema]); +in { program = utils.writePureShellScript - [ - gitMinimal - nix - ] - '' - # escape the temp dir created by writePureShellScript - cd - > /dev/null - - # run the cli - dream2nixConfig=${configFile} \ - dream2nixSrc=${dream2nixWithExternals} \ - fetcherNames="${b.toString (lib.attrNames fetchers.fetchers)}" \ - ${cliPython}/bin/python ${./.}/cli.py "$@" - ''; - - templateDefaultNix = - { - dream2nixLocationRelative, - dreamLock, - sourcePathRelative, - }: - let - defaultPackage = dreamLock._generic.defaultPackage; - defaultPackageVersion = dreamLock._generic.packages."${defaultPackage}"; - in + [ + gitMinimal + nix + ] '' - { - dream2nix ? import ( - let - dream2nixWithExternals = (builtins.getEnv "dream2nixWithExternals"); - in - if dream2nixWithExternals != "" then dream2nixWithExternals else - throw ''' - This default.nix is for debugging purposes and can only be evaluated within the dream2nix devShell env. - ''') {}, - }: + # escape the temp dir created by writePureShellScript + cd - > /dev/null - dream2nix.makeOutputs { - source = ./dream-lock.json; - ${lib.optionalString (dreamLock.sources."${defaultPackage}"."${defaultPackageVersion}".type == "unknown") '' - sourceOverrides = oldSources: { - "${defaultPackage}"."${defaultPackageVersion}" = ./${sourcePathRelative}; - }; - ''} - } + # run the cli + dream2nixConfig=${configFile} \ + dream2nixSrc=${dream2nixWithExternals} \ + fetcherNames="${b.toString (lib.attrNames fetchers.fetchers)}" \ + ${cliPython}/bin/python ${./.}/cli.py "$@" + ''; + + templateDefaultNix = { + dream2nixLocationRelative, + dreamLock, + sourcePathRelative, + }: let + defaultPackage = dreamLock._generic.defaultPackage; + defaultPackageVersion = dreamLock._generic.packages."${defaultPackage}"; + in '' + { + dream2nix ? import ( + let + dream2nixWithExternals = (builtins.getEnv "dream2nixWithExternals"); + in + if dream2nixWithExternals != "" then dream2nixWithExternals else + throw ''' + This default.nix is for debugging purposes and can only be evaluated within the dream2nix devShell env. + ''') {}, + }: + + dream2nix.makeOutputs { + source = ./dream-lock.json; + ${lib.optionalString (dreamLock.sources."${defaultPackage}"."${defaultPackageVersion}".type == "unknown") '' + sourceOverrides = oldSources: { + "${defaultPackage}"."${defaultPackageVersion}" = ./${sourcePathRelative}; + }; + ''} + } ''; } diff --git a/src/apps/contribute/default.nix b/src/apps/contribute/default.nix index da4f1871..d790d17f 100644 --- a/src/apps/contribute/default.nix +++ b/src/apps/contribute/default.nix @@ -3,12 +3,9 @@ python3, writeScript, ... -}: - -let - cliPython = python3.withPackages (ps: [ ps.cleo ]); -in -{ +}: let + cliPython = python3.withPackages (ps: [ps.cleo]); +in { program = writeScript "contribute" '' dream2nixSrc=${../../.} \ ${cliPython}/bin/python ${./contribute.py} contribute "$@" diff --git a/src/apps/default.nix b/src/apps/default.nix index b7cf84bc..261d6248 100644 --- a/src/apps/default.nix +++ b/src/apps/default.nix @@ -1,30 +1,25 @@ { lib, pkgs, - # dream2nix callPackageDream, translators, ... -}: - -let +}: let b = builtins; -in - -rec { +in rec { apps = { inherit cli cli2 contribute install; dream2nix = cli; }; - flakeApps = - lib.mapAttrs (appName: app: - { - type = "app"; - program = b.toString app.program; - } - ) apps; + flakeApps = lib.mapAttrs ( + appName: app: { + type = "app"; + program = b.toString app.program; + } + ) + apps; # the dream2nix cli cli = callPackageDream (import ./cli) {}; diff --git a/src/apps/install/default.nix b/src/apps/install/default.nix index 79b41cde..4e3d44f1 100644 --- a/src/apps/install/default.nix +++ b/src/apps/install/default.nix @@ -1,14 +1,12 @@ { runCommand, writeScript, - # dream2nix inputs dream2nixWithExternals, ... -}: - -{ - program = writeScript "install" +}: { + program = + writeScript "install" '' target="$1" if [[ "$target" == "" ]]; then @@ -28,4 +26,4 @@ echo "Installed dream2nix successfully to '$target'." echo "Please check/modify settings in '$target/config.json'" ''; -} \ No newline at end of file +} diff --git a/src/builders/default.nix b/src/builders/default.nix index 0fb46f82..6bfb02a2 100644 --- a/src/builders/default.nix +++ b/src/builders/default.nix @@ -2,24 +2,21 @@ builders, callPackageDream, ... -}: -{ - python = rec { - +}: { + python = rec { default = simpleBuilder; simpleBuilder = callPackageDream ./python/simple-builder {}; }; - nodejs = rec { - + nodejs = rec { default = granular; node2nix = callPackageDream ./nodejs/node2nix {}; - granular = callPackageDream ./nodejs/granular { inherit builders; }; + granular = callPackageDream ./nodejs/granular {inherit builders;}; }; - + rust = rec { default = buildRustPackage; diff --git a/src/builders/go/gomod2nix/default.nix b/src/builders/go/gomod2nix/default.nix index 3a833261..f4ec4761 100644 --- a/src/builders/go/gomod2nix/default.nix +++ b/src/builders/go/gomod2nix/default.nix @@ -3,19 +3,17 @@ pkgs, externals, ... -}: - -{ +}: { fetchedSources, dreamLock, -}: -let - gomod2nixTOML = fetchedSources.mapAttrs +}: let + gomod2nixTOML = + fetchedSources.mapAttrs dependencyObject.goName; in -externals.gomod2nixBuilder rec { - pname = dreamLock.generic.mainPackage; - version = dreamLock.sources."${pname}".version; - src = fetchedSources."${pname}"; - modules = ./gomod2nix.toml; -} + externals.gomod2nixBuilder rec { + pname = dreamLock.generic.mainPackage; + version = dreamLock.sources."${pname}".version; + src = fetchedSources."${pname}"; + modules = ./gomod2nix.toml; + } diff --git a/src/builders/nodejs/granular/default.nix b/src/builders/nodejs/granular/default.nix index 165fe1c5..0a1e7de2 100644 --- a/src/builders/nodejs/granular/default.nix +++ b/src/builders/nodejs/granular/default.nix @@ -7,50 +7,38 @@ runCommand, stdenv, writeText, - # dream2nix inputs builders, externals, utils, ... -}: - -{ +}: { # Funcs - # AttrSet -> Bool) -> AttrSet -> [x] - getCyclicDependencies, # name: version: -> [ {name=; version=; } ] - getDependencies, # name: version: -> [ {name=; version=; } ] - getSource, # name: version: -> store-path + getCyclicDependencies, # name: version: -> [ {name=; version=; } ] + getDependencies, # name: version: -> [ {name=; version=; } ] + getSource, # name: version: -> store-path buildPackageWithOtherBuilder, # { builder, name, version }: -> drv - # Attributes - subsystemAttrs, # attrset - defaultPackageName, # string - defaultPackageVersion, # string - packages, # list - + subsystemAttrs, # attrset + defaultPackageName, # string + defaultPackageVersion, # string + packages, # list # attrset of pname -> versions, # where versions is a list of version strings packageVersions, - # function which applies overrides to a package # It must be applied by the builder to each individual derivation # Example: # produceDerivation name (mkDerivation {...}) produceDerivation, - # Custom Options: (parametrize builder behavior) # These can be passed by the user via `builderArgs`. # All options must provide default standalonePackageNames ? [], - nodejs ? null, ... -}@args: - -let - +} @ args: let b = builtins; nodejsVersion = subsystemAttrs.nodejsVersion; @@ -59,8 +47,8 @@ let (args.packages."${name}" or null) == version; nodejs = - if args ? nodejs then - args.nodejs + if args ? nodejs + then args.nodejs else pkgs."nodejs-${builtins.toString nodejsVersion}_x" or (throw "Could not find nodejs version '${nodejsVersion}' in pkgs"); @@ -74,12 +62,12 @@ let allPackages = lib.mapAttrs - (name: versions: - lib.genAttrs - versions - (version: - makePackage name version)) - packageVersions; + (name: versions: + lib.genAttrs + versions + (version: + makePackage name version)) + packageVersions; outputs = { inherit defaultPackage; @@ -87,13 +75,13 @@ let # select only the packages listed in dreamLock as main packages packages = b.foldl' - (ps: p: ps // p) - {} - (lib.mapAttrsToList - (name: version:{ - "${name}"."${version}" = allPackages."${name}"."${version}"; - }) - args.packages); + (ps: p: ps // p) + {} + (lib.mapAttrsToList + (name: version: { + "${name}"."${version}" = allPackages."${name}"."${version}"; + }) + args.packages); }; # This is only executed for electron based packages. @@ -159,302 +147,294 @@ let ''; # Generates a derivation for a specific package name + version - makePackage = name: version: - let + makePackage = name: version: let + deps = getDependencies name version; - deps = getDependencies name version; + nodeDeps = + lib.forEach + deps + (dep: allPackages."${dep.name}"."${dep.version}"); - nodeDeps = - lib.forEach - deps - (dep: allPackages."${dep.name}"."${dep.version}" ); + dependenciesJson = + b.toJSON + (lib.listToAttrs + (b.map + (dep: lib.nameValuePair dep.name dep.version) + deps)); - dependenciesJson = b.toJSON - (lib.listToAttrs - (b.map - (dep: lib.nameValuePair dep.name dep.version) - deps)); + electronDep = + if ! isMainPackage name version + then null + else + lib.findFirst + (dep: dep.name == "electron") + null + deps; - electronDep = - if ! isMainPackage name version then - null + electronVersionMajor = + lib.versions.major electronDep.version; + + electronHeaders = + if electronDep == null + then null + else pkgs."electron_${electronVersionMajor}".headers; + + pkg = produceDerivation name (stdenv.mkDerivation rec { + inherit + dependenciesJson + electronHeaders + nodeDeps + nodeSources + version + ; + + packageName = name; + + pname = utils.sanitizeDerivationName name; + + installMethod = "symlink"; + + electronAppDir = "."; + + # only run build on the main package + runBuild = isMainPackage name version; + + src = getSource name version; + + nativeBuildInputs = [makeWrapper]; + + buildInputs = [jq nodejs python3]; + + # prevents running into ulimits + passAsFile = ["dependenciesJson" "nodeDeps"]; + + preConfigurePhases = ["d2nLoadFuncsPhase" "d2nPatchPhase"]; + + # can be overridden to define alternative install command + # (defaults to 'npm run postinstall') + buildScript = null; + + # python script to modify some metadata to support installation + # (see comments below on d2nPatchPhase) + fixPackage = "${./fix-package.py}"; + + # script to install (symlink or copy) dependencies. + installDeps = "${./install-deps.py}"; + + # costs performance and doesn't seem beneficial in most scenarios + dontStrip = true; + + # declare some useful shell functions + d2nLoadFuncsPhase = '' + # function to resolve symlinks to copies + symlinksToCopies() { + local dir="$1" + + echo "transforming symlinks to copies..." + for f in $(find -L "$dir" -xtype l); do + if [ -f $f ]; then + continue + fi + echo "copying $f" + chmod +wx $(dirname "$f") + mv "$f" "$f.bak" + mkdir "$f" + if [ -n "$(ls -A "$f.bak/")" ]; then + cp -r "$f.bak"/* "$f/" + chmod -R +w $f + fi + rm "$f.bak" + done + } + ''; + + # TODO: upstream fix to nixpkgs + # example which requires this: + # https://registry.npmjs.org/react-window-infinite-loader/-/react-window-infinite-loader-1.0.7.tgz + unpackCmd = + if lib.hasSuffix ".tgz" src + then "tar --delay-directory-restore -xf $src" + else null; + + unpackPhase = '' + runHook preUnpack + + nodeModules=$out/lib/node_modules + + export sourceRoot="$nodeModules/$packageName" + + # sometimes tarballs do not end with .tar.?? + unpackFallback(){ + local fn="$1" + tar xf "$fn" + } + + unpackCmdHooks+=(unpackFallback) + + unpackFile $src + + # Make the base dir in which the target dependency resides in first + mkdir -p "$(dirname "$sourceRoot")" + + # install source + if [ -f "$src" ] + then + # Figure out what directory has been unpacked + export packageDir="$(find . -maxdepth 1 -type d | tail -1)" + + # Restore write permissions + find "$packageDir" -type d -exec chmod u+x {} \; + chmod -R u+w "$packageDir" + + # Move the extracted tarball into the output folder + mv "$packageDir" "$sourceRoot" + elif [ -d "$src" ] + then + export strippedName="$(stripHash $src)" + + # Restore write permissions + chmod -R u+w "$strippedName" + + # Move the extracted directory into the output folder + mv "$strippedName" "$sourceRoot" + fi + + runHook postUnpack + ''; + + # The python script wich is executed in this phase: + # - ensures that the package is compatible to the current system + # - ensures the main version in package.json matches the expected + # - pins dependency versions in package.json + # (some npm commands might otherwise trigger networking) + # - creates symlinks for executables declared in package.json + # Apart from that: + # - Any usage of 'link:' in package.json is replaced with 'file:' + # - If package-lock.json exists, it is deleted, as it might conflict + # with the parent package-lock.json. + d2nPatchPhase = '' + # delete package-lock.json as it can lead to conflicts + rm -f package-lock.json + + # repair 'link:' -> 'file:' + mv $nodeModules/$packageName/package.json $nodeModules/$packageName/package.json.old + cat $nodeModules/$packageName/package.json.old | sed 's!link:!file\:!g' > $nodeModules/$packageName/package.json + rm $nodeModules/$packageName/package.json.old + + # run python script (see comment above): + cp package.json package.json.bak + python $fixPackage \ + || \ + # exit code 3 -> the package is incompatible to the current platform + # -> Let the build succeed, but don't create lib/node_packages + if [ "$?" == "3" ]; then + rm -r $out/* + echo "Not compatible with system $system" > $out/error + exit 0 else - lib.findFirst - (dep: dep.name == "electron") - null - deps; + exit 1 + fi - electronVersionMajor = - lib.versions.major electronDep.version; + # configure typescript + if [ -f ./tsconfig.json ] \ + && node -e 'require("typescript")' &>/dev/null; then + node ${./tsconfig-to-json.js} + ${pkgs.jq}/bin/jq ".compilerOptions.preserveSymlinks = true" tsconfig.json \ + | ${pkgs.moreutils}/bin/sponge tsconfig.json + fi + ''; - electronHeaders = - if electronDep == null then - null + # - installs dependencies into the node_modules directory + # - adds executables of direct node module dependencies to PATH + # - adds the current node module to NODE_PATH + # - sets HOME=$TMPDIR, as this is required by some npm scripts + # TODO: don't install dev dependencies. Load into NODE_PATH instead + configurePhase = '' + runHook preConfigure + + # symlink sub dependencies as well as this imitates npm better + python $installDeps + + # add bin path entries collected by python script + if [ -e $TMP/ADD_BIN_PATH ]; then + export PATH="$PATH:$(cat $TMP/ADD_BIN_PATH)" + fi + + # add dependencies to NODE_PATH + export NODE_PATH="$NODE_PATH:$nodeModules/$packageName/node_modules" + + export HOME=$TMPDIR + + runHook postConfigure + ''; + + # Runs the install command which defaults to 'npm run postinstall'. + # Allows using custom install command by overriding 'buildScript'. + buildPhase = '' + runHook preBuild + + # execute electron-rebuild + if [ -n "$electronHeaders" ]; then + ${electron-rebuild} + fi + + # execute install command + if [ -n "$buildScript" ]; then + if [ -f "$buildScript" ]; then + $buildScript + else + eval "$buildScript" + fi + # by default, only for top level packages, `npm run build` is executed + elif [ -n "$runBuild" ] && [ "$(jq '.scripts.build' ./package.json)" != "null" ]; then + npm run build else - pkgs."electron_${electronVersionMajor}".headers; + if [ "$(jq '.scripts.install' ./package.json)" != "null" ]; then + npm --production --offline --nodedir=$nodeSources run install + fi + if [ "$(jq '.scripts.postinstall' ./package.json)" != "null" ]; then + npm --production --offline --nodedir=$nodeSources run postinstall + fi + fi + runHook postBuild + ''; - pkg = - produceDerivation name (stdenv.mkDerivation rec { + # Symlinks executables and manual pages to correct directories + installPhase = '' + runHook preInstall - inherit - dependenciesJson - electronHeaders - nodeDeps - nodeSources - version - ; + echo "Symlinking exectuables to /bin" + if [ -d "$nodeModules/.bin" ] + then + chmod +x $nodeModules/.bin/* + ln -s $nodeModules/.bin $out/bin + fi - packageName = name; + echo "Symlinking manual pages" + if [ -d "$nodeModules/$packageName/man" ] + then + mkdir -p $out/share + for dir in "$nodeModules/$packageName/man/"* + do + mkdir -p $out/share/man/$(basename "$dir") + for page in "$dir"/* + do + ln -s $page $out/share/man/$(basename "$dir") + done + done + fi - pname = utils.sanitizeDerivationName name; - - installMethod = "symlink"; - - electronAppDir = "."; - - # only run build on the main package - runBuild = isMainPackage name version; - - src = getSource name version; - - nativeBuildInputs = [ makeWrapper ]; - - buildInputs = [ jq nodejs python3 ]; - - # prevents running into ulimits - passAsFile = [ "dependenciesJson" "nodeDeps" ]; - - preConfigurePhases = [ "d2nLoadFuncsPhase" "d2nPatchPhase" ]; - - # can be overridden to define alternative install command - # (defaults to 'npm run postinstall') - buildScript = null; - - # python script to modify some metadata to support installation - # (see comments below on d2nPatchPhase) - fixPackage = "${./fix-package.py}"; - - # script to install (symlink or copy) dependencies. - installDeps = "${./install-deps.py}"; - - # costs performance and doesn't seem beneficial in most scenarios - dontStrip = true; - - # declare some useful shell functions - d2nLoadFuncsPhase = '' - # function to resolve symlinks to copies - symlinksToCopies() { - local dir="$1" - - echo "transforming symlinks to copies..." - for f in $(find -L "$dir" -xtype l); do - if [ -f $f ]; then - continue - fi - echo "copying $f" - chmod +wx $(dirname "$f") - mv "$f" "$f.bak" - mkdir "$f" - if [ -n "$(ls -A "$f.bak/")" ]; then - cp -r "$f.bak"/* "$f/" - chmod -R +w $f - fi - rm "$f.bak" - done - } - ''; - - # TODO: upstream fix to nixpkgs - # example which requires this: - # https://registry.npmjs.org/react-window-infinite-loader/-/react-window-infinite-loader-1.0.7.tgz - unpackCmd = - if lib.hasSuffix ".tgz" src then - "tar --delay-directory-restore -xf $src" - else - null; - - unpackPhase = '' - runHook preUnpack - - nodeModules=$out/lib/node_modules - - export sourceRoot="$nodeModules/$packageName" - - # sometimes tarballs do not end with .tar.?? - unpackFallback(){ - local fn="$1" - tar xf "$fn" - } - - unpackCmdHooks+=(unpackFallback) - - unpackFile $src - - # Make the base dir in which the target dependency resides in first - mkdir -p "$(dirname "$sourceRoot")" - - # install source - if [ -f "$src" ] - then - # Figure out what directory has been unpacked - export packageDir="$(find . -maxdepth 1 -type d | tail -1)" - - # Restore write permissions - find "$packageDir" -type d -exec chmod u+x {} \; - chmod -R u+w "$packageDir" - - # Move the extracted tarball into the output folder - mv "$packageDir" "$sourceRoot" - elif [ -d "$src" ] - then - export strippedName="$(stripHash $src)" - - # Restore write permissions - chmod -R u+w "$strippedName" - - # Move the extracted directory into the output folder - mv "$strippedName" "$sourceRoot" - fi - - runHook postUnpack - ''; - - # The python script wich is executed in this phase: - # - ensures that the package is compatible to the current system - # - ensures the main version in package.json matches the expected - # - pins dependency versions in package.json - # (some npm commands might otherwise trigger networking) - # - creates symlinks for executables declared in package.json - # Apart from that: - # - Any usage of 'link:' in package.json is replaced with 'file:' - # - If package-lock.json exists, it is deleted, as it might conflict - # with the parent package-lock.json. - d2nPatchPhase = '' - # delete package-lock.json as it can lead to conflicts - rm -f package-lock.json - - # repair 'link:' -> 'file:' - mv $nodeModules/$packageName/package.json $nodeModules/$packageName/package.json.old - cat $nodeModules/$packageName/package.json.old | sed 's!link:!file\:!g' > $nodeModules/$packageName/package.json - rm $nodeModules/$packageName/package.json.old - - # run python script (see comment above): - cp package.json package.json.bak - python $fixPackage \ - || \ - # exit code 3 -> the package is incompatible to the current platform - # -> Let the build succeed, but don't create lib/node_packages - if [ "$?" == "3" ]; then - rm -r $out/* - echo "Not compatible with system $system" > $out/error - exit 0 - else - exit 1 - fi - - # configure typescript - if [ -f ./tsconfig.json ] \ - && node -e 'require("typescript")' &>/dev/null; then - node ${./tsconfig-to-json.js} - ${pkgs.jq}/bin/jq ".compilerOptions.preserveSymlinks = true" tsconfig.json \ - | ${pkgs.moreutils}/bin/sponge tsconfig.json - fi - ''; - - # - installs dependencies into the node_modules directory - # - adds executables of direct node module dependencies to PATH - # - adds the current node module to NODE_PATH - # - sets HOME=$TMPDIR, as this is required by some npm scripts - # TODO: don't install dev dependencies. Load into NODE_PATH instead - configurePhase = '' - runHook preConfigure - - # symlink sub dependencies as well as this imitates npm better - python $installDeps - - # add bin path entries collected by python script - if [ -e $TMP/ADD_BIN_PATH ]; then - export PATH="$PATH:$(cat $TMP/ADD_BIN_PATH)" - fi - - # add dependencies to NODE_PATH - export NODE_PATH="$NODE_PATH:$nodeModules/$packageName/node_modules" - - export HOME=$TMPDIR - - runHook postConfigure - ''; - - # Runs the install command which defaults to 'npm run postinstall'. - # Allows using custom install command by overriding 'buildScript'. - buildPhase = '' - runHook preBuild - - # execute electron-rebuild - if [ -n "$electronHeaders" ]; then - ${electron-rebuild} - fi - - # execute install command - if [ -n "$buildScript" ]; then - if [ -f "$buildScript" ]; then - $buildScript - else - eval "$buildScript" - fi - # by default, only for top level packages, `npm run build` is executed - elif [ -n "$runBuild" ] && [ "$(jq '.scripts.build' ./package.json)" != "null" ]; then - npm run build - else - if [ "$(jq '.scripts.install' ./package.json)" != "null" ]; then - npm --production --offline --nodedir=$nodeSources run install - fi - if [ "$(jq '.scripts.postinstall' ./package.json)" != "null" ]; then - npm --production --offline --nodedir=$nodeSources run postinstall - fi - fi - - runHook postBuild - ''; - - # Symlinks executables and manual pages to correct directories - installPhase = '' - runHook preInstall - - echo "Symlinking exectuables to /bin" - if [ -d "$nodeModules/.bin" ] - then - chmod +x $nodeModules/.bin/* - ln -s $nodeModules/.bin $out/bin - fi - - echo "Symlinking manual pages" - if [ -d "$nodeModules/$packageName/man" ] - then - mkdir -p $out/share - for dir in "$nodeModules/$packageName/man/"* - do - mkdir -p $out/share/man/$(basename "$dir") - for page in "$dir"/* - do - ln -s $page $out/share/man/$(basename "$dir") - done - done - fi - - # wrap electron app - # execute electron-rebuild - if [ -n "$electronHeaders" ]; then - ${electron-wrap} - fi - - runHook postInstall - ''; - }); - in - pkg; + # wrap electron app + # execute electron-rebuild + if [ -n "$electronHeaders" ]; then + ${electron-wrap} + fi + runHook postInstall + ''; + }); + in + pkg; in -outputs - + outputs diff --git a/src/builders/nodejs/node2nix/default.nix b/src/builders/nodejs/node2nix/default.nix index a6569aef..444a83b3 100644 --- a/src/builders/nodejs/node2nix/default.nix +++ b/src/builders/nodejs/node2nix/default.nix @@ -1,17 +1,13 @@ # builder imported from node2nix - { lib, pkgs, - # dream2nix inputs externals, node2nix ? externals.node2nix, utils, ... -}: - -{ +}: { subsystemAttrs, defaultPackageName, defaultPackageVersion, @@ -19,12 +15,10 @@ getDependencies, getSource, packageVersions, - # overrides packageOverrides ? {}, ... -}@args: -let +} @ args: let b = builtins; getAllDependencies = name: version: @@ -43,56 +37,49 @@ let node2nixEnv = node2nix nodejs; - makeSource = packageName: version: prevDeps: - let - depsFiltered = - (lib.filter - (dep: - ! b.elem dep prevDeps) - (getAllDependencies packageName version)); - parentDeps = - prevDeps ++ depsFiltered; - in - rec { - inherit packageName version; - name = utils.sanitizeDerivationName packageName; - src = getSource packageName version; - dependencies = - lib.forEach - depsFiltered - (dep: makeSource dep.name dep.version parentDeps); - }; + makeSource = packageName: version: prevDeps: let + depsFiltered = + lib.filter + (dep: + ! b.elem dep prevDeps) + (getAllDependencies packageName version); + parentDeps = + prevDeps ++ depsFiltered; + in rec { + inherit packageName version; + name = utils.sanitizeDerivationName packageName; + src = getSource packageName version; + dependencies = + lib.forEach + depsFiltered + (dep: makeSource dep.name dep.version parentDeps); + }; node2nixDependencies = lib.forEach - mainPackageDependencies - (dep: makeSource dep.name dep.version mainPackageDependencies); - # (dep: allSources."${dep.name}"."${dep.version}"); + mainPackageDependencies + (dep: makeSource dep.name dep.version mainPackageDependencies); + # (dep: allSources."${dep.name}"."${dep.version}"); callNode2Nix = funcName: args: node2nixEnv."${funcName}" (rec { - name = utils.sanitizeDerivationName packageName; - packageName = defaultPackageName; - version = defaultPackageVersion; - dependencies = node2nixDependencies; - production = true; - bypassCache = true; - reconstructLock = true; - src = getSource defaultPackageName defaultPackageVersion; - } - // args); - -in -rec { - + name = utils.sanitizeDerivationName packageName; + packageName = defaultPackageName; + version = defaultPackageVersion; + dependencies = node2nixDependencies; + production = true; + bypassCache = true; + reconstructLock = true; + src = getSource defaultPackageName defaultPackageVersion; + } + // args); +in rec { packages."${defaultPackageName}"."${defaultPackageVersion}" = defaultPackage; - defaultPackage = - let - pkg = callNode2Nix "buildNodePackage" {}; - in - utils.applyOverridesToPackage packageOverrides pkg defaultPackageName; + defaultPackage = let + pkg = callNode2Nix "buildNodePackage" {}; + in + utils.applyOverridesToPackage packageOverrides pkg defaultPackageName; devShell = callNode2Nix "buildNodeShell" {}; - } diff --git a/src/builders/python/simple-builder/default.nix b/src/builders/python/simple-builder/default.nix index 745818a6..41617b36 100644 --- a/src/builders/python/simple-builder/default.nix +++ b/src/builders/python/simple-builder/default.nix @@ -1,41 +1,34 @@ # A very simple single derivation python builder - { lib, pkgs, ... -}: - -{ +}: { fetchedSources, dreamLock, -}: - -let +}: let python = pkgs."${dreamLock._subsystem.pythonAttr}"; buildFunc = - if dreamLock._subsystem.application then - python.pkgs.buildPythonApplication - else - python.pkgs.buildPythonPackage; + if dreamLock._subsystem.application + then python.pkgs.buildPythonApplication + else python.pkgs.buildPythonPackage; defaultPackage = dreamLock._generic.defaultPackage; packageName = - if defaultPackage == null then - if dreamLock._subsystem.application then - "application" - else - "environment" - else - defaultPackage; + if defaultPackage == null + then + if dreamLock._subsystem.application + then "application" + else "environment" + else defaultPackage; defaultPackage = buildFunc { name = packageName; format = ""; buildInputs = pkgs.pythonManylinuxPackages.manylinux1; - nativeBuildInputs = [ pkgs.autoPatchelfHook python.pkgs.wheelUnpackHook ]; + nativeBuildInputs = [pkgs.autoPatchelfHook python.pkgs.wheelUnpackHook]; unpackPhase = '' mkdir dist for file in ${builtins.toString (lib.attrValues fetchedSources)}; do @@ -57,8 +50,7 @@ let --ignore-installed runHook postInstall ''; -}; - + }; in { inherit defaultPackage; } diff --git a/src/builders/rust/build-rust-package/default.nix b/src/builders/rust/build-rust-package/default.nix index fc93013f..71a89882 100644 --- a/src/builders/rust/build-rust-package/default.nix +++ b/src/builders/rust/build-rust-package/default.nix @@ -1,11 +1,8 @@ { lib, pkgs, - ... -}: - -{ +}: { subsystemAttrs, defaultPackageName, defaultPackageVersion, @@ -15,23 +12,19 @@ getSourceSpec, packages, produceDerivation, - ... -}@args: - -let +} @ args: let l = lib // builtins; utils = import ../utils.nix args; - vendoring = import ../vendor.nix (args // { inherit lib pkgs utils; }); + vendoring = import ../vendor.nix (args // {inherit lib pkgs utils;}); - buildPackage = pname: version: - let - src = utils.getRootSource pname version; - vendorDir = vendoring.vendorDependencies pname version; + buildPackage = pname: version: let + src = utils.getRootSource pname version; + vendorDir = vendoring.vendorDependencies pname version; - cargoBuildFlags = "--package ${pname}"; - in + cargoBuildFlags = "--package ${pname}"; + in produceDerivation pname (pkgs.rustPlatform.buildRustPackage { inherit pname version src; @@ -51,13 +44,11 @@ let ${vendoring.writeGitVendorEntries "vendored-sources"} ''; }); -in -rec { +in rec { packages = l.mapAttrs - (name: version: - { "${version}" = buildPackage name version; }) - args.packages; + (name: version: {"${version}" = buildPackage name version;}) + args.packages; defaultPackage = packages."${defaultPackageName}"."${defaultPackageVersion}"; } diff --git a/src/builders/rust/crane/default.nix b/src/builders/rust/crane/default.nix index 4a0cee26..acf4b757 100644 --- a/src/builders/rust/crane/default.nix +++ b/src/builders/rust/crane/default.nix @@ -1,12 +1,9 @@ { lib, pkgs, - externals, ... -}: - -{ +}: { subsystemAttrs, defaultPackageName, defaultPackageVersion, @@ -16,51 +13,47 @@ getSourceSpec, packages, produceDerivation, - ... -}@args: - -let +} @ args: let l = lib // builtins; utils = import ../utils.nix args; - vendoring = import ../vendor.nix (args // { inherit lib pkgs utils; }); + vendoring = import ../vendor.nix (args // {inherit lib pkgs utils;}); crane = externals.crane; - buildPackage = pname: version: - let - src = utils.getRootSource pname version; - cargoVendorDir = vendoring.vendorDependencies pname version; - postUnpack = '' - export CARGO_HOME=$(pwd)/.cargo_home - ''; - preConfigure = '' - ${vendoring.writeGitVendorEntries "nix-sources"} - ''; - # The deps-only derivation will use this as a prefix to the `pname` - depsNameSuffix = "-deps"; + buildPackage = pname: version: let + src = utils.getRootSource pname version; + cargoVendorDir = vendoring.vendorDependencies pname version; + postUnpack = '' + export CARGO_HOME=$(pwd)/.cargo_home + ''; + preConfigure = '' + ${vendoring.writeGitVendorEntries "nix-sources"} + ''; + # The deps-only derivation will use this as a prefix to the `pname` + depsNameSuffix = "-deps"; - common = {inherit pname version src cargoVendorDir preConfigure postUnpack;}; + common = {inherit pname version src cargoVendorDir preConfigure postUnpack;}; - depsArgs = common // { pnameSuffix = depsNameSuffix; }; - deps = produceDerivation "${pname}${depsNameSuffix}" (crane.buildDepsOnly depsArgs); - - buildArgs = common // { + depsArgs = common // {pnameSuffix = depsNameSuffix;}; + deps = produceDerivation "${pname}${depsNameSuffix}" (crane.buildDepsOnly depsArgs); + + buildArgs = + common + // { cargoArtifacts = deps; # Make sure cargo only builds & tests the package we want cargoBuildCommand = "cargo build --release --package ${pname}"; cargoTestCommand = "cargo test --release --package ${pname}"; }; - in + in produceDerivation pname (crane.buildPackage buildArgs); -in -rec { +in rec { packages = l.mapAttrs - (name: version: - { "${version}" = buildPackage name version; }) - args.packages; + (name: version: {"${version}" = buildPackage name version;}) + args.packages; defaultPackage = packages."${defaultPackageName}"."${defaultPackageVersion}"; } diff --git a/src/builders/rust/utils.nix b/src/builders/rust/utils.nix index 01c1b199..e3da54a9 100644 --- a/src/builders/rust/utils.nix +++ b/src/builders/rust/utils.nix @@ -2,11 +2,10 @@ getSourceSpec, getSource, getRoot, - ... -}: -rec { - getRootSource = pname: version: - let root = getRoot pname version; in +}: rec { + getRootSource = pname: version: let + root = getRoot pname version; + in getSource root.pname root.version; -} \ No newline at end of file +} diff --git a/src/builders/rust/vendor.nix b/src/builders/rust/vendor.nix index 74ebf811..a6442632 100644 --- a/src/builders/rust/vendor.nix +++ b/src/builders/rust/vendor.nix @@ -1,17 +1,14 @@ { lib, pkgs, - getRoot, getSource, getSourceSpec, getDependencies, getCyclicDependencies, subsystemAttrs, - ... -}: -let +}: let l = lib // builtins; isCyclic = cyclic: dep: @@ -44,78 +41,72 @@ let in rec { # Generates a shell script that writes git vendor entries to .cargo/config. # `replaceWith` is the name of the vendored source(s) to use. - writeGitVendorEntries = replaceWith: - let - makeEntry = source: - '' - [source."${source.url}${l.optionalString (source ? type) "?${source.type}=${source.value}"}"] - replace-with = "${replaceWith}" - git = "${source.url}" - ${l.optionalString (source ? type) "${source.type} = \"${source.value}\""} - ''; - entries = l.map makeEntry subsystemAttrs.gitSources; - in '' - mkdir -p $CARGO_HOME && touch $CARGO_HOME/config.toml - cat >> $CARGO_HOME/config.toml <> $CARGO_HOME/config.toml <&2 echo "Cannot find path for crate '${pkg.name}-${pkg.version}' in the tree in: $tree" - exit 1 - fi + findCrateSource = source: let + inherit (pkgs) cargo jq; + pkg = source.dep; + in '' + # If the target package is in a workspace, or if it's the top-level + # crate, we should find the crate path using `cargo metadata`. + crateCargoTOML=$(${cargo}/bin/cargo metadata --format-version 1 --no-deps --manifest-path $tree/Cargo.toml | \ + ${jq}/bin/jq -r '.packages[] | select(.name == "${pkg.name}") | .manifest_path') + # If the repository is not a workspace the package might be in a subdirectory. + if [[ -z $crateCargoTOML ]]; then + for manifest in $(find $tree -name "Cargo.toml"); do + echo Looking at $manifest + crateCargoTOML=$(${cargo}/bin/cargo metadata --format-version 1 --no-deps --manifest-path "$manifest" | ${jq}/bin/jq -r '.packages[] | select(.name == "${pkg.name}") | .manifest_path' || :) + if [[ ! -z $crateCargoTOML ]]; then + break fi - echo Found crate ${pkg.name} at $crateCargoTOML - tree="$(dirname $crateCargoTOML)" - ''; - makeScript = source: - let - isGit = source.spec.type == "git"; - isPath = source.spec.type == "path"; - in - l.optionalString (!isPath) '' - tree="${source.path}" - ${l.optionalString isGit (findCrateSource source)} - cp -prvd "$tree" $out/${source.name} - chmod u+w $out/${source.name} - ${l.optionalString isGit "printf '{\"files\":{},\"package\":null}' > \"$out/${source.name}/.cargo-checksum.json\""} - ''; + done + if [[ -z $crateCargoTOML ]]; then + >&2 echo "Cannot find path for crate '${pkg.name}-${pkg.version}' in the tree in: $tree" + exit 1 + fi + fi + echo Found crate ${pkg.name} at $crateCargoTOML + tree="$(dirname $crateCargoTOML)" + ''; + makeScript = source: let + isGit = source.spec.type == "git"; + isPath = source.spec.type == "path"; in + l.optionalString (!isPath) '' + tree="${source.path}" + ${l.optionalString isGit (findCrateSource source)} + cp -prvd "$tree" $out/${source.name} + chmod u+w $out/${source.name} + ${l.optionalString isGit "printf '{\"files\":{},\"package\":null}' > \"$out/${source.name}/.cargo-checksum.json\""} + ''; + in pkgs.runCommand "vendor-${pname}-${version}" {} '' mkdir -p $out @@ -123,11 +114,12 @@ in rec { l.concatMapStringsSep "\n" makeScript sources - } + } ''; # Vendors a package's roots dependencies. - vendorDependencies = pname: version: - let root = getRoot pname version; in + vendorDependencies = pname: version: let + root = getRoot pname version; + in vendorPackageDependencies root.pname root.version; } diff --git a/src/default.nix b/src/default.nix index 84ff1838..2b68bbff 100644 --- a/src/default.nix +++ b/src/default.nix @@ -2,49 +2,39 @@ # It requires passing one specific pkgs. # If the intention is to generate output for several systems, # use ./lib.nix instead. - { pkgs ? import {}, - dlib ? import ./lib { inherit config lib; }, + dlib ? import ./lib {inherit config lib;}, lib ? pkgs.lib, nix ? pkgs.nix, - # default to empty dream2nix config config ? - # if called via CLI, load config via env - if builtins ? getEnv && builtins.getEnv "dream2nixConfig" != "" then - builtins.toPath (builtins.getEnv "dream2nixConfig") - # load from default directory - else - {}, - + # if called via CLI, load config via env + if builtins ? getEnv && builtins.getEnv "dream2nixConfig" != "" + then builtins.toPath (builtins.getEnv "dream2nixConfig") + # load from default directory + else {}, # dependencies of dream2nix externalSources ? lib.genAttrs - (lib.attrNames (builtins.readDir externalDir)) - (inputName: "${/. + externalDir}/${inputName}"), - + (lib.attrNames (builtins.readDir externalDir)) + (inputName: "${/. + externalDir}/${inputName}"), # will be defined if called via flake externalPaths ? null, - # required for non-flake mode externalDir ? - # if flake is used, construct external dir from flake inputs - if externalPaths != null then - (import ./utils/external-dir.nix { - inherit externalPaths externalSources pkgs; - }) - # if called via CLI, load externals via env - else if builtins ? getEnv && builtins.getEnv "d2nExternalDir" != "" then - /. + (builtins.getEnv "d2nExternalDir") - # load from default directory - else - ./external, - -}@args: - -let - + # if flake is used, construct external dir from flake inputs + if externalPaths != null + then + (import ./utils/external-dir.nix { + inherit externalPaths externalSources pkgs; + }) + # if called via CLI, load externals via env + else if builtins ? getEnv && builtins.getEnv "d2nExternalDir" != "" + then /. + (builtins.getEnv "d2nExternalDir") + # load from default directory + else ./external, +} @ args: let b = builtins; l = lib // builtins; @@ -54,22 +44,23 @@ let configFile = pkgs.writeText "dream2nix-config.json" (b.toJSON config); # like pkgs.callPackage, but includes all the dream2nix modules - callPackageDream = f: args: pkgs.callPackage f (args // { - inherit apps; - inherit builders; - inherit callPackageDream; - inherit config; - inherit configFile; - inherit dlib; - inherit externals; - inherit externalSources; - inherit fetchers; - inherit dream2nixWithExternals; - inherit translators; - inherit utils; - inherit nix; - }); - + callPackageDream = f: args: + pkgs.callPackage f (args + // { + inherit apps; + inherit builders; + inherit callPackageDream; + inherit config; + inherit configFile; + inherit dlib; + inherit externals; + inherit externalSources; + inherit fetchers; + inherit dream2nixWithExternals; + inherit translators; + inherit utils; + inherit nix; + }); utils = callPackageDream ./utils {}; @@ -93,94 +84,97 @@ let pkgs.callPackage "${externalSources.node2nix}/nix/node-env.nix" { inherit nodejs; }; - crane = - let - importLibFile = name: import "${externalSources.crane}/lib/${name}.nix"; + crane = let + importLibFile = name: import "${externalSources.crane}/lib/${name}.nix"; - makeHook = attrs: name: - pkgs.makeSetupHook - ({ inherit name; } // attrs) - "${externalSources.crane}/pkgs/${name}.sh"; - genHooks = names: attrs: lib.genAttrs names (makeHook attrs); + makeHook = attrs: name: + pkgs.makeSetupHook + ({inherit name;} // attrs) + "${externalSources.crane}/pkgs/${name}.sh"; + genHooks = names: attrs: lib.genAttrs names (makeHook attrs); - otherHooks = - genHooks [ - "configureCargoCommonVarsHook" - "configureCargoVendoredDepsHook" - "remapSourcePathPrefixHook" - ] { }; - installHooks = - genHooks [ - "inheritCargoArtifactsHook" - "installCargoArtifactsHook" - ] { - substitutions = { - zstd = "${pkgs.pkgsBuildBuild.zstd}/bin/zstd"; - }; - }; - installLogHook = genHooks ["installFromCargoBuildLogHook"] { + otherHooks = genHooks [ + "configureCargoCommonVarsHook" + "configureCargoVendoredDepsHook" + "remapSourcePathPrefixHook" + ] {}; + installHooks = + genHooks [ + "inheritCargoArtifactsHook" + "installCargoArtifactsHook" + ] { substitutions = { - cargo = "${pkgs.pkgsBuildBuild.cargo}/bin/cargo"; - jq = "${pkgs.pkgsBuildBuild.jq}/bin/jq"; + zstd = "${pkgs.pkgsBuildBuild.zstd}/bin/zstd"; }; }; - in rec { - # These aren't used by dream2nix - crateNameFromCargoToml = null; - vendorCargoDeps = null; - - writeTOML = importLibFile "writeTOML" { - inherit (pkgs) writeText; - inherit (utils) toTOML; - }; - cleanCargoToml = importLibFile "cleanCargoToml" { - inherit (builtins) fromTOML; - }; - findCargoFiles = importLibFile "findCargoFiles" { - inherit (pkgs) lib; - }; - mkDummySrc = importLibFile "mkDummySrc" { - inherit (pkgs) writeText runCommandLocal lib; - inherit writeTOML cleanCargoToml findCargoFiles; - }; - - mkCargoDerivation = importLibFile "mkCargoDerivation" ({ - inherit (pkgs) cargo stdenv lib; - } // installHooks // otherHooks); - buildDepsOnly = importLibFile "buildDepsOnly" { - inherit - mkCargoDerivation crateNameFromCargoToml - vendorCargoDeps mkDummySrc; - }; - cargoBuild = importLibFile "cargoBuild" { - inherit - mkCargoDerivation buildDepsOnly - crateNameFromCargoToml vendorCargoDeps; - }; - buildPackage = importLibFile "buildPackage" { - inherit (pkgs) lib; - inherit (installLogHook) installFromCargoBuildLogHook; - inherit cargoBuild; + installLogHook = genHooks ["installFromCargoBuildLogHook"] { + substitutions = { + cargo = "${pkgs.pkgsBuildBuild.cargo}/bin/cargo"; + jq = "${pkgs.pkgsBuildBuild.jq}/bin/jq"; }; }; + in rec { + # These aren't used by dream2nix + crateNameFromCargoToml = null; + vendorCargoDeps = null; + + writeTOML = importLibFile "writeTOML" { + inherit (pkgs) writeText; + inherit (utils) toTOML; + }; + cleanCargoToml = importLibFile "cleanCargoToml" { + inherit (builtins) fromTOML; + }; + findCargoFiles = importLibFile "findCargoFiles" { + inherit (pkgs) lib; + }; + mkDummySrc = importLibFile "mkDummySrc" { + inherit (pkgs) writeText runCommandLocal lib; + inherit writeTOML cleanCargoToml findCargoFiles; + }; + + mkCargoDerivation = importLibFile "mkCargoDerivation" ({ + inherit (pkgs) cargo stdenv lib; + } + // installHooks + // otherHooks); + buildDepsOnly = importLibFile "buildDepsOnly" { + inherit + mkCargoDerivation + crateNameFromCargoToml + vendorCargoDeps + mkDummySrc + ; + }; + cargoBuild = importLibFile "cargoBuild" { + inherit + mkCargoDerivation + buildDepsOnly + crateNameFromCargoToml + vendorCargoDeps + ; + }; + buildPackage = importLibFile "buildPackage" { + inherit (pkgs) lib; + inherit (installLogHook) installFromCargoBuildLogHook; + inherit cargoBuild; + }; + }; }; - dreamOverrides = - let - overridesDirs = - config.overridesDirs - ++ - (lib.optionals (b ? getEnv && b.getEnv "d2nOverridesDir" != "") [ - (b.getEnv "d2nOverridesDir") - ]); - - in - utils.loadOverridesDirs overridesDirs pkgs; + dreamOverrides = let + overridesDirs = + config.overridesDirs + ++ (lib.optionals (b ? getEnv && b.getEnv "d2nOverridesDir" != "") [ + (b.getEnv "d2nOverridesDir") + ]); + in + utils.loadOverridesDirs overridesDirs pkgs; # the location of the dream2nix framework for self references (update scripts, etc.) dream2nixWithExternals = - if b.pathExists (./. + "/external") then - ./. + if b.pathExists (./. + "/external") + then ./. else pkgs.runCommand "dream2nix-full-src" {} '' cp -r ${./.} $out @@ -191,563 +185,530 @@ let ''; # automatically find a suitable builder for a given dream lock - findBuilder = dreamLock: - let - subsystem = dreamLock._generic.subsystem; - in - if ! builders ? "${subsystem}" then - throw "Could not find any builder for subsystem '${subsystem}'" - else - builders."${subsystem}".default; - + findBuilder = dreamLock: let + subsystem = dreamLock._generic.subsystem; + in + if ! builders ? "${subsystem}" + then throw "Could not find any builder for subsystem '${subsystem}'" + else builders."${subsystem}".default; # detect if granular or combined fetching must be used findFetcher = dreamLock: - if null != dreamLock._generic.sourcesAggregatedHash then - fetchers.combinedFetcher - else - fetchers.defaultFetcher; + if null != dreamLock._generic.sourcesAggregatedHash + then fetchers.combinedFetcher + else fetchers.defaultFetcher; # fetch only sources and do not build - fetchSources = - { - dreamLock, - fetcher ? null, - extract ? false, - sourceOverrides ? oldSources: {}, - }@args: - let - # if dream lock is a file, read and parse it - dreamLock' = (utils.readDreamLock { inherit dreamLock; }).lock; + fetchSources = { + dreamLock, + fetcher ? null, + extract ? false, + sourceOverrides ? oldSources: {}, + } @ args: let + # if dream lock is a file, read and parse it + dreamLock' = (utils.readDreamLock {inherit dreamLock;}).lock; - fetcher = - if args.fetcher or null == null then - findFetcher dreamLock' - else - args.fetcher; + fetcher = + if args.fetcher or null == null + then findFetcher dreamLock' + else args.fetcher; - fetched = fetcher rec { - inherit sourceOverrides; - defaultPackage = dreamLock._generic.defaultPackage; - defaultPackageVersion = dreamLock._generic.packages."${defaultPackage}"; - sources = dreamLock'.sources; - sourcesAggregatedHash = dreamLock'._generic.sourcesAggregatedHash; + fetched = fetcher rec { + inherit sourceOverrides; + defaultPackage = dreamLock._generic.defaultPackage; + defaultPackageVersion = dreamLock._generic.packages."${defaultPackage}"; + sources = dreamLock'.sources; + sourcesAggregatedHash = dreamLock'._generic.sourcesAggregatedHash; + }; + + fetchedSources = fetched.fetchedSources; + in + fetched + // { + fetchedSources = + if extract + then + lib.mapAttrs + (key: source: utils.extractSource {inherit source;}) + fetchedSources + else fetchedSources; + }; + + makeDreamLockForSource = { + source, + translator ? null, + translatorArgs ? {}, + } @ args: let + sourceSpec = + if b.isString args.source && ! lib.isStorePath args.source + then fetchers.translateShortcut {shortcut = args.source;} + else { + type = "path"; + path = args.source; }; - fetchedSources = fetched.fetchedSources; + source = fetchers.fetchSource {source = sourceSpec;}; + t = let + translator = translators.findOneTranslator { + inherit source; + translatorName = args.translator or null; + }; in - fetched // { - fetchedSources = - if extract then - lib.mapAttrs - (key: source: utils.extractSource { inherit source; }) - fetchedSources - else - fetchedSources; - }; + if b.elem translator.type ["pure" "ifd"] + then translator + else + throw '' + All comaptible translators are impure and therefore require + pre-processing the input before evaluation. + Use the CLI to add this package: + nix run .# -- add ... + ''; - - makeDreamLockForSource = - { - source, - translator ? null, - translatorArgs ? {}, - }@args: - let - - sourceSpec = - if b.isString args.source && ! lib.isStorePath args.source then - fetchers.translateShortcut { shortcut = args.source; } - else - { - type = "path"; - path = args.source; - }; - - source = fetchers.fetchSource { source = sourceSpec; }; - - t = - let - translator = translators.findOneTranslator { - inherit source; - translatorName = args.translator or null; - }; - - in - if b.elem translator.type [ "pure" "ifd" ] then - translator - else - throw '' - All comaptible translators are impure and therefore require - pre-processing the input before evaluation. - Use the CLI to add this package: - nix run .# -- add ... - ''; - - dreamLock' = translators.translators."${t.subsystem}"."${t.type}"."${t.name}".translate - (translatorArgs // { + dreamLock' = + translators.translators."${t.subsystem}"."${t.type}"."${t.name}".translate + (translatorArgs + // { inherit source; }); - dreamLock = - let - defaultPackage = dreamLock'._generic.defaultPackage; - defaultPackageVersion = dreamLock'._generic.packages."${defaultPackage}"; - in - lib.recursiveUpdate dreamLock' { - sources."${defaultPackage}"."${defaultPackageVersion}" = { - type = "path"; - path = "${source}"; - }; - }; - + dreamLock = let + defaultPackage = dreamLock'._generic.defaultPackage; + defaultPackageVersion = dreamLock'._generic.packages."${defaultPackage}"; in - dreamLock; - + lib.recursiveUpdate dreamLock' { + sources."${defaultPackage}"."${defaultPackageVersion}" = { + type = "path"; + path = "${source}"; + }; + }; + in + dreamLock; # build a dream lock via a specific builder - callBuilder = - { + callBuilder = { + builder, + builderArgs, + fetchedSources, + dreamLock, + inject, + sourceOverrides, + packageOverrides, + allOutputs, + } @ args: let + # inject dependencies + dreamLock = utils.dreamLock.injectDependencies args.dreamLock inject; + + dreamLockInterface = (utils.readDreamLock {inherit dreamLock;}).interface; + + produceDerivation = name: pkg: + utils.applyOverridesToPackage { + inherit pkg; + outputs = allOutputs; + pname = name; + conditionalOverrides = packageOverrides; + }; + + buildPackageWithOtherBuilder = { builder, - builderArgs, - fetchedSources, - dreamLock, - inject, - sourceOverrides, - packageOverrides, - allOutputs, - }@args: - let - - # inject dependencies - dreamLock = utils.dreamLock.injectDependencies args.dreamLock inject; - - dreamLockInterface = (utils.readDreamLock { inherit dreamLock; }).interface; - - produceDerivation = name: pkg: - utils.applyOverridesToPackage { - inherit pkg; - outputs = allOutputs; - pname = name; - conditionalOverrides = packageOverrides; - }; - - buildPackageWithOtherBuilder = - { - builder, - name, - version, - inject ? {}, - }: - let - subDreamLockLoaded = - utils.readDreamLock { - dreamLock = - utils.dreamLock.getSubDreamLock dreamLock name version; - }; - - in - callBuilder { - inherit - builder - builderArgs - fetchedSources - inject - sourceOverrides - packageOverrides - ; - - dreamLock = - subDreamLockLoaded.lock; - - outputs = allOutputs; - }; - - outputs = builder ( builderArgs // { - - inherit - buildPackageWithOtherBuilder - produceDerivation + name, + version, + inject ? {}, + }: let + subDreamLockLoaded = utils.readDreamLock { + dreamLock = + utils.dreamLock.getSubDreamLock dreamLock name version; + }; + in + callBuilder { + inherit + builder + builderArgs + fetchedSources + inject + sourceOverrides + packageOverrides ; - inherit (dreamLockInterface) - subsystemAttrs - getSourceSpec - getRoot - getDependencies - getCyclicDependencies - defaultPackageName - defaultPackageVersion - packages - packageVersions + dreamLock = + subDreamLockLoaded.lock; + + outputs = allOutputs; + }; + + outputs = builder (builderArgs + // { + inherit + buildPackageWithOtherBuilder + produceDerivation ; - getSource = utils.dreamLock.getSource fetchedSources; + inherit + (dreamLockInterface) + subsystemAttrs + getSourceSpec + getRoot + getDependencies + getCyclicDependencies + defaultPackageName + defaultPackageVersion + packages + packageVersions + ; - }); + getSource = utils.dreamLock.getSource fetchedSources; + }); - # Makes the packages tree compatible with flakes schema. - # For each package the attr `{pname}` will link to the latest release. - # Other package versions will be inside: `{pname}.versions` - formattedOutputs = outputs // { - packages = - let - allPackages = outputs.packages or {}; - - latestPackages = - lib.mapAttrs' - (pname: releases: - let - latest = - releases."${utils.latestVersion (b.attrNames releases)}"; - in - (lib.nameValuePair - "${pname}" - (latest // { - versions = releases; - }))) - allPackages; - in - latestPackages; - }; - - in - formattedOutputs; + # Makes the packages tree compatible with flakes schema. + # For each package the attr `{pname}` will link to the latest release. + # Other package versions will be inside: `{pname}.versions` + formattedOutputs = + outputs + // { + packages = let + allPackages = outputs.packages or {}; + latestPackages = + lib.mapAttrs' + (pname: releases: let + latest = + releases."${utils.latestVersion (b.attrNames releases)}"; + in (lib.nameValuePair + "${pname}" + (latest + // { + versions = releases; + }))) + allPackages; + in + latestPackages; + }; + in + formattedOutputs; riseAndShine = throw '' Use makeOutputs instead of riseAndShine. ''; - makeOutputsForDreamLock = - { - dreamLock, - builder ? null, - fetcher ? null, - inject ? {}, - sourceOverrides ? oldSources: {}, - packageOverrides ? {}, - builderArgs ? {}, - translator ? null, - translatorArgs ? {}, - }@args: - let - # parse dreamLock - dreamLockLoaded = utils.readDreamLock { inherit (args) dreamLock; }; - dreamLock = dreamLockLoaded.lock; - dreamLockInterface = dreamLockLoaded.interface; + makeOutputsForDreamLock = { + dreamLock, + builder ? null, + fetcher ? null, + inject ? {}, + sourceOverrides ? oldSources: {}, + packageOverrides ? {}, + builderArgs ? {}, + translator ? null, + translatorArgs ? {}, + } @ args: let + # parse dreamLock + dreamLockLoaded = utils.readDreamLock {inherit (args) dreamLock;}; + dreamLock = dreamLockLoaded.lock; + dreamLockInterface = dreamLockLoaded.interface; - builder' = - if builder == null then - findBuilder dreamLock - else - builder; + builder' = + if builder == null + then findBuilder dreamLock + else builder; - fetcher' = - if fetcher == null then - findFetcher dreamLock - else - fetcher; + fetcher' = + if fetcher == null + then findFetcher dreamLock + else fetcher; - fetchedSources = (fetchSources { + fetchedSources = + (fetchSources { inherit dreamLock sourceOverrides; fetcher = fetcher'; - }).fetchedSources; + }) + .fetchedSources; - builderOutputs = callBuilder { - - inherit - dreamLock - fetchedSources - allOutputs - sourceOverrides + builderOutputs = callBuilder { + inherit + dreamLock + fetchedSources + allOutputs + sourceOverrides ; - builder = builder'; + builder = builder'; - inherit builderArgs; + inherit builderArgs; - packageOverrides = - lib.recursiveUpdate - (dreamOverrides."${dreamLock._generic.subsystem}" or {}) - (args.packageOverrides or {}); + packageOverrides = + lib.recursiveUpdate + (dreamOverrides."${dreamLock._generic.subsystem}" or {}) + (args.packageOverrides or {}); - inject = - utils.dreamLock.decompressDependencyGraph args.inject or {}; + inject = + utils.dreamLock.decompressDependencyGraph args.inject or {}; + }; + + allOutputs = builderOutputs; + in + allOutputs; + + translateProjects = { + discoveredProjects ? + dlib.discoverers.discoverProjects + {inherit settings tree;}, + source ? throw "Pass either `source` or `tree` to translateProjects", + tree ? dlib.prepareSourceTree {inherit source;}, + pname, + settings ? [], + } @ args: let + # This influences error messages only + flakeMode = ! builtins ? currentSystem; + + getTranslator = subsystem: translatorName: + translators.translatorsV2."${subsystem}".all."${translatorName}"; + + isImpure = project: translatorName: + (getTranslator project.subsystem translatorName).type == "impure"; + + getInvalidationHash = project: + dlib.calcInvalidationHash { + inherit source; + # TODO: add translatorArgs + translatorArgs = {}; + translator = project.translator; }; - allOutputs = builderOutputs; + isResolved = project: let + dreamLockExists = + l.pathExists "${config.projectRoot}/${project.dreamLockPath}"; + dreamLockValid = + project.dreamLock._generic.invalidationHash + or "" + == project.invalidationHash; in - allOutputs; + dreamLockExists && dreamLockValid; - translateProjects = - { - discoveredProjects ? dlib.discoverers.discoverProjects - {inherit settings tree;}, - source ? - throw "Pass either `source` or `tree` to translateProjects", - tree ? dlib.prepareSourceTree { inherit source; }, - pname, - settings ? [], - }@args: + getProjectKey = project: "${project.name}_|_${project.subsystem}_|_${project.relPath}"; - let - - # This influences error messages only - flakeMode = ! builtins ? currentSystem; - - getTranslator = subsystem: translatorName: - translators.translatorsV2."${subsystem}".all."${translatorName}"; - - isImpure = project: translatorName: - (getTranslator project.subsystem translatorName).type == "impure"; - - getInvalidationHash = project: - dlib.calcInvalidationHash { - inherit source; - # TODO: add translatorArgs - translatorArgs = {}; - translator = project.translator; - }; - - isResolved = project: - let - dreamLockExists = - l.pathExists "${config.projectRoot}/${project.dreamLockPath}"; - - dreamLockValid = - project.dreamLock._generic.invalidationHash or "" - == project.invalidationHash; - in - dreamLockExists && dreamLockValid; - - getProjectKey = project: - "${project.name}_|_${project.subsystem}_|_${project.relPath}"; - - # list of projects extended with some information requried for processing - projectsList = - l.map - (project: (let self = project // rec { - dreamLock = (utils.readDreamLock { - dreamLock = "${config.projectRoot}/${project.dreamLockPath}"; - }).lock; + # list of projects extended with some information requried for processing + projectsList = + l.map + (project: (let + self = + project + // rec { + dreamLock = + (utils.readDreamLock { + dreamLock = "${config.projectRoot}/${project.dreamLockPath}"; + }) + .lock; impure = isImpure project translator; invalidationHash = getInvalidationHash project; key = getProjectKey project; resolved = isResolved self; translator = project.translator or (l.head project.translators); - }; in self)) - discoveredProjects; - - # attrset of projects by key - projects = - l.listToAttrs - (l.map - (proj: l.nameValuePair proj.key proj) - projectsList); - - # unresolved impure projects cannot be resolved on the fly - projectsImpureUnresolved = - l.filter (project: project.impure && ! project.resolved) projectsList; - - # for printing the paths inside the error message - projectsImpureUnresolvedInfo = - l.map - (project: "${project.name}: ${project.relPath}") - projectsImpureUnresolved; - - # projects without existing valid dream-lock.json - projectsPureUnresolved = - l.filter - (project: ! project.resolved && ! project.impure) - projectsList; - - # already resolved projects - projectsResolved = - l.filter - (project: project.resolved) - projectsList; - - # list of pure projects extended with 'dreamLock' attribute - projectsResolvedOnTheFly = - l.forEach projectsPureUnresolved - (proj: - let - translator = getTranslator proj.subsystem proj.translator; - dreamLock' = translator.translate { - inherit source tree; - project = proj; - }; - dreamLock = dreamLock' // { - _generic = dreamLock'._generic // { - invalidationHash = proj.invalidationHash; - }; - }; - in - proj // { - inherit dreamLock; - }); - - resolvedProjects = projectsResolved ++ projectsResolvedOnTheFly; - - in - if projectsImpureUnresolved != [] then - if flakeMode then - l.trace '' - ${"\n"} - Run `nix run .#resolveImpure` once to resolve impure projects. - The following projects cannot be resolved on the fly and are therefore excluded: - ${l.concatStringsSep "\n " projectsImpureUnresolvedInfo} - '' - resolvedProjects - else - l.trace '' - ${"\n"} - The following projects cannot be resolved on the fly and are therefore excluded: - ${l.concatStringsSep "\n " projectsImpureUnresolvedInfo} - '' - resolvedProjects - else if projectsPureUnresolved != [] then - resolvedProjects - else - resolvedProjects; - - # transform a list of resolved projects to buildable outputs - realizeProjects = - { - translatedProjects ? translateProjects { inherit pname settings source; }, - - # alternative way of calling (for debugging) - pname ? null, - source ? null, - - packageOverrides ? {}, - settings ? [], - }: - let - - dreamLocks = l.forEach translatedProjects (proj: proj.dreamLock); - - defaultSourceOverride = dreamLock: - if source == null then - {} - else - let - defaultPackage = dreamLock._generic.defaultPackage; - defaultPackageVersion = - dreamLock._generic.packages."${defaultPackage}"; - in - { - "${defaultPackage}"."${defaultPackageVersion}" = - "${source}/${dreamLock._generic.location}"; - }; - - # extends each package with a `.resolve` attribute - outputsForProject = proj: - let - outputs = makeOutputsForDreamLock rec { - inherit packageOverrides; - dreamLock = proj.dreamLock; - sourceOverrides = oldSources: - (defaultSourceOverride proj.dreamLock); }; - in - outputs + in + self)) + discoveredProjects; + + # attrset of projects by key + projects = + l.listToAttrs + (l.map + (proj: l.nameValuePair proj.key proj) + projectsList); + + # unresolved impure projects cannot be resolved on the fly + projectsImpureUnresolved = + l.filter (project: project.impure && ! project.resolved) projectsList; + + # for printing the paths inside the error message + projectsImpureUnresolvedInfo = + l.map + (project: "${project.name}: ${project.relPath}") + projectsImpureUnresolved; + + # projects without existing valid dream-lock.json + projectsPureUnresolved = + l.filter + (project: ! project.resolved && ! project.impure) + projectsList; + + # already resolved projects + projectsResolved = + l.filter + (project: project.resolved) + projectsList; + + # list of pure projects extended with 'dreamLock' attribute + projectsResolvedOnTheFly = + l.forEach projectsPureUnresolved + (proj: let + translator = getTranslator proj.subsystem proj.translator; + dreamLock' = translator.translate { + inherit source tree; + project = proj; + }; + dreamLock = + dreamLock' // { - packages = - l.mapAttrs - (pname: pkg: pkg.overrideAttrs (old: { - passthru = old.passthru or {} // { - resolve = utils.makeTranslateScript { - inherit source; - invalidationHash = proj.invalidationHash; - project = proj; - }; - }; - })) - (outputs.packages or {}); + _generic = + dreamLock'._generic + // { + invalidationHash = proj.invalidationHash; + }; }; - - projectOutputs = - l.map - (proj: outputsForProject proj) - translatedProjects; - - mergedOutputs = - l.foldl' - (all: outputs: all // { - packages = all.packages or {} // outputs.packages; - }) - {} - projectOutputs; - - in - mergedOutputs; - - - - # produce outputs for a dream-lock or a source - makeOutputs = - { - source, # source tree or dream-lock - builder ? null, - fetcher ? null, - inject ? {}, - sourceOverrides ? oldSources: {}, - packageOverrides ? {}, - builderArgs ? {}, - translator ? null, - translatorArgs ? {}, - }@args: - - let - - dreamLock' = - # in case of a dream-lock.json file or dream-lock attributes - if ( lib.isAttrs args.source && args.source ? _generic && args.source ? _subsytem ) - || lib.hasSuffix "dream-lock.json" source then - args.source - # input is a source tree -> generate the dream-lock - else - makeDreamLockForSource { inherit source translator translatorArgs; }; - - # parse dreamLock - dreamLockLoaded = utils.readDreamLock { dreamLock = dreamLock'; }; - dreamLock = dreamLockLoaded.lock; - dreamLockInterface = dreamLockLoaded.interface; - - # sub packages - builderOutputsSub = - b.mapAttrs - (dirName: dreamLock: - makeOutputs - (args // {source = dreamLock.lock; })) - dreamLockInterface.subDreamLocks; - - builderOutputs = makeOutputsForDreamLock - ((b.removeAttrs args ["source"]) // { + in + proj + // { inherit dreamLock; }); - allOutputs = - { subPackages = builderOutputsSub; } - // - # merge with sub package outputs - b.foldl' - (old: new: old // { - packages = new.packages or {} // old.packages; - }) - builderOutputs - (b.attrValues builderOutputsSub); + resolvedProjects = projectsResolved ++ projectsResolvedOnTheFly; + in + if projectsImpureUnresolved != [] + then + if flakeMode + then + l.trace '' + ${"\n"} + Run `nix run .#resolveImpure` once to resolve impure projects. + The following projects cannot be resolved on the fly and are therefore excluded: + ${l.concatStringsSep "\n " projectsImpureUnresolvedInfo} + '' + resolvedProjects + else + l.trace '' + ${"\n"} + The following projects cannot be resolved on the fly and are therefore excluded: + ${l.concatStringsSep "\n " projectsImpureUnresolvedInfo} + '' + resolvedProjects + else if projectsPureUnresolved != [] + then resolvedProjects + else resolvedProjects; + # transform a list of resolved projects to buildable outputs + realizeProjects = { + translatedProjects ? translateProjects {inherit pname settings source;}, + # alternative way of calling (for debugging) + pname ? null, + source ? null, + packageOverrides ? {}, + settings ? [], + }: let + dreamLocks = l.forEach translatedProjects (proj: proj.dreamLock); + + defaultSourceOverride = dreamLock: + if source == null + then {} + else let + defaultPackage = dreamLock._generic.defaultPackage; + defaultPackageVersion = + dreamLock._generic.packages."${defaultPackage}"; + in { + "${defaultPackage}"."${defaultPackageVersion}" = "${source}/${dreamLock._generic.location}"; + }; + + # extends each package with a `.resolve` attribute + outputsForProject = proj: let + outputs = makeOutputsForDreamLock rec { + inherit packageOverrides; + dreamLock = proj.dreamLock; + sourceOverrides = oldSources: (defaultSourceOverride proj.dreamLock); + }; in - allOutputs; + outputs + // { + packages = + l.mapAttrs + (pname: pkg: + pkg.overrideAttrs (old: { + passthru = + old.passthru + or {} + // { + resolve = utils.makeTranslateScript { + inherit source; + invalidationHash = proj.invalidationHash; + project = proj; + }; + }; + })) + (outputs.packages or {}); + }; + projectOutputs = + l.map + (proj: outputsForProject proj) + translatedProjects; -in -{ + mergedOutputs = + l.foldl' + (all: outputs: + all + // { + packages = all.packages or {} // outputs.packages; + }) + {} + projectOutputs; + in + mergedOutputs; + + # produce outputs for a dream-lock or a source + makeOutputs = { + source, # source tree or dream-lock + builder ? null, + fetcher ? null, + inject ? {}, + sourceOverrides ? oldSources: {}, + packageOverrides ? {}, + builderArgs ? {}, + translator ? null, + translatorArgs ? {}, + } @ args: let + dreamLock' = + # in case of a dream-lock.json file or dream-lock attributes + if + (lib.isAttrs args.source && args.source ? _generic && args.source ? _subsytem) + || lib.hasSuffix "dream-lock.json" source + then args.source + # input is a source tree -> generate the dream-lock + else makeDreamLockForSource {inherit source translator translatorArgs;}; + + # parse dreamLock + dreamLockLoaded = utils.readDreamLock {dreamLock = dreamLock';}; + dreamLock = dreamLockLoaded.lock; + dreamLockInterface = dreamLockLoaded.interface; + + # sub packages + builderOutputsSub = + b.mapAttrs + (dirName: dreamLock: + makeOutputs + (args // {source = dreamLock.lock;})) + dreamLockInterface.subDreamLocks; + + builderOutputs = + makeOutputsForDreamLock + ((b.removeAttrs args ["source"]) + // { + inherit dreamLock; + }); + + allOutputs = + {subPackages = builderOutputsSub;} + // + # merge with sub package outputs + b.foldl' + (old: new: + old + // { + packages = new.packages or {} // old.packages; + }) + builderOutputs + (b.attrValues builderOutputsSub); + in + allOutputs; +in { inherit apps builders @@ -762,9 +723,10 @@ in translators updaters utils - ; + ; - inherit (dlib) + inherit + (dlib) discoverers - ; + ; } diff --git a/src/discoverers/default.nix b/src/discoverers/default.nix index 9614b1f1..c5c8deff 100644 --- a/src/discoverers/default.nix +++ b/src/discoverers/default.nix @@ -2,86 +2,80 @@ config, dlib, lib, -}: - -let +}: let l = lib // builtins; subsystems = dlib.dirNames ./.; allDiscoverers = l.collect - (v: v ? discover) - discoverers; + (v: v ? discover) + discoverers; - discoverProjects = - { - source ? throw "Pass either `source` or `tree` to discoverProjects", - tree ? dlib.prepareSourceTree { inherit source; }, - settings ? [], - }: let - discoveredProjects = - l.flatten - (l.map - (discoverer: discoverer.discover { inherit tree; }) - allDiscoverers); + discoverProjects = { + source ? throw "Pass either `source` or `tree` to discoverProjects", + tree ? dlib.prepareSourceTree {inherit source;}, + settings ? [], + }: let + discoveredProjects = + l.flatten + (l.map + (discoverer: discoverer.discover {inherit tree;}) + allDiscoverers); - rootProjectName = l.head discoveredProjects; + rootProjectName = l.head discoveredProjects; - projectsExtended = l.forEach discoveredProjects - (proj: proj - // { - translator = l.head proj.translators; - dreamLockPath = getDreamLockPath proj rootProjectName; - }); - in - applyProjectSettings projectsExtended settings; + projectsExtended = + l.forEach discoveredProjects + (proj: + proj + // { + translator = l.head proj.translators; + dreamLockPath = getDreamLockPath proj rootProjectName; + }); + in + applyProjectSettings projectsExtended settings; - getDreamLockPath = project: rootProject: - let - root = - if config.projectRoot == null then - "." - else - config.projectRoot; - in - dlib.sanitizeRelativePath - "${config.packagesDir}/${rootProject.name}/${project.relPath}/dream-lock.json"; + getDreamLockPath = project: rootProject: let + root = + if config.projectRoot == null + then "." + else config.projectRoot; + in + dlib.sanitizeRelativePath + "${config.packagesDir}/${rootProject.name}/${project.relPath}/dream-lock.json"; - applyProjectSettings = projects: settingsList: - let - settingsListForProject = project: - l.filter - (settings: - if ! settings ? filter then true - else settings.filter project) - settingsList; + applyProjectSettings = projects: settingsList: let + settingsListForProject = project: + l.filter + (settings: + if ! settings ? filter + then true + else settings.filter project) + settingsList; - applySettings = project: settings: - l.recursiveUpdate project settings; + applySettings = project: settings: + l.recursiveUpdate project settings; - applyAllSettings = project: - l.foldl' - (proj: settings: applySettings proj settings) - project - (settingsListForProject project); + applyAllSettings = project: + l.foldl' + (proj: settings: applySettings proj settings) + project + (settingsListForProject project); - settingsApplied = - l.forEach projects - (proj: applyAllSettings proj); + settingsApplied = + l.forEach projects + (proj: applyAllSettings proj); + in + settingsApplied; - in settingsApplied; - - - discoverers = l.genAttrs subsystems (subsystem: - (import (./. + "/${subsystem}") { inherit dlib lib subsystem; }) + discoverers = l.genAttrs subsystems ( + subsystem: (import (./. + "/${subsystem}") {inherit dlib lib subsystem;}) ); -in - -{ +in { inherit applyProjectSettings discoverProjects discoverers - ; + ; } diff --git a/src/discoverers/nodejs/default.nix b/src/discoverers/nodejs/default.nix index 942a3e37..7af0987f 100644 --- a/src/discoverers/nodejs/default.nix +++ b/src/discoverers/nodejs/default.nix @@ -1,65 +1,53 @@ { dlib, lib, - subsystem, -}: - -let +}: let l = lib // builtins; - discover = - { - tree, - }: - let - projects = discoverInternal { - inherit tree; - }; - in - filterProjects projects; + discover = {tree}: let + projects = discoverInternal { + inherit tree; + }; + in + filterProjects projects; # One translator call can process a whole workspace containing all # sub-packages of that workspace. # Therefore we can filter out projects which are children of a workspace. - filterProjects = projects: - let - workspaceRoots = - l.filter - (proj: proj.subsystemInfo.workspaces or [] != []) - projects; + filterProjects = projects: let + workspaceRoots = + l.filter + (proj: proj.subsystemInfo.workspaces or [] != []) + projects; - allWorkspaceChildren = - l.flatten - (l.map - (root: root.subsystemInfo.workspaces) - workspaceRoots); + allWorkspaceChildren = + l.flatten + (l.map + (root: root.subsystemInfo.workspaces) + workspaceRoots); - childrenRemoved = - l.filter - (proj: - (! l.elem proj.relPath allWorkspaceChildren)) - projects; + childrenRemoved = + l.filter + (proj: (! l.elem proj.relPath allWorkspaceChildren)) + projects; + in + childrenRemoved; - in - childrenRemoved; - - getTranslatorNames = path: - let - nodes = l.readDir path; - packageJson = l.fromJSON (l.readFile "${path}/package.json"); - translators = - # if the package has no dependencies we use the - # package-lock translator with `packageLock = null` - if ! packageJson ? dependencies && ! packageJson ? devDependencies - then [ "package-lock" ] - - else - l.optionals (nodes ? "package-lock.json") [ "package-lock" ] - ++ l.optionals (nodes ? "yarn.lock") [ "yarn-lock" ] - ++ [ "package-json" ]; - in - translators; + getTranslatorNames = path: let + nodes = l.readDir path; + packageJson = l.fromJSON (l.readFile "${path}/package.json"); + translators = + # if the package has no dependencies we use the + # package-lock translator with `packageLock = null` + if ! packageJson ? dependencies && ! packageJson ? devDependencies + then ["package-lock"] + else + l.optionals (nodes ? "package-lock.json") ["package-lock"] + ++ l.optionals (nodes ? "yarn.lock") ["yarn-lock"] + ++ ["package-json"]; + in + translators; # returns the parsed package.json of a given directory getPackageJson = dirPath: @@ -67,166 +55,146 @@ let # returns all relative paths to workspaces defined by a glob getWorkspacePaths = glob: tree: - if l.hasSuffix "*" glob then - let - prefix = l.removeSuffix "*" glob; - path = "${tree.fullPath}/${prefix}"; + if l.hasSuffix "*" glob + then let + prefix = l.removeSuffix "*" glob; + path = "${tree.fullPath}/${prefix}"; - dirNames = - if l.pathExists path - then dlib.listDirs path - else - l.trace - "WARNING: Detected workspace ${glob} does not exist." - []; - - existingWsPaths = - l.filter - (wsPath: - if l.pathExists "${path}/${wsPath}/package.json" - then true - else - let - notExistingPath = - dlib.sanitizeRelativePath "${prefix}/${wsPath}"; - in - l.trace - "WARNING: Detected workspace ${notExistingPath} does not exist." - false) - dirNames; - in - l.map (dname: "${prefix}/${dname}") existingWsPaths - - else - if l.pathExists "${tree.fullPath}/${glob}/package.json" - then [ glob ] - else - l.trace + dirNames = + if l.pathExists path + then dlib.listDirs path + else + l.trace "WARNING: Detected workspace ${glob} does not exist." []; + existingWsPaths = + l.filter + (wsPath: + if l.pathExists "${path}/${wsPath}/package.json" + then true + else let + notExistingPath = + dlib.sanitizeRelativePath "${prefix}/${wsPath}"; + in + l.trace + "WARNING: Detected workspace ${notExistingPath} does not exist." + false) + dirNames; + in + l.map (dname: "${prefix}/${dname}") existingWsPaths + else if l.pathExists "${tree.fullPath}/${glob}/package.json" + then [glob] + else + l.trace + "WARNING: Detected workspace ${glob} does not exist." + []; + # collect project info for workspaces defined by current package.json - getWorkspaces = tree: parentInfo: - let - packageJson = tree.files."package.json".jsonContent; - workspacesRaw = packageJson.workspaces or []; + getWorkspaces = tree: parentInfo: let + packageJson = tree.files."package.json".jsonContent; + workspacesRaw = packageJson.workspaces or []; - workspacesFlattened = - if l.isAttrs workspacesRaw - then - l.flatten - (l.mapAttrsToList - (category: workspaces: workspaces) - workspacesRaw) - - else if l.isList workspacesRaw - then workspacesRaw - - else throw "Error parsing workspaces in ${tree.files."package.json".relPath}"; - - in - l.flatten - (l.forEach workspacesFlattened - (glob: - let - workspacePaths = getWorkspacePaths glob tree; - in - l.forEach workspacePaths - (wPath: makeWorkspaceProjectInfo tree wPath parentInfo))); - - makeWorkspaceProjectInfo = tree: wsRelPath: parentInfo: - { - inherit subsystem; - name = - (getPackageJson "${tree.fullPath}/${wsRelPath}").name - or "${parentInfo.name}/${wsRelPath}"; - relPath = dlib.sanitizeRelativePath "${tree.relPath}/${wsRelPath}"; - translators = - l.unique - ( - (lib.filter (trans: l.elem trans ["package-lock" "yarn-lock"]) parentInfo.translators) - ++ (getTranslatorNames "${tree.fullPath}/${wsRelPath}") - ); - subsystemInfo = { - workspaceParent = tree.relPath; - }; - }; - - discoverInternal = - { - tree, - - # Internal parameter preventing workspace projects from being discovered - # twice. - alreadyDiscovered ? {}, - }: - let - foundSubProjects = alreadyDiscovered: + workspacesFlattened = + if l.isAttrs workspacesRaw + then l.flatten - ((l.mapAttrsToList - (dname: dir: discoverInternal { - inherit alreadyDiscovered; - tree = dir; - }) - (tree.directories or {}))); - in + (l.mapAttrsToList + (category: workspaces: workspaces) + workspacesRaw) + else if l.isList workspacesRaw + then workspacesRaw + else throw "Error parsing workspaces in ${tree.files."package.json".relPath}"; + in + l.flatten + (l.forEach workspacesFlattened + (glob: let + workspacePaths = getWorkspacePaths glob tree; + in + l.forEach workspacePaths + (wPath: makeWorkspaceProjectInfo tree wPath parentInfo))); + + makeWorkspaceProjectInfo = tree: wsRelPath: parentInfo: { + inherit subsystem; + name = + (getPackageJson "${tree.fullPath}/${wsRelPath}").name + or "${parentInfo.name}/${wsRelPath}"; + relPath = dlib.sanitizeRelativePath "${tree.relPath}/${wsRelPath}"; + translators = + l.unique + ( + (lib.filter (trans: l.elem trans ["package-lock" "yarn-lock"]) parentInfo.translators) + ++ (getTranslatorNames "${tree.fullPath}/${wsRelPath}") + ); + subsystemInfo = { + workspaceParent = tree.relPath; + }; + }; + + discoverInternal = { + tree, + # Internal parameter preventing workspace projects from being discovered + # twice. + alreadyDiscovered ? {}, + }: let + foundSubProjects = alreadyDiscovered: + l.flatten + (l.mapAttrsToList + (dname: dir: + discoverInternal { + inherit alreadyDiscovered; + tree = dir; + }) + (tree.directories or {})); + in # skip if not a nodajs project - if alreadyDiscovered ? "${tree.relPath}" - || ! tree ? files."package.json" then + if + alreadyDiscovered + ? "${tree.relPath}" + || ! tree ? files."package.json" + then # this will be cleaned by `flatten` for sub-directories foundSubProjects alreadyDiscovered - else - let + else let + # project info of current directory + currentProjectInfo = { + inherit subsystem; + inherit (tree) relPath; + name = tree.files."package.json".jsonContent.name or tree.relPath; + translators = getTranslatorNames tree.fullPath; + subsystemInfo = l.optionalAttrs (workspaces != []) { + workspaces = + l.map + (w: l.removePrefix tree.relPath w.relPath) + workspaces; + }; + }; - # project info of current directory - currentProjectInfo = - { - inherit subsystem; - inherit (tree) relPath; - name = tree.files."package.json".jsonContent.name or tree.relPath; - translators = getTranslatorNames tree.fullPath; - subsystemInfo = - l.optionalAttrs (workspaces != []) { - workspaces = - l.map - (w: l.removePrefix tree.relPath w.relPath) - workspaces; - }; - }; + workspaces = getWorkspaces tree currentProjectInfo; - workspaces = getWorkspaces tree currentProjectInfo; + # list of all projects infos found by the current iteration + foundProjects = + # current directories project info + [currentProjectInfo] + # workspaces defined by the current directory + ++ workspaces; - - # list of all projects infos found by the current iteration - foundProjects = - # current directories project info - [ currentProjectInfo ] - - # workspaces defined by the current directory - ++ - workspaces; - - # index of already found projects - # This is needed, because sub-projects also contain a `package.json`, - # and would otherwise be discovered again as an independent project. - alreadyDiscovered' = - alreadyDiscovered - // - (l.genAttrs - (l.map (p: p.relPath) foundProjects) - (relPath: null)); - in - # l.trace tree.directories - # the current directory - foundProjects - - # sub-directories - # Thanks to `alreadyDiscovered`, workspace projects won't be discovered - # a second time. - ++ - (foundSubProjects alreadyDiscovered'); -in - -{ + # index of already found projects + # This is needed, because sub-projects also contain a `package.json`, + # and would otherwise be discovered again as an independent project. + alreadyDiscovered' = + alreadyDiscovered + // (l.genAttrs + (l.map (p: p.relPath) foundProjects) + (relPath: null)); + in + # l.trace tree.directories + # the current directory + foundProjects + # sub-directories + # Thanks to `alreadyDiscovered`, workspace projects won't be discovered + # a second time. + ++ (foundSubProjects alreadyDiscovered'); +in { inherit discover; } diff --git a/src/fetchers/combined-fetcher.nix b/src/fetchers/combined-fetcher.nix index 8681b481..49c5ad35 100644 --- a/src/fetchers/combined-fetcher.nix +++ b/src/fetchers/combined-fetcher.nix @@ -9,108 +9,102 @@ nix, stdenv, writeScript, - # dream2nix defaultFetcher, utils, ... -}: -{ +}: { # sources attrset from dream lock sources, sourcesAggregatedHash, ... -}@args: -let - +} @ args: let b = builtins; # resolve to individual fetcher calls defaultFetched = (defaultFetcher args).fetchedSources; # extract the arguments from the individual fetcher calls - FODArgsAll = - let - FODArgsAll' = - lib.mapAttrs - (name: versions: - lib.mapAttrs - (version: fetched: - # handle FOD sources - if lib.all - (attr: fetched ? "${attr}") - [ "outputHash" "outputHashAlgo" "outputHashMode" ] then + FODArgsAll = let + FODArgsAll' = + lib.mapAttrs + ( + name: versions: + lib.mapAttrs + (version: fetched: + # handle FOD sources + if + lib.all + (attr: fetched ? "${attr}") + ["outputHash" "outputHashAlgo" "outputHashMode"] + then + (fetched.overrideAttrs (args: { + passthru.originalArgs = args; + })) + .originalArgs + // { + outPath = let + sanitizedName = utils.sanitizeDerivationName name; + in "${sanitizedName}/${version}/${fetched.name}"; + } + # handle path sources + else if lib.isString fetched + then "ignore" + # handle store path sources + else if lib.isStorePath fetched + then "ignore" + # handle unknown sources + else if fetched == "unknown" + then "ignore" + # error out on unknown source types + else + throw '' + Error while generating FOD fetcher for combined sources. + Cannot classify source of ${name}#${version}. + '') + versions + ) + defaultFetched; + in + lib.filterAttrs + (name: versions: versions != {}) + (lib.mapAttrs + (name: versions: + lib.filterAttrs + (version: fetcherArgs: fetcherArgs != "ignore") + versions) + FODArgsAll'); - (fetched.overrideAttrs (args: { - passthru.originalArgs = args; - })).originalArgs // { - outPath = - let - sanitizedName = utils.sanitizeDerivationName name; - in - "${sanitizedName}/${version}/${fetched.name}"; - } - - # handle path sources - else if lib.isString fetched then - "ignore" - - # handle store path sources - else if lib.isStorePath fetched then - "ignore" - - # handle unknown sources - else if fetched == "unknown" then - "ignore" - - # error out on unknown source types - else - throw '' - Error while generating FOD fetcher for combined sources. - Cannot classify source of ${name}#${version}. - '') - versions - ) - defaultFetched; - in - lib.filterAttrs - (name: versions: versions != {}) - (lib.mapAttrs - (name: versions: - lib.filterAttrs - (version: fetcherArgs: fetcherArgs != "ignore") - versions) - FODArgsAll'); - - FODArgsAllList = - lib.flatten - (lib.mapAttrsToList - (name: versions: - b.attrValues versions) - FODArgsAll); + FODArgsAllList = + lib.flatten + (lib.mapAttrsToList + (name: versions: + b.attrValues versions) + FODArgsAll); # convert arbitrary types to string, like nix does with derivation arguments toString' = x: - if lib.isBool x then - if x then - "1" - else - "" - else if lib.isList x then - ''"${lib.concatStringsSep " " (lib.forEach x (y: toString' y))}"'' - else if x == null then - "" - else - b.toJSON x; + if lib.isBool x + then + if x + then "1" + else "" + else if lib.isList x + then ''"${lib.concatStringsSep " " (lib.forEach x (y: toString' y))}"'' + else if x == null + then "" + else b.toJSON x; # set up nix build env for signle item setupEnvForItem = fetcherArgs: '' # export arguments for builder ${lib.concatStringsSep "\n" (lib.mapAttrsToList (argName: argVal: '' - export ${argName}=${ - lib.replaceStrings [ "$" ''\n'' ] [ ''\$'' "\n" ] (toString' argVal)} - '') fetcherArgs)} + export ${argName}=${ + lib.replaceStrings ["$" ''\n''] [''\$'' "\n"] (toString' argVal) + } + '') + fetcherArgs)} # run builder bash ${fetcherArgs.builder} @@ -159,40 +153,37 @@ let echo "FOD_HASH=$(${nix}/bin/nix hash path $out)" ''; - FODAllSources = - let - nativeBuildInputs' = - lib.unique - (lib.foldl (a: b: a ++ b) [] - (b.map - (fetcherArgs: (fetcherArgs.nativeBuildInputs or [])) - FODArgsAllList)); - in - stdenv.mkDerivation rec { - name = "sources-combined"; - inherit builder; - nativeBuildInputs = nativeBuildInputs' ++ [ + FODAllSources = let + nativeBuildInputs' = + lib.unique + (lib.foldl (a: b: a ++ b) [] + (b.map + (fetcherArgs: (fetcherArgs.nativeBuildInputs or [])) + FODArgsAllList)); + in + stdenv.mkDerivation rec { + name = "sources-combined"; + inherit builder; + nativeBuildInputs = + nativeBuildInputs' + ++ [ coreutils ]; - outputHashAlgo = "sha256"; - outputHashMode = "recursive"; - outputHash = sourcesAggregatedHash; - }; - -in - -{ + outputHashAlgo = "sha256"; + outputHashMode = "recursive"; + outputHash = sourcesAggregatedHash; + }; +in { FOD = FODAllSources; fetchedSources = lib.mapAttrs - (name: versions: - lib.mapAttrs - (version: source: - if FODArgsAll ? "${name}"."${version}" then - "${FODAllSources}/${FODArgsAll."${name}"."${version}".outPath}" - else - defaultFetched."${name}"."${version}") - versions) - sources; + (name: versions: + lib.mapAttrs + (version: source: + if FODArgsAll ? "${name}"."${version}" + then "${FODAllSources}/${FODArgsAll."${name}"."${version}".outPath}" + else defaultFetched."${name}"."${version}") + versions) + sources; } diff --git a/src/fetchers/crates-io/default.nix b/src/fetchers/crates-io/default.nix index 5b96d041..2f553272 100644 --- a/src/fetchers/crates-io/default.nix +++ b/src/fetchers/crates-io/default.nix @@ -2,40 +2,39 @@ lib, fetchurl, runCommand, - utils, ... -}: -{ - - inputs = [ "pname" "version" ]; +}: { + inputs = ["pname" "version"]; versionField = "version"; - outputs = { pname, version, ... }@inp: - let - b = builtins; - # See https://github.com/rust-lang/crates.io-index/blob/master/config.json#L2 - url = "https://crates.io/api/v1/crates/${pname}/${version}/download"; - in - { - calcHash = algo: utils.hashFile algo (b.fetchurl { + outputs = { + pname, + version, + ... + } @ inp: let + b = builtins; + # See https://github.com/rust-lang/crates.io-index/blob/master/config.json#L2 + url = "https://crates.io/api/v1/crates/${pname}/${version}/download"; + in { + calcHash = algo: + utils.hashFile algo (b.fetchurl { inherit url; }); - fetched = hash: - let - fetched = fetchurl { - inherit url; - sha256 = hash; - name = "download-${pname}-${version}"; - }; - in - runCommand "unpack-${pname}-${version}" {} - '' - mkdir -p $out - tar --strip-components 1 -xzf ${fetched} -C $out - echo '{"package":"${hash}","files":{}}' > $out/.cargo-checksum.json - ''; - }; + fetched = hash: let + fetched = fetchurl { + inherit url; + sha256 = hash; + name = "download-${pname}-${version}"; + }; + in + runCommand "unpack-${pname}-${version}" {} + '' + mkdir -p $out + tar --strip-components 1 -xzf ${fetched} -C $out + echo '{"package":"${hash}","files":{}}' > $out/.cargo-checksum.json + ''; + }; } diff --git a/src/fetchers/default-fetcher.nix b/src/fetchers/default-fetcher.nix index be9f0737..ad4b8dbc 100644 --- a/src/fetchers/default-fetcher.nix +++ b/src/fetchers/default-fetcher.nix @@ -1,56 +1,51 @@ { lib, - # dream2nix attributes fetchSource, fetchers, ... -}: -{ +}: { # sources attrset from dream lock defaultPackage, defaultPackageVersion, sourceOverrides, sources, ... -}: - -let - +}: let b = builtins; fetchedSources = - lib.mapAttrs - (name: versions: - lib.mapAttrs - (version: source: - if source.type == "unknown" then - "unknown" - else if source.type == "path" then - if lib.isStorePath source.path then - source.path - # assume path relative to main package source - else - "${overriddenSources."${defaultPackage}"."${defaultPackageVersion}"}/${source.path}" - else if fetchers.fetchers ? "${source.type}" then - fetchSource { - source = source // { - pname = name; - inherit version; - }; - } - else throw "unsupported source type '${source.type}'") - versions) - sources; + (name: versions: + lib.mapAttrs + (version: source: + if source.type == "unknown" + then "unknown" + else if source.type == "path" + then + if lib.isStorePath source.path + then source.path + # assume path relative to main package source + else "${overriddenSources."${defaultPackage}"."${defaultPackageVersion}"}/${source.path}" + else if fetchers.fetchers ? "${source.type}" + then + fetchSource { + source = + source + // { + pname = name; + inherit version; + }; + } + else throw "unsupported source type '${source.type}'") + versions) + sources; overriddenSources = lib.recursiveUpdate - fetchedSources - (sourceOverrides fetchedSources); - -in -{ + fetchedSources + (sourceOverrides fetchedSources); +in { # attrset: pname -> path of downloaded source fetchedSources = overriddenSources; } diff --git a/src/fetchers/default.nix b/src/fetchers/default.nix index 1f795989..3529d333 100644 --- a/src/fetchers/default.nix +++ b/src/fetchers/default.nix @@ -1,258 +1,241 @@ { lib, - # dream2nix callPackageDream, utils, ... -}: - -let +}: let b = builtins; callFetcher = file: args: callPackageDream file args; -in - -rec { - - fetchers = lib.genAttrs (utils.dirNames ./.) (name: - callFetcher (./. + "/${name}") {} +in rec { + fetchers = lib.genAttrs (utils.dirNames ./.) ( + name: + callFetcher (./. + "/${name}") {} ); - defaultFetcher = callPackageDream ./default-fetcher.nix { inherit fetchers fetchSource; }; + defaultFetcher = callPackageDream ./default-fetcher.nix {inherit fetchers fetchSource;}; - combinedFetcher = callPackageDream ./combined-fetcher.nix { inherit defaultFetcher; }; + combinedFetcher = callPackageDream ./combined-fetcher.nix {inherit defaultFetcher;}; - constructSource = - { - type, - reComputeHash ? false, - ... - }@args: - let - fetcher = fetchers."${type}"; - argsKeep = b.removeAttrs args [ "reComputeHash" ]; - fetcherOutputs = - fetcher.outputs - (b.removeAttrs argsKeep [ "dir" "hash" "type" ]); - in - argsKeep - # if the hash was not provided, calculate hash on the fly (impure) - // (lib.optionalAttrs reComputeHash { - hash = fetcherOutputs.calcHash "sha256"; - }); - - # update source spec to different version - updateSource = - { - source, - newVersion, - ... - }: - constructSource (source // { - reComputeHash = true; - } // { - "${fetchers."${source.type}".versionField}" = newVersion; + constructSource = { + type, + reComputeHash ? false, + ... + } @ args: let + fetcher = fetchers."${type}"; + argsKeep = b.removeAttrs args ["reComputeHash"]; + fetcherOutputs = + fetcher.outputs + (b.removeAttrs argsKeep ["dir" "hash" "type"]); + in + argsKeep + # if the hash was not provided, calculate hash on the fly (impure) + // (lib.optionalAttrs reComputeHash { + hash = fetcherOutputs.calcHash "sha256"; }); + # update source spec to different version + updateSource = { + source, + newVersion, + ... + }: + constructSource (source + // { + reComputeHash = true; + } + // { + "${fetchers."${source.type}".versionField}" = newVersion; + }); + # fetch a source defined via a dream lock source spec - fetchSource = { source, extract ? false }: - let - fetcher = fetchers."${source.type}"; - fetcherArgs = b.removeAttrs source [ "dir" "hash" "type" ]; - fetcherOutputs = fetcher.outputs fetcherArgs; - maybeArchive = fetcherOutputs.fetched (source.hash or null); - in - if source ? dir then - "${maybeArchive}/${source.dir}" - else - maybeArchive; + fetchSource = { + source, + extract ? false, + }: let + fetcher = fetchers."${source.type}"; + fetcherArgs = b.removeAttrs source ["dir" "hash" "type"]; + fetcherOutputs = fetcher.outputs fetcherArgs; + maybeArchive = fetcherOutputs.fetched (source.hash or null); + in + if source ? dir + then "${maybeArchive}/${source.dir}" + else maybeArchive; # fetch a source defined by a shortcut - fetchShortcut = { shortcut, extract ? false, }: + fetchShortcut = { + shortcut, + extract ? false, + }: fetchSource { - source = translateShortcut { inherit shortcut; }; + source = translateShortcut {inherit shortcut;}; inherit extract; }; - parseShortcut = shortcut: - let - # in: "git+https://foo.com/bar?kwarg1=lol&kwarg2=hello" - # out: [ "git+" "git" "https" "//" "foo.com/bar" "?kwarg1=lol&kwarg2=hello" "kwarg1=lol&kwarg2=hello" ] - split = - b.match - ''(([[:alnum:]]+)\+)?([[:alnum:]-]+):(//)?([^\?]*)(\?(.*))?'' - shortcut; + parseShortcut = shortcut: let + # in: "git+https://foo.com/bar?kwarg1=lol&kwarg2=hello" + # out: [ "git+" "git" "https" "//" "foo.com/bar" "?kwarg1=lol&kwarg2=hello" "kwarg1=lol&kwarg2=hello" ] + split = + b.match + ''(([[:alnum:]]+)\+)?([[:alnum:]-]+):(//)?([^\?]*)(\?(.*))?'' + shortcut; - parsed = { - proto1 = b.elemAt split 1; - proto2 = b.elemAt split 2; - path = b.elemAt split 4; - allArgs = b.elemAt split 6; - kwargs = b.removeAttrs kwargs_ [ "dir" ]; - dir = kwargs_.dir or null; - }; + parsed = { + proto1 = b.elemAt split 1; + proto2 = b.elemAt split 2; + path = b.elemAt split 4; + allArgs = b.elemAt split 6; + kwargs = b.removeAttrs kwargs_ ["dir"]; + dir = kwargs_.dir or null; + }; - kwargs_ = - if parsed.allArgs == null then - {} - else - lib.listToAttrs - (map - (kwarg: - let - split = lib.splitString "=" kwarg; - in - lib.nameValuePair - (b.elemAt split 0) - (b.elemAt split 1)) - (lib.splitString "&" parsed.allArgs)); - - in - if split == null then - throw "Unable to parse shortcut: ${shortcut}" + kwargs_ = + if parsed.allArgs == null + then {} else - parsed; - - renderUrlArgs = kwargs: - let - asStr = - (lib.concatStringsSep - "&" - (lib.mapAttrsToList - (name: val: "${name}=${val}") - kwargs)); - - in - if asStr == "" then - "" - else - "?" + asStr; + lib.listToAttrs + (map + (kwarg: let + split = lib.splitString "=" kwarg; + in + lib.nameValuePair + (b.elemAt split 0) + (b.elemAt split 1)) + (lib.splitString "&" parsed.allArgs)); + in + if split == null + then throw "Unable to parse shortcut: ${shortcut}" + else parsed; + renderUrlArgs = kwargs: let + asStr = + lib.concatStringsSep + "&" + (lib.mapAttrsToList + (name: val: "${name}=${val}") + kwargs); + in + if asStr == "" + then "" + else "?" + asStr; # translate shortcut to dream lock source spec - translateShortcut = { shortcut, computeHash ? true, }: - let - - parsed = parseShortcut shortcut; - - checkArgs = fetcherName: args: - let - fetcher = fetchers."${fetcherName}"; - unknownArgNames = lib.filter (argName: ! lib.elem argName fetcher.inputs) (lib.attrNames args); - missingArgNames = lib.filter (inputName: ! args ? "${inputName}") fetcher.inputs; - in - if lib.length unknownArgNames > 0 then - throw "Received unknown arguments for fetcher '${fetcherName}': ${b.toString unknownArgNames}" - else if lib.length missingArgNames > 0 then - throw "Missing arguments for fetcher '${fetcherName}': ${b.toString missingArgNames}" - else - args; - - translateHttpUrl = - let - fetcher = fetchers.http; - - urlArgsFinal = renderUrlArgs parsed.kwargs; - - url = with parsed; "${proto2}://${path}${urlArgsFinal}"; - - fetcherOutputs = fetchers.http.outputs { - inherit url; - }; - - in - constructSource - { - inherit url; - type = "http"; - } - // (lib.optionalAttrs (parsed.dir != null) { - dir = parsed.dir; - }) - // (lib.optionalAttrs computeHash { - hash = fetcherOutputs.calcHash "sha256"; - }); - - translateProtoShortcut = - let - - kwargsUrl = b.removeAttrs parsed.kwargs fetcher.inputs; - - urlArgs = renderUrlArgs kwargsUrl; - - url = with parsed; "${proto2}://${path}${urlArgs}"; - - fetcherName = parsed.proto1; - - fetcher = fetchers."${fetcherName}"; - - args = parsed.kwargs // { inherit url; }; - - fetcherOutputs = fetcher.outputs (checkArgs fetcherName args); - - in - constructSource - (parsed.kwargs // { - type = fetcherName; - inherit url; - } - // (lib.optionalAttrs (parsed.dir != null) { - dir = parsed.dir; - }) - // (lib.optionalAttrs computeHash { - hash = fetcherOutputs.calcHash "sha256"; - })); - - translateRegularShortcut = - let - fetcherName = parsed.proto2; - - path = lib.removeSuffix "/" parsed.path; - - params = lib.splitString "/" path; - - fetcher = fetchers."${fetcherName}"; - - args = - if fetcher ? parseParams then - fetcher.parseParams params - - else if b.length params != b.length fetcher.inputs then - throw '' - Wrong number of arguments provided in shortcut for fetcher '${fetcherName}' - Should be ${fetcherName}:${lib.concatStringsSep "/" fetcher.inputs} - '' - - else - lib.listToAttrs - (lib.forEach - (lib.range 0 ((lib.length fetcher.inputs) - 1)) - (idx: - lib.nameValuePair - (lib.elemAt fetcher.inputs idx) - (lib.elemAt params idx) - )); - - fetcherOutputs = fetcher.outputs (args // parsed.kwargs); - - in - constructSource (args // parsed.kwargs // { - type = fetcherName; - } - // (lib.optionalAttrs (parsed.dir != null) { - dir = parsed.dir; - }) - // (lib.optionalAttrs computeHash { - hash = fetcherOutputs.calcHash "sha256"; - })); + translateShortcut = { + shortcut, + computeHash ? true, + }: let + parsed = parseShortcut shortcut; + checkArgs = fetcherName: args: let + fetcher = fetchers."${fetcherName}"; + unknownArgNames = lib.filter (argName: ! lib.elem argName fetcher.inputs) (lib.attrNames args); + missingArgNames = lib.filter (inputName: ! args ? "${inputName}") fetcher.inputs; in - if parsed.proto1 != null then - translateProtoShortcut - else if lib.hasPrefix "http://" shortcut - || lib.hasPrefix "https://" shortcut then - translateHttpUrl - else - translateRegularShortcut; + if lib.length unknownArgNames > 0 + then throw "Received unknown arguments for fetcher '${fetcherName}': ${b.toString unknownArgNames}" + else if lib.length missingArgNames > 0 + then throw "Missing arguments for fetcher '${fetcherName}': ${b.toString missingArgNames}" + else args; + translateHttpUrl = let + fetcher = fetchers.http; + + urlArgsFinal = renderUrlArgs parsed.kwargs; + + url = with parsed; "${proto2}://${path}${urlArgsFinal}"; + + fetcherOutputs = fetchers.http.outputs { + inherit url; + }; + in + constructSource + { + inherit url; + type = "http"; + } + // (lib.optionalAttrs (parsed.dir != null) { + dir = parsed.dir; + }) + // (lib.optionalAttrs computeHash { + hash = fetcherOutputs.calcHash "sha256"; + }); + + translateProtoShortcut = let + kwargsUrl = b.removeAttrs parsed.kwargs fetcher.inputs; + + urlArgs = renderUrlArgs kwargsUrl; + + url = with parsed; "${proto2}://${path}${urlArgs}"; + + fetcherName = parsed.proto1; + + fetcher = fetchers."${fetcherName}"; + + args = parsed.kwargs // {inherit url;}; + + fetcherOutputs = fetcher.outputs (checkArgs fetcherName args); + in + constructSource + (parsed.kwargs + // { + type = fetcherName; + inherit url; + } + // (lib.optionalAttrs (parsed.dir != null) { + dir = parsed.dir; + }) + // (lib.optionalAttrs computeHash { + hash = fetcherOutputs.calcHash "sha256"; + })); + + translateRegularShortcut = let + fetcherName = parsed.proto2; + + path = lib.removeSuffix "/" parsed.path; + + params = lib.splitString "/" path; + + fetcher = fetchers."${fetcherName}"; + + args = + if fetcher ? parseParams + then fetcher.parseParams params + else if b.length params != b.length fetcher.inputs + then + throw '' + Wrong number of arguments provided in shortcut for fetcher '${fetcherName}' + Should be ${fetcherName}:${lib.concatStringsSep "/" fetcher.inputs} + '' + else + lib.listToAttrs + (lib.forEach + (lib.range 0 ((lib.length fetcher.inputs) - 1)) + ( + idx: + lib.nameValuePair + (lib.elemAt fetcher.inputs idx) + (lib.elemAt params idx) + )); + + fetcherOutputs = fetcher.outputs (args // parsed.kwargs); + in + constructSource (args + // parsed.kwargs + // { + type = fetcherName; + } + // (lib.optionalAttrs (parsed.dir != null) { + dir = parsed.dir; + }) + // (lib.optionalAttrs computeHash { + hash = fetcherOutputs.calcHash "sha256"; + })); + in + if parsed.proto1 != null + then translateProtoShortcut + else if + lib.hasPrefix "http://" shortcut + || lib.hasPrefix "https://" shortcut + then translateHttpUrl + else translateRegularShortcut; } diff --git a/src/fetchers/git/default.nix b/src/fetchers/git/default.nix index 256cfce4..a6c42d70 100644 --- a/src/fetchers/git/default.nix +++ b/src/fetchers/git/default.nix @@ -1,15 +1,11 @@ { fetchgit, lib, - utils, ... -}: -let +}: let b = builtins; -in -{ - +in { inputs = [ "url" "rev" @@ -17,47 +13,50 @@ in versionField = "rev"; - outputs = { url, rev, ... }@inp: - if b.match "refs/(heads|tags)/.*" rev == null && builtins.match "[a-f0-9]*" rev == null then - throw ''rev must either be a sha1 revision or "refs/heads/branch-name" or "refs/tags/tag-name"'' - else - let - + outputs = { + url, + rev, + ... + } @ inp: + if b.match "refs/(heads|tags)/.*" rev == null && builtins.match "[a-f0-9]*" rev == null + then throw ''rev must either be a sha1 revision or "refs/heads/branch-name" or "refs/tags/tag-name"'' + else let b = builtins; refAndRev = - if b.match "refs/(heads|tags)/.*" inp.rev != null then - { ref = inp.rev; } - else - { rev = inp.rev; }; - - in - { - - calcHash = algo: utils.hashPath algo + if b.match "refs/(heads|tags)/.*" inp.rev != null + then {ref = inp.rev;} + else {rev = inp.rev;}; + in { + calcHash = algo: + utils.hashPath algo (b.fetchGit - (refAndRev // { - inherit url; - allRefs = true; - submodules = true; - })); + (refAndRev + // { + inherit url; + allRefs = true; + submodules = true; + })); # git can either be verified via revision or hash. # In case revision is used for verification, `hash` will be null. fetched = hash: - if hash == null then - if ! refAndRev ? rev then - throw "Cannot fetch git repo without integrity. Specify at least 'rev' or 'sha256'" + if hash == null + then + if ! refAndRev ? rev + then throw "Cannot fetch git repo without integrity. Specify at least 'rev' or 'sha256'" else b.fetchGit - (refAndRev // { + (refAndRev + // { inherit url; allRefs = true; submodules = true; }) else fetchgit - (refAndRev // { + (refAndRev + // { inherit url; fetchSubmodules = true; sha256 = hash; diff --git a/src/fetchers/github/default.nix b/src/fetchers/github/default.nix index 10686639..4b4ddc09 100644 --- a/src/fetchers/github/default.nix +++ b/src/fetchers/github/default.nix @@ -3,12 +3,9 @@ lib, nix, runCommand, - utils, ... -}: -{ - +}: { inputs = [ "owner" "repo" @@ -19,20 +16,22 @@ defaultUpdater = "githubNewestReleaseTag"; - outputs = { owner, repo, rev, ... }@inp: - let - b = builtins; - in - { - - calcHash = algo: utils.hashPath algo (b.fetchTarball { + outputs = { + owner, + repo, + rev, + ... + } @ inp: let + b = builtins; + in { + calcHash = algo: + utils.hashPath algo (b.fetchTarball { url = "https://github.com/${owner}/${repo}/tarball/${rev}"; }); - fetched = hash: - fetchFromGitHub { - inherit owner repo rev hash; - }; - - }; + fetched = hash: + fetchFromGitHub { + inherit owner repo rev hash; + }; + }; } diff --git a/src/fetchers/gitlab/default.nix b/src/fetchers/gitlab/default.nix index 5cacc334..c05b2919 100644 --- a/src/fetchers/gitlab/default.nix +++ b/src/fetchers/gitlab/default.nix @@ -1,11 +1,8 @@ { fetchFromGitLab, - utils, ... -}: -{ - +}: { inputs = [ "owner" "repo" @@ -14,20 +11,22 @@ versionField = "rev"; - outputs = { owner, repo, rev, ... }@inp: - let - b = builtins; - in - { - - calcHash = algo: utils.hashPath algo (b.fetchTarball { + outputs = { + owner, + repo, + rev, + ... + } @ inp: let + b = builtins; + in { + calcHash = algo: + utils.hashPath algo (b.fetchTarball { url = "https://gitlab.com/${owner}/${repo}/-/archive/${rev}/${repo}-${rev}.tar.gz"; }); - fetched = hash: - fetchFromGitLab { - inherit owner repo rev hash; - }; - - }; + fetched = hash: + fetchFromGitLab { + inherit owner repo rev hash; + }; + }; } diff --git a/src/fetchers/http/default.nix b/src/fetchers/http/default.nix index b82975ee..75dfa700 100644 --- a/src/fetchers/http/default.nix +++ b/src/fetchers/http/default.nix @@ -1,51 +1,42 @@ { lib, fetchurl, - utils, ... -}: -{ - +}: { inputs = [ "url" ]; - outputs = { url, ... }@inp: - let - b = builtins; - in - { - - calcHash = algo: utils.hashFile algo (b.fetchurl { + outputs = {url, ...} @ inp: let + b = builtins; + in { + calcHash = algo: + utils.hashFile algo (b.fetchurl { inherit url; }); - fetched = hash: - let - drv = - if hash != null && lib.stringLength hash == 40 then - fetchurl { - inherit url; - sha1 = hash; - } - else - fetchurl { - inherit url hash; - }; + fetched = hash: let + drv = + if hash != null && lib.stringLength hash == 40 + then + fetchurl { + inherit url; + sha1 = hash; + } + else + fetchurl { + inherit url hash; + }; - drvSanitized = - drv.overrideAttrs (old: { - name = lib.strings.sanitizeDerivationName old.name; - }); + drvSanitized = drv.overrideAttrs (old: { + name = lib.strings.sanitizeDerivationName old.name; + }); - extracted = - utils.extractSource { - source = drvSanitized; - }; - - in - extracted; - - }; + extracted = utils.extractSource { + source = drvSanitized; + }; + in + extracted; + }; } diff --git a/src/fetchers/npm/default.nix b/src/fetchers/npm/default.nix index c2a37206..d8a31f33 100644 --- a/src/fetchers/npm/default.nix +++ b/src/fetchers/npm/default.nix @@ -2,17 +2,12 @@ fetchurl, lib, python3, - utils, ... -}: - -let +}: let b = builtins; -in - -rec { - inputs = [ "pname" "version" ]; +in rec { + inputs = ["pname" "version"]; versionField = "version"; @@ -21,55 +16,57 @@ rec { # becuase some node packages contain submodules like `@hhhtj/draw.io` # the amount of arguments can vary and a custom parser is needed parseParams = params: - if b.length params == b.length inputs then + if b.length params == b.length inputs + then lib.listToAttrs - (lib.forEach - (lib.range 0 ((lib.length inputs) - 1)) - (idx: + (lib.forEach + (lib.range 0 ((lib.length inputs) - 1)) + ( + idx: lib.nameValuePair - (lib.elemAt inputs idx) - (lib.elemAt params idx) - )) - else if b.length params == (b.length inputs) + 1 then - parseParams [ - "${b.elemAt params 0}/${b.elemAt params 1}" - (b.elemAt params 2) - ] - else - throw '' - Wrong number of arguments provided in shortcut for fetcher 'npm' - Should be npm:${lib.concatStringsSep "/" inputs} - ''; - + (lib.elemAt inputs idx) + (lib.elemAt params idx) + )) + else if b.length params == (b.length inputs) + 1 + then + parseParams [ + "${b.elemAt params 0}/${b.elemAt params 1}" + (b.elemAt params 2) + ] + else + throw '' + Wrong number of arguments provided in shortcut for fetcher 'npm' + Should be npm:${lib.concatStringsSep "/" inputs} + ''; # defaultUpdater = ""; - outputs = { pname, version, }@inp: - let - b = builtins; + outputs = { + pname, + version, + } @ inp: let + b = builtins; - submodule = lib.last (lib.splitString "/" pname); - url = "https://registry.npmjs.org/${pname}/-/${submodule}-${version}.tgz"; - in - { - - calcHash = algo: utils.hashPath algo ( - b.fetchurl { inherit url; } + submodule = lib.last (lib.splitString "/" pname); + url = "https://registry.npmjs.org/${pname}/-/${submodule}-${version}.tgz"; + in { + calcHash = algo: + utils.hashPath algo ( + b.fetchurl {inherit url;} ); - fetched = hash: - let - source = - (fetchurl { - inherit url; - sha256 = hash; - }).overrideAttrs (old: { - outputHashMode = "recursive"; - }); - in - utils.extractSource { - inherit source; - }; - - }; + fetched = hash: let + source = + (fetchurl { + inherit url; + sha256 = hash; + }) + .overrideAttrs (old: { + outputHashMode = "recursive"; + }); + in + utils.extractSource { + inherit source; + }; + }; } diff --git a/src/fetchers/path/default.nix b/src/fetchers/path/default.nix index b879f8cf..372150de 100644 --- a/src/fetchers/path/default.nix +++ b/src/fetchers/path/default.nix @@ -1,22 +1,13 @@ -{ - utils, - ... -}: -{ - +{utils, ...}: { inputs = [ "path" ]; - outputs = { path, ... }@inp: - let - b = builtins; - in - { + outputs = {path, ...} @ inp: let + b = builtins; + in { + calcHash = algo: utils.hashPath "${path}"; - calcHash = algo: utils.hashPath "${path}"; - - fetched = hash: "${path}"; - - }; + fetched = hash: "${path}"; + }; } diff --git a/src/fetchers/pypi-sdist/default.nix b/src/fetchers/pypi-sdist/default.nix index 5c14b109..2e95c224 100644 --- a/src/fetchers/pypi-sdist/default.nix +++ b/src/fetchers/pypi-sdist/default.nix @@ -1,45 +1,44 @@ { fetchurl, python3, - utils, ... -}: -{ - - inputs = [ "pname" "version" ]; +}: { + inputs = ["pname" "version"]; versionField = "version"; defaultUpdater = "pypiNewestReleaseVersion"; - outputs = { pname, version, extension ? "tar.gz", }@inp: - let - b = builtins; + outputs = { + pname, + version, + extension ? "tar.gz", + } @ inp: let + b = builtins; - firstChar = builtins.substring 0 1 pname; - url = - "https://files.pythonhosted.org/packages/source/" - + "${firstChar}/${pname}/${pname}-${version}.${extension}"; - in - { - - calcHash = algo: utils.hashPath algo ( - b.fetchurl { inherit url; } + firstChar = builtins.substring 0 1 pname; + url = + "https://files.pythonhosted.org/packages/source/" + + "${firstChar}/${pname}/${pname}-${version}.${extension}"; + in { + calcHash = algo: + utils.hashPath algo ( + b.fetchurl {inherit url;} ); - fetched = hash: - let - source = - (fetchurl { - inherit url; - sha256 = hash; - }).overrideAttrs (old: { - outputHashMode = "recursive"; - }); - in - utils.extractSource { - inherit source; - }; - }; + fetched = hash: let + source = + (fetchurl { + inherit url; + sha256 = hash; + }) + .overrideAttrs (old: { + outputHashMode = "recursive"; + }); + in + utils.extractSource { + inherit source; + }; + }; } diff --git a/src/lib.nix b/src/lib.nix index 7ba6a2f3..d6fa0e29 100644 --- a/src/lib.nix +++ b/src/lib.nix @@ -1,7 +1,6 @@ # like ./default.nix but system intependent # (allows to generate outputs for several systems) # follows flake output schema - { dlib, nixpkgsSrc, @@ -9,272 +8,254 @@ overridesDirs, externalSources, externalPaths, - -}@args: - -let - +} @ args: let b = builtins; l = lib // builtins; dream2nixForSystem = config: system: pkgs: import ./default.nix - { inherit config externalPaths externalSources pkgs; }; - + {inherit config externalPaths externalSources pkgs;}; # TODO: design output schema for cross compiled packages - makePkgsKey = pkgs: - let - build = pkgs.buildPlatform.system; - host = pkgs.hostPlatform.system; - in - if build == host then build - else throw "cross compiling currently not supported"; + makePkgsKey = pkgs: let + build = pkgs.buildPlatform.system; + host = pkgs.hostPlatform.system; + in + if build == host + then build + else throw "cross compiling currently not supported"; makeNixpkgs = pkgsList: systems: - - # fail if neither pkgs nor systems are defined - if pkgsList == null && systems == [] then - throw "Either `systems` or `pkgs` must be defined" - + # fail if neither pkgs nor systems are defined + if pkgsList == null && systems == [] + then throw "Either `systems` or `pkgs` must be defined" # fail if pkgs and systems are both defined - else if pkgsList != null && systems != [] then - throw "Define either `systems` or `pkgs`, not both" - + else if pkgsList != null && systems != [] + then throw "Define either `systems` or `pkgs`, not both" # only pkgs is specified - else if pkgsList != null then - if b.isList pkgsList then + else if pkgsList != null + then + if b.isList pkgsList + then lib.listToAttrs - (pkgs: lib.nameValuePair (makePkgsKey pkgs) pkgs) - pkgsList - else - { "${makePkgsKey pkgsList}" = pkgsList; } - + (pkgs: lib.nameValuePair (makePkgsKey pkgs) pkgs) + pkgsList + else {"${makePkgsKey pkgsList}" = pkgsList;} # only systems is specified else lib.genAttrs systems - (system: import nixpkgsSrc { inherit system; }); + (system: import nixpkgsSrc {inherit system;}); + flakifyBuilderOutputs = system: outputs: + (lib.optionalAttrs (outputs ? "defaultPackage") { + defaultPackage."${system}" = outputs.defaultPackage; + }) + // (lib.optionalAttrs (outputs ? "packages") { + packages."${system}" = outputs.packages; + }) + // (lib.optionalAttrs (outputs ? "devShell") { + devShell."${system}" = outputs.devShell; + }); - flakifyBuilderOutputs = system: outputs: - (lib.optionalAttrs (outputs ? "defaultPackage") { - defaultPackage."${system}" = outputs.defaultPackage; - }) - // - (lib.optionalAttrs (outputs ? "packages") { - packages."${system}" = outputs.packages; - }) - // - (lib.optionalAttrs (outputs ? "devShell") { - devShell."${system}" = outputs.devShell; - }); + init = { + pkgs ? null, + systems ? [], + config ? {}, + } @ argsInit: let + config' = (import ./utils/config.nix).loadConfig argsInit.config or {}; - init = - { - pkgs ? null, - systems ? [], - config ? {}, - }@argsInit: - let - - config' = (import ./utils/config.nix).loadConfig argsInit.config or {}; - - config = config' // { + config = + config' + // { overridesDirs = args.overridesDirs ++ config'.overridesDirs; }; - allPkgs = makeNixpkgs pkgs systems; + allPkgs = makeNixpkgs pkgs systems; - forAllSystems = f: lib.mapAttrs f allPkgs; + forAllSystems = f: lib.mapAttrs f allPkgs; - dream2nixFor = forAllSystems (dream2nixForSystem config); - in - { + dream2nixFor = forAllSystems (dream2nixForSystem config); + in { + riseAndShine = throw "Use makeFlakeOutputs instead of riseAndShine."; - riseAndShine = throw "Use makeFlakeOutputs instead of riseAndShine."; - - makeFlakeOutputs = mArgs: makeFlakeOutputsFunc - ( - { inherit config pkgs systems; } - // mArgs - ); - - apps = - forAllSystems - (system: pkgs: - dream2nixFor."${system}".apps.flakeApps); - - defaultApp = - forAllSystems - (system: pkgs: - dream2nixFor."${system}".apps.flakeApps.dream2nix); - - builders = - forAllSystems - (system: pkgs: - dream2nixFor."${system}".builders); - }; - - makeFlakeOutputsFunc = - { - builder ? null, - pname ? null, - pkgs ? null, - source, - systems ? [], - translator ? null, - translatorArgs ? {}, - ... - }@args: - let - - config = args.config or ((import ./utils/config.nix).loadConfig {}); - - argsForward = b.removeAttrs args [ "config" "pname" "pkgs" "systems" ]; - - allPkgs = makeNixpkgs pkgs systems; - - forAllSystems = f: b.mapAttrs f allPkgs; - - dream2nixFor = forAllSystems (dream2nixForSystem config); - - translatorFound = dlib.translators.findOneTranslator { - inherit source; - translatorName = args.translator or null; - }; - - translatorFoundFor = forAllSystems (system: pkgs: - with translatorFound; - dream2nixFor."${system}".translators.translators - ."${subsystem}"."${type}"."${name}" + makeFlakeOutputs = mArgs: + makeFlakeOutputsFunc + ( + {inherit config pkgs systems;} + // mArgs ); - invalidationHash = dlib.calcInvalidationHash { - inherit source translatorArgs; - translator = translatorFound.name; - }; + apps = + forAllSystems + (system: pkgs: + dream2nixFor."${system}".apps.flakeApps); - specifyPnameError = throw '' - Translator `${translatorFound.name}` could not automatically determine `pname`. - Please specify `pname` when calling `makeFlakeOutputs` - ''; + defaultApp = + forAllSystems + (system: pkgs: + dream2nixFor."${system}".apps.flakeApps.dream2nix); - detectedName = translatorFound.projectName; + builders = + forAllSystems + (system: pkgs: + dream2nixFor."${system}".builders); + }; - pname = - if args.pname or null != null then - args.pname - else if detectedName != null then - detectedName - else - specifyPnameError; + makeFlakeOutputsFunc = { + builder ? null, + pname ? null, + pkgs ? null, + source, + systems ? [], + translator ? null, + translatorArgs ? {}, + ... + } @ args: let + config = args.config or ((import ./utils/config.nix).loadConfig {}); - allBuilderOutputs = - lib.mapAttrs - (system: pkgs: - let - dream2nix = dream2nixFor."${system}"; + argsForward = b.removeAttrs args ["config" "pname" "pkgs" "systems"]; - dreamLockJsonPath = with config; - "${projectRoot}/${packagesDir}/${pname}/dream-lock.json"; + allPkgs = makeNixpkgs pkgs systems; - dreamLock = dream2nix.utils.readDreamLock { - dreamLock = dreamLockJsonPath; - }; + forAllSystems = f: b.mapAttrs f allPkgs; - dreamLockExistsAndValid = - b.pathExists dreamLockJsonPath - && dreamLock.lock._generic.invalidationHash or "" == invalidationHash; + dream2nixFor = forAllSystems (dream2nixForSystem config); - result = translator: args: - dream2nix.makeOutputs (argsForward // { - # TODO: this triggers the translator finding routine a second time - translator = translatorFound.name; - }); - in + translatorFound = dlib.translators.findOneTranslator { + inherit source; + translatorName = args.translator or null; + }; - if dreamLockExistsAndValid then - # we need to override the source here as it is coming from - # a flake input - let - defaultPackage = dreamLock.lock._generic.defaultPackage; - defaultPackageVersion = - dreamLock.lock._generic.packages."${defaultPackage}"; - in - result translatorFound { - source = dreamLockJsonPath; - sourceOverrides = oldSources: { - "${defaultPackage}"."${defaultPackageVersion}" = - args.source; - }; - } + translatorFoundFor = forAllSystems ( + system: pkgs: + with translatorFound; + dream2nixFor + ."${system}" + .translators + .translators + ."${subsystem}" + ."${type}" + ."${name}" + ); - else if b.elem translatorFound.type [ "pure" "ifd" ] then - # warn the user about potentially slow on-the-fly evaluation - b.trace '' - ${"\n"} - The dream-lock.json for input '${pname}' doesn't exist or is outdated. - ...Falling back to on-the-fly evaluation (possibly slow). - To speed up future evalutations run once: - nix run .#resolve - '' - result translatorFound {} + invalidationHash = dlib.calcInvalidationHash { + inherit source translatorArgs; + translator = translatorFound.name; + }; - else - # print error because impure translation is required first. - # continue the evaluation anyways, as otherwise we won't have - # the `resolve` app - b.trace '' - ${"\n"} - ERROR: - Some information is missing to build this project reproducibly. - Please execute nix run .#resolve to resolve all impurities. - '' - {}) + specifyPnameError = throw '' + Translator `${translatorFound.name}` could not automatically determine `pname`. + Please specify `pname` when calling `makeFlakeOutputs` + ''; - allPkgs; + detectedName = translatorFound.projectName; - flakifiedOutputsList = - lib.mapAttrsToList - (system: outputs: flakifyBuilderOutputs system outputs) - allBuilderOutputs; + pname = + if args.pname or null != null + then args.pname + else if detectedName != null + then detectedName + else specifyPnameError; - flakeOutputs = - b.foldl' - (allOutputs: output: lib.recursiveUpdate allOutputs output) - {} - flakifiedOutputsList; + allBuilderOutputs = + lib.mapAttrs + (system: pkgs: let + dream2nix = dream2nixFor."${system}"; - in - lib.recursiveUpdate - flakeOutputs - { - apps = forAllSystems (system: pkgs: { - resolve.type = "app"; - resolve.program = - let - utils = (dream2nixFor."${system}".utils); + dreamLockJsonPath = with config; "${projectRoot}/${packagesDir}/${pname}/dream-lock.json"; - # TODO: Too many calls to findOneTranslator. - # -> make findOneTranslator system independent - translatorFound = - dream2nixFor."${system}".translators.findOneTranslator { - inherit source; - translatorName = args.translator or null; - }; - in - b.toString - (utils.makePackageLockScript { - inherit source translatorArgs; - packagesDir = config.packagesDir; - translator = translatorFound.name; - }); - }); + dreamLock = dream2nix.utils.readDreamLock { + dreamLock = dreamLockJsonPath; }; -in -{ + dreamLockExistsAndValid = + b.pathExists dreamLockJsonPath + && dreamLock.lock._generic.invalidationHash or "" == invalidationHash; + + result = translator: args: + dream2nix.makeOutputs (argsForward + // { + # TODO: this triggers the translator finding routine a second time + translator = translatorFound.name; + }); + in + if dreamLockExistsAndValid + then + # we need to override the source here as it is coming from + # a flake input + let + defaultPackage = dreamLock.lock._generic.defaultPackage; + defaultPackageVersion = + dreamLock.lock._generic.packages."${defaultPackage}"; + in + result translatorFound { + source = dreamLockJsonPath; + sourceOverrides = oldSources: { + "${defaultPackage}"."${defaultPackageVersion}" = + args.source; + }; + } + else if b.elem translatorFound.type ["pure" "ifd"] + then + # warn the user about potentially slow on-the-fly evaluation + b.trace '' + ${"\n"} + The dream-lock.json for input '${pname}' doesn't exist or is outdated. + ...Falling back to on-the-fly evaluation (possibly slow). + To speed up future evalutations run once: + nix run .#resolve + '' + result + translatorFound {} + else + # print error because impure translation is required first. + # continue the evaluation anyways, as otherwise we won't have + # the `resolve` app + b.trace '' + ${"\n"} + ERROR: + Some information is missing to build this project reproducibly. + Please execute nix run .#resolve to resolve all impurities. + '' + {}) + allPkgs; + + flakifiedOutputsList = + lib.mapAttrsToList + (system: outputs: flakifyBuilderOutputs system outputs) + allBuilderOutputs; + + flakeOutputs = + b.foldl' + (allOutputs: output: lib.recursiveUpdate allOutputs output) + {} + flakifiedOutputsList; + in + lib.recursiveUpdate + flakeOutputs + { + apps = forAllSystems (system: pkgs: { + resolve.type = "app"; + resolve.program = let + utils = dream2nixFor."${system}".utils; + + # TODO: Too many calls to findOneTranslator. + # -> make findOneTranslator system independent + translatorFound = dream2nixFor."${system}".translators.findOneTranslator { + inherit source; + translatorName = args.translator or null; + }; + in + b.toString + (utils.makePackageLockScript { + inherit source translatorArgs; + packagesDir = config.packagesDir; + translator = translatorFound.name; + }); + }); + }; +in { inherit dlib init; riseAndShine = throw "Use makeFlakeOutputs instead of riseAndShine."; makeFlakeOutpus = makeFlakeOutputsFunc; diff --git a/src/lib/default.nix b/src/lib/default.nix index a6503157..efaeabc7 100644 --- a/src/lib/default.nix +++ b/src/lib/default.nix @@ -2,12 +2,9 @@ lib, config ? (import ../utils/config.nix).loadConfig {}, ... -}: -let - +}: let l = lib // builtins; - # exported attributes dlib = { inherit @@ -27,20 +24,20 @@ let sanitizeDerivationName sanitizeRelativePath traceJ - ; + ; - inherit (parseUtils) + inherit + (parseUtils) identifyGitUrl parseGitUrl - ; + ; }; # other libs - translators = import ./translators.nix { inherit dlib lib; }; - discoverers = import ../discoverers { inherit config dlib lib; }; - - parseUtils = import ./parsing.nix { inherit lib; }; + translators = import ./translators.nix {inherit dlib lib;}; + discoverers = import ../discoverers {inherit config dlib lib;}; + parseUtils = import ./parsing.nix {inherit lib;}; # INTERNAL @@ -49,21 +46,18 @@ let # recursively applied as parameters. # For this to work, the function parameters defined by the called function # must always be ordered alphabetically. - callWithAttrArgs = func: args: - let - applyParamsRec = func: params: - if l.length params == 1 then - func (l.head params) - else - applyParamsRec - (func (l.head params)) - (l.tail params); - - in - if lib.functionArgs func == {} then - applyParamsRec func (l.attrValues args) + callWithAttrArgs = func: args: let + applyParamsRec = func: params: + if l.length params == 1 + then func (l.head params) else - func args; + applyParamsRec + (func (l.head params)) + (l.tail params); + in + if lib.functionArgs func == {} + then applyParamsRec func (l.attrValues args) + else func args; # prepare source tree for executing discovery phase # produces this structure: @@ -90,107 +84,98 @@ let # }; # }; # } - prepareSourceTreeInternal = sourceRoot: relPath: name: depth: - let - relPath' = relPath; - fullPath' = "${sourceRoot}/${relPath}"; - current = l.readDir fullPath'; + prepareSourceTreeInternal = sourceRoot: relPath: name: depth: let + relPath' = relPath; + fullPath' = "${sourceRoot}/${relPath}"; + current = l.readDir fullPath'; - fileNames = - l.filterAttrs (n: v: v == "regular") current; + fileNames = + l.filterAttrs (n: v: v == "regular") current; - directoryNames = - l.filterAttrs (n: v: v == "directory") current; + directoryNames = + l.filterAttrs (n: v: v == "directory") current; - makeNewPath = prefix: name: - if prefix == "" then - name - else - "${prefix}/${name}"; + makeNewPath = prefix: name: + if prefix == "" + then name + else "${prefix}/${name}"; - directories = - l.mapAttrs - (dname: _: - prepareSourceTreeInternal - sourceRoot - (makeNewPath relPath dname) - dname - (depth - 1)) - directoryNames; + directories = + l.mapAttrs + (dname: _: + prepareSourceTreeInternal + sourceRoot + (makeNewPath relPath dname) + dname + (depth - 1)) + directoryNames; - files = - l.mapAttrs - (fname: _: rec { - name = fname; - fullPath = "${fullPath'}/${fname}"; - relPath = makeNewPath relPath' fname; - content = readTextFile fullPath; - jsonContent = l.fromJSON content; - tomlContent = l.fromTOML content; - }) - fileNames; + files = + l.mapAttrs + (fname: _: rec { + name = fname; + fullPath = "${fullPath'}/${fname}"; + relPath = makeNewPath relPath' fname; + content = readTextFile fullPath; + jsonContent = l.fromJSON content; + tomlContent = l.fromTOML content; + }) + fileNames; - getNodeFromPath = path: - let - cleanPath = l.removePrefix "/" path; - pathSplit = l.splitString "/" cleanPath; - dirSplit = l.init pathSplit; - leaf = l.last pathSplit; - error = throw '' - Failed while trying to navigate to ${path} from ${fullPath'} - ''; + getNodeFromPath = path: let + cleanPath = l.removePrefix "/" path; + pathSplit = l.splitString "/" cleanPath; + dirSplit = l.init pathSplit; + leaf = l.last pathSplit; + error = throw '' + Failed while trying to navigate to ${path} from ${fullPath'} + ''; - dirAttrPath = - l.init - (l.concatMap - (x: [x] ++ ["directories"]) - dirSplit); - - dir = - if (l.length dirSplit == 0) || dirAttrPath == [ "" ] then - self - else if ! l.hasAttrByPath dirAttrPath directories then - error - else - l.getAttrFromPath dirAttrPath directories; - - in - if path == "" then - self - else if dir ? directories."${leaf}" then - dir.directories."${leaf}" - else if dir ? files."${leaf}" then - dir.files."${leaf}" - else - error; - - self = - { - inherit files getNodeFromPath name relPath; - - fullPath = fullPath'; - } - # stop recursion if depth is reached - // (l.optionalAttrs (depth > 0) { - inherit directories; - }); + dirAttrPath = + l.init + (l.concatMap + (x: [x] ++ ["directories"]) + dirSplit); + dir = + if (l.length dirSplit == 0) || dirAttrPath == [""] + then self + else if ! l.hasAttrByPath dirAttrPath directories + then error + else l.getAttrFromPath dirAttrPath directories; in - self; + if path == "" + then self + else if dir ? directories."${leaf}" + then dir.directories."${leaf}" + else if dir ? files."${leaf}" + then dir.files."${leaf}" + else error; + + self = + { + inherit files getNodeFromPath name relPath; + + fullPath = fullPath'; + } + # stop recursion if depth is reached + // (l.optionalAttrs (depth > 0) { + inherit directories; + }); + in + self; # determines if version v1 is greater than version v2 versionGreater = v1: v2: l.compareVersions v1 v2 == 1; - # EXPORTED # calculate an invalidation hash for given source translation inputs - calcInvalidationHash = - { - source, - translator, - translatorArgs, - }: + calcInvalidationHash = { + source, + translator, + translatorArgs, + }: l.hashString "sha256" '' ${source} ${translator} @@ -199,19 +184,18 @@ let ''; # call a function using arguments defined by the env var FUNC_ARGS - callViaEnv = func: - let - funcArgs = l.fromJSON (l.readFile (l.getEnv "FUNC_ARGS")); - in - callWithAttrArgs func funcArgs; + callViaEnv = func: let + funcArgs = l.fromJSON (l.readFile (l.getEnv "FUNC_ARGS")); + in + callWithAttrArgs func funcArgs; # Returns true if every given pattern is satisfied by at least one file name # inside the given directory. # Sub-directories are not recursed. containsMatchingFile = patterns: dir: l.all - (pattern: l.any (file: l.match pattern file != null) (listFiles dir)) - patterns; + (pattern: l.any (file: l.match pattern file != null) (listFiles dir)) + patterns; # directory names of a given directory dirNames = dir: l.attrNames (l.filterAttrs (name: type: type == "directory") (builtins.readDir dir)); @@ -219,36 +203,32 @@ let # picks the latest version from a list of version strings latestVersion = versions: l.head - (lib.sort versionGreater versions); + (lib.sort versionGreater versions); listDirs = path: l.attrNames (l.filterAttrs (n: v: v == "directory") (builtins.readDir path)); listFiles = path: l.attrNames (l.filterAttrs (n: v: v == "regular") (builtins.readDir path)); - nameVersionPair = name: version: - { inherit name version; }; + nameVersionPair = name: version: {inherit name version;}; - prepareSourceTree = - { - source, - depth ? 10, - }: + prepareSourceTree = { + source, + depth ? 10, + }: prepareSourceTreeInternal source "" "" depth; - readTextFile = file: l.replaceStrings [ "\r\n" ] [ "\n" ] (l.readFile file); + readTextFile = file: l.replaceStrings ["\r\n"] ["\n"] (l.readFile file); # like nixpkgs recursiveUpdateUntil, but with the depth as a stop condition recursiveUpdateUntilDepth = depth: lhs: rhs: lib.recursiveUpdateUntil (path: _: _: (l.length path) > depth) lhs rhs; sanitizeDerivationName = name: - lib.replaceStrings [ "@" "/" ] [ "__at__" "__slash__" ] name; + lib.replaceStrings ["@" "/"] ["__at__" "__slash__"] name; sanitizeRelativePath = path: l.removePrefix "/" (l.toString (l.toPath "/${path}")); traceJ = toTrace: eval: l.trace (l.toJSON toTrace) eval; - in - -dlib + dlib diff --git a/src/lib/parsing.nix b/src/lib/parsing.nix index 05fb4bca..e7812b84 100644 --- a/src/lib/parsing.nix +++ b/src/lib/parsing.nix @@ -1,50 +1,38 @@ -{ - lib, - ... -}: -let - +{lib, ...}: let b = builtins; identifyGitUrl = url: lib.hasPrefix "git+" url || b.match ''^github:.*/.*#.*'' url != null; - parseGitUrl = url: - let - githubMatch = b.match ''^github:(.*)/(.*)#(.*)$'' url; + parseGitUrl = url: let + githubMatch = b.match ''^github:(.*)/(.*)#(.*)$'' url; + in + if githubMatch != null + then let + owner = b.elemAt githubMatch 0; + repo = b.elemAt githubMatch 1; + rev = b.elemAt githubMatch 2; + in { + url = "https://github.com/${owner}/${repo}"; + inherit rev; + } + else let + splitUrlRev = lib.splitString "#" url; + rev = lib.last splitUrlRev; + urlOnly = lib.head splitUrlRev; in - if githubMatch != null then - let - owner = b.elemAt githubMatch 0; - repo = b.elemAt githubMatch 1; - rev = b.elemAt githubMatch 2; - in - { - url = "https://github.com/${owner}/${repo}"; - inherit rev; - } - else - let - splitUrlRev = lib.splitString "#" url; - rev = lib.last splitUrlRev; - urlOnly = lib.head splitUrlRev; - in - if lib.hasPrefix "git+ssh://" urlOnly then - { - inherit rev; - url = "https://${(lib.last (lib.splitString "@" url))}"; - } - else if lib.hasPrefix "git+https://" urlOnly then - { - inherit rev; - url = lib.removePrefix "git+" urlOnly; - } - else - throw "Cannot parse git url: ${url}"; - - -in -{ + if lib.hasPrefix "git+ssh://" urlOnly + then { + inherit rev; + url = "https://${(lib.last (lib.splitString "@" url))}"; + } + else if lib.hasPrefix "git+https://" urlOnly + then { + inherit rev; + url = lib.removePrefix "git+" urlOnly; + } + else throw "Cannot parse git url: ${url}"; +in { inherit identifyGitUrl parseGitUrl; } diff --git a/src/lib/translators.nix b/src/lib/translators.nix index 19e0f3e4..98e9150a 100644 --- a/src/lib/translators.nix +++ b/src/lib/translators.nix @@ -1,223 +1,199 @@ { dlib, lib, -}: -let +}: let l = lib // builtins; # INTERNAL subsystems = dlib.dirNames ../translators; - translatorTypes = [ "impure" "ifd" "pure" ]; + translatorTypes = ["impure" "ifd" "pure"]; # attrset of: subsystem -> translator-type -> (function subsystem translator-type) mkTranslatorsSet = function: l.genAttrs - (dlib.dirNames ../translators) - (subsystem: - let - availableTypes = - l.filter - (type: l.pathExists (../translators + "/${subsystem}/${type}")) - translatorTypes; + (dlib.dirNames ../translators) + (subsystem: let + availableTypes = + l.filter + (type: l.pathExists (../translators + "/${subsystem}/${type}")) + translatorTypes; - translatorsForTypes = - l.genAttrs - availableTypes - (transType: function subsystem transType); - - in - translatorsForTypes // { - all = - l.foldl' - (a: b: a // b) - {} - (l.attrValues translatorsForTypes); - }); + translatorsForTypes = + l.genAttrs + availableTypes + (transType: function subsystem transType); + in + translatorsForTypes + // { + all = + l.foldl' + (a: b: a // b) + {} + (l.attrValues translatorsForTypes); + }); # flat list of all translators sorted by priority (pure translators first) - translatorsList = - let - list = l.collect (v: v ? subsystem) translators; - prio = translator: - if translator.type == "pure" then - 0 - else if translator.type == "ifd" then - 1 - else if translator.type == "impure" then - 2 - else - 3; - in - l.sort - (a: b: (prio a) < (prio b)) - list; - - callTranslator = subsystem: type: name: file: args: - let - translatorModule = import file { - inherit dlib lib; - }; - - in - translatorModule // { - inherit name subsystem type; - projectName = - if translatorModule ? projectName then - translatorModule.projectName - else - { ... }: null; - }; + translatorsList = let + list = l.collect (v: v ? subsystem) translators; + prio = translator: + if translator.type == "pure" + then 0 + else if translator.type == "ifd" + then 1 + else if translator.type == "impure" + then 2 + else 3; + in + l.sort + (a: b: (prio a) < (prio b)) + list; + callTranslator = subsystem: type: name: file: args: let + translatorModule = import file { + inherit dlib lib; + }; + in + translatorModule + // { + inherit name subsystem type; + projectName = + if translatorModule ? projectName + then translatorModule.projectName + else {...}: null; + }; # EXPORTED # attrset of: subsystem -> translator-type -> translator - translators = mkTranslatorsSet (subsystem: type: - let + translators = mkTranslatorsSet ( + subsystem: type: let translatorNames = dlib.dirNames (../translators + "/${subsystem}/${type}"); translatorsLoaded = l.genAttrs - translatorNames - (translatorName: - callTranslator - subsystem - type - translatorName - (../translators + "/${subsystem}/${type}/${translatorName}") - {}); + translatorNames + (translatorName: + callTranslator + subsystem + type + translatorName + (../translators + "/${subsystem}/${type}/${translatorName}") + {}); in translatorsLoaded ); mapTranslators = f: l.mapAttrs - (subsystem: types: + (subsystem: types: + l.mapAttrs + (type: names: l.mapAttrs - (type: names: - l.mapAttrs - (name: translator: f translator) - names) - types) - translators; + (name: translator: f translator) + names) + types) + translators; # returns the list of translators including their special args # and adds a flag `compatible` to each translator indicating # if the translator is compatible to all given paths - translatorsForInput = - { - source, - }: + translatorsForInput = {source}: l.forEach translatorsList - (t: rec { - inherit (t) - extraArgs - name - subsystem - type + (t: rec { + inherit + (t) + extraArgs + name + subsystem + type ; - compatible = t.compatible { inherit source; }; - projectName = t.projectName { inherit source; }; - }); + compatible = t.compatible {inherit source;}; + projectName = t.projectName {inherit source;}; + }); # also includes subdirectories of the given paths up to a certain depth # to check for translator compatibility - translatorsForInputRecursive = - { - source, - depth ? 2, - }: - let - listDirsRec = dir: depth: - let - subDirs = - l.map - (subdir: "${dir}/${subdir}") - (dlib.listDirs dir); - in - if depth == 0 then - subDirs - else - subDirs - ++ - (l.flatten - (map - (subDir: listDirsRec subDir (depth -1)) - subDirs)); - - dirsToCheck = - [ source ] - ++ - (l.flatten - (map - (inputDir: listDirsRec inputDir depth) - [ source ])); - + translatorsForInputRecursive = { + source, + depth ? 2, + }: let + listDirsRec = dir: depth: let + subDirs = + l.map + (subdir: "${dir}/${subdir}") + (dlib.listDirs dir); in - l.genAttrs - dirsToCheck - (dir: - translatorsForInput { - source = dir; - } - ); + if depth == 0 + then subDirs + else + subDirs + ++ (l.flatten + (map + (subDir: listDirsRec subDir (depth - 1)) + subDirs)); + dirsToCheck = + [source] + ++ (l.flatten + (map + (inputDir: listDirsRec inputDir depth) + [source])); + in + l.genAttrs + dirsToCheck + ( + dir: + translatorsForInput { + source = dir; + } + ); # pupulates a translators special args with defaults getextraArgsDefaults = extraArgsDef: l.mapAttrs - (name: def: - if def.type == "flag" then - false - else - def.default or null - ) - extraArgsDef; - + ( + name: def: + if def.type == "flag" + then false + else def.default or null + ) + extraArgsDef; # return one compatible translator or throw error - findOneTranslator = - { - source, - translatorName ? null, - }@args: - let - translatorsForSource = translatorsForInput { - inherit source; - }; + findOneTranslator = { + source, + translatorName ? null, + } @ args: let + translatorsForSource = translatorsForInput { + inherit source; + }; - nameFilter = - if translatorName != null then - (translator: translator.name == translatorName) - else - (translator: true); - - compatibleTranslators = - let - result = - l.filter - (t: t.compatible) - translatorsForSource; - in - if result == [] then - throw "Could not find a compatible translator for input" - else - result; - - translator = - l.findFirst - nameFilter - (throw ''Specified translator ${translatorName} not found or incompatible'') - compatibleTranslators; + nameFilter = + if translatorName != null + then (translator: translator.name == translatorName) + else (translator: true); + compatibleTranslators = let + result = + l.filter + (t: t.compatible) + translatorsForSource; in - translator; + if result == [] + then throw "Could not find a compatible translator for input" + else result; -in -{ + translator = + l.findFirst + nameFilter + (throw ''Specified translator ${translatorName} not found or incompatible'') + compatibleTranslators; + in + translator; +in { inherit findOneTranslator getextraArgsDefaults @@ -225,5 +201,5 @@ in translators translatorsForInput translatorsForInputRecursive - ; + ; } diff --git a/src/libV2.nix b/src/libV2.nix index 4ee28402..76d853b0 100644 --- a/src/libV2.nix +++ b/src/libV2.nix @@ -1,7 +1,6 @@ # like ./default.nix but system intependent # (allows to generate outputs for several systems) # follows flake output schema - { dlib, nixpkgsSrc, @@ -9,236 +8,220 @@ overridesDirs, externalSources, externalPaths, - -}@args: - -let - +} @ args: let b = builtins; l = lib // builtins; dream2nixForSystem = config: system: pkgs: import ./default.nix - { inherit config externalPaths externalSources pkgs; }; - + {inherit config externalPaths externalSources pkgs;}; # TODO: design output schema for cross compiled packages - makePkgsKey = pkgs: - let - build = pkgs.buildPlatform.system; - host = pkgs.hostPlatform.system; - in - if build == host then build - else throw "cross compiling currently not supported"; + makePkgsKey = pkgs: let + build = pkgs.buildPlatform.system; + host = pkgs.hostPlatform.system; + in + if build == host + then build + else throw "cross compiling currently not supported"; makeNixpkgs = pkgsList: systems: - - # fail if neither pkgs nor systems are defined - if pkgsList == null && systems == [] then - throw "Either `systems` or `pkgs` must be defined" - + # fail if neither pkgs nor systems are defined + if pkgsList == null && systems == [] + then throw "Either `systems` or `pkgs` must be defined" # fail if pkgs and systems are both defined - else if pkgsList != null && systems != [] then - throw "Define either `systems` or `pkgs`, not both" - + else if pkgsList != null && systems != [] + then throw "Define either `systems` or `pkgs`, not both" # only pkgs is specified - else if pkgsList != null then - if b.isList pkgsList then + else if pkgsList != null + then + if b.isList pkgsList + then lib.listToAttrs - (pkgs: lib.nameValuePair (makePkgsKey pkgs) pkgs) - pkgsList - else - { "${makePkgsKey pkgsList}" = pkgsList; } - + (pkgs: lib.nameValuePair (makePkgsKey pkgs) pkgs) + pkgsList + else {"${makePkgsKey pkgsList}" = pkgsList;} # only systems is specified else lib.genAttrs systems - (system: import nixpkgsSrc { inherit system; }); + (system: import nixpkgsSrc {inherit system;}); + flakifyBuilderOutputs = system: outputs: + l.mapAttrs + (ouputType: outputValue: {"${system}" = outputValue;}) + outputs; - flakifyBuilderOutputs = system: outputs: - l.mapAttrs - (ouputType: outputValue: { "${system}" = outputValue; }) - outputs; + init = { + pkgs ? null, + systems ? [], + config ? {}, + } @ argsInit: let + config' = (import ./utils/config.nix).loadConfig argsInit.config or {}; - init = - { - pkgs ? null, - systems ? [], - config ? {}, - }@argsInit: - let - - config' = (import ./utils/config.nix).loadConfig argsInit.config or {}; - - config = config' // { + config = + config' + // { overridesDirs = args.overridesDirs ++ config'.overridesDirs; }; - allPkgs = makeNixpkgs pkgs systems; + allPkgs = makeNixpkgs pkgs systems; - forAllSystems = f: lib.mapAttrs f allPkgs; + forAllSystems = f: lib.mapAttrs f allPkgs; - dream2nixFor = forAllSystems (dream2nixForSystem config); - in - { + dream2nixFor = forAllSystems (dream2nixForSystem config); + in { + riseAndShine = throw "Use makeFlakeOutputs instead of riseAndShine."; - riseAndShine = throw "Use makeFlakeOutputs instead of riseAndShine."; + makeFlakeOutputs = mArgs: + makeFlakeOutputsFunc + ( + {inherit config pkgs systems;} + // mArgs + ); - makeFlakeOutputs = mArgs: makeFlakeOutputsFunc - ( - { inherit config pkgs systems; } - // mArgs - ); + apps = + forAllSystems + (system: pkgs: + dream2nixFor."${system}".apps.flakeApps); - apps = - forAllSystems - (system: pkgs: - dream2nixFor."${system}".apps.flakeApps); + defaultApp = + forAllSystems + (system: pkgs: + dream2nixFor."${system}".apps.flakeApps.dream2nix); + }; - defaultApp = - forAllSystems - (system: pkgs: - dream2nixFor."${system}".apps.flakeApps.dream2nix); + makeFlakeOutputsFunc = { + pname ? throw "Please pass `pname` to makeFlakeOutputs", + pkgs ? null, + packageOverrides ? {}, + settings ? [], + source, + systems ? [], + translator ? null, + translatorArgs ? {}, + ... + } @ args: let + config = args.config or ((import ./utils/config.nix).loadConfig {}); + allPkgs = makeNixpkgs pkgs systems; + forAllSystems = f: b.mapAttrs f allPkgs; + dream2nixFor = forAllSystems (dream2nixForSystem config); + getInvalidationHash = project: + dlib.calcInvalidationHash { + inherit source; + # TODO: add translatorArgs + translatorArgs = {}; + translator = project.translator; }; - makeFlakeOutputsFunc = + discoveredProjects = dlib.discoverers.discoverProjects { + inherit settings; + tree = dlib.prepareSourceTree {inherit source;}; + }; + + allBuilderOutputs = + lib.mapAttrs + (system: pkgs: let + dream2nix = dream2nixFor."${system}"; + + impureDiscoveredProjects = + l.filter + (proj: + dream2nix + .translators + .translatorsV2 + ."${proj.subsystem}" + .all + ."${proj.translator}" + .type + == "impure") + discoveredProjects; + + impureResolveScriptsList = + l.listToAttrs + (l.forEach impureDiscoveredProjects + (project: + l.nameValuePair + "Name: ${project.name}; Subsystem: ${project.subsystem}; relPath: ${project.relPath}" + (dream2nix.utils.makeTranslateScript { + inherit project source; + invalidationHash = getInvalidationHash project; + }))); + + resolveImpureScript = + dream2nix.utils.writePureShellScript + [] + '' + cd $WORKDIR + ${l.concatStringsSep "\n" + (l.mapAttrsToList + (title: script: '' + echo "Resolving:: ${title}" + ${script}/bin/resolve + '') + impureResolveScriptsList)} + ''; + + translatedProjects = dream2nix.translateProjects { + inherit pname settings source; + }; + + realizedProjects = dream2nix.realizeProjects { + inherit packageOverrides translatedProjects source; + }; + + allOutputs = + realizedProjects + // { + apps.resolveImpure = { + type = "app"; + program = l.toString resolveImpureScript; + }; + }; + in + allOutputs) + allPkgs; + + flakifiedOutputsList = + lib.mapAttrsToList + (system: outputs: flakifyBuilderOutputs system outputs) + allBuilderOutputs; + + flakeOutputsBuilders = + b.foldl' + (allOutputs: output: lib.recursiveUpdate allOutputs output) + {} + flakifiedOutputsList; + + flakeOutputs = + {projectsJson = l.toJSON discoveredProjects;} + // flakeOutputsBuilders; + in + lib.recursiveUpdate + flakeOutputs { - pname ? throw "Please pass `pname` to makeFlakeOutputs", - pkgs ? null, - packageOverrides ? {}, - settings ? [], - source, - systems ? [], - translator ? null, - translatorArgs ? {}, - ... - }@args: - let + apps = forAllSystems (system: pkgs: { + resolve.type = "app"; + resolve.program = let + utils = dream2nixFor."${system}".utils; - config = args.config or ((import ./utils/config.nix).loadConfig {}); - allPkgs = makeNixpkgs pkgs systems; - forAllSystems = f: b.mapAttrs f allPkgs; - dream2nixFor = forAllSystems (dream2nixForSystem config); - - getInvalidationHash = project: - dlib.calcInvalidationHash { - inherit source; - # TODO: add translatorArgs - translatorArgs = {}; - translator = project.translator; - }; - - discoveredProjects = dlib.discoverers.discoverProjects { - inherit settings; - tree = dlib.prepareSourceTree { inherit source; }; - }; - - allBuilderOutputs = - lib.mapAttrs - (system: pkgs: - let - dream2nix = dream2nixFor."${system}"; - - impureDiscoveredProjects = - l.filter - (proj: - dream2nix.translators.translatorsV2."${proj.subsystem}".all - ."${proj.translator}".type == "impure") - discoveredProjects; - - impureResolveScriptsList = - l.listToAttrs - (l.forEach impureDiscoveredProjects - (project: - l.nameValuePair - "Name: ${project.name}; Subsystem: ${project.subsystem}; relPath: ${project.relPath}" - (dream2nix.utils.makeTranslateScript { - inherit project source; - invalidationHash = getInvalidationHash project; - }))); - - resolveImpureScript = - dream2nix.utils.writePureShellScript - [] - '' - cd $WORKDIR - ${l.concatStringsSep "\n" - (l.mapAttrsToList - (title: script: '' - echo "Resolving:: ${title}" - ${script}/bin/resolve - '') - impureResolveScriptsList)} - ''; - - translatedProjects = dream2nix.translateProjects { - inherit pname settings source; - }; - - realizedProjects = - dream2nix.realizeProjects { - inherit packageOverrides translatedProjects source; - }; - - allOutputs = - realizedProjects - // { - apps.resolveImpure = { - type = "app"; - program = l.toString resolveImpureScript; - }; - }; - in - allOutputs) - allPkgs; - - flakifiedOutputsList = - lib.mapAttrsToList - (system: outputs: flakifyBuilderOutputs system outputs) - allBuilderOutputs; - - flakeOutputsBuilders = - b.foldl' - (allOutputs: output: lib.recursiveUpdate allOutputs output) - {} - flakifiedOutputsList; - - flakeOutputs = - { projectsJson = l.toJSON (discoveredProjects); } - // flakeOutputsBuilders; - - in - lib.recursiveUpdate - flakeOutputs - { - apps = forAllSystems (system: pkgs: { - resolve.type = "app"; - resolve.program = - let - utils = (dream2nixFor."${system}".utils); - - # TODO: Too many calls to findOneTranslator. - # -> make findOneTranslator system independent - translatorFound = - dream2nixFor."${system}".translators.findOneTranslator { - inherit source; - translatorName = args.translator or null; - }; - in - b.toString - (utils.makePackageLockScript { - inherit source translatorArgs; - packagesDir = config.packagesDir; - translator = translatorFound.name; - }); + # TODO: Too many calls to findOneTranslator. + # -> make findOneTranslator system independent + translatorFound = dream2nixFor."${system}".translators.findOneTranslator { + inherit source; + translatorName = args.translator or null; + }; + in + b.toString + (utils.makePackageLockScript { + inherit source translatorArgs; + packagesDir = config.packagesDir; + translator = translatorFound.name; }); - }; - -in -{ + }); + }; +in { inherit dlib init; riseAndShine = throw "Use makeFlakeOutputs instead of riseAndShine."; makeFlakeOutpus = makeFlakeOutputsFunc; diff --git a/src/templates/builders/default.nix b/src/templates/builders/default.nix index df1597b7..66abbcb0 100644 --- a/src/templates/builders/default.nix +++ b/src/templates/builders/default.nix @@ -2,47 +2,36 @@ lib, pkgs, stdenv, - # dream2nix inputs builders, externals, utils, ... -}: - -{ +}: { # Funcs - # AttrSet -> Bool) -> AttrSet -> [x] - getCyclicDependencies, # name: version: -> [ {name=; version=; } ] - getDependencies, # name: version: -> [ {name=; version=; } ] - getSource, # name: version: -> store-path + getCyclicDependencies, # name: version: -> [ {name=; version=; } ] + getDependencies, # name: version: -> [ {name=; version=; } ] + getSource, # name: version: -> store-path buildPackageWithOtherBuilder, # { builder, name, version }: -> drv - # Attributes - subsystemAttrs, # attrset - defaultPackageName, # string - defaultPackageVersion, # string - + subsystemAttrs, # attrset + defaultPackageName, # string + defaultPackageVersion, # string # attrset of pname -> versions, # where versions is a list of version strings packageVersions, - # function which applies overrides to a package # It must be applied by the builder to each individual derivation # Example: # produceDerivation name (mkDerivation {...}) produceDerivation, - # Custom Options: (parametrize builder behavior) # These can be passed by the user via `builderArgs`. # All options must provide default standalonePackageNames ? [], ... -}@args: - -let - +} @ args: let b = builtins; # the main package @@ -51,37 +40,30 @@ let # manage pakcages in attrset to prevent duplicated evaluation packages = lib.mapAttrs - (name: versions: - lib.genAttrs - versions - (version: makeOnePackage name version)) - packageVersions; + (name: versions: + lib.genAttrs + versions + (version: makeOnePackage name version)) + packageVersions; # Generates a derivation for a specific package name + version - makeOnePackage = name: version: - let - pkg = - stdenv.mkDerivation rec { + makeOnePackage = name: version: let + pkg = stdenv.mkDerivation rec { + pname = utils.sanitizeDerivationName name; + inherit version; - pname = utils.sanitizeDerivationName name; - inherit version; + src = getSource name version; - src = getSource name version; + buildInputs = + map + (dep: packages."${dep.name}"."${dep.version}") + (getDependencies name version); - buildInputs = - map - (dep: packages."${dep.name}"."${dep.version}") - (getDependencies name version); - - # Implement build phases - - }; - in - # apply packageOverrides to current derivation - (utils.applyOverridesToPackage packageOverrides pkg name); - - -in -{ + # Implement build phases + }; + in + # apply packageOverrides to current derivation + (utils.applyOverridesToPackage packageOverrides pkg name); +in { inherit defaultPackage packages; } diff --git a/src/templates/translators/impure.nix b/src/templates/translators/impure.nix index 135307d8..5352602c 100644 --- a/src/templates/translators/impure.nix +++ b/src/templates/translators/impure.nix @@ -1,10 +1,7 @@ { dlib, lib, -}: - -{ - +}: { # A derivation which outputs a single executable at `$out`. # The executable will be called by dream2nix for translation # The input format is specified in /specifications/translator-call-example.json. @@ -15,66 +12,55 @@ # The program is expected to create a file at the location specified # by the input parameter `outFile`. # The output file must contain the dream lock data encoded as json. - translateBin = - { - # dream2nix utils - utils, - - # nixpkgs dependenies - bash, - jq, - writeScriptBin, - ... - }: + translateBin = { + # dream2nix utils + utils, + # nixpkgs dependenies + bash, + jq, + writeScriptBin, + ... + }: utils.writePureShellScript - [ - bash - coreutils - jq - nix - ] - '' - # accroding to the spec, the translator reads the input from a json file - jsonInput=$1 + [ + bash + coreutils + jq + nix + ] + '' + # accroding to the spec, the translator reads the input from a json file + jsonInput=$1 - # read the json input - outputFile=$(${jq}/bin/jq '.outputFile' -c -r $jsonInput) - source=$(${jq}/bin/jq '.source' -c -r $jsonInput) - inputFiles=$(${jq}/bin/jq '.inputFiles | .[]' -c -r $jsonInput) - - # TODO: - # read input files/dirs and produce a json file at $outputFile - # containing the dream lock similar to /specifications/dream-lock-example.json - ''; + # read the json input + outputFile=$(${jq}/bin/jq '.outputFile' -c -r $jsonInput) + source=$(${jq}/bin/jq '.source' -c -r $jsonInput) + inputFiles=$(${jq}/bin/jq '.inputFiles | .[]' -c -r $jsonInput) + # TODO: + # read input files/dirs and produce a json file at $outputFile + # containing the dream lock similar to /specifications/dream-lock-example.json + ''; # This function should return the projects name. # The computational complexity of this should be kept as lightweight # as possible, as this migth be executed on a large amount of inputs at once. - projectName = - { - source, - }: + projectName = {source}: null; - # This allows the framework to detect if the translator is compatible with the given input # to automatically select the right translator. - compatible = - { - source, - }: - # TODO: insert regex here that matches valid input file names - # examples: - # - ''.*requirements.*\.txt'' - # - ''.*package-lock\.json'' + compatible = {source}: + # TODO: insert regex here that matches valid input file names + # examples: + # - ''.*requirements.*\.txt'' + # - ''.*package-lock\.json'' dlib.containsMatchingFile - [ - ''TODO: regex1'' - ''TODO: regex2'' - ] - source; - + [ + ''TODO: regex1'' + ''TODO: regex2'' + ] + source; # If the translator requires additional arguments, specify them here. # There are only two types of arguments: @@ -82,7 +68,6 @@ # - boolean flag (type = "flag") # String arguments contain a default value and examples. Flags do not. extraArgs = { - # Example: boolean option # Flags always default to 'false' if not specified by the user noDev = { @@ -100,6 +85,5 @@ ]; type = "argument"; }; - }; } diff --git a/src/translators/default.nix b/src/translators/default.nix index 5c02d338..8cc375d7 100644 --- a/src/translators/default.nix +++ b/src/translators/default.nix @@ -6,92 +6,88 @@ nix, pkgs, python3, - callPackageDream, externals, dream2nixWithExternals, utils, ... -}: - -let - +}: let b = builtins; l = lib // builtins; # transforms V1 translators to V2 translators - ensureTranslatorV2 = translator: - let - version = translator.version or 1; - cleanedArgs = args: l.removeAttrs args [ "project" "tree" ]; + ensureTranslatorV2 = translator: let + version = translator.version or 1; + cleanedArgs = args: l.removeAttrs args ["project" "tree"]; - upgradedTranslator = - translator - // (lib.optionalAttrs (translator ? translate) { - translate = args: - let - dreamLock = - translator.translate - ((cleanedArgs args) // { - source = "${args.source}/${args.project.relPath}"; - name = args.project.name; - }); - in - dreamLock // { - _generic = dreamLock._generic // { - location = args.project.relPath; - }; - }; - }); - - finalTranslator = - if version == 2 then - translator - else - upgradedTranslator; - in - finalTranslator - // (lib.optionalAttrs (finalTranslator ? translate) { - translateBin = - wrapPureTranslator2 - (with translator; [ subsystem type name ]); - # ensure `tree` is passed - translate = args: - finalTranslator.translate (args // { - tree = - args.tree or (dlib.prepareSourceTree { inherit (args) source; }); - }); + upgradedTranslator = + translator + // (lib.optionalAttrs (translator ? translate) { + translate = args: let + dreamLock = + translator.translate + ((cleanedArgs args) + // { + source = "${args.source}/${args.project.relPath}"; + name = args.project.name; + }); + in + dreamLock + // { + _generic = + dreamLock._generic + // { + location = args.project.relPath; + }; + }; }); - # transforms V2 translators to V1 translators - ensureTranslatorV1 = translator: - let - version = translator.version or 1; + finalTranslator = + if version == 2 + then translator + else upgradedTranslator; + in + finalTranslator + // (lib.optionalAttrs (finalTranslator ? translate) { + translateBin = + wrapPureTranslator2 + (with translator; [subsystem type name]); + # ensure `tree` is passed + translate = args: + finalTranslator.translate (args + // { + tree = + args.tree or (dlib.prepareSourceTree {inherit (args) source;}); + }); + }); - downgradeTranslator = - translator - // (lib.optionalAttrs (translator ? translate) { - translate = args: - translator.translate (args // { + # transforms V2 translators to V1 translators + ensureTranslatorV1 = translator: let + version = translator.version or 1; + + downgradeTranslator = + translator + // (lib.optionalAttrs (translator ? translate) { + translate = args: + translator.translate (args + // { inherit (args) source; - tree = dlib.prepareSourceTree { inherit (args) source; }; + tree = dlib.prepareSourceTree {inherit (args) source;}; project = { - name = translator.projectName { inherit (args) source; }; + name = translator.projectName {inherit (args) source;}; relPath = ""; subsystem = translator.subsystem; }; }); - }); - - finalTranslator = - if version == 1 then - translator - else - downgradeTranslator; - in - finalTranslator; + }); + finalTranslator = + if version == 1 + then translator + else downgradeTranslator; + in + finalTranslator; makeTranslatorV2 = translatorModule: ensureTranslatorV2 (makeTranslator translatorModule); @@ -99,153 +95,144 @@ let makeTranslatorV1 = translatorModule: ensureTranslatorV1 (makeTranslator translatorModule); - - makeTranslator = - translatorModule: - let - translator = - translatorModule - - # for pure translators - # - import the `translate` function - # - generate `translateBin` - // (lib.optionalAttrs (translatorModule ? translate) { - translate = - let - translateOriginal = callPackageDream translatorModule.translate { - translatorName = translatorModule.name; - }; - in - args: translateOriginal - ( - (dlib.translators.getextraArgsDefaults - (translatorModule.extraArgs or {})) - // args - ); - translateBin = - wrapPureTranslator - (with translatorModule; [ subsystem type name ]); - }) - - # for impure translators: - # - import the `translateBin` function - // (lib.optionalAttrs (translatorModule ? translateBin) { - translateBin = callPackageDream translatorModule.translateBin - { - translatorName = translatorModule.name; - }; - }); - - in - translator; - + makeTranslator = translatorModule: let + translator = + translatorModule + # for pure translators + # - import the `translate` function + # - generate `translateBin` + // (lib.optionalAttrs (translatorModule ? translate) { + translate = let + translateOriginal = callPackageDream translatorModule.translate { + translatorName = translatorModule.name; + }; + in + args: + translateOriginal + ( + (dlib.translators.getextraArgsDefaults + (translatorModule.extraArgs or {})) + // args + ); + translateBin = + wrapPureTranslator + (with translatorModule; [subsystem type name]); + }) + # for impure translators: + # - import the `translateBin` function + // (lib.optionalAttrs (translatorModule ? translateBin) { + translateBin = + callPackageDream translatorModule.translateBin + { + translatorName = translatorModule.name; + }; + }); + in + translator; translators = dlib.translators.mapTranslators makeTranslatorV1; translatorsV2 = dlib.translators.mapTranslators makeTranslatorV2; # adds a translateBin to a pure translator - wrapPureTranslator2 = translatorAttrPath: - let - bin = utils.writePureShellScript - [ - coreutils - jq - nix - python3 - ] - '' - jsonInputFile=$(realpath $1) - outputFile=$WORKDIR/$(jq '.outputFile' -c -r $jsonInputFile) + wrapPureTranslator2 = translatorAttrPath: let + bin = + utils.writePureShellScript + [ + coreutils + jq + nix + python3 + ] + '' + jsonInputFile=$(realpath $1) + outputFile=$WORKDIR/$(jq '.outputFile' -c -r $jsonInputFile) - cd $WORKDIR - mkdir -p $(dirname $outputFile) + cd $WORKDIR + mkdir -p $(dirname $outputFile) - nix eval \ - --option experimental-features "nix-command flakes"\ - --show-trace --impure --raw --expr " - let - dream2nix = import ${dream2nixWithExternals} {}; + nix eval \ + --option experimental-features "nix-command flakes"\ + --show-trace --impure --raw --expr " + let + dream2nix = import ${dream2nixWithExternals} {}; - translatorArgs = - (builtins.fromJSON - (builtins.unsafeDiscardStringContext (builtins.readFile '''$1'''))); + translatorArgs = + (builtins.fromJSON + (builtins.unsafeDiscardStringContext (builtins.readFile '''$1'''))); - dreamLock = - dream2nix.translators.translatorsV2.${ - lib.concatStringsSep "." translatorAttrPath - }.translate - translatorArgs; - in - dream2nix.utils.dreamLock.toJSON - # don't use nix to detect cycles, this will be more efficient in python - (dreamLock // { - _generic = builtins.removeAttrs dreamLock._generic [ \"cyclicDependencies\" ]; - }) - " | python3 ${../apps/cli2/format-dream-lock.py} > $outputFile - ''; - in - bin.overrideAttrs (old: { - name = "translator-${lib.concatStringsSep "-" translatorAttrPath}"; - }); + dreamLock = + dream2nix.translators.translatorsV2.${ + lib.concatStringsSep "." translatorAttrPath + }.translate + translatorArgs; + in + dream2nix.utils.dreamLock.toJSON + # don't use nix to detect cycles, this will be more efficient in python + (dreamLock // { + _generic = builtins.removeAttrs dreamLock._generic [ \"cyclicDependencies\" ]; + }) + " | python3 ${../apps/cli2/format-dream-lock.py} > $outputFile + ''; + in + bin.overrideAttrs (old: { + name = "translator-${lib.concatStringsSep "-" translatorAttrPath}"; + }); # adds a translateBin to a pure translator - wrapPureTranslator = translatorAttrPath: - let - bin = utils.writePureShellScript - [ - coreutils - jq - nix - python3 - ] - '' - jsonInputFile=$(realpath $1) - outputFile=$(jq '.outputFile' -c -r $jsonInputFile) + wrapPureTranslator = translatorAttrPath: let + bin = + utils.writePureShellScript + [ + coreutils + jq + nix + python3 + ] + '' + jsonInputFile=$(realpath $1) + outputFile=$(jq '.outputFile' -c -r $jsonInputFile) - cd $WORKDIR - mkdir -p $(dirname $outputFile) + cd $WORKDIR + mkdir -p $(dirname $outputFile) - nix eval \ - --option experimental-features "nix-command flakes"\ - --show-trace --impure --raw --expr " - let - dream2nix = import ${dream2nixWithExternals} {}; + nix eval \ + --option experimental-features "nix-command flakes"\ + --show-trace --impure --raw --expr " + let + dream2nix = import ${dream2nixWithExternals} {}; - translatorArgs = - (builtins.fromJSON - (builtins.unsafeDiscardStringContext (builtins.readFile '''$1'''))); + translatorArgs = + (builtins.fromJSON + (builtins.unsafeDiscardStringContext (builtins.readFile '''$1'''))); - dreamLock = - dream2nix.translators.translators.${ - lib.concatStringsSep "." translatorAttrPath - }.translate - translatorArgs; - in - dream2nix.utils.dreamLock.toJSON - # don't use nix to detect cycles, this will be more efficient in python - (dreamLock // { - _generic = builtins.removeAttrs dreamLock._generic [ \"cyclicDependencies\" ]; - }) - " | python3 ${../apps/cli2/format-dream-lock.py} > $outputFile - ''; - in - bin.overrideAttrs (old: { - name = "translator-${lib.concatStringsSep "-" translatorAttrPath}"; - }); - - - -in -{ + dreamLock = + dream2nix.translators.translators.${ + lib.concatStringsSep "." translatorAttrPath + }.translate + translatorArgs; + in + dream2nix.utils.dreamLock.toJSON + # don't use nix to detect cycles, this will be more efficient in python + (dreamLock // { + _generic = builtins.removeAttrs dreamLock._generic [ \"cyclicDependencies\" ]; + }) + " | python3 ${../apps/cli2/format-dream-lock.py} > $outputFile + ''; + in + bin.overrideAttrs (old: { + name = "translator-${lib.concatStringsSep "-" translatorAttrPath}"; + }); +in { inherit translators translatorsV2 - ; + ; - inherit (dlib.translators) + inherit + (dlib.translators) findOneTranslator translatorsForInput translatorsForInputRecursive - ; + ; } diff --git a/src/translators/go/impure/gomod2nix/default.nix b/src/translators/go/impure/gomod2nix/default.nix index 97e2e651..9acb0c59 100644 --- a/src/translators/go/impure/gomod2nix/default.nix +++ b/src/translators/go/impure/gomod2nix/default.nix @@ -1,74 +1,59 @@ { dlib, lib, -}: -let +}: let l = lib // builtins; -in -{ - +in { # the input format is specified in /specifications/translator-call-example.json # this script receives a json file including the input paths and specialArgs - translateBin = - { - # dream2nix utils - utils, - dream2nixWithExternals, - - bash, - coreutils, - jq, - nix, - writeScriptBin, - ... - }: + translateBin = { + # dream2nix utils + utils, + dream2nixWithExternals, + bash, + coreutils, + jq, + nix, + writeScriptBin, + ... + }: utils.writePureShellScript - [ - bash - coreutils - jq - nix - ] - '' - # accroding to the spec, the translator reads the input from a json file - jsonInput=$1 + [ + bash + coreutils + jq + nix + ] + '' + # accroding to the spec, the translator reads the input from a json file + jsonInput=$1 - # read the json input - outputFile=$(${jq}/bin/jq '.outputFile' -c -r $jsonInput) - source=$(${jq}/bin/jq '.source' -c -r $jsonInput) + # read the json input + outputFile=$(${jq}/bin/jq '.outputFile' -c -r $jsonInput) + source=$(${jq}/bin/jq '.source' -c -r $jsonInput) - tmpBuild=$(mktemp -d) - cd $tmpBuild - cp -r $source/* . - chmod -R +w . - # This should be in sync with gomod2nix version in flake.lock - nix run github:tweag/gomod2nix/67f22dd738d092c6ba88e420350ada0ed4992ae8 + tmpBuild=$(mktemp -d) + cd $tmpBuild + cp -r $source/* . + chmod -R +w . + # This should be in sync with gomod2nix version in flake.lock + nix run github:tweag/gomod2nix/67f22dd738d092c6ba88e420350ada0ed4992ae8 - nix eval --show-trace --impure --raw --expr "import ${./translate.nix} ${dream2nixWithExternals} ./." > $outputFile - ''; - - projectName = - { - source, - }: - let - goModFile = "${source}/go.mod"; - firstLine = (l.elemAt (l.splitString "\n" (l.readFile goModFile)) 0); - in - if l.pathExists goModFile then - l.last (l.splitString "/" (l.elemAt (l.splitString " " firstLine) 1)) - else - null; + nix eval --show-trace --impure --raw --expr "import ${./translate.nix} ${dream2nixWithExternals} ./." > $outputFile + ''; + projectName = {source}: let + goModFile = "${source}/go.mod"; + firstLine = l.elemAt (l.splitString "\n" (l.readFile goModFile)) 0; + in + if l.pathExists goModFile + then l.last (l.splitString "/" (l.elemAt (l.splitString " " firstLine) 1)) + else null; # This allows the framework to detect if the translator is compatible with the given input # to automatically select the right translator. - compatible = - { - source, - }: - dlib.containsMatchingFile [ ''go\.sum'' ''go\.mod'' ] source; - + compatible = {source}: + dlib.containsMatchingFile [''go\.sum'' ''go\.mod''] source; # If the translator requires additional arguments, specify them here. # There are only two types of arguments: diff --git a/src/translators/go/impure/gomod2nix/translate.nix b/src/translators/go/impure/gomod2nix/translate.nix index 0ad7c8a5..e31310c5 100644 --- a/src/translators/go/impure/gomod2nix/translate.nix +++ b/src/translators/go/impure/gomod2nix/translate.nix @@ -1,75 +1,66 @@ -dream2nixWithExternals: -cwd: -let - dream2nix = import dream2nixWithExternals { }; +dream2nixWithExternals: cwd: let + dream2nix = import dream2nixWithExternals {}; b = builtins; parsed = b.fromTOML (builtins.readFile "${cwd}/gomod2nix.toml"); - pkgs = import { }; + pkgs = import {}; lib = pkgs.lib; serializePackages = inputData: lib.mapAttrsToList - (goName: depAttrs: depAttrs // { inherit goName; }) - parsed; + (goName: depAttrs: depAttrs // {inherit goName;}) + parsed; translated = dream2nix.utils.simpleTranslate - ({ - getDepByNameVer, - dependenciesByOriginalID, - ... - }: + ({ + getDepByNameVer, + dependenciesByOriginalID, + ... + }: rec { + translatorName = "gomod2nix"; - rec { + inputData = parsed; - translatorName = "gomod2nix"; + defaultPackage = let + firstLine = b.elemAt (lib.splitString "\n" (b.readFile "${cwd}/go.mod")) 0; + in + lib.last (lib.splitString "/" (b.elemAt (lib.splitString " " firstLine) 1)); - inputData = parsed; + packages."${defaultPackage}" = "unknown"; - defaultPackage = - let - firstLine = (b.elemAt (lib.splitString "\n" (b.readFile "${cwd}/go.mod")) 0); - in - lib.last (lib.splitString "/" (b.elemAt (lib.splitString " " firstLine) 1)); + subsystemName = "go"; - packages."${defaultPackage}" = "unknown"; + subsystemAttrs = {}; - subsystemName = "go"; + inherit serializePackages; - subsystemAttrs = { }; + mainPackageDependencies = + lib.forEach + (serializePackages parsed) + (dep: { + name = getName dep; + version = getVersion dep; + }); - inherit serializePackages; + getOriginalID = dependencyObject: + null; - mainPackageDependencies = - lib.forEach - (serializePackages parsed) - (dep: { - name = getName dep; - version = getVersion dep; - }); + getName = dependencyObject: + dependencyObject.goName; - getOriginalID = dependencyObject: - null; + getVersion = dependencyObject: + lib.removePrefix "v" dependencyObject.sumVersion; - getName = dependencyObject: - dependencyObject.goName; + getDependencies = dependencyObject: []; - getVersion = dependencyObject: - lib.removePrefix "v" dependencyObject.sumVersion; + getSourceType = dependencyObject: "git"; - getDependencies = dependencyObject: - []; - - getSourceType = dependencyObject: "git"; - - sourceConstructors = { - git = dependencyObject: - { - type = "git"; - hash = dependencyObject.fetch.sha256; - url = dependencyObject.fetch.url; - rev = dependencyObject.fetch.rev; - }; + sourceConstructors = { + git = dependencyObject: { + type = "git"; + hash = dependencyObject.fetch.sha256; + url = dependencyObject.fetch.url; + rev = dependencyObject.fetch.rev; }; - - }); + }; + }); in dream2nix.utils.dreamLock.toJSON translated diff --git a/src/translators/nodejs/impure/package-json/default.nix b/src/translators/nodejs/impure/package-json/default.nix index 622f1a2f..a8a47253 100644 --- a/src/translators/nodejs/impure/package-json/default.nix +++ b/src/translators/nodejs/impure/package-json/default.nix @@ -1,78 +1,68 @@ { dlib, lib, -}: - -{ - +}: { # the input format is specified in /specifications/translator-call-example.json # this script receives a json file including the input paths and specialArgs - translateBin = - { - # dream2nix utils - translators, - utils, - - # nixpkgs dependenies - bash, - coreutils, - jq, - nodePackages, - writeScriptBin, - ... - }: - + translateBin = { + # dream2nix utils + translators, + utils, + # nixpkgs dependenies + bash, + coreutils, + jq, + nodePackages, + writeScriptBin, + ... + }: utils.writePureShellScript - [ - bash - coreutils - jq - nodePackages.npm - ] - '' - # accroding to the spec, the translator reads the input from a json file - jsonInput=$1 + [ + bash + coreutils + jq + nodePackages.npm + ] + '' + # accroding to the spec, the translator reads the input from a json file + jsonInput=$1 - # read the json input - outputFile=$(jq '.outputFile' -c -r $jsonInput) - source=$(jq '.source' -c -r $jsonInput) - npmArgs=$(jq '.npmArgs' -c -r $jsonInput) + # read the json input + outputFile=$(jq '.outputFile' -c -r $jsonInput) + source=$(jq '.source' -c -r $jsonInput) + npmArgs=$(jq '.npmArgs' -c -r $jsonInput) - cp -r $source/* ./ - chmod -R +w ./ - rm -rf package-lock.json + cp -r $source/* ./ + chmod -R +w ./ + rm -rf package-lock.json - if [ "$(jq '.noDev' -c -r $jsonInput)" == "true" ]; then - echo "excluding dev dependencies" - jq '.devDependencies = {}' ./package.json > package.json.mod - mv package.json.mod package.json - npm install --package-lock-only --production $npmArgs - else - npm install --package-lock-only $npmArgs - fi + if [ "$(jq '.noDev' -c -r $jsonInput)" == "true" ]; then + echo "excluding dev dependencies" + jq '.devDependencies = {}' ./package.json > package.json.mod + mv package.json.mod package.json + npm install --package-lock-only --production $npmArgs + else + npm install --package-lock-only $npmArgs + fi - jq ".source = \"$(pwd)\"" -c -r $jsonInput > $TMPDIR/newJsonInput - - cd $WORKDIR - ${translators.translators.nodejs.pure.package-lock.translateBin} $TMPDIR/newJsonInput - ''; + jq ".source = \"$(pwd)\"" -c -r $jsonInput > $TMPDIR/newJsonInput + cd $WORKDIR + ${translators.translators.nodejs.pure.package-lock.translateBin} $TMPDIR/newJsonInput + ''; # inherit projectName function from package-lock translator projectName = dlib.translators.translators.nodejs.pure.package-lock.projectName; - # This allows the framework to detect if the translator is compatible with the given input # to automatically select the right translator. - compatible = - { - source, - }: - dlib.containsMatchingFile [ ''.*package.json'' ] source; + compatible = {source}: + dlib.containsMatchingFile [''.*package.json''] source; # inherit options from package-lock translator extraArgs = - dlib.translators.translators.nodejs.pure.package-lock.extraArgs // { + dlib.translators.translators.nodejs.pure.package-lock.extraArgs + // { npmArgs = { description = "Additional arguments for npm"; type = "argument"; diff --git a/src/translators/nodejs/pure/package-lock/default.nix b/src/translators/nodejs/pure/package-lock/default.nix index 049f813a..373f3c21 100644 --- a/src/translators/nodejs/pure/package-lock/default.nix +++ b/src/translators/nodejs/pure/package-lock/default.nix @@ -1,275 +1,248 @@ { dlib, lib, -}: - -let +}: let b = builtins; l = lib // builtins; - nodejsUtils = import ../../utils.nix { inherit lib; }; + nodejsUtils = import ../../utils.nix {inherit lib;}; getPackageLock = tree: project: nodejsUtils.getWorkspaceLockFile tree project "package-lock.json"; - translate = - { - translatorName, - utils, - ... - }: - { - project, - source, - tree, + translate = { + translatorName, + utils, + ... + }: { + project, + source, + tree, + # translator args + noDev, + nodejs, + ... + } @ args: let + b = builtins; - # translator args - noDev, - nodejs, - ... - }@args: - let + dev = ! noDev; + name = project.name; + tree = args.tree.getNodeFromPath project.relPath; + relPath = project.relPath; + source = "${args.source}/${relPath}"; + workspaces = project.subsystemInfo.workspaces or []; - b = builtins; + packageLock = (getPackageLock args.tree project).jsonContent or null; - dev = ! noDev; - name = project.name; - tree = args.tree.getNodeFromPath project.relPath; - relPath = project.relPath; - source = "${args.source}/${relPath}"; - workspaces = project.subsystemInfo.workspaces or []; + packageJson = + (tree.getNodeFromPath "package.json").jsonContent; - packageLock = (getPackageLock args.tree project).jsonContent or null; + packageLockDeps = + if packageLock == null + then {} + else packageLock.dependencies or {}; - packageJson = - (tree.getNodeFromPath "package.json").jsonContent; + rootDependencies = packageLockDeps; - packageLockDeps = - if packageLock == null then - {} - else - packageLock.dependencies or {}; + packageJsonDeps = nodejsUtils.getPackageJsonDeps packageJson noDev; - rootDependencies = packageLockDeps; + parsedDependencies = + l.filterAttrs + (name: dep: packageJsonDeps ? "${name}") + packageLockDeps; - packageJsonDeps = nodejsUtils.getPackageJsonDeps packageJson noDev; - - parsedDependencies = - l.filterAttrs - (name: dep: packageJsonDeps ? "${name}") - packageLockDeps; - - identifyGitSource = dependencyObject: - # TODO: when integrity is there, and git url is github then use tarball instead - # ! (dependencyObject ? integrity) && - dlib.identifyGitUrl dependencyObject.version; - - getVersion = dependencyObject: - let - # example: "version": "npm:@tailwindcss/postcss7-compat@2.2.4", - npmMatch = b.match ''^npm:.*@(.*)$'' dependencyObject.version; - - in - if npmMatch != null then - b.elemAt npmMatch 0 - else if identifyGitSource dependencyObject then - "0.0.0-rc.${b.substring 0 8 (dlib.parseGitUrl dependencyObject.version).rev}" - else if lib.hasPrefix "file:" dependencyObject.version then - let - path = getPath dependencyObject; - in - (b.fromJSON - (b.readFile "${source}/${path}/package.json") - ).version - else if lib.hasPrefix "https://" dependencyObject.version then - "unknown" - else - dependencyObject.version; - - getPath = dependencyObject: - lib.removePrefix "file:" dependencyObject.version; - - pinVersions = dependencies: parentScopeDeps: - lib.mapAttrs - (pname: pdata: - let - selfScopeDeps = parentScopeDeps // dependencies; - requires = pdata.requires or {}; - dependencies = pdata.dependencies or {}; - in - pdata // { - depsExact = - lib.forEach - (lib.attrNames requires) - (reqName: { - name = reqName; - version = getVersion selfScopeDeps."${reqName}"; - }); - dependencies = pinVersions dependencies selfScopeDeps; - } - ) - dependencies; - - pinnedRootDeps = - pinVersions rootDependencies rootDependencies; - - createMissingSource = name: version: - { - type = "http"; - url = "https://registry.npmjs.org/${name}/-/${name}-${version}.tgz"; - }; + identifyGitSource = dependencyObject: + # TODO: when integrity is there, and git url is github then use tarball instead + # ! (dependencyObject ? integrity) && + dlib.identifyGitUrl dependencyObject.version; + getVersion = dependencyObject: let + # example: "version": "npm:@tailwindcss/postcss7-compat@2.2.4", + npmMatch = b.match ''^npm:.*@(.*)$'' dependencyObject.version; in + if npmMatch != null + then b.elemAt npmMatch 0 + else if identifyGitSource dependencyObject + then "0.0.0-rc.${b.substring 0 8 (dlib.parseGitUrl dependencyObject.version).rev}" + else if lib.hasPrefix "file:" dependencyObject.version + then let + path = getPath dependencyObject; + in + ( + b.fromJSON + (b.readFile "${source}/${path}/package.json") + ) + .version + else if lib.hasPrefix "https://" dependencyObject.version + then "unknown" + else dependencyObject.version; - utils.simpleTranslate - ({ - getDepByNameVer, - dependenciesByOriginalID, - ... - }: + getPath = dependencyObject: + lib.removePrefix "file:" dependencyObject.version; - rec { + pinVersions = dependencies: parentScopeDeps: + lib.mapAttrs + ( + pname: pdata: let + selfScopeDeps = parentScopeDeps // dependencies; + requires = pdata.requires or {}; + dependencies = pdata.dependencies or {}; + in + pdata + // { + depsExact = + lib.forEach + (lib.attrNames requires) + (reqName: { + name = reqName; + version = getVersion selfScopeDeps."${reqName}"; + }); + dependencies = pinVersions dependencies selfScopeDeps; + } + ) + dependencies; - inherit translatorName; - location = relPath; + pinnedRootDeps = + pinVersions rootDependencies rootDependencies; - # values - inputData = pinnedRootDeps; + createMissingSource = name: version: { + type = "http"; + url = "https://registry.npmjs.org/${name}/-/${name}-${version}.tgz"; + }; + in + utils.simpleTranslate + ({ + getDepByNameVer, + dependenciesByOriginalID, + ... + }: rec { + inherit translatorName; + location = relPath; - defaultPackage = - if name != "{automatic}" then - name - else - packageJson.name or (throw ( - "Could not identify package name. " - + "Please specify extra argument 'name'" - )); + # values + inputData = pinnedRootDeps; - packages = - { "${defaultPackage}" = packageJson.version or "unknown"; } - // (nodejsUtils.getWorkspacePackages tree workspaces); + defaultPackage = + if name != "{automatic}" + then name + else + packageJson.name + or (throw ( + "Could not identify package name. " + + "Please specify extra argument 'name'" + )); - mainPackageDependencies = - lib.mapAttrsToList - (pname: pdata: - { name = pname; version = getVersion pdata; }) - (lib.filterAttrs - (pname: pdata: ! (pdata.dev or false) || dev) - parsedDependencies); + packages = + {"${defaultPackage}" = packageJson.version or "unknown";} + // (nodejsUtils.getWorkspacePackages tree workspaces); - subsystemName = "nodejs"; + mainPackageDependencies = + lib.mapAttrsToList + (pname: pdata: { + name = pname; + version = getVersion pdata; + }) + (lib.filterAttrs + (pname: pdata: ! (pdata.dev or false) || dev) + parsedDependencies); - subsystemAttrs = { nodejsVersion = args.nodejs; }; + subsystemName = "nodejs"; - # functions - serializePackages = inputData: - let - serialize = inputData: - lib.mapAttrsToList # returns list of lists - (pname: pdata: - [ (pdata // { - inherit pname; - depsExact = - lib.filter - (req: - (! (pdata.dependencies."${req.name}".bundled or false))) - pdata.depsExact or {}; - }) ] - ++ - (lib.optionals (pdata ? dependencies) - (lib.flatten - (serialize - (lib.filterAttrs - (pname: data: ! data.bundled or false) - pdata.dependencies))))) - inputData; - in - lib.filter - (pdata: - dev || ! (pdata.dev or false)) - (lib.flatten (serialize inputData)); + subsystemAttrs = {nodejsVersion = args.nodejs;}; - getName = dependencyObject: dependencyObject.pname; + # functions + serializePackages = inputData: let + serialize = inputData: + lib.mapAttrsToList # returns list of lists + + (pname: pdata: + [ + (pdata + // { + inherit pname; + depsExact = + lib.filter + (req: (! (pdata.dependencies."${req.name}".bundled or false))) + pdata.depsExact or {}; + }) + ] + ++ (lib.optionals (pdata ? dependencies) + (lib.flatten + (serialize + (lib.filterAttrs + (pname: data: ! data.bundled or false) + pdata.dependencies))))) + inputData; + in + lib.filter + (pdata: + dev || ! (pdata.dev or false)) + (lib.flatten (serialize inputData)); - inherit getVersion; + getName = dependencyObject: dependencyObject.pname; - getSourceType = dependencyObject: - if identifyGitSource dependencyObject then - "git" - else if lib.hasPrefix "file:" dependencyObject.version then - "path" - else - "http"; + inherit getVersion; - sourceConstructors = { + getSourceType = dependencyObject: + if identifyGitSource dependencyObject + then "git" + else if lib.hasPrefix "file:" dependencyObject.version + then "path" + else "http"; - git = dependencyObject: - dlib.parseGitUrl dependencyObject.version; + sourceConstructors = { + git = dependencyObject: + dlib.parseGitUrl dependencyObject.version; - http = dependencyObject: - if lib.hasPrefix "https://" dependencyObject.version then - rec { - version = getVersion dependencyObject; - url = dependencyObject.version; - hash = dependencyObject.integrity; - } - else if dependencyObject.resolved == false then - (createMissingSource - (getName dependencyObject) - (getVersion dependencyObject)) - // { - hash = dependencyObject.integrity; - } - else - rec { - url = dependencyObject.resolved; - hash = dependencyObject.integrity; - }; + http = dependencyObject: + if lib.hasPrefix "https://" dependencyObject.version + then rec { + version = getVersion dependencyObject; + url = dependencyObject.version; + hash = dependencyObject.integrity; + } + else if dependencyObject.resolved == false + then + (createMissingSource + (getName dependencyObject) + (getVersion dependencyObject)) + // { + hash = dependencyObject.integrity; + } + else rec { + url = dependencyObject.resolved; + hash = dependencyObject.integrity; + }; - path = dependencyObject: - rec { - path = getPath dependencyObject; - }; + path = dependencyObject: rec { + path = getPath dependencyObject; }; + }; - getDependencies = dependencyObject: - dependencyObject.depsExact; - }); -in - -rec { - + getDependencies = dependencyObject: + dependencyObject.depsExact; + }); +in rec { version = 2; inherit translate; + projectName = {source}: let + packageJson = "${source}/package.json"; + parsed = b.fromJSON (b.readFile packageJson); + in + if b.pathExists packageJson && parsed ? name + then parsed.name + else null; - projectName = - { - source, - }: - let - packageJson = "${source}/package.json"; - parsed = b.fromJSON (b.readFile packageJson); - in - if b.pathExists packageJson && parsed ? name then - parsed.name - else - null; - - - compatible = - { - source, - }: + compatible = {source}: dlib.containsMatchingFile - [ - ''.*package-lock\.json'' - ''.*package.json'' - ] - source; + [ + ''.*package-lock\.json'' + ''.*package.json'' + ] + source; extraArgs = { - name = { description = "The name of the main package"; examples = [ @@ -296,6 +269,5 @@ rec { ]; type = "argument"; }; - }; } diff --git a/src/translators/nodejs/pure/yarn-lock/default.nix b/src/translators/nodejs/pure/yarn-lock/default.nix index 81591a48..de623bbd 100644 --- a/src/translators/nodejs/pure/yarn-lock/default.nix +++ b/src/translators/nodejs/pure/yarn-lock/default.nix @@ -1,327 +1,301 @@ { dlib, lib, -}: - -let +}: let l = lib // builtins; - nodejsUtils = import ../../utils.nix { inherit lib; }; - parser = import ./parser.nix { inherit lib; }; + nodejsUtils = import ../../utils.nix {inherit lib;}; + parser = import ./parser.nix {inherit lib;}; getYarnLock = tree: proj: tree.getNodeFromPath "${proj.relPath}/yarn.lock"; - translate = - { - translatorName, - utils, - ... - }: - { - project, - source, - tree, + translate = { + translatorName, + utils, + ... + }: { + project, + source, + tree, + # extraArgs + nodejs, + noDev, + ... + } @ args: let + b = builtins; + dev = ! noDev; + name = project.name; + relPath = project.relPath; + tree = args.tree.getNodeFromPath project.relPath; + workspaces = project.subsystemInfo.workspaces or []; + yarnLock = parser.parse (tree.getNodeFromPath "yarn.lock").content; - # extraArgs - nodejs, - noDev, - ... - }@args: + defaultPackage = + if name != "{automatic}" + then name + else + packageJson.name + or (throw ( + "Could not identify package name. " + + "Please specify extra argument 'name'" + )); - let - b = builtins; - dev = ! noDev; - name = project.name; - relPath = project.relPath; - tree = args.tree.getNodeFromPath project.relPath; - workspaces = project.subsystemInfo.workspaces or []; - yarnLock = parser.parse (tree.getNodeFromPath "yarn.lock").content; + packageJson = + (tree.getNodeFromPath "package.json").jsonContent; - defaultPackage = - if name != "{automatic}" then - name - else - packageJson.name or (throw ( - "Could not identify package name. " - + "Please specify extra argument 'name'" - )); - - packageJson = - (tree.getNodeFromPath "package.json").jsonContent; - - packageJsonDeps = nodejsUtils.getPackageJsonDeps packageJson noDev; - - workspacesPackageJson = nodejsUtils.getWorkspacePackageJson tree workspaces; - - in + packageJsonDeps = nodejsUtils.getPackageJsonDeps packageJson noDev; + workspacesPackageJson = nodejsUtils.getWorkspacePackageJson tree workspaces; + in utils.simpleTranslate2 - ({ - objectsByKey, - ... - }: let + ({objectsByKey, ...}: let + makeWorkspaceExtraObject = workspace: let + json = workspacesPackageJson."${workspace}"; + name = json.name or workspace; + version = json.version or "unknown"; + in { + inherit name version; - makeWorkspaceExtraObject = workspace: let - json = workspacesPackageJson."${workspace}"; - name = json.name or workspace; - version = json.version or "unknown"; - in - { - inherit name version; - - dependencies = - l.mapAttrsToList - (depName: semVer: let - yarnName = "${depName}@${semVer}"; - depObject = objectsByKey.yarnName."${yarnName}"; - in - if exportedWorkspacePackages ? "${depName}" - then - { - name = depName; - version = exportedWorkspacePackages."${depName}"; - } - else {name = depName; version = depObject.version;}) - (nodejsUtils.getPackageJsonDeps json noDev); - - sourceSpec = { - type = "path"; - path = workspace; - rootName = defaultPackage; - rootVersion = packageJson.version or "unknown"; - }; - }; - - extraObjects = l.map makeWorkspaceExtraObject workspaces; - - exportedWorkspacePackages = - l.listToAttrs - (l.map - (wsObject: - l.nameValuePair - wsObject.name - wsObject.version) - extraObjects); - - getSourceType = rawObj: finalObj: let - dObj = rawObj; - in - if - lib.hasInfix "@github:" dObj.yarnName - - || (dObj ? resolved - && lib.hasInfix "codeload.github.com/" dObj.resolved) - - || lib.hasInfix "@git+" dObj.yarnName - - # example: - # "jest-image-snapshot@https://github.com/machard/jest-image-snapshot#machard-patch-1": - # version "4.2.0" - # resolved "https://github.com/machard/jest-image-snapshot#d087e8683859dba2964b5866a4d1eb02ba64e7b9" - || (lib.hasInfix "@https://github.com" dObj.yarnName - && lib.hasPrefix "https://github.com" dObj.resolved) - then - if dObj ? integrity - then - b.trace ( - "Warning: Using git despite integrity exists for" - + "${finalObj.name}" - ) - "git" - else "git" - - else if - lib.hasInfix "@link:" dObj.yarnName - || lib.hasInfix "@file:" dObj.yarnName - then "path" - - else "http"; - - in rec { - - inherit defaultPackage extraObjects translatorName; - - exportedPackages = - { "${defaultPackage}" = packageJson.version or "unknown"; } - // exportedWorkspacePackages; - - subsystemName = "nodejs"; - - subsystemAttrs = { nodejsVersion = args.nodejs; }; - - keys = { - yarnName = rawObj: finalObj: - rawObj.yarnName; - }; - - extractors = { - name = rawObj: finalObj: - if lib.hasInfix "@git+" rawObj.yarnName then - lib.head (lib.splitString "@git+" rawObj.yarnName) - # Example: - # @matrix-org/olm@https://gitlab.matrix.org/api/v4/projects/27/packages/npm/@matrix-org/olm/-/@matrix-org/olm-3.2.3.tgz - else if lib.hasInfix "@https://" rawObj.yarnName then - lib.head (lib.splitString "@https://" rawObj.yarnName) - else - let - split = lib.splitString "@" rawObj.yarnName; - version = lib.last split; - in - if lib.hasPrefix "@" rawObj.yarnName then - lib.removeSuffix "@${version}" rawObj.yarnName - else - lib.head split; - - version = rawObj: finalObj: - if l.hasInfix "@git+" rawObj.yarnName - then - let - split = l.splitString "@git+" rawObj.yarnName; - gitUrl = l.last split; - in - # l.strings.sanitizeDerivationName - "${rawObj.version}@git+${gitUrl}" - - else rawObj.version; - - dependencies = rawObj: finalObj: let - dependencies = let - deps = - rawObj.dependencies or {} - // rawObj.optionalDependencies or {}; - in - lib.mapAttrsToList - (name: version: { "${name}" = version; }) - deps; - in - lib.forEach - dependencies - (dependency: - builtins.head ( - lib.mapAttrsToList - (name: versionSpec: let - yarnName = "${name}@${versionSpec}"; - depObject = objectsByKey.yarnName."${yarnName}"; - version = depObject.version; - in - if ! objectsByKey.yarnName ? ${yarnName} then - # handle missing lock file entry - let - versionMatch = - b.match ''.*\^([[:digit:]|\.]+)'' versionSpec; - in - { - inherit name; - version = b.elemAt versionMatch 0; - } - else - { inherit name version; } - ) - dependency - ) - ); - - sourceSpec = rawObj: finalObj: let - type = getSourceType rawObj finalObj; - in - { inherit type; } - // - (if type == "git" - then - if utils.identifyGitUrl rawObj.resolved then - (utils.parseGitUrl rawObj.resolved) // { - version = rawObj.version; - } - else - let - githubUrlInfos = lib.splitString "/" rawObj.resolved; - owner = lib.elemAt githubUrlInfos 3; - repo = lib.elemAt githubUrlInfos 4; - in - if b.length githubUrlInfos == 7 then - let - rev = lib.elemAt githubUrlInfos 6; - in - { - url = "https://github.com/${owner}/${repo}"; - inherit rev; - } - else if b.length githubUrlInfos == 5 then - let - urlAndRev = lib.splitString "#" rawObj.resolved; - in - { - url = lib.head urlAndRev; - rev = lib.last urlAndRev; - } - else - throw ( - "Unable to parse git dependency for: " - + "${finalObj.name}#${finalObj.version}" - ) - - else if type == "path" - then - if lib.hasInfix "@link:" rawObj.yarnName then - { - path = - lib.last (lib.splitString "@link:" rawObj.yarnName); - } - else if lib.hasInfix "@file:" rawObj.yarnName then - { - path = - lib.last (lib.splitString "@file:" rawObj.yarnName); - } - else - throw "unknown path format ${b.toJSON rawObj}" - - else # type == "http" - { - type = "http"; - hash = - if rawObj ? integrity then - rawObj.integrity - else - let - hash = - lib.last (lib.splitString "#" rawObj.resolved); - in - if lib.stringLength hash == 40 then - hash - else - throw "Missing integrity for ${rawObj.yarnName}"; - url = lib.head (lib.splitString "#" rawObj.resolved); - }); - }; - - extraDependencies = + dependencies = l.mapAttrsToList - (name: semVer: let - depYarnKey = "${name}@${semVer}"; - dependencyAttrs = - if ! yarnLock ? "${depYarnKey}" then - throw "Cannot find entry for top level dependency: '${depYarnKey}'" + (depName: semVer: let + yarnName = "${depName}@${semVer}"; + depObject = objectsByKey.yarnName."${yarnName}"; + in + if exportedWorkspacePackages ? "${depName}" + then { + name = depName; + version = exportedWorkspacePackages."${depName}"; + } + else { + name = depName; + version = depObject.version; + }) + (nodejsUtils.getPackageJsonDeps json noDev); + + sourceSpec = { + type = "path"; + path = workspace; + rootName = defaultPackage; + rootVersion = packageJson.version or "unknown"; + }; + }; + + extraObjects = l.map makeWorkspaceExtraObject workspaces; + + exportedWorkspacePackages = + l.listToAttrs + (l.map + (wsObject: + l.nameValuePair + wsObject.name + wsObject.version) + extraObjects); + + getSourceType = rawObj: finalObj: let + dObj = rawObj; + in + if + lib.hasInfix "@github:" dObj.yarnName + || (dObj + ? resolved + && lib.hasInfix "codeload.github.com/" dObj.resolved) + || lib.hasInfix "@git+" dObj.yarnName + # example: + # "jest-image-snapshot@https://github.com/machard/jest-image-snapshot#machard-patch-1": + # version "4.2.0" + # resolved "https://github.com/machard/jest-image-snapshot#d087e8683859dba2964b5866a4d1eb02ba64e7b9" + || (lib.hasInfix "@https://github.com" dObj.yarnName + && lib.hasPrefix "https://github.com" dObj.resolved) + then + if dObj ? integrity + then + b.trace ( + "Warning: Using git despite integrity exists for" + + "${finalObj.name}" + ) + "git" + else "git" + else if + lib.hasInfix "@link:" dObj.yarnName + || lib.hasInfix "@file:" dObj.yarnName + then "path" + else "http"; + in rec { + inherit defaultPackage extraObjects translatorName; + + exportedPackages = + {"${defaultPackage}" = packageJson.version or "unknown";} + // exportedWorkspacePackages; + + subsystemName = "nodejs"; + + subsystemAttrs = {nodejsVersion = args.nodejs;}; + + keys = { + yarnName = rawObj: finalObj: + rawObj.yarnName; + }; + + extractors = { + name = rawObj: finalObj: + if lib.hasInfix "@git+" rawObj.yarnName + then lib.head (lib.splitString "@git+" rawObj.yarnName) + # Example: + # @matrix-org/olm@https://gitlab.matrix.org/api/v4/projects/27/packages/npm/@matrix-org/olm/-/@matrix-org/olm-3.2.3.tgz + else if lib.hasInfix "@https://" rawObj.yarnName + then lib.head (lib.splitString "@https://" rawObj.yarnName) + else let + split = lib.splitString "@" rawObj.yarnName; + version = lib.last split; + in + if lib.hasPrefix "@" rawObj.yarnName + then lib.removeSuffix "@${version}" rawObj.yarnName + else lib.head split; + + version = rawObj: finalObj: + if l.hasInfix "@git+" rawObj.yarnName + then let + split = l.splitString "@git+" rawObj.yarnName; + gitUrl = l.last split; + in + # l.strings.sanitizeDerivationName + "${rawObj.version}@git+${gitUrl}" + else rawObj.version; + + dependencies = rawObj: finalObj: let + dependencies = let + deps = + rawObj.dependencies + or {} + // rawObj.optionalDependencies or {}; + in + lib.mapAttrsToList + (name: version: {"${name}" = version;}) + deps; + in + lib.forEach + dependencies + ( + dependency: + builtins.head ( + lib.mapAttrsToList + ( + name: versionSpec: let + yarnName = "${name}@${versionSpec}"; + depObject = objectsByKey.yarnName."${yarnName}"; + version = depObject.version; + in + if ! objectsByKey.yarnName ? ${yarnName} + then + # handle missing lock file entry + let + versionMatch = + b.match ''.*\^([[:digit:]|\.]+)'' versionSpec; + in { + inherit name; + version = b.elemAt versionMatch 0; + } + else {inherit name version;} + ) + dependency + ) + ); + + sourceSpec = rawObj: finalObj: let + type = getSourceType rawObj finalObj; + in + {inherit type;} + // ( + if type == "git" + then + if utils.identifyGitUrl rawObj.resolved + then + (utils.parseGitUrl rawObj.resolved) + // { + version = rawObj.version; + } + else let + githubUrlInfos = lib.splitString "/" rawObj.resolved; + owner = lib.elemAt githubUrlInfos 3; + repo = lib.elemAt githubUrlInfos 4; + in + if b.length githubUrlInfos == 7 + then let + rev = lib.elemAt githubUrlInfos 6; + in { + url = "https://github.com/${owner}/${repo}"; + inherit rev; + } + else if b.length githubUrlInfos == 5 + then let + urlAndRev = lib.splitString "#" rawObj.resolved; + in { + url = lib.head urlAndRev; + rev = lib.last urlAndRev; + } else - yarnLock."${depYarnKey}"; - in + throw ( + "Unable to parse git dependency for: " + + "${finalObj.name}#${finalObj.version}" + ) + else if type == "path" + then + if lib.hasInfix "@link:" rawObj.yarnName + then { + path = + lib.last (lib.splitString "@link:" rawObj.yarnName); + } + else if lib.hasInfix "@file:" rawObj.yarnName + then { + path = + lib.last (lib.splitString "@file:" rawObj.yarnName); + } + else throw "unknown path format ${b.toJSON rawObj}" + else # type == "http" { - name = defaultPackage; - version = packageJson.version or "unknown"; - dependencies = [ - {inherit name; version = dependencyAttrs.version;} - ]; - }) - packageJsonDeps; + type = "http"; + hash = + if rawObj ? integrity + then rawObj.integrity + else let + hash = + lib.last (lib.splitString "#" rawObj.resolved); + in + if lib.stringLength hash == 40 + then hash + else throw "Missing integrity for ${rawObj.yarnName}"; + url = lib.head (lib.splitString "#" rawObj.resolved); + } + ); + }; - serializedRawObjects = - lib.mapAttrsToList - (yarnName: depAttrs: depAttrs // { inherit yarnName; }) - yarnLock; - - }); + extraDependencies = + l.mapAttrsToList + (name: semVer: let + depYarnKey = "${name}@${semVer}"; + dependencyAttrs = + if ! yarnLock ? "${depYarnKey}" + then throw "Cannot find entry for top level dependency: '${depYarnKey}'" + else yarnLock."${depYarnKey}"; + in { + name = defaultPackage; + version = packageJson.version or "unknown"; + dependencies = [ + { + inherit name; + version = dependencyAttrs.version; + } + ]; + }) + packageJsonDeps; + serializedRawObjects = + lib.mapAttrsToList + (yarnName: depAttrs: depAttrs // {inherit yarnName;}) + yarnLock; + }); in { - version = 2; inherit translate; @@ -329,15 +303,10 @@ in { # inherit projectName function from package-lock translator projectName = dlib.translators.translators.nodejs.pure.package-lock.projectName; - # This allows the framework to detect if the translator is compatible with the given input # to automatically select the right translator. - compatible = - { - source, - }: - dlib.containsMatchingFile [ ''.*yarn\.lock'' ''.*package.json'' ] source; - + compatible = {source}: + dlib.containsMatchingFile [''.*yarn\.lock'' ''.*package.json''] source; # If the translator requires additional arguments, specify them here. # There are only two types of arguments: @@ -345,7 +314,6 @@ in { # - boolean flag (type = "flag") # String arguments contain a default value and examples. Flags do not. extraArgs = { - name = { description = "The name of the main package"; examples = [ @@ -370,6 +338,5 @@ in { ]; type = "argument"; }; - }; } diff --git a/src/translators/nodejs/pure/yarn-lock/parser.nix b/src/translators/nodejs/pure/yarn-lock/parser.nix index 75ccbc9f..9c76d111 100644 --- a/src/translators/nodejs/pure/yarn-lock/parser.nix +++ b/src/translators/nodejs/pure/yarn-lock/parser.nix @@ -1,139 +1,119 @@ - -{ - lib ? (import {}).lib, - ... -}: - -let +{lib ? (import {}).lib, ...}: let l = lib // builtins; - parse = text: - let - lines = l.splitString "\n" text; + parse = text: let + lines = l.splitString "\n" text; - findStartLineNum = num: - let - line = l.elemAt lines num; - in - if ! l.hasPrefix "#" line - && ! l.hasPrefix " " line - && ! l.hasPrefix "_" line then - num - else - findStartLineNum (num + 1); + findStartLineNum = num: let + line = l.elemAt lines num; + in + if + ! l.hasPrefix "#" line + && ! l.hasPrefix " " line + && ! l.hasPrefix "_" line + then num + else findStartLineNum (num + 1); - contentLines = - l.sublist - (findStartLineNum 0) - ((l.length lines) - 1) - lines; + contentLines = + l.sublist + (findStartLineNum 0) + ((l.length lines) - 1) + lines; - matchLine = line: - let - # yarn v2 - m1 = l.match ''( *)(.*): (.*)'' line; - m2 = l.match ''( *)(.*):$'' line; + matchLine = line: let + # yarn v2 + m1 = l.match ''( *)(.*): (.*)'' line; + m2 = l.match ''( *)(.*):$'' line; - # yarn v1 - m3 = l.match ''( *)(.*) "(.*)"'' line; - m4 = l.match ''( *)(.*) (.*)'' line; - in - if m1 != null then - { - indent = (l.stringLength (l.elemAt m1 0)) / 2; - key = l.elemAt m1 1; - value = l.elemAt m1 2; - } - else if m2 != null then - { - indent = (l.stringLength (l.elemAt m2 0)) / 2; - # transform yarn 1 to yarn 2 tyle - key = - l.replaceStrings [ '', "'' ] [ '', '' ] - (l.replaceStrings [ ''", '' ] [ '', '' ] (l.elemAt m2 1)); - value = null; - } - else if m3 != null then - { - indent = (l.stringLength (l.elemAt m3 0)) / 2; - key = l.elemAt m3 1; - value = l.elemAt m3 2; - } - else if m4 != null then - { - indent = (l.stringLength (l.elemAt m4 0)) / 2; - key = l.elemAt m4 1; - value = l.elemAt m4 2; - } - else - null; + # yarn v1 + m3 = l.match ''( *)(.*) "(.*)"'' line; + m4 = l.match ''( *)(.*) (.*)'' line; + in + if m1 != null + then { + indent = (l.stringLength (l.elemAt m1 0)) / 2; + key = l.elemAt m1 1; + value = l.elemAt m1 2; + } + else if m2 != null + then { + indent = (l.stringLength (l.elemAt m2 0)) / 2; + # transform yarn 1 to yarn 2 tyle + key = + l.replaceStrings ['', "''] ['', ''] + (l.replaceStrings [''", ''] ['', ''] (l.elemAt m2 1)); + value = null; + } + else if m3 != null + then { + indent = (l.stringLength (l.elemAt m3 0)) / 2; + key = l.elemAt m3 1; + value = l.elemAt m3 2; + } + else if m4 != null + then { + indent = (l.stringLength (l.elemAt m4 0)) / 2; + key = l.elemAt m4 1; + value = l.elemAt m4 2; + } + else null; - closingParenthesis = num: - if num == 1 then "}" else "}" + (closingParenthesis (num - 1)); - - jsonLines = lines: - let - filtered = l.filter (line: l.match ''[[:space:]]*'' line == null) lines; - matched = l.map (line: matchLine line) filtered; - in - l.imap0 - (i: line: - let - mNext = l.elemAt matched (i + 1); - m = l.elemAt matched i; - keyParenthesis = - let - beginOK = l.hasPrefix ''"'' m.key; - endOK = l.hasSuffix ''"'' m.key; - begin = l.optionalString (! beginOK) ''"''; - end = l.optionalString (! endOK) ''"''; - in - ''${begin}${m.key}${end}''; - valParenthesis = - if l.hasPrefix ''"'' m.value then - m.value - else - ''"${m.value}"''; - in - if l.length filtered == i + 1 then - let - end = closingParenthesis m.indent; - in - ''${keyParenthesis}: ${valParenthesis}${end}}'' - else if m.value == null then - ''${keyParenthesis}: {'' - # if indent of next line is smaller, close the object - else if mNext.indent < m.indent then - let - end = closingParenthesis (m.indent - mNext.indent); - in - ''${keyParenthesis}: ${valParenthesis}${end},'' - else - ''${keyParenthesis}: ${valParenthesis},'') - filtered; - - json = "{${l.concatStringsSep "\n" (jsonLines contentLines)}"; - - dataRaw = l.fromJSON json; - - # transform key collections like: - # "@babel/code-frame@^7.0.0, @babel/code-frame@^7.10.4" - # ... to individual entries - data = - l.listToAttrs - (l.flatten - (l.mapAttrsToList - (n: v: - let - keys = l.splitString ", " n; - in - l.map (k: l.nameValuePair k v) keys) - dataRaw)); + closingParenthesis = num: + if num == 1 + then "}" + else "}" + (closingParenthesis (num - 1)); + jsonLines = lines: let + filtered = l.filter (line: l.match ''[[:space:]]*'' line == null) lines; + matched = l.map (line: matchLine line) filtered; + in + l.imap0 + (i: line: let + mNext = l.elemAt matched (i + 1); + m = l.elemAt matched i; + keyParenthesis = let + beginOK = l.hasPrefix ''"'' m.key; + endOK = l.hasSuffix ''"'' m.key; + begin = l.optionalString (! beginOK) ''"''; + end = l.optionalString (! endOK) ''"''; + in ''${begin}${m.key}${end}''; + valParenthesis = + if l.hasPrefix ''"'' m.value + then m.value + else ''"${m.value}"''; in - data; + if l.length filtered == i + 1 + then let + end = closingParenthesis m.indent; + in ''${keyParenthesis}: ${valParenthesis}${end}}'' + else if m.value == null + then ''${keyParenthesis}: {'' + # if indent of next line is smaller, close the object + else if mNext.indent < m.indent + then let + end = closingParenthesis (m.indent - mNext.indent); + in ''${keyParenthesis}: ${valParenthesis}${end},'' + else ''${keyParenthesis}: ${valParenthesis},'') + filtered; -in -{ + json = "{${l.concatStringsSep "\n" (jsonLines contentLines)}"; + + dataRaw = l.fromJSON json; + + # transform key collections like: + # "@babel/code-frame@^7.0.0, @babel/code-frame@^7.10.4" + # ... to individual entries + data = + l.listToAttrs + (l.flatten + (l.mapAttrsToList + (n: v: let + keys = l.splitString ", " n; + in + l.map (k: l.nameValuePair k v) keys) + dataRaw)); + in + data; +in { inherit parse; } diff --git a/src/translators/nodejs/utils.nix b/src/translators/nodejs/utils.nix index 31500e89..44d39951 100644 --- a/src/translators/nodejs/utils.nix +++ b/src/translators/nodejs/utils.nix @@ -1,49 +1,39 @@ -{ - lib, -}: let - -l = lib // builtins; - +{lib}: let + l = lib // builtins; in rec { - getPackageJsonDeps = packageJson: noDev: - packageJson.dependencies or {} + packageJson.dependencies + or {} // (lib.optionalAttrs (! noDev) (packageJson.devDependencies or {})); getWorkspaceLockFile = tree: project: fname: let # returns the parsed package-lock.json for a given project dirRelPath = - if project ? subsystemInfo.workspaceParent then - "${project.subsystemInfo.workspaceParent}" - else - "${project.relPath}"; + if project ? subsystemInfo.workspaceParent + then "${project.subsystemInfo.workspaceParent}" + else "${project.relPath}"; packageJson = (tree.getNodeFromPath "${dirRelPath}/package.json").jsonContent; hasNoDependencies = ! packageJson ? dependencies && ! packageJson ? devDependencies; - in - if hasNoDependencies then - null - else - tree.getNodeFromPath "${dirRelPath}/${fname}"; - + if hasNoDependencies + then null + else tree.getNodeFromPath "${dirRelPath}/${fname}"; getWorkspacePackageJson = tree: workspaces: l.genAttrs - workspaces - (wsRelPath: - (tree.getNodeFromPath "${wsRelPath}/package.json").jsonContent); + workspaces + (wsRelPath: + (tree.getNodeFromPath "${wsRelPath}/package.json").jsonContent); getWorkspacePackages = tree: workspaces: lib.mapAttrs' - (wsRelPath: json: - l.nameValuePair - json.name - json.version) - (getWorkspacePackageJson tree workspaces); - - + (wsRelPath: json: + l.nameValuePair + json.name + json.version) + (getWorkspacePackageJson tree workspaces); } diff --git a/src/translators/python/impure/pip/default.nix b/src/translators/python/impure/pip/default.nix index 70ac8092..29afdccf 100644 --- a/src/translators/python/impure/pip/default.nix +++ b/src/translators/python/impure/pip/default.nix @@ -1,125 +1,111 @@ { dlib, lib, -}: - -let +}: let b = builtins; -in - -{ - +in { # the input format is specified in /specifications/translator-call-example.json # this script receives a json file including the input paths and extraArgs - translateBin = - { - # dream2nix - externalSources, - utils, - - bash, - coreutils, - jq, - nix, - python3, - writeScriptBin, - ... - }: - let - machNixExtractor = "${externalSources.mach-nix}/lib/default.nix"; - - setuptools_shim = '' - import sys, setuptools, tokenize, os; sys.argv[0] = 'setup.py'; __file__='setup.py'; - f=getattr(tokenize, 'open', open)(__file__); - code=f.read().replace('\r\n', '\n'); - f.close(); - exec(compile(code, __file__, 'exec')) - ''; - in + translateBin = { + # dream2nix + externalSources, + utils, + bash, + coreutils, + jq, + nix, + python3, + writeScriptBin, + ... + }: let + machNixExtractor = "${externalSources.mach-nix}/lib/default.nix"; + setuptools_shim = '' + import sys, setuptools, tokenize, os; sys.argv[0] = 'setup.py'; __file__='setup.py'; + f=getattr(tokenize, 'open', open)(__file__); + code=f.read().replace('\r\n', '\n'); + f.close(); + exec(compile(code, __file__, 'exec')) + ''; + in utils.writePureShellScript - [ - bash - coreutils - jq - nix - ] - '' - # accroding to the spec, the translator reads the input from a json file - jsonInput=$1 + [ + bash + coreutils + jq + nix + ] + '' + # accroding to the spec, the translator reads the input from a json file + jsonInput=$1 - # read the json input - outputFile=$(${jq}/bin/jq '.outputFile' -c -r $jsonInput) - source=$(${jq}/bin/jq '.source' -c -r $jsonInput) - pythonAttr=$(${jq}/bin/jq '.pythonAttr' -c -r $jsonInput) - application=$(${jq}/bin/jq '.application' -c -r $jsonInput) + # read the json input + outputFile=$(${jq}/bin/jq '.outputFile' -c -r $jsonInput) + source=$(${jq}/bin/jq '.source' -c -r $jsonInput) + pythonAttr=$(${jq}/bin/jq '.pythonAttr' -c -r $jsonInput) + application=$(${jq}/bin/jq '.application' -c -r $jsonInput) - # build python and pip executables - tmpBuild=$(mktemp -d) - nix build --show-trace --impure --expr \ - " - (import ${machNixExtractor} {}).mkPy - (import {}).$pythonAttr - " \ - -o $tmpBuild/python - nix build --impure --expr "(import {}).$pythonAttr.pkgs.pip" -o $tmpBuild/pip - python=$tmpBuild/python/bin/python - pip=$tmpBuild/pip/bin/pip + # build python and pip executables + tmpBuild=$(mktemp -d) + nix build --show-trace --impure --expr \ + " + (import ${machNixExtractor} {}).mkPy + (import {}).$pythonAttr + " \ + -o $tmpBuild/python + nix build --impure --expr "(import {}).$pythonAttr.pkgs.pip" -o $tmpBuild/pip + python=$tmpBuild/python/bin/python + pip=$tmpBuild/pip/bin/pip - # prepare temporary directory - tmp=$(mktemp -d) + # prepare temporary directory + tmp=$(mktemp -d) - # extract python requirements from setup.py - cp -r $source $tmpBuild/src - chmod -R +w $tmpBuild/src - cd $tmpBuild/src - chmod +x setup.py || true - echo "extracting dependencies" - out_file=$tmpBuild/python.json \ - dump_setup_attrs=y \ - PYTHONIOENCODING=utf8 \ - LANG=C.utf8 \ - $python -c "${setuptools_shim}" install &> $tmpBuild/python.log || true + # extract python requirements from setup.py + cp -r $source $tmpBuild/src + chmod -R +w $tmpBuild/src + cd $tmpBuild/src + chmod +x setup.py || true + echo "extracting dependencies" + out_file=$tmpBuild/python.json \ + dump_setup_attrs=y \ + PYTHONIOENCODING=utf8 \ + LANG=C.utf8 \ + $python -c "${setuptools_shim}" install &> $tmpBuild/python.log || true - # extract requirements from json result - $python -c " - import json - result = json.load(open('$tmpBuild/python.json')) - for key in ('install_requires', 'setup_requires'): - if key in result: - print('\n'.join(result[key])) - " > $tmpBuild/computed_requirements + # extract requirements from json result + $python -c " + import json + result = json.load(open('$tmpBuild/python.json')) + for key in ('install_requires', 'setup_requires'): + if key in result: + print('\n'.join(result[key])) + " > $tmpBuild/computed_requirements - # download files according to requirements - $tmpBuild/pip/bin/pip download \ - --no-cache \ - --dest $tmp \ - --progress-bar off \ - -r $tmpBuild/computed_requirements - # -r ''${inputFiles/$'\n'/$' -r '} + # download files according to requirements + $tmpBuild/pip/bin/pip download \ + --no-cache \ + --dest $tmp \ + --progress-bar off \ + -r $tmpBuild/computed_requirements + # -r ''${inputFiles/$'\n'/$' -r '} - # generate the dream lock from the downloaded list of files - NAME=$(${jq}/bin/jq '.name' -c -r $tmpBuild/python.json) \ - VERSION=$(${jq}/bin/jq '.version' -c -r $tmpBuild/python.json) \ - $tmpBuild/python/bin/python ${./generate-dream-lock.py} $tmp $jsonInput + # generate the dream lock from the downloaded list of files + NAME=$(${jq}/bin/jq '.name' -c -r $tmpBuild/python.json) \ + VERSION=$(${jq}/bin/jq '.version' -c -r $tmpBuild/python.json) \ + $tmpBuild/python/bin/python ${./generate-dream-lock.py} $tmp $jsonInput - rm -rf $tmp $tmpBuild - ''; + rm -rf $tmp $tmpBuild + ''; - - compatible = - { - source, - }: + compatible = {source}: dlib.containsMatchingFile - [ - ''.*requirements.*\.txt'' - ] - source; + [ + ''.*requirements.*\.txt'' + ] + source; # define special args and provide defaults extraArgs = { - # the python attribute pythonAttr = { default = "python3"; @@ -136,6 +122,5 @@ in description = "build application instead of package"; type = "flag"; }; - }; } diff --git a/src/translators/rust/pure/cargo-lock/default.nix b/src/translators/rust/pure/cargo-lock/default.nix index abf9268f..6425fdf6 100644 --- a/src/translators/rust/pure/cargo-lock/default.nix +++ b/src/translators/rust/pure/cargo-lock/default.nix @@ -1,271 +1,248 @@ { dlib, lib, -}: - -let +}: let l = lib // builtins; -in +in { + translate = { + externals, + translatorName, + utils, + ... + }: { + source, + packageName, + ... + } @ args: let + inputDir = source; -{ - translate = - { - externals, - translatorName, - utils, - ... - }: - { - source, - packageName, - ... - }@args: - let - inputDir = source; + recurseFiles = path: + l.flatten ( + l.mapAttrsToList + (n: v: + if v == "directory" + then recurseFiles "${path}/${n}" + else "${path}/${n}") + (l.readDir path) + ); - recurseFiles = path: - l.flatten ( - l.mapAttrsToList - (n: v: - if v == "directory" then - recurseFiles "${path}/${n}" - else - "${path}/${n}") - (l.readDir path) - ); + # Find all Cargo.toml files and parse them + allFiles = l.flatten (l.map recurseFiles [inputDir]); + cargoTomlPaths = l.filter (path: l.baseNameOf path == "Cargo.toml") allFiles; + cargoTomls = + l.map + (path: { + inherit path; + value = l.fromTOML (l.readFile path); + }) + cargoTomlPaths; - # Find all Cargo.toml files and parse them - allFiles = l.flatten (l.map recurseFiles [ inputDir ]); - cargoTomlPaths = l.filter (path: l.baseNameOf path == "Cargo.toml") allFiles; - cargoTomls = - l.map - (path: { - inherit path; - value = l.fromTOML (l.readFile path); - }) - cargoTomlPaths; + # Filter cargo-tomls to for files that actually contain packages + cargoPackages = + l.filter + (toml: l.hasAttrByPath ["package" "name"] toml.value) + cargoTomls; - # Filter cargo-tomls to for files that actually contain packages - cargoPackages = - l.filter - (toml: l.hasAttrByPath [ "package" "name" ] toml.value) - cargoTomls; + packageName = + if args.packageName == "{automatic}" + then let + # Small function to check if a given package path has a package + # that has binaries + hasBinaries = toml: + l.hasAttr "bin" toml.value + || l.pathExists "${l.dirOf toml.path}/src/main.rs" + || l.pathExists "${l.dirOf toml.path}/src/bin"; - packageName = - if args.packageName == "{automatic}" - then - let - # Small function to check if a given package path has a package - # that has binaries - hasBinaries = toml: - l.hasAttr "bin" toml.value - || l.pathExists "${l.dirOf toml.path}/src/main.rs" - || l.pathExists "${l.dirOf toml.path}/src/bin"; + # Try to find a package with a binary + pkg = l.findFirst hasBinaries (l.elemAt cargoPackages 0) cargoPackages; + in + pkg.value.package.name + else args.packageName; - # Try to find a package with a binary - pkg = l.findFirst hasBinaries (l.elemAt cargoPackages 0) cargoPackages; + # Find the Cargo.toml matching the package name + checkForPackageName = cargoToml: (cargoToml.value.package.name or null) == packageName; + packageToml = + l.findFirst + checkForPackageName + (throw "no Cargo.toml found with the package name passed: ${packageName}") + cargoTomls; - in pkg.value.package.name - else args.packageName; + # Parse Cargo.lock and extract dependencies + parsedLock = l.fromTOML (l.readFile "${inputDir}/Cargo.lock"); + parsedDeps = parsedLock.package; + # This parses a "package-name version" entry in the "dependencies" + # field of a dependency in Cargo.lock + makeDepNameVersion = entry: let + parsed = l.splitString " " entry; + name = l.head parsed; + maybeVersion = + if l.length parsed > 1 + then l.last parsed + else null; + in { + inherit name; + version = + # If there is no version, search through the lockfile to + # find the dependency's version + if maybeVersion != null + then maybeVersion + else + ( + l.findFirst + (dep: dep.name == name) + (throw "no dependency found with name ${name} in Cargo.lock") + parsedDeps + ) + .version; + }; - # Find the Cargo.toml matching the package name - checkForPackageName = cargoToml: (cargoToml.value.package.name or null) == packageName; - packageToml = - l.findFirst - checkForPackageName - (throw "no Cargo.toml found with the package name passed: ${packageName}") - cargoTomls; + package = rec { + toml = packageToml.value; + tomlPath = packageToml.path; - # Parse Cargo.lock and extract dependencies - parsedLock = l.fromTOML (l.readFile "${inputDir}/Cargo.lock"); - parsedDeps = parsedLock.package; - # This parses a "package-name version" entry in the "dependencies" - # field of a dependency in Cargo.lock - makeDepNameVersion = entry: - let - parsed = l.splitString " " entry; - name = l.head parsed; - maybeVersion = if l.length parsed > 1 then l.last parsed else null; - in + name = toml.package.name; + version = toml.package.version or (l.warn "no version found in Cargo.toml for ${name}, defaulting to unknown" "unknown"); + }; + + # Parses a git source, taken straight from nixpkgs. + parseGitSource = src: let + parts = builtins.match ''git\+([^?]+)(\?(rev|tag|branch)=(.*))?#(.*)'' src; + type = builtins.elemAt parts 2; # rev, tag or branch + value = builtins.elemAt parts 3; + in + if parts == null + then null + else { - inherit name; - version = - # If there is no version, search through the lockfile to - # find the dependency's version - if maybeVersion != null - then maybeVersion - else ( - l.findFirst - (dep: dep.name == name) - (throw "no dependency found with name ${name} in Cargo.lock") - parsedDeps - ).version; - }; + url = builtins.elemAt parts 0; + sha = builtins.elemAt parts 4; + } + // lib.optionalAttrs (type != null) {inherit type value;}; - package = rec { - toml = packageToml.value; - tomlPath = packageToml.path; + # Extracts a source type from a dependency. + getSourceTypeFrom = dependencyObject: let + checkType = type: l.hasPrefix "${type}+" dependencyObject.source; + in + if !(l.hasAttr "source" dependencyObject) + then "path" + else if checkType "git" + then "git" + else if checkType "registry" + then + if dependencyObject.source == "registry+https://github.com/rust-lang/crates.io-index" + then "crates-io" + else throw "registries other than crates.io are not supported yet" + else throw "unknown or unsupported source type: ${dependencyObject.source}"; + in + utils.simpleTranslate + ({ + getDepByNameVer, + dependenciesByOriginalID, + ... + }: rec { + # VALUES - name = toml.package.name; - version = toml.package.version or (l.warn "no version found in Cargo.toml for ${name}, defaulting to unknown" "unknown"); + inherit translatorName; + + # The raw input data as an attribute set. + # This will then be processed by `serializePackages` (see below) and + # transformed into a flat list. + inputData = parsedDeps; + + defaultPackage = package.name; + + packages = + (l.listToAttrs + (l.map + (toml: + l.nameValuePair + toml.value.package.name + toml.value.package.version) + cargoPackages)) + // {"${defaultPackage}" = package.version;}; + + mainPackageDependencies = let + mainPackage = + l.findFirst + (dep: dep.name == package.name) + (throw "could not find main package in Cargo.lock") + parsedDeps; + in + l.map makeDepNameVersion (mainPackage.dependencies or []); + + # the name of the subsystem + subsystemName = "rust"; + + # Extract subsystem specific attributes. + # The structure of this should be defined in: + # ./src/specifications/{subsystem} + subsystemAttrs = rec { + gitSources = let + gitDeps = l.filter (dep: (getSourceTypeFrom dep) == "git") parsedDeps; + in + l.unique (l.map (dep: parseGitSource dep.source) gitDeps); }; - # Parses a git source, taken straight from nixpkgs. - parseGitSource = src: - let - parts = builtins.match ''git\+([^?]+)(\?(rev|tag|branch)=(.*))?#(.*)'' src; - type = builtins.elemAt parts 2; # rev, tag or branch - value = builtins.elemAt parts 3; - in - if parts == null then null - else { - url = builtins.elemAt parts 0; - sha = builtins.elemAt parts 4; - } // lib.optionalAttrs (type != null) { inherit type value; }; + # FUNCTIONS - # Extracts a source type from a dependency. - getSourceTypeFrom = dependencyObject: - let checkType = type: l.hasPrefix "${type}+" dependencyObject.source; in - if !(l.hasAttr "source" dependencyObject) - then "path" - else if checkType "git" then - "git" - else if checkType "registry" then - if dependencyObject.source == "registry+https://github.com/rust-lang/crates.io-index" - then "crates-io" - else throw "registries other than crates.io are not supported yet" - else - throw "unknown or unsupported source type: ${dependencyObject.source}"; - in + # return a list of package objects of arbitrary structure + serializePackages = inputData: inputData; - utils.simpleTranslate - ({ - getDepByNameVer, - dependenciesByOriginalID, - ... - }: + # return the name for a package object + getName = dependencyObject: dependencyObject.name; - rec { - # VALUES + # return the version for a package object + getVersion = dependencyObject: dependencyObject.version; - inherit translatorName; + # get dependencies of a dependency object + getDependencies = dependencyObject: + l.map makeDepNameVersion (dependencyObject.dependencies or []); - # The raw input data as an attribute set. - # This will then be processed by `serializePackages` (see below) and - # transformed into a flat list. - inputData = parsedDeps; + # return the source type of a package object + getSourceType = getSourceTypeFrom; - defaultPackage = package.name; + # An attrset of constructor functions. + # Given a dependency object and a source type, construct the + # source definition containing url, hash, etc. + sourceConstructors = { + path = dependencyObject: let + toml = ( + l.findFirst + (toml: toml.value.package.name == dependencyObject.name) + (throw "could not find crate ${dependencyObject.name}") + cargoPackages + ); + relDir = lib.removePrefix "${inputDir}/" (l.dirOf toml.path); + in { + path = relDir; + rootName = package.name; + rootVersion = package.version; + }; - packages = - (l.listToAttrs - (l.map - (toml: - l.nameValuePair - toml.value.package.name - toml.value.package.version) - cargoPackages)) - // - { "${defaultPackage}" = package.version; }; + git = dependencyObject: let + parsed = parseGitSource dependencyObject.source; + in { + url = parsed.url; + rev = parsed.sha; + }; - mainPackageDependencies = - let - mainPackage = - l.findFirst - (dep: dep.name == package.name) - (throw "could not find main package in Cargo.lock") - parsedDeps; - in - l.map makeDepNameVersion (mainPackage.dependencies or [ ]); - - # the name of the subsystem - subsystemName = "rust"; - - # Extract subsystem specific attributes. - # The structure of this should be defined in: - # ./src/specifications/{subsystem} - subsystemAttrs = rec { - gitSources = let - gitDeps = l.filter (dep: (getSourceTypeFrom dep) == "git") parsedDeps; - in l.unique (l.map (dep: parseGitSource dep.source) gitDeps); - }; - - # FUNCTIONS - - # return a list of package objects of arbitrary structure - serializePackages = inputData: inputData; - - # return the name for a package object - getName = dependencyObject: dependencyObject.name; - - # return the version for a package object - getVersion = dependencyObject: dependencyObject.version; - - # get dependencies of a dependency object - getDependencies = dependencyObject: - l.map makeDepNameVersion (dependencyObject.dependencies or [ ]); - - # return the source type of a package object - getSourceType = getSourceTypeFrom; - - # An attrset of constructor functions. - # Given a dependency object and a source type, construct the - # source definition containing url, hash, etc. - sourceConstructors = { - path = dependencyObject: - let - toml = - (l.findFirst - (toml: toml.value.package.name == dependencyObject.name) - (throw "could not find crate ${dependencyObject.name}") - cargoPackages - ); - relDir = lib.removePrefix "${inputDir}/" (l.dirOf toml.path); - in - { - path = relDir; - rootName = package.name; - rootVersion = package.version; - }; - - git = dependencyObject: - let - parsed = parseGitSource dependencyObject.source; - in - { - url = parsed.url; - rev = parsed.sha; - }; - - crates-io = dependencyObject: - { - hash = dependencyObject.checksum; - }; - }; - }); - - - projectName = - { - source, - }: - let - cargoToml = "${source}/Cargo.toml"; - in - if l.pathExists cargoToml then - (l.fromTOML (l.readFile cargoToml)).package.name or null - else - null; + crates-io = dependencyObject: { + hash = dependencyObject.checksum; + }; + }; + }); + projectName = {source}: let + cargoToml = "${source}/Cargo.toml"; + in + if l.pathExists cargoToml + then (l.fromTOML (l.readFile cargoToml)).package.name or null + else null; # This allows the framework to detect if the translator is compatible with the given input # to automatically select the right translator. - compatible = - { - source, - }: - dlib.containsMatchingFile [ ''.*Cargo\.lock'' ] source; - + compatible = {source}: + dlib.containsMatchingFile [''.*Cargo\.lock''] source; # If the translator requires additional arguments, specify them here. # When users run the CLI, they will be asked to specify these arguments. diff --git a/src/updaters/default.nix b/src/updaters/default.nix index 303536f7..625e08b6 100644 --- a/src/updaters/default.nix +++ b/src/updaters/default.nix @@ -5,45 +5,33 @@ lib, python3, writeText, - # dream2nix inputs callPackageDream, fetchers, utils, ... -}: -let - +}: let lockUtils = utils.dreamLock; updaters = callPackageDream ./updaters.nix {}; - getUpdaterName = - { - dreamLock, - }: - let - lock = (utils.readDreamLock { inherit dreamLock; }).lock; - source = lockUtils.getMainPackageSource lock; - in - lock.updater - or fetchers.fetchers."${source.type}".defaultUpdater - or null; + getUpdaterName = {dreamLock}: let + lock = (utils.readDreamLock {inherit dreamLock;}).lock; + source = lockUtils.getMainPackageSource lock; + in + lock.updater + or fetchers.fetchers."${source.type}".defaultUpdater + or null; - makeUpdateScript = - { - dreamLock, - updater ? getUpdaterName { inherit dreamLock; }, - }: - let - lock = (utils.readDreamLock { inherit dreamLock; }).lock; - source = lockUtils.getMainPackageSource lock; - updater' = updaters."${updater}"; - in - updater' source; - -in - -{ + makeUpdateScript = { + dreamLock, + updater ? getUpdaterName {inherit dreamLock;}, + }: let + lock = (utils.readDreamLock {inherit dreamLock;}).lock; + source = lockUtils.getMainPackageSource lock; + updater' = updaters."${updater}"; + in + updater' source; +in { inherit getUpdaterName makeUpdateScript updaters; } diff --git a/src/updaters/updaters.nix b/src/updaters/updaters.nix index 3025ca57..bf446230 100644 --- a/src/updaters/updaters.nix +++ b/src/updaters/updaters.nix @@ -5,42 +5,31 @@ lib, python3, writeText, - # dream2nix inputs utils, ... -}: -{ - githubNewestReleaseTag = - { - owner, - repo, - ... - }: - utils.writePureShellScript [ curl jq ] '' +}: { + githubNewestReleaseTag = { + owner, + repo, + ... + }: + utils.writePureShellScript [curl jq] '' curl -s "https://api.github.com/repos/${owner}/${repo}/releases?per_page=1" | jq -r '.[0].tag_name' ''; - - pypiNewestReleaseVersion = - { - pname, - ... - }: - utils.writePureShellScript [ curl jq ] '' + + pypiNewestReleaseVersion = {pname, ...}: + utils.writePureShellScript [curl jq] '' curl -s https://pypi.org/pypi/${pname}/json | jq -r '.info.version' ''; - npmNewestReleaseVersion = - { - pname, - ... - }: - # api docs: https://github.com/npm/registry/blob/master/docs/REGISTRY-API.md#get - utils.writePureShellScript [ curl jq ] '' + npmNewestReleaseVersion = {pname, ...}: + # api docs: https://github.com/npm/registry/blob/master/docs/REGISTRY-API.md#get + utils.writePureShellScript [curl jq] '' curl -s https://registry.npmjs.com/${pname} | jq -r '."dist-tags".latest' ''; - urlRegexPython = + urlRegexPython = # Don't forget to use double quoted strings # or double escape ('\\' instead of '\'). # Expects named group 'rev' to be defined. @@ -50,14 +39,12 @@ url, regex, ... - }: - let + }: let reFile = writeText "regex" regex; in - utils.writePureShellScript [ curl gnugrep python3 ] '' + utils.writePureShellScript [curl gnugrep python3] '' curl -s ${url} \ | python3 -c \ 'import re, sys; print(re.search(open("${reFile}").read(), sys.stdin.read()).group("ver"), end="")' ''; - } diff --git a/src/utils/config.nix b/src/utils/config.nix index 05ed20eb..73ea6705 100644 --- a/src/utils/config.nix +++ b/src/utils/config.nix @@ -1,5 +1,4 @@ let - b = builtins; # loads attrs either from s: @@ -7,29 +6,25 @@ let # - json string # - attrset (no changes) loadAttrs = input: - if b.isPath input then - b.fromJSON (b.readFile input) - else if b.isString input then - b.fromJSON input - else if b.isAttrs input then - input - else - throw "input for loadAttrs must be json file or string or attrs"; + if b.isPath input + then b.fromJSON (b.readFile input) + else if b.isString input + then b.fromJSON input + else if b.isAttrs input + then input + else throw "input for loadAttrs must be json file or string or attrs"; # load dream2nix config extending with defaults - loadConfig = configInput: - let - config = loadAttrs configInput; - defaults = { - overridesDirs = []; - packagesDir = "./packages"; - projectRoot = null; - repoName = null; - }; - in - defaults // config; - -in -{ + loadConfig = configInput: let + config = loadAttrs configInput; + defaults = { + overridesDirs = []; + packagesDir = "./packages"; + projectRoot = null; + repoName = null; + }; + in + defaults // config; +in { inherit loadConfig; } diff --git a/src/utils/default.nix b/src/utils/default.nix index 7e06f284..27bbe559 100644 --- a/src/utils/default.nix +++ b/src/utils/default.nix @@ -13,16 +13,13 @@ stdenv, writeScript, writeScriptBin, - # dream2nix inputs apps, callPackageDream, externalSources, translators, ... -}: -let - +}: let b = builtins; l = lib // builtins; @@ -38,153 +35,150 @@ let # copied from poetry2nix ireplace = idx: value: list: ( lib.genList - (i: if i == idx then value else (b.elemAt list i)) - (b.length list) + (i: + if i == idx + then value + else (b.elemAt list i)) + (b.length list) ); }; - in + overrideUtils + // translatorUtils + // translatorUtils2 + // rec { + inherit + (dlib) + dirNames + callViaEnv + identifyGitUrl + latestVersion + listDirs + listFiles + nameVersionPair + parseGitUrl + readTextFile + recursiveUpdateUntilDepth + sanitizeDerivationName + traceJ + ; -overrideUtils -// translatorUtils -// translatorUtils2 -// rec { + dreamLock = dreamLockUtils; - inherit (dlib) - dirNames - callViaEnv - identifyGitUrl - latestVersion - listDirs - listFiles - nameVersionPair - parseGitUrl - readTextFile - recursiveUpdateUntilDepth - sanitizeDerivationName - traceJ - ; + inherit (dreamLockUtils) readDreamLock; - dreamLock = dreamLockUtils; + toDrv = path: runCommand "some-drv" {} "cp -r ${path} $out"; - inherit (dreamLockUtils) readDreamLock; + toTOML = import ./toTOML.nix {inherit lib;}; - toDrv = path: runCommand "some-drv" {} "cp -r ${path} $out"; - - toTOML = import ./toTOML.nix { inherit lib; }; - - # hash the contents of a path via `nix hash path` - hashPath = algo: path: - let + # hash the contents of a path via `nix hash path` + hashPath = algo: path: let hashPath = runCommand "hash-${algo}" {} '' ${nix}/bin/nix --option experimental-features nix-command hash path ${path} | tr --delete '\n' > $out ''; in b.readFile hashPath; - # hash a file via `nix hash file` - hashFile = algo: path: - let + # hash a file via `nix hash file` + hashFile = algo: path: let hashFile = runCommand "hash-${algo}" {} '' ${nix}/bin/nix --option experimental-features nix-command hash file ${path} | tr --delete '\n' > $out ''; in b.readFile hashFile; - # builder to create a shell script that has it's own PATH - writePureShellScript = availablePrograms: script: writeScript "script.sh" '' - #!${bash}/bin/bash - set -Eeuo pipefail + # builder to create a shell script that has it's own PATH + writePureShellScript = availablePrograms: script: + writeScript "script.sh" '' + #!${bash}/bin/bash + set -Eeuo pipefail - export PATH="${lib.makeBinPath availablePrograms}" - export NIX_PATH=nixpkgs=${pkgs.path} - export WORKDIR="$PWD" + export PATH="${lib.makeBinPath availablePrograms}" + export NIX_PATH=nixpkgs=${pkgs.path} + export WORKDIR="$PWD" - TMPDIR=$(${coreutils}/bin/mktemp -d) - cd $TMPDIR + TMPDIR=$(${coreutils}/bin/mktemp -d) + cd $TMPDIR - ${script} + ${script} - cd - ${coreutils}/bin/rm -rf $TMPDIR - ''; + cd + ${coreutils}/bin/rm -rf $TMPDIR + ''; - # builder to create a shell script that has it's own PATH - writePureShellScriptBin = binName: availablePrograms: script: - writeScriptBin binName '' - #!${bash}/bin/bash - set -Eeuo pipefail + # builder to create a shell script that has it's own PATH + writePureShellScriptBin = binName: availablePrograms: script: + writeScriptBin binName '' + #!${bash}/bin/bash + set -Eeuo pipefail - export PATH="${lib.makeBinPath availablePrograms}" - export NIX_PATH=nixpkgs=${pkgs.path} - export WORKDIR="$PWD" + export PATH="${lib.makeBinPath availablePrograms}" + export NIX_PATH=nixpkgs=${pkgs.path} + export WORKDIR="$PWD" - TMPDIR=$(${coreutils}/bin/mktemp -d) - cd $TMPDIR + TMPDIR=$(${coreutils}/bin/mktemp -d) + cd $TMPDIR - ${script} + ${script} - cd - ${coreutils}/bin/rm -rf $TMPDIR - ''; + cd + ${coreutils}/bin/rm -rf $TMPDIR + ''; - extractSource = - { + extractSource = { source, dir ? "", }: - stdenv.mkDerivation { - name = "${(source.name or "")}-extracted"; - src = source; - inherit dir; - phases = [ "unpackPhase" ]; - dontInstall = true; - dontFixup = true; - unpackCmd = - if lib.hasSuffix ".tgz" source.name then - '' + stdenv.mkDerivation { + name = "${(source.name or "")}-extracted"; + src = source; + inherit dir; + phases = ["unpackPhase"]; + dontInstall = true; + dontFixup = true; + unpackCmd = + if lib.hasSuffix ".tgz" source.name + then '' tar --delay-directory-restore -xf $src # set executable flag only on directories chmod -R +X . '' - else - null; - # sometimes tarballs do not end with .tar.?? - preUnpack = '' - unpackFallback(){ - local fn="$1" - tar xf "$fn" - } + else null; + # sometimes tarballs do not end with .tar.?? + preUnpack = '' + unpackFallback(){ + local fn="$1" + tar xf "$fn" + } - unpackCmdHooks+=(unpackFallback) - ''; - postUnpack = '' - echo postUnpack - mv "$sourceRoot/$dir" $out - exit - ''; - }; + unpackCmdHooks+=(unpackFallback) + ''; + postUnpack = '' + echo postUnpack + mv "$sourceRoot/$dir" $out + exit + ''; + }; - satisfiesSemver = poetry2nixSemver.satisfiesSemver; + satisfiesSemver = poetry2nixSemver.satisfiesSemver; - makeTranslateScript = - { + makeTranslateScript = { invalidationHash, source, project, - }@args: - let + } @ args: let translator = translators.translatorsV2."${project.subsystem}".all."${project.translator}"; - argsJsonFile = pkgs.writeText "translator-args.json" + argsJsonFile = + pkgs.writeText "translator-args.json" (l.toJSON (args - // { - project = l.removeAttrs args.project ["dreamLock"]; - outputFile = project.dreamLockPath; - })); + // { + project = l.removeAttrs args.project ["dreamLock"]; + outputFile = project.dreamLockPath; + })); in writePureShellScriptBin "resolve" [ @@ -219,15 +213,14 @@ overrideUtils fi ''; - # a script that produces and dumps the dream-lock json for a given source - makePackageLockScript = - { + # a script that produces and dumps the dream-lock json for a given source + makePackageLockScript = { packagesDir, source, translator, translatorArgs, }: - writePureShellScript + writePureShellScript [] '' cd $WORKDIR @@ -236,12 +229,12 @@ overrideUtils --no-default-nix \ --translator ${translator} \ --invalidation-hash ${dlib.calcInvalidationHash { - inherit source translator translatorArgs; - }} \ + inherit source translator translatorArgs; + }} \ --packages-root $WORKDIR/${packagesDir} \ ${lib.concatStringsSep " \\\n" - (lib.mapAttrsToList - (key: val: "--arg ${key}=${b.toString val}") - translatorArgs)} + (lib.mapAttrsToList + (key: val: "--arg ${key}=${b.toString val}") + translatorArgs)} ''; -} + } diff --git a/src/utils/dream-lock.nix b/src/utils/dream-lock.nix index 33a0c792..8ef74cbe 100644 --- a/src/utils/dream-lock.nix +++ b/src/utils/dream-lock.nix @@ -1,139 +1,126 @@ { lib, - # dream2nix utils, ... -}: -let - +}: let b = builtins; - subDreamLockNames = dreamLockFile: - let - dir = b.dirOf dreamLockFile; + subDreamLockNames = dreamLockFile: let + dir = b.dirOf dreamLockFile; - directories = utils.listDirs dir; + directories = utils.listDirs dir; - dreamLockDirs = - lib.filter - (d: b.pathExists ("${dir}/${d}/dream-lock.json")) - directories; + dreamLockDirs = + lib.filter + (d: b.pathExists "${dir}/${d}/dream-lock.json") + directories; + in + dreamLockDirs; + readDreamLock = {dreamLock} @ args: let + isFile = + b.isPath dreamLock + || b.isString dreamLock + || lib.isDerivation dreamLock; + + lockMaybeCompressed = + if isFile + then b.fromJSON (b.readFile dreamLock) + else dreamLock; + + lock = + if lockMaybeCompressed.decompressed or false + then lockMaybeCompressed + else decompressDreamLock lockMaybeCompressed; + + subDreamLocks = + if ! isFile + then {} + else let + dir = b.dirOf dreamLock; + in + lib.genAttrs + (subDreamLockNames dreamLock) + (d: + readDreamLock + {dreamLock = "${dir}/${d}/dream-lock.json";}); + + packages = lock._generic.packages; + + defaultPackageName = lock._generic.defaultPackage; + defaultPackageVersion = packages."${defaultPackageName}"; + + subsystemAttrs = lock._subsystem; + + sources = lock.sources; + + dependencyGraph = lock.dependencies; + + packageVersions = + lib.mapAttrs + (name: versions: lib.attrNames versions) + dependencyGraph; + + cyclicDependencies = lock.cyclicDependencies; + + getSourceSpec = pname: version: + sources."${pname}"."${version}" + or ( + throw "The source spec for ${pname}#${version} is not defined in lockfile." + ); + + getDependencies = pname: version: + b.filter + (dep: ! b.elem dep cyclicDependencies."${pname}"."${version}" or []) + dependencyGraph."${pname}"."${version}" or []; + + getCyclicDependencies = pname: version: + cyclicDependencies."${pname}"."${version}" or []; + + getRoot = pname: version: let + spec = getSourceSpec pname version; in - dreamLockDirs; - - - readDreamLock = - { - dreamLock, - }@args: - let - - isFile = - b.isPath dreamLock - || b.isString dreamLock - || lib.isDerivation dreamLock; - - lockMaybeCompressed = - if isFile then - b.fromJSON (b.readFile dreamLock) - else - dreamLock; - - lock = - if lockMaybeCompressed.decompressed or false then - lockMaybeCompressed - else - decompressDreamLock lockMaybeCompressed; - - subDreamLocks = - if ! isFile then - {} - else - let - dir = b.dirOf dreamLock; - in - lib.genAttrs - (subDreamLockNames dreamLock) - (d: - readDreamLock - { dreamLock = "${dir}/${d}/dream-lock.json"; }); - - packages = lock._generic.packages; - - defaultPackageName = lock._generic.defaultPackage; - defaultPackageVersion = packages."${defaultPackageName}"; - - subsystemAttrs = lock._subsystem; - - sources = lock.sources; - - dependencyGraph = lock.dependencies; - - packageVersions = - lib.mapAttrs - (name: versions: lib.attrNames versions) - dependencyGraph; - - cyclicDependencies = lock.cyclicDependencies; - - getSourceSpec = pname: version: - sources."${pname}"."${version}" or ( - throw "The source spec for ${pname}#${version} is not defined in lockfile." - ); - - getDependencies = pname: version: - b.filter - (dep: ! b.elem dep cyclicDependencies."${pname}"."${version}" or []) - dependencyGraph."${pname}"."${version}" or []; - - getCyclicDependencies = pname: version: - cyclicDependencies."${pname}"."${version}" or []; - - getRoot = pname: version: - let spec = getSourceSpec pname version; in - if spec.type == "path" then - { - pname = spec.rootName; - version = spec.rootVersion; - } - else - { inherit pname version; }; - - in - { - inherit lock; - interface = { - - inherit - defaultPackageName - defaultPackageVersion - subsystemAttrs - getCyclicDependencies - getDependencies - getSourceSpec - getRoot - packages - packageVersions - subDreamLocks - ; - }; - }; + if spec.type == "path" + then { + pname = spec.rootName; + version = spec.rootVersion; + } + else {inherit pname version;}; + in { + inherit lock; + interface = { + inherit + defaultPackageName + defaultPackageVersion + subsystemAttrs + getCyclicDependencies + getDependencies + getSourceSpec + getRoot + packages + packageVersions + subDreamLocks + ; + }; + }; getMainPackageSource = dreamLock: - dreamLock.sources - ."${dreamLock._generic.defaultPackage}" - ."${dreamLock._generic.packages."${dreamLock._generic.defaultPackage}"}" + dreamLock + .sources + ."${dreamLock._generic.defaultPackage}" + ."${dreamLock._generic.packages."${dreamLock._generic.defaultPackage}"}" // rec { pname = dreamLock._generic.defaultPackage; - version = dreamLock._generic.packages."${pname}" ; + version = dreamLock._generic.packages."${pname}"; }; getSource = fetchedSources: pname: version: - if fetchedSources ? "${pname}"."${version}" - && fetchedSources."${pname}"."${version}" != "unknown" then - fetchedSources."${pname}"."${version}" + if + fetchedSources + ? "${pname}"."${version}" + && fetchedSources."${pname}"."${version}" != "unknown" + then fetchedSources."${pname}"."${version}" else throw '' The source for ${pname}#${version} is not defined. @@ -149,143 +136,140 @@ let ``` ''; - # generate standalone dreamLock for a depenndency of an existing dreamLock - getSubDreamLock = dreamLock: name: version: - let - lock = (readDreamLock { inherit dreamLock; }).lock; - - in - lock // { - _generic = lock._generic // { - defaultPackage = name; - packages = lock._generic.packages // { + # generate standalone dreamLock for a depenndency of an existing dreamLock + getSubDreamLock = dreamLock: name: version: let + lock = (readDreamLock {inherit dreamLock;}).lock; + in + lock + // { + _generic = + lock._generic + // { + defaultPackage = name; + packages = + lock._generic.packages + // { "${name}" = version; }; - }; }; + }; - injectDependencies = dreamLock: inject: - if inject == {} then dreamLock else - let - lock = (readDreamLock { inherit dreamLock; }).lock; + injectDependencies = dreamLock: inject: + if inject == {} + then dreamLock + else let + lock = (readDreamLock {inherit dreamLock;}).lock; - oldDependencyGraph = lock.dependencies; + oldDependencyGraph = lock.dependencies; - newDependencyGraph = + newDependencyGraph = + lib.zipAttrsWith + (name: versions: lib.zipAttrsWith - (name: versions: - lib.zipAttrsWith - (version: deps: lib.unique (lib.flatten deps)) - versions) - [ - oldDependencyGraph - inject - ]; - - in - lib.recursiveUpdate lock { - dependencies = newDependencyGraph; - }; - - decompressDependencyGraph = compGraph: - lib.mapAttrs - (name: versions: - lib.mapAttrs - (version: deps: - map - (dep: { - name = b.elemAt dep 0; - version = b.elemAt dep 1; - }) - deps) - versions) - compGraph; - - compressDependencyGraph = decompGraph: - lib.mapAttrs - (name: versions: - lib.mapAttrs - (version: deps: map ( dep: [ dep.name dep.version ]) deps) - versions) - decompGraph; - - decompressDreamLock = comp: - let - dependencyGraphDecomp = - decompressDependencyGraph (comp.dependencies or {}); - - cyclicDependencies = - decompressDependencyGraph (comp.cyclicDependencies or {}); - - emptyDependencyGraph = - lib.mapAttrs - (name: versions: - lib.mapAttrs - (version: source: []) - versions) - comp.sources; - - dependencyGraph = - lib.recursiveUpdate - emptyDependencyGraph - dependencyGraphDecomp; - - in - comp // { - decompressed = true; - cyclicDependencies = cyclicDependencies; - dependencies = dependencyGraph; - }; - - compressDreamLock = uncomp: - let - dependencyGraphComp = - compressDependencyGraph - uncomp.dependencies; - - cyclicDependencies = - compressDependencyGraph - uncomp.cyclicDependencies; - - dependencyGraph = - lib.filterAttrs - (name: versions: versions != {}) - (lib.mapAttrs - (name: versions: - lib.filterAttrs - (version: deps: deps != []) - versions) - dependencyGraphComp); - in - (b.removeAttrs uncomp [ "decompressed" ]) // { - inherit cyclicDependencies; - dependencies = dependencyGraph; + (version: deps: lib.unique (lib.flatten deps)) + versions) + [ + oldDependencyGraph + inject + ]; + in + lib.recursiveUpdate lock { + dependencies = newDependencyGraph; }; - toJSON = dreamLock: - let - lock = - if dreamLock.decompressed or false then - compressDreamLock dreamLock - else - dreamLock; + decompressDependencyGraph = compGraph: + lib.mapAttrs + (name: versions: + lib.mapAttrs + (version: deps: + map + (dep: { + name = b.elemAt dep 0; + version = b.elemAt dep 1; + }) + deps) + versions) + compGraph; - json = b.toJSON lock; + compressDependencyGraph = decompGraph: + lib.mapAttrs + (name: versions: + lib.mapAttrs + (version: deps: map (dep: [dep.name dep.version]) deps) + versions) + decompGraph; - in - json; + decompressDreamLock = comp: let + dependencyGraphDecomp = + decompressDependencyGraph (comp.dependencies or {}); -in - { - inherit - compressDreamLock - decompressDreamLock - decompressDependencyGraph - getMainPackageSource - getSource - getSubDreamLock - readDreamLock - injectDependencies - toJSON + cyclicDependencies = + decompressDependencyGraph (comp.cyclicDependencies or {}); + + emptyDependencyGraph = + lib.mapAttrs + (name: versions: + lib.mapAttrs + (version: source: []) + versions) + comp.sources; + + dependencyGraph = + lib.recursiveUpdate + emptyDependencyGraph + dependencyGraphDecomp; + in + comp + // { + decompressed = true; + cyclicDependencies = cyclicDependencies; + dependencies = dependencyGraph; + }; + + compressDreamLock = uncomp: let + dependencyGraphComp = + compressDependencyGraph + uncomp.dependencies; + + cyclicDependencies = + compressDependencyGraph + uncomp.cyclicDependencies; + + dependencyGraph = + lib.filterAttrs + (name: versions: versions != {}) + (lib.mapAttrs + (name: versions: + lib.filterAttrs + (version: deps: deps != []) + versions) + dependencyGraphComp); + in + (b.removeAttrs uncomp ["decompressed"]) + // { + inherit cyclicDependencies; + dependencies = dependencyGraph; + }; + + toJSON = dreamLock: let + lock = + if dreamLock.decompressed or false + then compressDreamLock dreamLock + else dreamLock; + + json = b.toJSON lock; + in + json; +in { + inherit + compressDreamLock + decompressDreamLock + decompressDependencyGraph + getMainPackageSource + getSource + getSubDreamLock + readDreamLock + injectDependencies + toJSON ; - } +} diff --git a/src/utils/external-dir.nix b/src/utils/external-dir.nix index 08acc601..24f9dc9f 100644 --- a/src/utils/external-dir.nix +++ b/src/utils/external-dir.nix @@ -4,16 +4,15 @@ externalSources, externalPaths, }: - pkgs.runCommand "dream2nix-external-dir" {} - (lib.concatStringsSep "\n" - (lib.mapAttrsToList - (inputName: paths: - lib.concatStringsSep "\n" - (lib.forEach - paths - (path: '' - mkdir -p $out/${inputName}/$(dirname ${path}) - cp ${externalSources."${inputName}"}/${path} $out/${inputName}/${path} - ''))) - externalPaths)) +(lib.concatStringsSep "\n" + (lib.mapAttrsToList + (inputName: paths: + lib.concatStringsSep "\n" + (lib.forEach + paths + (path: '' + mkdir -p $out/${inputName}/$(dirname ${path}) + cp ${externalSources."${inputName}"}/${path} $out/${inputName}/${path} + ''))) + externalPaths)) diff --git a/src/utils/override.nix b/src/utils/override.nix index 5109f9a2..42299e38 100644 --- a/src/utils/override.nix +++ b/src/utils/override.nix @@ -1,29 +1,25 @@ { lib, - # dream2nix utils, ... -}: -let - +}: let b = builtins; - loadOverridesDirs = overridesDirs: pkgs: - let - loadOverrides = dir: - lib.genAttrs (utils.dirNames dir) (name: - import (dir + "/${name}") { - inherit lib pkgs; - satisfiesSemver = constraint: pkg: - utils.satisfiesSemver pkg.version constraint; - }); - in - b.foldl' - (loaded: nextDir: - utils.recursiveUpdateUntilDepth 3 loaded (loadOverrides nextDir)) - {} - overridesDirs; + loadOverridesDirs = overridesDirs: pkgs: let + loadOverrides = dir: + lib.genAttrs (utils.dirNames dir) (name: + import (dir + "/${name}") { + inherit lib pkgs; + satisfiesSemver = constraint: pkg: + utils.satisfiesSemver pkg.version constraint; + }); + in + b.foldl' + (loaded: nextDir: + utils.recursiveUpdateUntilDepth 3 loaded (loadOverrides nextDir)) + {} + overridesDirs; throwErrorUnclearAttributeOverride = pname: overrideName: attrName: throw '' @@ -54,156 +50,142 @@ let ``` ''; - getOverrideFunctionArgs = function: - let - funcArgs = lib.functionArgs function; - in - if funcArgs != {} then - b.attrNames funcArgs - else + getOverrideFunctionArgs = function: let + funcArgs = lib.functionArgs function; + in + if funcArgs != {} + then b.attrNames funcArgs + else + ( + function (old: {passthru.funcArgs = lib.attrNames old;}) + ) + .funcArgs; + + applyOverridesToPackage = { + conditionalOverrides, + pkg, + pname, + outputs, + }: let + # if condition is unset, it will be assumed true + evalCondition = condOverride: pkg: + if condOverride ? _condition + then condOverride._condition pkg + else true; + + # filter the overrides by the package name and conditions + overridesToApply = let + # TODO: figure out if regex names will be useful + regexOverrides = {}; + # lib.filterAttrs + # (name: data: + # lib.hasPrefix "^" name + # && + # b.match name pname != null) + # conditionalOverrides; + + overridesForPackage = + b.foldl' + (overrides: new: overrides // new) + conditionalOverrides."${pname}" or {} + (lib.attrValues regexOverrides); + + overridesListForPackage = + lib.mapAttrsToList ( - function (old: {passthru.funcArgs = lib.attrNames old;}) - ).funcArgs; + _name: data: + data // {inherit _name;} + ) + overridesForPackage; + in (lib.filter + (condOverride: evalCondition condOverride pkg) + overridesListForPackage); - applyOverridesToPackage = - { - conditionalOverrides, - pkg, - pname, - outputs, - }: - let + # apply single attribute override + applySingleAttributeOverride = oldVal: functionOrValue: + if b.isFunction functionOrValue + then + if lib.functionArgs functionOrValue == {} + then functionOrValue oldVal + else + functionOrValue { + old = oldVal; + inherit outputs; + } + else functionOrValue; - # if condition is unset, it will be assumed true - evalCondition = condOverride: pkg: - if condOverride ? _condition then - condOverride._condition pkg + # helper to apply one conditional override + # the condition is not evaluated anymore here + applyOneOverride = pkg: condOverride: let + base_derivation = + if condOverride ? _replace + then + if lib.isFunction condOverride._replace + then condOverride._replace pkg + else if lib.isDerivation condOverride._replace + then condOverride._replace else - true; + throw + ("override attr ${pname}.${condOverride._name}._replace" + + " must either be a derivation or a function") + else pkg; - # filter the overrides by the package name and conditions - overridesToApply = - let - # TODO: figure out if regex names will be useful - regexOverrides = {}; - # lib.filterAttrs - # (name: data: - # lib.hasPrefix "^" name - # && - # b.match name pname != null) - # conditionalOverrides; + overrideFuncs = + lib.mapAttrsToList + (funcName: func: {inherit funcName func;}) + (lib.filterAttrs (n: v: lib.hasPrefix "override" n) condOverride); - overridesForPackage = - b.foldl' - (overrides: new: overrides // new) - conditionalOverrides."${pname}" or {} - (lib.attrValues regexOverrides); + singleArgOverrideFuncs = let + availableFunctions = + lib.mapAttrs + (funcName: func: getOverrideFunctionArgs func) + (lib.filterAttrs + (funcName: func: lib.hasPrefix "override" funcName) + base_derivation); - overridesListForPackage = - lib.mapAttrsToList - (_name: data: - data // { inherit _name; } - ) - overridesForPackage; - in - (lib.filter - (condOverride: evalCondition condOverride pkg) - overridesListForPackage); - - # apply single attribute override - applySingleAttributeOverride = oldVal: functionOrValue: - if b.isFunction functionOrValue then - if lib.functionArgs functionOrValue == {} then - functionOrValue oldVal - else - functionOrValue { - old = oldVal; - inherit outputs; - } - else - functionOrValue; - - # helper to apply one conditional override - # the condition is not evaluated anymore here - applyOneOverride = pkg: condOverride: - let - base_derivation = - if condOverride ? _replace then - if lib.isFunction condOverride._replace then - condOverride._replace pkg - else if lib.isDerivation condOverride._replace then - condOverride._replace - else - throw - ("override attr ${pname}.${condOverride._name}._replace" - + " must either be a derivation or a function") - else - pkg; - - overrideFuncs = - lib.mapAttrsToList - (funcName: func: { inherit funcName func; }) - (lib.filterAttrs (n: v: lib.hasPrefix "override" n) condOverride); - - singleArgOverrideFuncs = - let - availableFunctions = - lib.mapAttrs - (funcName: func: getOverrideFunctionArgs func) - (lib.filterAttrs - (funcName: func: lib.hasPrefix "override" funcName) - base_derivation); - - getOverrideFuncNameForAttrName = attrName: - let - applicableFuncs = - lib.attrNames - (lib.filterAttrs - (funcName: args: b.elem attrName args) - availableFunctions); - in - if b.length applicableFuncs == 0 then - "overrideAttrs" - else if b.length applicableFuncs > 1 then - throwErrorUnclearAttributeOverride pname condOverride._name attrName - else - b.elemAt applicableFuncs 0; - - attributeOverrides = - lib.filterAttrs - (n: v: ! lib.hasPrefix "override" n && ! lib.hasPrefix "_" n) - condOverride; - - in - lib.mapAttrsToList - (attrName: funcOrValue: { - funcName = getOverrideFuncNameForAttrName attrName; - func = oldAttrs: { "${attrName}" = funcOrValue; }; - }) - attributeOverrides; - - in - b.foldl' - (pkg: overrideFunc: - pkg."${overrideFunc.funcName}" - (old: - let - updateAttrsFuncs = overrideFunc.func old; - in - lib.mapAttrs - (attrName: functionOrValue: - applySingleAttributeOverride old."${attrName}" functionOrValue) - updateAttrsFuncs)) - base_derivation - (overrideFuncs ++ singleArgOverrideFuncs); + getOverrideFuncNameForAttrName = attrName: let + applicableFuncs = + lib.attrNames + (lib.filterAttrs + (funcName: args: b.elem attrName args) + availableFunctions); in - # apply the overrides to the given pkg - (lib.foldl - (pkg: condOverride: applyOneOverride pkg condOverride) - pkg - overridesToApply); + if b.length applicableFuncs == 0 + then "overrideAttrs" + else if b.length applicableFuncs > 1 + then throwErrorUnclearAttributeOverride pname condOverride._name attrName + else b.elemAt applicableFuncs 0; -in -{ + attributeOverrides = + lib.filterAttrs + (n: v: ! lib.hasPrefix "override" n && ! lib.hasPrefix "_" n) + condOverride; + in + lib.mapAttrsToList + (attrName: funcOrValue: { + funcName = getOverrideFuncNameForAttrName attrName; + func = oldAttrs: {"${attrName}" = funcOrValue;}; + }) + attributeOverrides; + in + b.foldl' + (pkg: overrideFunc: + pkg."${overrideFunc.funcName}" + (old: let + updateAttrsFuncs = overrideFunc.func old; + in + lib.mapAttrs + (attrName: functionOrValue: + applySingleAttributeOverride old."${attrName}" functionOrValue) + updateAttrsFuncs)) + base_derivation + (overrideFuncs ++ singleArgOverrideFuncs); + in + # apply the overrides to the given pkg + (lib.foldl + (pkg: condOverride: applyOneOverride pkg condOverride) + pkg + overridesToApply); +in { inherit applyOverridesToPackage loadOverridesDirs; } diff --git a/src/utils/toTOML.nix b/src/utils/toTOML.nix index c1c93487..0e384830 100644 --- a/src/utils/toTOML.nix +++ b/src/utils/toTOML.nix @@ -1,5 +1,4 @@ -{ lib }: -let +{lib}: let inherit (lib) length @@ -31,14 +30,13 @@ let ty = tomlTy v; in if ty == "set" - then - let - vals = - mapAttrsToList - (k': v': "${quoteKey k'} = ${outputValInner v'}") - v; - valsStr = concatStringsSep ", " vals; - in "{ ${valsStr} }" + then let + vals = + mapAttrsToList + (k': v': "${quoteKey k'} = ${outputValInner v'}") + v; + valsStr = concatStringsSep ", " vals; + in "{ ${valsStr} }" else outputVal v; outputVal = v: let @@ -49,11 +47,10 @@ let else if ty == "string" then quoteString v else if ty == "list" || ty == "list_of_attrs" - then - let - vals = map quoteString v; - valsStr = concatStringsSep ", " vals; - in "[ ${valsStr} ]" + then let + vals = map quoteString v; + valsStr = concatStringsSep ", " vals; + in "[ ${valsStr} ]" else if ty == "set" then abort "unsupported set for not-inner value" else abort "Not implemented: type ${ty}"; @@ -62,14 +59,13 @@ let ty = tomlTy v; in if ty == "set" - then - let - vals = - mapAttrsToList - (k': v': "${quoteKey k'} = ${outputValInner v'}") - v; - valsStr = concatStringsSep ", " vals; - in ["${quoteKey k} = { ${valsStr} }"] + then let + vals = + mapAttrsToList + (k': v': "${quoteKey k'} = ${outputValInner v'}") + v; + valsStr = concatStringsSep ", " vals; + in ["${quoteKey k} = { ${valsStr} }"] else outputKeyVal k v; # Returns a list of strings; one string per line @@ -88,11 +84,10 @@ let ) v else if ty == "list" - then - let - vals = map quoteString v; - valsStr = concatStringsSep ", " vals; - in ["${quoteKey k} = [ ${valsStr} ]"] + then let + vals = map quoteString v; + valsStr = concatStringsSep ", " vals; + in ["${quoteKey k} = [ ${valsStr} ]"] else if ty == "set" then ["[${k}]"] ++ (concatLists (mapAttrsToList outputKeyValInner v)) else abort "Not implemented: type ${ty} for key ${k}"; @@ -115,26 +110,26 @@ let then if length x == 0 then "list" - else - let - ty = typeOf (elemAt x 0); - in - #assert (all (v: typeOf v == ty) x); - if ty == "set" - then "list_of_attrs" - else "list" + else let + ty = typeOf (elemAt x 0); + in + #assert (all (v: typeOf v == ty) x); + if ty == "set" + then "list_of_attrs" + else "list" else abort "Not implemented: toml type for ${typeOf x}"; toTOML = attrs: assert (typeOf attrs == "set"); let - byTy = lib.foldl - ( - acc: x: let - ty = tomlTy x.v; - in - acc // { "${ty}" = (acc.${ty} or []) ++ [x]; } - ) - {} (mapAttrsToList (k: v: { inherit k v; }) attrs); + byTy = + lib.foldl + ( + acc: x: let + ty = tomlTy x.v; + in + acc // {"${ty}" = (acc.${ty} or []) ++ [x];} + ) + {} (mapAttrsToList (k: v: {inherit k v;}) attrs); in concatMapStringsSep "\n" (kv: concatStringsSep "\n" (outputKeyVal kv.k kv.v)) diff --git a/src/utils/translator.nix b/src/utils/translator.nix index 239cf323..4137de84 100644 --- a/src/utils/translator.nix +++ b/src/utils/translator.nix @@ -1,270 +1,262 @@ { lib, - # dream2nix fetchers, dlib, ... -}: -let - +}: let b = builtins; overrideWarning = fields: args: - lib.filterAttrs (name: _: - if lib.any (field: name == field) fields - then lib.warn '' - you are trying to pass a "${name}" key from your source - constructor, this will be overrided with a value passed - by dream2nix. - '' false - else true - ) args; + lib.filterAttrs ( + name: _: + if lib.any (field: name == field) fields + then + lib.warn '' + you are trying to pass a "${name}" key from your source + constructor, this will be overrided with a value passed + by dream2nix. + '' + false + else true + ) + args; - simpleTranslate = func: - let - final = - func - { - inherit getDepByNameVer; - inherit dependenciesByOriginalID; - }; + simpleTranslate = func: let + final = + func + { + inherit getDepByNameVer; + inherit dependenciesByOriginalID; + }; - getDepByNameVer = name: version: - final.allDependencies."${name}"."${version}" or null; + getDepByNameVer = name: version: + final.allDependencies."${name}"."${version}" or null; - dependenciesByOriginalID = b.foldl' - (result: pkgData: lib.recursiveUpdate result { + dependenciesByOriginalID = + b.foldl' + (result: pkgData: + lib.recursiveUpdate result { "${final.getOriginalID pkgData}" = pkgData; }) + {} + serializedPackagesList; + + serializedPackagesList = final.serializePackages final.inputData; + + dreamLockData = magic final; + + magic = { + # values + defaultPackage, + inputData, + location ? "", + mainPackageDependencies, + packages, + subsystemName, + subsystemAttrs, + translatorName, + # functions + serializePackages, + getName, + getVersion, + getSourceType, + sourceConstructors, + createMissingSource ? (name: version: throw "Cannot find source for ${name}:${version}"), + getDependencies ? null, + getOriginalID ? null, + mainPackageSource ? {type = "unknown";}, + }: let + allDependencies = + b.foldl' + (result: pkgData: + lib.recursiveUpdate result { + "${getName pkgData}" = { + "${getVersion pkgData}" = pkgData; + }; + }) {} serializedPackagesList; - serializedPackagesList = final.serializePackages final.inputData; - - dreamLockData = magic final; - - magic = - { - # values - defaultPackage, - inputData, - location ? "", - mainPackageDependencies, - packages, - subsystemName, - subsystemAttrs, - translatorName, - - # functions - serializePackages, - getName, - getVersion, - getSourceType, - sourceConstructors, - createMissingSource ? - (name: version: throw "Cannot find source for ${name}:${version}"), - getDependencies ? null, - getOriginalID ? null, - mainPackageSource ? { type = "unknown"; }, - }: - let - - allDependencies = b.foldl' - (result: pkgData: lib.recursiveUpdate result { - "${getName pkgData}" = { - "${getVersion pkgData}" = pkgData; - }; - }) - {} - serializedPackagesList; - - sources = b.foldl' - (result: pkgData: - let - pkgName = getName pkgData; - pkgVersion = getVersion pkgData; - in lib.recursiveUpdate result { - "${pkgName}" = { - "${pkgVersion}" = - let - type = getSourceType pkgData; - - constructedArgs = sourceConstructors."${type}" pkgData; - - constructedArgsKeep = - overrideWarning [ "pname" "version" ] constructedArgs; - - constructedSource = - fetchers.constructSource (constructedArgsKeep // { - inherit type; - pname = pkgName; - version = pkgVersion; - }); - - in - b.removeAttrs constructedSource [ "pname" "version" ]; - }; - }) - {} - serializedPackagesList; - - dependencyGraph = - let - depGraph = - (lib.mapAttrs - (name: versions: - lib.mapAttrs - (version: pkgData: getDependencies pkgData) - versions) - allDependencies); - in - depGraph // { - "${defaultPackage}" = depGraph."${defaultPackage}" or {} // { - "${packages."${defaultPackage}"}" = mainPackageDependencies; - }; - }; - - allDependencyKeys = - let - depsWithDuplicates = - lib.flatten - (lib.flatten - (lib.mapAttrsToList - (name: versions: lib.attrValues versions) - dependencyGraph)); - in - lib.unique depsWithDuplicates; - - missingDependencies = - lib.flatten - (lib.forEach allDependencyKeys - (dep: - if sources ? "${dep.name}"."${dep.version}" then - [] - else - dep)); - - generatedSources = - if missingDependencies == [] then - {} - else - lib.listToAttrs - (b.map - (dep: lib.nameValuePair - "${dep.name}" - { - "${dep.version}" = - createMissingSource dep.name dep.version; - }) - missingDependencies); - - allSources = - lib.recursiveUpdate sources generatedSources; - - cyclicDependencies = - # TODO: inefficient! Implement some kind of early cutoff - let - findCycles = node: prevNodes: cycles: - let - - children = dependencyGraph."${node.name}"."${node.version}"; - - cyclicChildren = - lib.filter - (child: prevNodes ? "${child.name}#${child.version}") - children; - - nonCyclicChildren = - lib.filter - (child: ! prevNodes ? "${child.name}#${child.version}") - children; - - cycles' = - cycles - ++ - (b.map (child: { from = node; to = child; }) cyclicChildren); - - # use set for efficient lookups - prevNodes' = - prevNodes - // { "${node.name}#${node.version}" = null; }; - - in - if nonCyclicChildren == [] then - cycles' - else - lib.flatten - (b.map - (child: findCycles child prevNodes' cycles') - nonCyclicChildren); - - cyclesList = - findCycles - (dlib.nameVersionPair defaultPackage packages."${defaultPackage}") - {} - []; - in - b.foldl' - (cycles: cycle: - ( - let - existing = - cycles."${cycle.from.name}"."${cycle.from.version}" - or []; - - reverse = - cycles."${cycle.to.name}"."${cycle.to.version}" - or []; - - in - # if edge or reverse edge already in cycles, do nothing - if b.elem cycle.from reverse - || b.elem cycle.to existing then - cycles - else - lib.recursiveUpdate - cycles - { - "${cycle.from.name}"."${cycle.from.version}" = - existing ++ [ cycle.to ]; - })) - {} - cyclesList; - + sources = + b.foldl' + (result: pkgData: let + pkgName = getName pkgData; + pkgVersion = getVersion pkgData; in - { - decompressed = true; + lib.recursiveUpdate result { + "${pkgName}" = { + "${pkgVersion}" = let + type = getSourceType pkgData; - _generic = + constructedArgs = sourceConstructors."${type}" pkgData; + + constructedArgsKeep = + overrideWarning ["pname" "version"] constructedArgs; + + constructedSource = fetchers.constructSource (constructedArgsKeep + // { + inherit type; + pname = pkgName; + version = pkgVersion; + }); + in + b.removeAttrs constructedSource ["pname" "version"]; + }; + }) + {} + serializedPackagesList; + + dependencyGraph = let + depGraph = + lib.mapAttrs + (name: versions: + lib.mapAttrs + (version: pkgData: getDependencies pkgData) + versions) + allDependencies; + in + depGraph + // { + "${defaultPackage}" = + depGraph."${defaultPackage}" + or {} + // { + "${packages."${defaultPackage}"}" = mainPackageDependencies; + }; + }; + + allDependencyKeys = let + depsWithDuplicates = + lib.flatten + (lib.flatten + (lib.mapAttrsToList + (name: versions: lib.attrValues versions) + dependencyGraph)); + in + lib.unique depsWithDuplicates; + + missingDependencies = + lib.flatten + (lib.forEach allDependencyKeys + (dep: + if sources ? "${dep.name}"."${dep.version}" + then [] + else dep)); + + generatedSources = + if missingDependencies == [] + then {} + else + lib.listToAttrs + (b.map + (dep: + lib.nameValuePair + "${dep.name}" { - inherit - defaultPackage - location - packages - ; - subsystem = subsystemName; - sourcesAggregatedHash = null; - }; + "${dep.version}" = + createMissingSource dep.name dep.version; + }) + missingDependencies); - # build system specific attributes - _subsystem = subsystemAttrs; + allSources = + lib.recursiveUpdate sources generatedSources; - inherit cyclicDependencies; + cyclicDependencies = + # TODO: inefficient! Implement some kind of early cutoff + let + findCycles = node: prevNodes: cycles: let + children = dependencyGraph."${node.name}"."${node.version}"; - sources = allSources; - } - // - (lib.optionalAttrs - (getDependencies != null) - { dependencies = dependencyGraph; }); + cyclicChildren = + lib.filter + (child: prevNodes ? "${child.name}#${child.version}") + children; + nonCyclicChildren = + lib.filter + (child: ! prevNodes ? "${child.name}#${child.version}") + children; + + cycles' = + cycles + ++ (b.map (child: { + from = node; + to = child; + }) + cyclicChildren); + + # use set for efficient lookups + prevNodes' = + prevNodes + // {"${node.name}#${node.version}" = null;}; + in + if nonCyclicChildren == [] + then cycles' + else + lib.flatten + (b.map + (child: findCycles child prevNodes' cycles') + nonCyclicChildren); + + cyclesList = + findCycles + (dlib.nameVersionPair defaultPackage packages."${defaultPackage}") + {} + []; + in + b.foldl' + (cycles: cycle: ( + let + existing = + cycles."${cycle.from.name}"."${cycle.from.version}" + or []; + + reverse = + cycles."${cycle.to.name}"."${cycle.to.version}" + or []; + in + # if edge or reverse edge already in cycles, do nothing + if + b.elem cycle.from reverse + || b.elem cycle.to existing + then cycles + else + lib.recursiveUpdate + cycles + { + "${cycle.from.name}"."${cycle.from.version}" = + existing ++ [cycle.to]; + } + )) + {} + cyclesList; in + { + decompressed = true; - dreamLockData; + _generic = { + inherit + defaultPackage + location + packages + ; + subsystem = subsystemName; + sourcesAggregatedHash = null; + }; -in - { - inherit simpleTranslate; - } + # build system specific attributes + _subsystem = subsystemAttrs; + inherit cyclicDependencies; + + sources = allSources; + } + // (lib.optionalAttrs + (getDependencies != null) + {dependencies = dependencyGraph;}); + in + dreamLockData; +in { + inherit simpleTranslate; +} diff --git a/src/utils/translator2.nix b/src/utils/translator2.nix index d76c568f..4c401fd6 100644 --- a/src/utils/translator2.nix +++ b/src/utils/translator2.nix @@ -2,244 +2,234 @@ dlib, lib, ... -}: -let - +}: let l = lib // builtins; - simpleTranslate2 = func: - let - final = - func + simpleTranslate2 = func: let + final = + func + { + inherit objectsByKey; + }; + + rawObjects = final.serializedRawObjects; + + expectedFields = [ + "name" + "version" + "sourceSpec" + "dependencies" + ]; + + finalObjects' = + l.map + (rawObj: let + finalObj = + {inherit rawObj;} + // l.mapAttrs + (key: extractFunc: extractFunc rawObj finalObj) + final.extractors; + in + finalObj) + rawObjects; + + # checks validity of all objects by iterating over them + finalObjects = + l.map + (finalObj: + if l.any (field: ! finalObj ? "${field}") expectedFields + then + throw + '' + Translator ${final.translatorName} failed. + The following object does not contain all required fields: + Object: + ${l.toJSON finalObj} + Missing fields: + ${l.subtractLists expectedFields (l.attrNames finalObj)} + '' + # TODO: validate sourceSpec as well + else finalObj) + (finalObjects' ++ (final.extraObjects or [])); + + objectsByKey = + l.mapAttrs + (key: keyFunc: + l.foldl' + (merged: finalObj: + merged + // {"${keyFunc finalObj.rawObj finalObj}" = finalObj;}) + {} + finalObjects') + final.keys; + + dreamLockData = magic final; + + magic = { + defaultPackage, + exportedPackages, + extractors, + extraDependencies ? {}, + extraObjects ? [], + keys ? {}, + location ? "", + serializedRawObjects, + subsystemName, + subsystemAttrs ? {}, + translatorName, + }: let + allDependencies = + l.foldl' + (result: finalObj: + lib.recursiveUpdate + result { - inherit objectsByKey; - }; + "${finalObj.name}" = { + "${finalObj.version}" = finalObj; + }; + }) + {} + finalObjects; - rawObjects = final.serializedRawObjects; - - expectedFields = [ - "name" - "version" - "sourceSpec" - "dependencies" - ]; - - finalObjects' = - l.map - (rawObj: let - finalObj = - { inherit rawObj; } - // l.mapAttrs - (key: extractFunc: extractFunc rawObj finalObj) - final.extractors; - in - finalObj) - rawObjects; - - # checks validity of all objects by iterating over them - finalObjects = - l.map - (finalObj: - if l.any (field: ! finalObj ? "${field}") expectedFields - then throw - '' - Translator ${final.translatorName} failed. - The following object does not contain all required fields: - Object: - ${l.toJSON finalObj} - Missing fields: - ${l.subtractLists expectedFields (l.attrNames finalObj)} - '' - # TODO: validate sourceSpec as well - else finalObj) - (finalObjects' ++ (final.extraObjects or [])); - - objectsByKey = + sources = l.mapAttrs - (key: keyFunc: - l.foldl' - (merged: finalObj: - merged - // {"${keyFunc finalObj.rawObj finalObj}" = finalObj;}) - {} - finalObjects') - final.keys; + (name: versions: + l.mapAttrs + (version: finalObj: finalObj.sourceSpec) + versions) + allDependencies; - - dreamLockData = magic final; - - magic = - { - defaultPackage, - exportedPackages, - extractors, - extraDependencies ? {}, - extraObjects ? [], - keys ? {}, - location ? "", - serializedRawObjects, - subsystemName, - subsystemAttrs ? {}, - translatorName, - }: - let - - allDependencies = l.foldl' - (result: finalObj: - lib.recursiveUpdate - result - { - "${finalObj.name}" = { - "${finalObj.version}" = finalObj; - }; - }) - {} - finalObjects; - - sources = - l.mapAttrs - (name: versions: - l.mapAttrs - (version: finalObj: finalObj.sourceSpec) - versions) - allDependencies; - - dependencyGraph = - let - depGraph = - (lib.mapAttrs - (name: versions: - lib.mapAttrs - (version: finalObj: finalObj.dependencies) - versions) - allDependencies); - in - # add extraDependencies to dependency graph - l.foldl' - (all: new: - all // { - "${new.name}" = - all."${new.name}" or {} - // { - "${new.version}" = - all."${new.name}"."${new.version}" or [] - ++ new.dependencies; - }; - }) - depGraph - extraDependencies; - - cyclicDependencies = - # TODO: inefficient! Implement some kind of early cutoff - let - depGraphWithFakeRoot = - l.recursiveUpdate - dependencyGraph - { - __fake-entry.__fake-version = - l.mapAttrsToList - dlib.nameVersionPair - exportedPackages; - }; - - findCycles = node: prevNodes: cycles: - let - - children = - depGraphWithFakeRoot."${node.name}"."${node.version}"; - - cyclicChildren = - lib.filter - (child: prevNodes ? "${child.name}#${child.version}") - children; - - nonCyclicChildren = - lib.filter - (child: ! prevNodes ? "${child.name}#${child.version}") - children; - - cycles' = - cycles - ++ - (l.map (child: { from = node; to = child; }) cyclicChildren); - - # use set for efficient lookups - prevNodes' = - prevNodes - // { "${node.name}#${node.version}" = null; }; - - in - if nonCyclicChildren == [] then - cycles' - else - lib.flatten - (l.map - (child: findCycles child prevNodes' cycles') - nonCyclicChildren); - - cyclesList = - findCycles - (dlib.nameVersionPair - "__fake-entry" - "__fake-version") - {} - []; - in - l.foldl' - (cycles: cycle: - ( - let - existing = - cycles."${cycle.from.name}"."${cycle.from.version}" - or []; - - reverse = - cycles."${cycle.to.name}"."${cycle.to.version}" - or []; - - in - # if edge or reverse edge already in cycles, do nothing - if l.elem cycle.from reverse - || l.elem cycle.to existing then - cycles - else - lib.recursiveUpdate - cycles - { - "${cycle.from.name}"."${cycle.from.version}" = - existing ++ [ cycle.to ]; - })) - {} - cyclesList; - - in - { - decompressed = true; - - _generic = - { - inherit - defaultPackage - location - ; - packages = exportedPackages; - subsystem = subsystemName; - sourcesAggregatedHash = null; + dependencyGraph = let + depGraph = + lib.mapAttrs + (name: versions: + lib.mapAttrs + (version: finalObj: finalObj.dependencies) + versions) + allDependencies; + in + # add extraDependencies to dependency graph + l.foldl' + (all: new: + all + // { + "${new.name}" = + all."${new.name}" + or {} + // { + "${new.version}" = + all."${new.name}"."${new.version}" + or [] + ++ new.dependencies; }; + }) + depGraph + extraDependencies; - # build system specific attributes - _subsystem = subsystemAttrs; + cyclicDependencies = + # TODO: inefficient! Implement some kind of early cutoff + let + depGraphWithFakeRoot = + l.recursiveUpdate + dependencyGraph + { + __fake-entry.__fake-version = + l.mapAttrsToList + dlib.nameVersionPair + exportedPackages; + }; - inherit cyclicDependencies sources; - } - // { dependencies = dependencyGraph; }; + findCycles = node: prevNodes: cycles: let + children = + depGraphWithFakeRoot."${node.name}"."${node.version}"; + cyclicChildren = + lib.filter + (child: prevNodes ? "${child.name}#${child.version}") + children; + + nonCyclicChildren = + lib.filter + (child: ! prevNodes ? "${child.name}#${child.version}") + children; + + cycles' = + cycles + ++ (l.map (child: { + from = node; + to = child; + }) + cyclicChildren); + + # use set for efficient lookups + prevNodes' = + prevNodes + // {"${node.name}#${node.version}" = null;}; + in + if nonCyclicChildren == [] + then cycles' + else + lib.flatten + (l.map + (child: findCycles child prevNodes' cycles') + nonCyclicChildren); + + cyclesList = + findCycles + (dlib.nameVersionPair + "__fake-entry" + "__fake-version") + {} + []; + in + l.foldl' + (cycles: cycle: ( + let + existing = + cycles."${cycle.from.name}"."${cycle.from.version}" + or []; + + reverse = + cycles."${cycle.to.name}"."${cycle.to.version}" + or []; + in + # if edge or reverse edge already in cycles, do nothing + if + l.elem cycle.from reverse + || l.elem cycle.to existing + then cycles + else + lib.recursiveUpdate + cycles + { + "${cycle.from.name}"."${cycle.from.version}" = + existing ++ [cycle.to]; + } + )) + {} + cyclesList; in + { + decompressed = true; - dreamLockData; + _generic = { + inherit + defaultPackage + location + ; + packages = exportedPackages; + subsystem = subsystemName; + sourcesAggregatedHash = null; + }; -in - { - inherit simpleTranslate2; - } + # build system specific attributes + _subsystem = subsystemAttrs; + inherit cyclicDependencies sources; + } + // {dependencies = dependencyGraph;}; + in + dreamLockData; +in { + inherit simpleTranslate2; +} diff --git a/tests/impure/default.nix b/tests/impure/default.nix index c37d9b4f..3a722ba6 100644 --- a/tests/impure/default.nix +++ b/tests/impure/default.nix @@ -2,28 +2,26 @@ async, coreutils, lib, - # dream2nix callPackageDream, utils, ... -}: -let - +}: let l = lib // builtins; allTestFiles = l.attrNames - (l.filterAttrs - (name: type: type == "regular" && l.hasPrefix "test_" name) - (l.readDir ./.)); + (l.filterAttrs + (name: type: type == "regular" && l.hasPrefix "test_" name) + (l.readDir ./.)); allTests = l.map - (file: callPackageDream ("${./.}/${file}") {}) - allTestFiles; + (file: callPackageDream "${./.}/${file}" {}) + allTestFiles; - executeAll = utils.writePureShellScript + executeAll = + utils.writePureShellScript [ async coreutils @@ -39,7 +37,5 @@ let async -s=$S wait rm $S ''; - - in executeAll diff --git a/tests/impure/test_combined_fetching.nix b/tests/impure/test_combined_fetching.nix index e2fad9a0..3df52b56 100644 --- a/tests/impure/test_combined_fetching.nix +++ b/tests/impure/test_combined_fetching.nix @@ -1,28 +1,24 @@ { lib, - # dream2nix apps, utils, ... -}: -let - +}: let l = lib // builtins; cli = apps.cli.program; - in -utils.writePureShellScript -[] -'' - ${cli} add github:prettier/prettier/2.4.1 \ - --no-default-nix \ - --translator yarn-lock \ - --attribute-name prettier \ - --arg name="{automatic}" \ - --arg noDev=false \ - --arg nodejs=14 \ - --arg peer=false \ - --aggregate -'' + utils.writePureShellScript + [] + '' + ${cli} add github:prettier/prettier/2.4.1 \ + --no-default-nix \ + --translator yarn-lock \ + --attribute-name prettier \ + --arg name="{automatic}" \ + --arg noDev=false \ + --arg nodejs=14 \ + --arg peer=false \ + --aggregate + '' diff --git a/tests/impure/test_go.nix b/tests/impure/test_go.nix index 689250cd..5ccc053d 100644 --- a/tests/impure/test_go.nix +++ b/tests/impure/test_go.nix @@ -1,23 +1,19 @@ { lib, - # dream2nix apps, utils, ... -}: -let - +}: let l = lib // builtins; cli = apps.cli.program; - in -utils.writePureShellScript -[] -'' - ${cli} add github:tweag/gomod2nix/67f22dd738d092c6ba88e420350ada0ed4992ae8 \ - --no-default-nix \ - --translator gomod2nix \ - --attribute-name gomod2nix -'' + utils.writePureShellScript + [] + '' + ${cli} add github:tweag/gomod2nix/67f22dd738d092c6ba88e420350ada0ed4992ae8 \ + --no-default-nix \ + --translator gomod2nix \ + --attribute-name gomod2nix + '' diff --git a/tests/impure/test_package-json.nix b/tests/impure/test_package-json.nix index 67b02148..fb73cde7 100644 --- a/tests/impure/test_package-json.nix +++ b/tests/impure/test_package-json.nix @@ -1,29 +1,25 @@ { lib, - # dream2nix apps, utils, ... -}: -let - +}: let l = lib // builtins; cli = apps.cli.program; - in -utils.writePureShellScript -[] -'' - ${cli} add npm:eslint/8.4.0 \ - --no-default-nix \ - --translator package-json \ - --attribute-name eslint \ - --arg name="{automatic}" \ - --arg noDev=true \ - --arg nodejs=14 \ - --arg npmArgs= + utils.writePureShellScript + [] + '' + ${cli} add npm:eslint/8.4.0 \ + --no-default-nix \ + --translator package-json \ + --attribute-name eslint \ + --arg name="{automatic}" \ + --arg noDev=true \ + --arg nodejs=14 \ + --arg npmArgs= - ${cli} update eslint --to-version 8.4.1 -'' + ${cli} update eslint --to-version 8.4.1 + '' diff --git a/tests/impure/test_package-lock.nix b/tests/impure/test_package-lock.nix index 22ce7a1d..701946c1 100644 --- a/tests/impure/test_package-lock.nix +++ b/tests/impure/test_package-lock.nix @@ -1,26 +1,22 @@ { lib, - # dream2nix apps, utils, ... -}: -let - +}: let l = lib // builtins; cli = apps.cli.program; - in -utils.writePureShellScript -[] -'' - ${cli} add github:mattermost/mattermost-webapp/v6.1.0 \ - --no-default-nix \ - --translator package-lock \ - --attribute-name mattermost-webapp \ - --arg name="{automatic}" \ - --arg noDev=false \ - --arg nodejs=14 -'' + utils.writePureShellScript + [] + '' + ${cli} add github:mattermost/mattermost-webapp/v6.1.0 \ + --no-default-nix \ + --translator package-lock \ + --attribute-name mattermost-webapp \ + --arg name="{automatic}" \ + --arg noDev=false \ + --arg nodejs=14 + '' diff --git a/tests/impure/test_rust.nix b/tests/impure/test_rust.nix index 530cfc63..d1eb5b60 100644 --- a/tests/impure/test_rust.nix +++ b/tests/impure/test_rust.nix @@ -1,24 +1,20 @@ { lib, - # dream2nix apps, utils, ... -}: -let - +}: let l = lib // builtins; cli = apps.cli.program; - in -utils.writePureShellScript -[] -'' - ${cli} add github:BurntSushi/ripgrep/13.0.0 \ - --no-default-nix \ - --translator cargo-lock \ - --arg packageName="ripgrep" \ - --attribute-name ripgrep -'' + utils.writePureShellScript + [] + '' + ${cli} add github:BurntSushi/ripgrep/13.0.0 \ + --no-default-nix \ + --translator cargo-lock \ + --arg packageName="ripgrep" \ + --attribute-name ripgrep + '' diff --git a/tests/impure/test_yarn-lock-prettier.nix b/tests/impure/test_yarn-lock-prettier.nix index e23d9d65..c725b1e3 100644 --- a/tests/impure/test_yarn-lock-prettier.nix +++ b/tests/impure/test_yarn-lock-prettier.nix @@ -1,26 +1,22 @@ { lib, - # dream2nix apps, utils, ... -}: -let - +}: let l = lib // builtins; cli = apps.cli.program; - in -utils.writePureShellScript -[] -'' - ${cli} add github:prettier/prettier/2.4.1 \ - --no-default-nix \ - --translator yarn-lock \ - --attribute-name prettier \ - --arg name="{automatic}" \ - --arg noDev=false \ - --arg nodejs=14 -'' + utils.writePureShellScript + [] + '' + ${cli} add github:prettier/prettier/2.4.1 \ + --no-default-nix \ + --translator yarn-lock \ + --attribute-name prettier \ + --arg name="{automatic}" \ + --arg noDev=false \ + --arg nodejs=14 + '' diff --git a/tests/pure/default.nix b/tests/pure/default.nix index 23846c5a..a86c14ae 100644 --- a/tests/pure/default.nix +++ b/tests/pure/default.nix @@ -1,10 +1,8 @@ { lib ? pkgs.lib, pkgs ? import {}, - dream2nix ? import ./src { inherit pkgs; }, -}: - -let + dream2nix ? import ./src {inherit pkgs;}, +}: let l = pkgs.lib // builtins; buildProjectsTests = import ./projects.nix { @@ -14,8 +12,6 @@ let otherTests = import ./other { inherit lib pkgs dream2nix; }; - in -buildProjectsTests -// -otherTests + buildProjectsTests + // otherTests diff --git a/tests/pure/other/default.nix b/tests/pure/other/default.nix index c0608f50..1fac4152 100644 --- a/tests/pure/other/default.nix +++ b/tests/pure/other/default.nix @@ -1,21 +1,18 @@ { lib ? pkgs.lib, pkgs ? import {}, - dream2nix ? import ./src { inherit pkgs; }, -}: - -let - + dream2nix ? import ./src {inherit pkgs;}, +}: let l = pkgs.lib // builtins; fetchAggrgatedGithub = dream2nix.utils.toDrv - (dream2nix.fetchSources { - dreamLock = ./prettier-github-aggregated.json; - }).fetchedSources.prettier."2.4.1"; - -in -{ + (dream2nix.fetchSources { + dreamLock = ./prettier-github-aggregated.json; + }) + .fetchedSources + .prettier + ."2.4.1"; +in { inherit fetchAggrgatedGithub; } - diff --git a/tests/pure/projects.nix b/tests/pure/projects.nix index a8d61c4c..20a635e8 100644 --- a/tests/pure/projects.nix +++ b/tests/pure/projects.nix @@ -1,26 +1,22 @@ { lib ? pkgs.lib, pkgs ? import {}, - dream2nix ? import ./src { inherit pkgs; }, -}: - -let + dream2nix ? import ./src {inherit pkgs;}, +}: let lib = pkgs.lib // builtins; - makeTest = - { - name, - source, - cmds, - }: - let - outputs = dream2nix.makeOutputs { - inherit source; - }; - commandsToRun = cmds outputs; - in - pkgs.runCommand "test-${name}" {} - (lib.concatStringsSep "\n" commandsToRun); + makeTest = { + name, + source, + cmds, + }: let + outputs = dream2nix.makeOutputs { + inherit source; + }; + commandsToRun = cmds outputs; + in + pkgs.runCommand "test-${name}" {} + (lib.concatStringsSep "\n" commandsToRun); projects = { prettier = { @@ -28,22 +24,19 @@ let url = "https://github.com/prettier/prettier/tarball/2.4.1"; sha256 = "19b37qakhlsnr2n5bgv83aih5npgzbad1d2p2rs3zbq5syqbxdyi"; }; - cmds = outputs: - let - prettier = outputs.defaultPackage.overrideAttrs (old: { - dontBuild = true; - }); - in - [ - "${prettier}/bin/prettier --version | grep -q 2.4.1 && mkdir $out" - ]; + cmds = outputs: let + prettier = outputs.defaultPackage.overrideAttrs (old: { + dontBuild = true; + }); + in [ + "${prettier}/bin/prettier --version | grep -q 2.4.1 && mkdir $out" + ]; }; }; allTests = lib.mapAttrs - (name: args: makeTest (args // { inherit name; })) - projects; - + (name: args: makeTest (args // {inherit name;})) + projects; in -allTests + allTests diff --git a/tests/unit/default.nix b/tests/unit/default.nix index 4254243e..7e732647 100644 --- a/tests/unit/default.nix +++ b/tests/unit/default.nix @@ -1,23 +1,19 @@ { self, - lib, nix, python3, - utils, dream2nixWithExternals, ... -}: -let - l = lib // builtins; - +}: let + l = lib // builtins; in utils.writePureShellScript - [ - nix - ] - '' - export dream2nixSrc=${dream2nixWithExternals} - ${python3.pkgs.pytest}/bin/pytest ${self}/tests/unit "$@" - '' + [ + nix + ] + '' + export dream2nixSrc=${dream2nixWithExternals} + ${python3.pkgs.pytest}/bin/pytest ${self}/tests/unit "$@" + ''