WIP stack-lock translator & haskell builder

This commit is contained in:
DavHau 2022-05-31 14:38:28 +02:00
parent d6d94d88db
commit 8836a3f422
5 changed files with 832 additions and 0 deletions

View File

@ -0,0 +1,227 @@
{
"nodes": {
"alejandra": {
"inputs": {
"flakeCompat": "flakeCompat",
"nixpkgs": [
"dream2nix",
"nixpkgs"
]
},
"locked": {
"lastModified": 1652972885,
"narHash": "sha256-OKTV5Mi0WyDGsF6GcTwWkgJPNRkskD5yqCZZmghZYHI=",
"owner": "kamadorueda",
"repo": "alejandra",
"rev": "69d2075e432c562099965829d8bc4da701b10d20",
"type": "github"
},
"original": {
"owner": "kamadorueda",
"repo": "alejandra",
"type": "github"
}
},
"crane": {
"flake": false,
"locked": {
"lastModified": 1644785799,
"narHash": "sha256-VpAJO1L0XeBvtCuNGK4IDKp6ENHIpTrlaZT7yfBCvwo=",
"owner": "ipetkov",
"repo": "crane",
"rev": "fc7a94f841347c88f2cb44217b2a3faa93e2a0b2",
"type": "github"
},
"original": {
"owner": "ipetkov",
"repo": "crane",
"type": "github"
}
},
"dream2nix": {
"inputs": {
"alejandra": "alejandra",
"crane": "crane",
"flake-utils-pre-commit": "flake-utils-pre-commit",
"gomod2nix": "gomod2nix",
"mach-nix": "mach-nix",
"nixpkgs": "nixpkgs",
"node2nix": "node2nix",
"poetry2nix": "poetry2nix",
"pre-commit-hooks": "pre-commit-hooks"
},
"locked": {
"lastModified": 1653944295,
"narHash": "sha256-xoFmfL71JS/wP5SvkupqDB7SNhDFmb77dyiyniNAwYs=",
"owner": "nix-community",
"repo": "dream2nix",
"rev": "ca7f4d0a7fb79813b446ebce097c3db538b37b8c",
"type": "github"
},
"original": {
"owner": "nix-community",
"repo": "dream2nix",
"type": "github"
}
},
"flake-utils-pre-commit": {
"locked": {
"lastModified": 1644229661,
"narHash": "sha256-1YdnJAsNy69bpcjuoKdOYQX0YxZBiCYZo4Twxerqv7k=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "3cecb5b042f7f209c56ffd8371b2711a290ec797",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"flakeCompat": {
"flake": false,
"locked": {
"lastModified": 1648199409,
"narHash": "sha256-JwPKdC2PoVBkG6E+eWw3j6BMR6sL3COpYWfif7RVb8Y=",
"owner": "edolstra",
"repo": "flake-compat",
"rev": "64a525ee38886ab9028e6f61790de0832aa3ef03",
"type": "github"
},
"original": {
"owner": "edolstra",
"repo": "flake-compat",
"type": "github"
}
},
"gomod2nix": {
"flake": false,
"locked": {
"lastModified": 1627572165,
"narHash": "sha256-MFpwnkvQpauj799b4QTBJQFEddbD02+Ln5k92QyHOSk=",
"owner": "tweag",
"repo": "gomod2nix",
"rev": "67f22dd738d092c6ba88e420350ada0ed4992ae8",
"type": "github"
},
"original": {
"owner": "tweag",
"repo": "gomod2nix",
"type": "github"
}
},
"mach-nix": {
"flake": false,
"locked": {
"lastModified": 1634711045,
"narHash": "sha256-m5A2Ty88NChLyFhXucECj6+AuiMZPHXNbw+9Kcs7F6Y=",
"owner": "DavHau",
"repo": "mach-nix",
"rev": "4433f74a97b94b596fa6cd9b9c0402104aceef5d",
"type": "github"
},
"original": {
"id": "mach-nix",
"type": "indirect"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1653581809,
"narHash": "sha256-Uvka0V5MTGbeOfWte25+tfRL3moECDh1VwokWSZUdoY=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "83658b28fe638a170a19b8933aa008b30640fbd1",
"type": "github"
},
"original": {
"id": "nixpkgs",
"ref": "nixos-unstable",
"type": "indirect"
}
},
"node2nix": {
"flake": false,
"locked": {
"lastModified": 1634916276,
"narHash": "sha256-lov2b/8ydYjq+MhKQugmWV2lFnq35AU5RTRBTfLq7B4=",
"owner": "svanderburg",
"repo": "node2nix",
"rev": "644e90c0304038a446ed53efc97e9eb1e2831e71",
"type": "github"
},
"original": {
"owner": "svanderburg",
"repo": "node2nix",
"type": "github"
}
},
"poetry2nix": {
"flake": false,
"locked": {
"lastModified": 1632969109,
"narHash": "sha256-jPDclkkiAy5m2gGLBlKgH+lQtbF7tL4XxBrbSzw+Ioc=",
"owner": "nix-community",
"repo": "poetry2nix",
"rev": "aee8f04296c39d88155e05d25cfc59dfdd41cc77",
"type": "github"
},
"original": {
"owner": "nix-community",
"ref": "1.21.0",
"repo": "poetry2nix",
"type": "github"
}
},
"pre-commit-hooks": {
"inputs": {
"flake-utils": [
"dream2nix",
"flake-utils-pre-commit"
],
"nixpkgs": [
"dream2nix",
"nixpkgs"
]
},
"locked": {
"lastModified": 1646153636,
"narHash": "sha256-AlWHMzK+xJ1mG267FdT8dCq/HvLCA6jwmx2ZUy5O8tY=",
"owner": "cachix",
"repo": "pre-commit-hooks.nix",
"rev": "b6bc0b21e1617e2b07d8205e7fae7224036dfa4b",
"type": "github"
},
"original": {
"owner": "cachix",
"repo": "pre-commit-hooks.nix",
"type": "github"
}
},
"root": {
"inputs": {
"dream2nix": "dream2nix",
"src": "src"
}
},
"src": {
"flake": false,
"locked": {
"lastModified": 1653912557,
"narHash": "sha256-vWyVk9pnzj/OaPhcOiWGq4pA4k5Ml78YDBKbofUKApE=",
"owner": "NorfairKing",
"repo": "cabal2json",
"rev": "001af89f29ef048fc610517f2d07e694e5fbe56b",
"type": "github"
},
"original": {
"owner": "NorfairKing",
"repo": "cabal2json",
"type": "github"
}
}
},
"root": "root",
"version": 7
}

View File

@ -0,0 +1,29 @@
{
inputs = {
dream2nix.url = "github:nix-community/dream2nix";
src.url = "github:NorfairKing/cabal2json";
src.flake = false;
};
outputs = {
self,
dream2nix,
src,
} @ inp: let
dream2nix = inp.dream2nix.lib2.init {
systems = ["x86_64-linux"];
config.projectRoot = ./.;
};
in
(dream2nix.makeFlakeOutputs {
source = src;
settings = [
{
subsystemInfo.noDev = true;
}
];
})
// {
# checks = self.packages;
};
}

View File

@ -0,0 +1,122 @@
{lib, ...}: let
l = lib // builtins;
in {
type = "pure";
build = {
lib,
pkgs,
stdenv,
# dream2nix inputs
externals,
utils,
...
}: {
### FUNCTIONS
# AttrSet -> Bool) -> AttrSet -> [x]
getCyclicDependencies, # name: version: -> [ {name=; version=; } ]
getDependencies, # name: version: -> [ {name=; version=; } ]
getSource, # name: version: -> store-path
# to get information about the original source spec
getSourceSpec, # name: version: -> {type="git"; url=""; hash="";}
### ATTRIBUTES
subsystemAttrs, # attrset
defaultPackageName, # string
defaultPackageVersion, # string
# all exported (top-level) package names and versions
# attrset of pname -> version,
packages,
# all existing package names and versions
# attrset of pname -> versions,
# where versions is a list of version strings
packageVersions,
# function which applies overrides to a package
# It must be applied by the builder to each individual derivation
# Example:
# produceDerivation name (mkDerivation {...})
produceDerivation,
...
} @ args: let
b = builtins;
all-cabal-hashes = pkgs.runCommand "all-cabal-hashes" {} ''
mkdir $out
cd $out
tar --strip-components 1 -xf ${pkgs.all-cabal-hashes}
'';
# the main package
defaultPackage = allPackages."${defaultPackageName}"."${defaultPackageVersion}";
# packages to export
packages =
lib.mapAttrs
(name: version: {
"${version}" = allPackages.${name}.${version};
})
args.packages;
# manage packages in attrset to prevent duplicated evaluation
allPackages =
lib.mapAttrs
(name: versions:
lib.genAttrs
versions
(version: makeOnePackage name version))
packageVersions;
# Generates a derivation for a specific package name + version
makeOnePackage = name: version: let
pkg = pkgs.haskell.packages.ghc8107.mkDerivation (rec {
pname = utils.sanitizeDerivationName name;
inherit version;
license = null;
src = getSource name version;
isLibrary = true;
isExecutable = true;
doCheck = false;
doBenchmark = false;
libraryToolDepends = libraryHaskellDepends;
executableHaskellDepends = libraryHaskellDepends;
testHaskellDepends = libraryHaskellDepends;
testToolDepends = libraryHaskellDepends;
libraryHaskellDepends =
(with pkgs.haskell.packages.ghc8107; [
# Cabal_3_2_1_0
hspec
QuickCheck
])
++ (
map
(dep: allPackages."${dep.name}"."${dep.version}")
(getDependencies name version)
);
# TODO: Implement build phases
}
/*
For all transitive dependencies, overwrite cabal file with the one
from all-cabal-hashes.
We want to ensure that the cabal file is the latest revision.
See: https://github.com/haskell-infra/hackage-trustees/blob/master/revisions-information.md
*/
// (l.optionalAttrs (name != defaultPackageName) {
preConfigure = ''
cp ${all-cabal-hashes}/${name}/${version}/${name}.cabal ./
'';
})
# enable tests only for the top-level package
// (l.optionalAttrs (name == defaultPackageName) {
doCheck = true;
}));
in
# apply packageOverrides to current derivation
produceDerivation name pkg;
in {
inherit defaultPackage packages;
};
}

View File

@ -0,0 +1,378 @@
{
dlib,
lib,
...
}: let
l = lib // builtins;
hiddenPackagesDefault = {
# TODO: unblock these packages and implement actual logic to interpret the
# flags found in cabal files
Win32 = null;
# copied from stacklock2nix
array = null;
base = null;
binary = null;
bytestring = null;
Cabal = null;
containers = null;
deepseq = null;
directory = null;
dns-internal = null;
fast-digits-internal = null;
filepath = null;
ghc = null;
ghc-boot = null;
ghc-boot-th = null;
ghc-compact = null;
ghc-heap = null;
ghc-prim = null;
ghci = null;
haskeline = null;
hpc = null;
integer-gmp = null;
libiserv = null;
mtl = null;
parsec = null;
pretty = null;
process = null;
rts = null;
stm = null;
template-haskell = null;
terminfo = null;
text = null;
time = null;
transformers = null;
unix = null;
xhtml = null;
};
in {
type = "ifd";
/*
Automatically generate unit tests for this translator using project sources
from the specified list.
!!! Your first action should be adding a project here. This will simplify
your work because you will be able to use `nix run .#tests-unit` to
test your implementation for correctness.
*/
generateUnitTestsForProjects = [
(builtins.fetchTarball {
url = "https://github.com/NorfairKing/cabal2json/tarball/8b864d93e3e99eb547a0d377da213a1fae644902";
sha256 = "0zd38mzfxz8jxdlcg3fy6gqq7bwpkfann9w0vd6n8aasyz8xfbpj";
})
];
discoverProject = tree:
l.any
(filename: l.hasSuffix ".cabal" filename)
(l.attrNames tree.files);
# translate from a given source and a project specification to a dream-lock.
translate = {
translatorName,
pkgs,
utils,
...
}: let
haskellUtils = import ./utils.nix {inherit dlib lib pkgs;};
all-cabal-hashes = let
all-cabal-hashes' = pkgs.runCommand "all-cabal-hashes" {} ''
mkdir $out
cd $out
tar --strip-components 1 -xf ${pkgs.all-cabal-hashes}
'';
names = dlib.listDirs all-cabal-hashes';
getVersions = name: dlib.listDirs "${all-cabal-hashes'}/${name}";
in
l.genAttrs names
(name:
l.genAttrs
(getVersions name)
(
version:
(l.fromJSON (l.readFile "${all-cabal-hashes'}/${name}/${version}/${name}.json"))
.package-hashes
));
in
{
project,
tree,
...
} @ args: let
# get the root source and project source
rootSource = tree.fullPath;
projectSource = "${tree.fullPath}/${project.relPath}";
projectTree = tree.getNodeFromPath project.relPath;
# parse the cabal file
cabalFiles =
l.filter
(l.hasSuffix ".cabal")
(l.attrNames projectTree.files);
cabalFile = projectTree.getNodeFromPath (l.head cabalFiles);
cabal = haskellUtils.fromCabal cabalFile.fullPath project.name;
defaultPackageVersion =
l.concatStringsSep
"."
(l.map l.toString cabal.description.package.version);
stackLock =
haskellUtils.fromYaml
(projectTree.getNodeFromPath "stack.yaml.lock").fullPath;
snapshotEntry = l.head (stackLock.snapshots);
snapshotYamlFile = builtins.fetchurl {
url = snapshotEntry.completed.url;
sha256 = snapshotEntry.completed.sha256;
# This is a plain text file, therefore enable http compression
curlOpts = "--compressed";
};
snapshot = haskellUtils.fromYaml snapshotYamlFile;
hidden =
hiddenPackagesDefault;
# TODO: find out what to do with the hidden packages from the snapshot
# Currently it looks like those should not be included
# // (
# l.genAttrs
# (l.attrNames snapshot.hidden)
# (name: null)
# );
serializedRawObjects =
l.map
parseStackLockEntry
(stackLock.packages ++ snapshot.packages);
allCandidates =
l.map
(rawObj: dlib.nameVersionPair rawObj.name rawObj.version)
serializedRawObjects;
cabalData =
haskellUtils.batchCabalData
allCandidates;
parseStackLockEntry = entry:
if entry ? completed
then parseHackageUrl entry.completed.hackage
else parseHackageUrl entry.hackage;
parseHackageUrl = url:
# example:
# AC-Angle-1.0@sha256:e1ffee97819283b714598b947de323254e368f6ae7d4db1d3618fa933f80f065,544
let
splitAtAt = l.splitString "@" url;
nameVersion = l.head splitAtAt;
hashAll = l.last splitAtAt;
nameVersionPieces = l.splitString "-" nameVersion;
version = l.last nameVersionPieces;
name = l.concatStringsSep "-" (l.init nameVersionPieces);
hashChecksumSplit = l.splitString ":" hashAll;
hashType = l.head hashChecksumSplit;
hashAndLength = l.last hashChecksumSplit;
hashLengthSplit = l.splitString "," hashAndLength;
hash = l.head hashLengthSplit;
length = l.last hashLengthSplit;
in {
inherit name version hash;
};
getHackageUrl = finalObj: "https://hackage.haskell.org/package/${finalObj.name}-${finalObj.version}.tar.gz";
# downloadCabalFile = finalObj: let
# source' = builtins.fetchurl {
# url = finalObj.sourceSpec.url;
# sha256 = finalObj.sourceSpec.hash;
# };
# source = utils.extractSource {
# source = source';
# };
# in "${source}/${finalObj.name}.cabal";
getDependencyNames = finalObj: objectsByName: let
cabal = with finalObj;
cabalData.${name}.${version};
targetBuildDepends =
cabal.library.condTreeData.build-info.targetBuildDepends or [];
buildToolDepends =
cabal.library.condTreeData.build-info.buildToolDepends or [];
# testTargetBuildDepends = l.flatten (
# l.mapAttrsToList
# (suiteName: suite: suite.condTreeData.build-info.targetBuildDepends)
# cabal.test-suites or {}
# );
defaultFlags = l.filter (flag: flag.default) cabal.package-flags;
defaultFlagNames = l.map (flag: flag.name) defaultFlags;
collectBuildDepends = condTreeComponent:
l.concatMap
(attrs: attrs.targetBuildDepends)
(l.collect
(x: x ? targetBuildDepends)
condTreeComponent);
condBuildDepends =
l.concatMap
(component: collectBuildDepends component)
cabal.library.condTreeComponents or [];
depNames =
l.map
(dep: dep.package-name)
(targetBuildDepends ++ buildToolDepends ++ condBuildDepends);
in
l.filter
(name:
# ensure package is not a hidden package
(! hidden ? ${name})
# ignore packages which are not part of the snapshot or lock file
&& (objectsByName ? ${name}))
depNames;
in
dlib.simpleTranslate2
({objectsByKey, ...}: rec {
inherit translatorName;
# relative path of the project within the source tree.
location = project.relPath;
# the name of the subsystem
subsystemName = "haskell";
# Extract subsystem specific attributes.
# The structure of this should be defined in:
# ./src/specifications/{subsystem}
subsystemAttrs = {};
# name of the default package
defaultPackage = cabal.description.package.name;
/*
List the package candidates which should be exposed to the user.
Only top-level packages should be listed here.
Users will not be interested in all individual dependencies.
*/
exportedPackages = {
"${defaultPackage}" = defaultPackageVersion;
};
/*
a list of raw package objects
If the upstream format is a deep attrset, this list should contain
a flattened representation of all entries.
*/
serializedRawObjects =
l.map
parseStackLockEntry
(stackLock.packages ++ snapshot.packages);
/*
Define extractor functions which each extract one property from
a given raw object.
(Each rawObj comes from serializedRawObjects).
Extractors can access the fields extracted by other extractors
by accessing finalObj.
*/
extractors = {
name = rawObj: finalObj:
rawObj.name;
version = rawObj: finalObj:
rawObj.version;
dependencies = rawObj: finalObj: let
depNames = getDependencyNames finalObj objectsByKey.name;
in
l.map
(depName: {
name = depName;
version = objectsByKey.name.${depName}.version;
})
depNames;
sourceSpec = rawObj: finalObj:
# example
# https://hackage.haskell.org/package/AC-Angle-1.0/AC-Angle-1.0.tar.gz
{
type = "http";
url = getHackageUrl finalObj;
hash = with finalObj; "sha256:${all-cabal-hashes.${name}.${version}.SHA256}";
};
};
/*
Optionally define extra extractors which will be used to key all
final objects, so objects can be accessed via:
`objectsByKey.${keyName}.${value}`
*/
keys = {
/*
This is an example. Remove this completely or replace in case you
need a key.
*/
name = rawObj: finalObj:
finalObj.name;
};
/*
Optionally add extra objects (list of `finalObj`) to be added to
the dream-lock.
*/
extraObjects = [
{
name = defaultPackage;
version = defaultPackageVersion;
dependencies = let
testTargetBuildDepends = l.flatten (
l.mapAttrsToList
(suiteName: suite:
suite.condTreeData.build-info.targetBuildDepends
++ suite.condTreeData.build-info.buildToolDepends)
cabal.test-suites or {}
);
depNames =
l.map
(dep: dep.package-name)
(
cabal.library.condTreeData.build-info.targetBuildDepends
or []
++ cabal.library.condTreeData.build-info.buildToolDepends or []
++ testTargetBuildDepends
);
in
l.map
(depName: {
name = depName;
version = objectsByKey.name.${depName}.version;
})
(l.filter
(name:
(! hidden ? ${name})
&& (name != defaultPackage))
depNames);
sourceSpec = {
type = "path";
path = projectTree.fullPath;
};
}
];
});
extraArgs = {};
}

View File

@ -0,0 +1,76 @@
{
lib,
dlib,
pkgs,
}: let
l = lib // builtins;
flakeCompat = import (builtins.fetchTarball {
url = "https://github.com/edolstra/flake-compat/tarball/b4a34015c698c7793d592d66adbab377907a2be8";
sha256 = "1qc703yg0babixi6wshn5wm2kgl5y1drcswgszh4xxzbrwkk9sv7";
});
in rec {
all-cabal-hashes = pkgs.runCommand "all-cabal-hashes" {} ''
mkdir $out
cd $out
tar --strip-components 1 -xf ${pkgs.all-cabal-hashes}
'';
cabal2jsonSrc = builtins.fetchTarball {
url = "https://github.com/NorfairKing/cabal2json/tarball/8b864d93e3e99eb547a0d377da213a1fae644902";
sha256 = "0zd38mzfxz8jxdlcg3fy6gqq7bwpkfann9w0vd6n8aasyz8xfbpj";
};
cabal2jsonFlake = flakeCompat {
src = cabal2jsonSrc;
};
cabal2json = cabal2jsonFlake.defaultNix.packages.${pkgs.system}.cabal2json;
# parse cabal file via IFD
fromCabal = file: name: let
file' = l.path {path = file;};
jsonFile = pkgs.runCommand "${name}.cabal.json" {} ''
${cabal2json}/bin/cabal2json ${file'} > $out
'';
in
l.fromJSON (l.readFile jsonFile);
fromYaml = file: let
file' = l.path {path = file;};
jsonFile = pkgs.runCommand "yaml.json" {} ''
${pkgs.yaml2json}/bin/yaml2json < ${file'} > $out
'';
in
l.fromJSON (l.readFile jsonFile);
batchCabal2Json = candidates: let
candidatesJsonStr = l.toJSON candidates;
convertOne = name: version: ''
cabalFile=${all-cabal-hashes}/${name}/${version}/${name}.cabal
if [ -e $cabalFile ]; then
echo "converting cabal to json: ${name}-${version}"
mkdir -p $out/${name}/${version}
${cabal2json}/bin/cabal2json \
$cabalFile \
> $out/${name}/${version}/cabal.json
else
echo "all-cabal-hashes" seems to be outdated
exit 1
fi
'';
in
pkgs.runCommand "cabal-json-files" {}
(l.concatStringsSep "\n"
(l.map (c: convertOne c.name c.version) candidates));
batchCabalData = candidates: let
batchJson = batchCabal2Json candidates;
in
l.mapAttrs
(name: _:
l.mapAttrs
(version: _: l.fromJSON (l.readFile "${batchJson}/${name}/${version}/cabal.json"))
(l.readDir "${batchJson}/${name}"))
(l.readDir batchJson);
}