Use new cleanSourceWith to filter component source (#209)

When trying to build haskell packages from local source using nix (
source code you have used `git clone` to retrieve and have modified)
it is often annoying that small changes can trigger a lot of components
to be built unnecessarily.  For instance if you change the code for
a test in a package it will often trigger builds of the library and
all its dependencies.

To avoid this problem we could manually set up `cleanSourceWith` calls
in nix to filter out files that are not need, but to do this every
component would be difficult and error prone.

This change automates this process using the information in the
packages `.cabal` file to filter the source of each component
automatically  when using `callCabalPlanToNix` (we should be able to
add it to `callStackToNix` too).

## How does it work?

* A new version on `cleanSourceWith` is used that adds a `subDir`
  argument.  It allows descending into a subdirectory to compose with
  cleaning the source.

* A new `--full` argument is passed to `plan-to-nix` by
  `callCabalPlanToNix` telling it to output extra information about
  the cabal packages in the plan (directories and filenames).

* `callCabalProjectToNix` uses the new `subDir` argument of
  `cleanSourceWith` when setting the packageSrc.

* `comp-builder.nix` and `setup-builder.nix` use a new
  `cleanCabalComponent` function to further clean the packageSrc
  using the new information in the `package` and `component`.

## hpack wildcards
`package.yaml` files can include wild cards and these are not handled
yet (they do not map to anything in the `.cabal` file and so are lost).
They will need to be added manually to the appropriate field
(eg. `packages.xyz.package.dataFiles`)

If your hpack file does not contain wildcards or if you are willing to
add the wildcards entries manually in nix it is still useful to
enable cleaning on hpack.

For example:

```
packages.ghc-toolkit.package.cleanHpack = true;
packages.ghc-toolkit.components.library.extraSrcFiles = [
  "genapply/**/**"
  "boot-libs/**/**"
  "ghc-libdir/**/**"
  ];
```
This commit is contained in:
Hamish Mackenzie 2019-08-26 12:27:47 +02:00 committed by GitHub
parent b4afacb1b5
commit 0366537651
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
16 changed files with 381 additions and 94 deletions

View File

@ -43,6 +43,11 @@
}:
let
# TODO fix cabal wildcard support so hpack wildcards can be mapped to cabal wildcards
cleanSrc = if cabal-generator == "hpack" && !(package.cleanHpack or false)
then builtins.trace ("Cleaning component source not supported for hpack package: " + name) src
else haskellLib.cleanCabalComponent package component src;
fullName = if haskellLib.isAll componentId
then "${name}-all"
else "${name}-${componentId.ctype}-${componentId.cname}";
@ -116,12 +121,14 @@ in stdenv.lib.fix (drv:
stdenv.mkDerivation ({
name = fullName;
inherit src doCheck doCrossCheck dontPatchELF dontStrip;
src = cleanSrc;
inherit doCheck doCrossCheck dontPatchELF dontStrip;
passthru = {
inherit (package) identifier;
config = component;
inherit configFiles executableToolDepends;
inherit configFiles executableToolDepends cleanSrc;
env = shellWrappers;
# The directory containing the haddock documentation.

View File

@ -3,28 +3,48 @@
{ setup-depends, package, name, src, flags }:
let
component = {
depends = setup-depends;
libs = [];
frameworks = [];
doExactConfig = false;
# We have to set hsSourceDirs or cleanCabalComponent will
# include everything (and as a result all the components of
# the package will depend on eveything in the package).
# TODO find a better way
hsSourceDirs = ["setup-src"];
includeDirs = [];
asmSources = [];
cSources = [];
cmmSources = [];
cxxSources = [];
jsSources = [];
extraSrcFiles = [ "Setup.hs" "Setup.lhs" ];
};
cleanSrc = haskellLib.cleanCabalComponent package component src;
fullName = "${name}-setup";
includeGhcPackage = lib.any (p: p.identifier.name == "ghc") setup-depends;
configFiles = makeSetupConfigFiles {
inherit (package) identifier;
inherit fullName flags;
component = {
depends = setup-depends;
libs = [];
frameworks = [];
doExactConfig = false;
};
};
inherit fullName flags component;
};
in
stdenv.lib.fix (drv:
stdenv.mkDerivation {
name = "${fullName}";
inherit src;
src = cleanSrc;
nativeBuildInputs = [ghc];
passthru = {
inherit (package) identifier;
config = component;
inherit configFiles cleanSrc;
};
CABAL_CONFIG = configFiles + /cabal.config;
phases = ["unpackPhase" "buildPhase" "installPhase"];
buildPhase = ''

View File

@ -178,12 +178,13 @@ let
mkLocalHackageRepo = import ./mk-local-hackage-repo { inherit (self) hackageTarball; inherit pkgs; };
dotCabal = { index-state, sha256 }@args:
pkgs.runCommand "dot-cabal-at-${builtins.replaceStrings [":"] [""] index-state}" { nativeBuildInputs = [ pkgs.cabal-install ]; } ''
dotCabal = { index-state, sha256, cabal-install ? pkgs.cabal-install }@args:
pkgs.runCommand "dot-cabal-at-${builtins.replaceStrings [":"] [""] index-state}" { nativeBuildInputs = [ cabal-install ]; } ''
mkdir -p $out/.cabal
cat <<EOF > $out/.cabal/config
repository cached
url: file:${self.mkLocalHackageRepo args}
url: file:${self.mkLocalHackageRepo
(builtins.removeAttrs args ["cabal-install"])}
secure: True
root-keys:
key-threshold: 0
@ -205,13 +206,18 @@ let
# Resulting nix files are added to nix-plan subdirectory.
callCabalProjectToNix = import ./lib/call-cabal-project-to-nix.nix {
index-state-hashes = import indexStateHashesPath;
inherit (buildPackages) dotCabal;
pkgs = buildPackages.pkgs; # buildPackages;
inherit (buildPackages) dotCabal haskellLib;
pkgs = buildPackages.pkgs;
inherit (buildPackages.pkgs.haskellPackages) hpack;
inherit (buildPackages.pkgs) runCommand cabal-install ghc symlinkJoin cacert;
inherit (buildPackages) nix-tools;
};
# Loads a plan and filters the package directories using cleanSourceWith
importAndFilterProject = import ./lib/import-and-filter-project.nix {
inherit pkgs haskellLib;
};
# References to the unpacked sources, for caching in a Hydra jobset.
source-pins = self.callPackage ./lib/make-source-pins.nix {
sources = [ hackageSrc stackageSrc pkgs.path ];
@ -238,7 +244,7 @@ let
tar xzf ${tarball}
mv "${name}-${version}" $out
'';
in let plan-pkgs = import (callCabalProjectToNix { inherit src; index-state = builtins.trace "Using index-state: ${index-state}" index-state; });
in let plan-pkgs = (callCabalProjectToNix { inherit src; index-state = builtins.trace "Using index-state: ${index-state}" index-state; }).pkgs;
in let pkg-set = mkCabalProjectPkgSet { inherit plan-pkgs; };
in pkg-set.config.hsPkgs.${name};
});

View File

@ -177,8 +177,49 @@ A derivation containing the `nix-tools` [command-line tools](commands.md).
## callStackToNix
Runs `stack-to-nix` and produces the output needed for
`importAndFilterProject`.
**Example**:
```nix
pkgSet = mkStackPkgSet {
stack-pkgs = (importAndFilterProject (callStackToNix {
src = ./.;
})).pkgs;
pkg-def-extras = [];
modules = [];
};
```
## callCabalProjectToNix
Runs `cabal new-configure` and `plan-to-nix` and produces the output
needed for `importAndFilterProject`.
**Example**:
```nix
pkgSet = mkCabalProjectPkgSet {
plan-pkgs = (importAndFilterProject (callCabalProjectToNix {
index-state = "2019-04-30T00:00:00Z";
src = ./.;
})).pkgs;
```
## importAndFilterProject
Imports from a derivation created by `callStackToNix`
or `callCabalProjectToNix`.
The result is an attrset with the following values:
| Attribute | Type | Description |
|----------------|------|-----------------------------------------------------|
| `pkgs` | attrset | that can be passed to `mkStackPkgSet` (as `stack-pkgs`) or `mkCabalProjectPkgSet` (as `plan-pkgs`). |
| `nix` | | this can be built and cached so that the amount built in the evaluation phase is not too great (helps to avoid timeouts on Hydra). |
## hackage
## stackage

View File

@ -1,4 +1,4 @@
{ dotCabal, pkgs, runCommand, nix-tools, cabal-install, ghc, hpack, symlinkJoin, cacert, index-state-hashes }:
{ dotCabal, pkgs, runCommand, nix-tools, cabal-install, ghc, hpack, symlinkJoin, cacert, index-state-hashes, haskellLib }:
let defaultGhc = ghc;
defaultCabalInstall = cabal-install;
in { index-state ? null, index-sha256 ? null, src, ghc ? defaultGhc,
@ -10,19 +10,19 @@ assert (if (builtins.compareVersions cabal-install.version "2.4.0.0") < 0
else true);
let
maybeCleanedSource =
if pkgs.lib.canCleanSource src
then pkgs.lib.cleanSourceWith {
if haskellLib.canCleanSource src
then haskellLib.cleanSourceWith {
inherit src;
filter = path: type:
type == "directory" ||
pkgs.lib.any (i: (pkgs.lib.hasSuffix i path)) [ ".project" ".cabal" "package.yaml" ]; }
else src;
# Using origSrc bypasses any cleanSourceWith so that it will work when
# Using origSrcSubDir bypasses any cleanSourceWith so that it will work when
# access to the store is restricted. If origSrc was already in the store
# you can pass the project in as a string.
rawCabalProject =
let origSrcDir = maybeCleanedSource.origSrc or maybeCleanedSource;
let origSrcDir = maybeCleanedSource.origSrcSubDir or maybeCleanedSource;
in if cabalProject != null
then cabalProject
else
@ -81,7 +81,7 @@ let
parseBlock = block:
let
x = span (pkgs.lib.strings.hasPrefix " ") (pkgs.lib.splitString "\n" block);
attrs = parseBlockLines x.fst;
attrs = parseBlockLines x.fst;
in
if attrs."--sha256" or "" == ""
then {
@ -137,7 +137,7 @@ let
}
else replaceSoureRepos rawCabalProject;
plan = runCommand "plan-to-nix-pkgs" {
plan-nix = runCommand "plan-to-nix-pkgs" {
nativeBuildInputs = [ nix-tools ghc hpack cabal-install pkgs.rsync pkgs.git ];
} (''
tmp=$(mktemp -d)
@ -154,7 +154,8 @@ let
export GIT_SSL_CAINFO=${cacert}/etc/ssl/certs/ca-bundle.crt
HOME=${dotCabal {
index-state = index-state-found;
sha256 = index-sha256-found; }} cabal new-configure \
sha256 = index-sha256-found;
inherit cabal-install; }} cabal new-configure \
--with-ghc=${ghc.targetPrefix}ghc \
--with-ghc-pkg=${ghc.targetPrefix}ghc-pkg
@ -179,45 +180,9 @@ let
# run `plan-to-nix` in $out. This should produce files right there with the
# proper relative paths.
(cd $out && plan-to-nix --plan-json $tmp/dist-newstyle/cache/plan.json -o .)
(cd $out && plan-to-nix --full --plan-json $tmp/dist-newstyle/cache/plan.json -o .)
# move pkgs.nix to default.nix ensure we can just nix `import` the result.
mv $out/pkgs.nix $out/default.nix
'');
in
# TODO: We really want this (symlinks) instead of copying the source over each and
# every time. However this will not work with sandboxed builds. They won't
# have access to `plan` or `src` paths. So while they will see all the
# links, they won't be able to read any of them.
#
# We should be able to fix this if we propagaed the build inputs properly.
# As we are `import`ing the produced nix-path here, we seem to be losing the
# dependencies though.
#
# I guess the end-result is that ifd's don't work well with symlinks.
#
# symlinkJoin {
# name = "plan-and-src";
# # todo: should we clean `src` to drop any .git, .nix, ... other irelevant files?
# buildInputs = [ plan src ];
# }
runCommand "plan-to-nix-pkgs-with-src" { nativeBuildInputs = [ pkgs.rsync ]; } (''
mkdir $out
# todo: should we clean `src` to drop any .git, .nix, ... other irelevant files?
rsync -a "${src}/" "$out/"
rsync -a ${plan}/ $out/
'' +
( pkgs.lib.strings.concatStrings (
pkgs.lib.lists.zipListsWith (n: f: ''
mkdir -p $out/.source-repository-packages/${builtins.toString n}
rsync -a "${f}/" "$out/.source-repository-packages/${builtins.toString n}/"
'')
(pkgs.lib.lists.range 0 ((builtins.length fixedProject.sourceRepos) - 1))
fixedProject.sourceRepos
)
) + ''
# Rsync will have made $out read only and that can cause problems when
# nix sandboxing is enabled (since it can prevent nix from moving the directory
# out of the chroot sandbox).
chmod +w $out
'')
in { projectNix = plan-nix; inherit src; inherit (fixedProject) sourceRepos; }

View File

@ -9,6 +9,7 @@
{ src, stackYaml ? null, ignorePackageYaml ? false }:
let
stackToNixArgs = builtins.concatStringsSep " " [
"--full"
"--stack-yaml=${src}/${if stackYaml == null then "stack.yaml" else stackYaml}"
(if ignorePackageYaml then "--ignore-package-yaml" else "")
"-o ."
@ -30,14 +31,4 @@ let
# move pkgs.nix to default.nix ensure we can just nix `import` the result.
mv $out/pkgs.nix $out/default.nix
'';
in
runCommand "stack-to-nix-pkgs-with-src" { nativeBuildInputs = [ pkgs.rsync ]; } ''
mkdir $out
# todo: should we clean `src` to drop any .git, .nix, ... other irelevant files?
rsync -a "${src}/" "$out/"
rsync -a ${stack}/ $out/
# Rsync will have made $out read only and that can cause problems when
# nix sandboxing is enabled (since it can prevent nix from moving the directory
# out of the chroot sandbox).
chmod +w $out
''
in { projectNix = stack; inherit src; sourceRepos = []; }

View File

@ -0,0 +1,85 @@
# Use cleanSourceWith to filter just the files needed for a particular
# component of the package
{ lib, cleanSourceWith }: package: component: src:
let
srcStr' = src.origSrcSubDir or src.origSrc or null;
# Transform
# "." -> ""
# "./." -> ""
# "./xyz" -> "xyz"
normalizeRelativePath = rel:
if rel == "." || rel == "./."
then ""
else lib.strings.removePrefix "./" rel;
# Like normalizeRelativePath but with a trailing / when needed
normalizeRelativeDir = dir:
let p = normalizeRelativePath dir;
in if p == "" then "" else p + "/";
in
if srcStr' == null || package.detailLevel != "FullDetails"
then src
else
let
srcStr = toString srcStr';
dataDir = normalizeRelativeDir package.dataDir;
hsSourceDirs = builtins.map normalizeRelativeDir component.hsSourceDirs
++ (if component.hsSourceDirs == [] then [""] else []);
includeDirs = builtins.map normalizeRelativeDir component.includeDirs;
dirsNeeded = [dataDir]
++ hsSourceDirs
++ includeDirs;
fileMatch = dir: list:
let
prefixes = builtins.map (f: dir + f) (
lib.lists.remove null (lib.lists.flatten (
builtins.map (f: builtins.match "([^*]*)[*].*" f) list)));
exactMatches = builtins.map (f: dataDir + f) (
lib.lists.remove null (lib.lists.flatten (
builtins.map (f: builtins.match "([^*]*)" f) list)));
in rPath: lib.any (d: lib.strings.hasPrefix d rPath) prefixes
|| lib.any (d: d == rPath) exactMatches;
dataFileMatch = fileMatch dataDir package.dataFiles;
licenseMatch = fileMatch "" package.licenseFiles;
extraSrcMatch = fileMatch "" (
package.extraSrcFiles
++ component.extraSrcFiles);
extraDocMatch = fileMatch "" package.extraDocFiles;
otherSourceFiles =
component.asmSources
++ component.cmmSources
++ component.cSources
++ component.cxxSources
++ component.jsSources;
in cleanSourceWith {
inherit src;
filter = path: type:
assert (if !lib.strings.hasPrefix (srcStr + "/") (path + "/")
then throw ("Unexpected path " + path + " (expected something in " + srcStr + "/)")
else true);
let
srcStrLen = lib.strings.stringLength srcStr;
rPath = lib.strings.substring (srcStrLen + 1) (lib.strings.stringLength path - srcStrLen - 1) path;
# This is a handy way to find out why different files are included
# traceReason = reason: v: if v then builtins.trace (rPath + " : " + reason) true else false;
traceReason = reason: v: v;
in
traceReason "directory is needed" (
lib.any (d: lib.strings.hasPrefix (rPath + "/") d) (
dirsNeeded
++ package.extraSrcFiles
++ component.extraSrcFiles
++ package.extraDocFiles
++ builtins.map (f:
dataDir + (if dataDir == "" then "" else "/") + f) package.dataFiles
++ otherSourceFiles))
|| traceReason "cabal package definition" (lib.strings.hasSuffix ".cabal" rPath)
|| traceReason "hpack package defintion" (rPath == "package.yaml")
|| traceReason "data file" (lib.strings.hasPrefix dataDir rPath
&& dataFileMatch rPath)
|| traceReason "haskell source dir" (lib.any (d: lib.strings.hasPrefix d rPath) hsSourceDirs)
|| traceReason "include dir" (lib.any (d: lib.strings.hasPrefix d rPath) includeDirs)
|| traceReason "license file" (licenseMatch rPath)
|| traceReason "extra source file" (extraSrcMatch rPath)
|| traceReason "extra doc file" (extraDocMatch rPath)
|| traceReason "other source file" (lib.any (f: f == rPath) otherSourceFiles);
}

41
lib/clean-source-with.nix Normal file
View File

@ -0,0 +1,41 @@
# This is a replacement for the cleanSourceWith function in nixpkgs
# https://github.com/NixOS/nixpkgs/blob/1d9d31a0eb8e8358830528538a391df52f6a075a/lib/sources.nix#L41
# It adds a subDir argument in a way that allows descending into a subdirectory
# to compose with cleaning the source with a filter.
{ lib }: rec {
cleanSourceWith = { filter ? path: type: true, src, subDir ? "" }:
let
subDir' = if subDir == "" then "" else "/" + subDir;
# In case this is mixed with older versions of cleanSourceWith
isFiltered = src ? _isLibCleanSourceWith;
isFilteredEx = src ? _isLibCleanSourceWithEx;
origSrc = if isFiltered || isFilteredEx then src.origSrc else src;
origSubDir = if isFilteredEx then src.origSubDir + subDir' else subDir';
origSrcSubDir = toString origSrc + origSubDir;
parentFilter = if isFiltered || isFilteredEx
then path: type: src.filter path type
else path: type: true;
filter' = path: type:
# Include parent paths based on the parent filter
(lib.strings.hasPrefix (path + "/") (origSrcSubDir + "/")
&& parentFilter path type)
# Children only if both filters return true
|| (lib.strings.hasPrefix (origSrcSubDir + "/") path
&& (filter path type && parentFilter path type));
in {
inherit origSrc origSubDir origSrcSubDir;
filter = filter';
outPath = (builtins.filterSource filter' origSrc) + origSubDir;
_isLibCleanSourceWithEx = true;
# It is only safe for older cleanSourceWith to filter this one
# if the we are still looking at the root of origSrc
_isLibCleanSourceWith = origSubDir == "";
};
pathHasContext = builtins.hasContext or (lib.hasPrefix builtins.storeDir);
canCleanSource = src:
src ? _isLibCleanSourceWithEx
|| src ? _isLibCleanSourceWith
|| !(pathHasContext (toString src));
}

View File

@ -112,4 +112,11 @@ with haskellLib;
(mapAttrs (_: package: package.components.${group} // { recurseForDerivations = true; })
(filterAttrs (name: package: (package.isHaskell or false) && packageSel package) haskellPackages))
// { recurseForDerivations = true; };
# Replacement for lib.cleanSourceWith that has a subDir argument.
inherit (import ./clean-source-with.nix { inherit lib; }) cleanSourceWith canCleanSource;
# Use cleanSourceWith to filter just the files needed for a particular
# component of a package
cleanCabalComponent = import ./clean-cabal-component.nix { inherit lib cleanSourceWith; };
}

View File

@ -0,0 +1,36 @@
# This function takes the output of `callCabalProjectToNix` or
# `callStackToNix`. It imports the projectNix that was produced
# and combines it with the original source.
{ pkgs, haskellLib }:
{ projectNix, sourceRepos, src }:
let
project = import "${projectNix}";
in {
nix = projectNix;
pkgs = project // {
extras = hackage: let old = (project.extras hackage).packages; in {
packages = pkgs.lib.attrsets.mapAttrs (name: value:
if builtins.isFunction value
then value
else {...}@args:
let oldPkg = import value args;
packageSrc = if !pkgs.lib.strings.hasPrefix (toString projectNix) (toString oldPkg.src.content)
then toString oldPkg.src.content
else let
subDir = pkgs.lib.strings.removePrefix "/" (
pkgs.lib.strings.removePrefix (toString projectNix)
(toString oldPkg.src.content));
srcRepoPrefix = ".source-repository-packages/";
in if pkgs.lib.strings.hasPrefix srcRepoPrefix subDir
then
pkgs.lib.lists.elemAt sourceRepos (
pkgs.lib.strings.toInt (pkgs.lib.strings.removePrefix srcRepoPrefix subDir))
else if haskellLib.canCleanSource src
then haskellLib.cleanSourceWith { inherit src subDir; }
else src + (if subDir == "" then "" else "/" + subDir);
in oldPkg // {
src = (pkgs.lib).mkDefault packageSrc;
}) old;
};
};
}

View File

@ -19,7 +19,11 @@
with lib;
with types;
{
# Work around issue that can cause _lots_ of files to be copied into the store.
# See https://github.com/NixOS/nixpkgs/pull/64691
let path = types.path // { check = x: types.path.check (x.origSrc or x); };
in {
# This is how the Nix expressions generated by *-to-nix receive
# their flags argument.
config._module.args = mod_args // { flags = config.flags; };
@ -82,6 +86,46 @@ with types;
type = listOfFilteringNulls unspecified;
default = [];
};
detailLevel = mkOption {
type = str;
default = "MinimalDetails";
};
licenseFiles = mkOption {
type = listOfFilteringNulls unspecified;
default = [];
};
dataDir = mkOption {
type = str;
default = "";
};
dataFiles = mkOption {
type = listOfFilteringNulls unspecified;
default = [];
};
extraSrcFiles = mkOption {
type = listOfFilteringNulls unspecified;
default = [];
};
extraTmpFiles = mkOption {
type = listOfFilteringNulls unspecified;
default = [];
};
extraDocFiles = mkOption {
type = listOfFilteringNulls unspecified;
default = [];
};
cleanHpack = mkOption {
type = bool;
default = false;
};
};
components = let
@ -108,6 +152,50 @@ with types;
type = listOfFilteringNulls unspecified;
default = [];
};
modules = mkOption {
type = listOfFilteringNulls unspecified;
default = [];
};
asmSources = mkOption {
type = listOfFilteringNulls unspecified;
default = [];
};
cmmSources = mkOption {
type = listOfFilteringNulls unspecified;
default = [];
};
cSources = mkOption {
type = listOfFilteringNulls unspecified;
default = [];
};
cxxSources = mkOption {
type = listOfFilteringNulls unspecified;
default = [];
};
jsSources = mkOption {
type = listOfFilteringNulls unspecified;
default = [];
};
hsSourceDirs = mkOption {
type = listOfFilteringNulls unspecified;
default = [];
};
includeDirs = mkOption {
type = listOfFilteringNulls unspecified;
default = [];
};
includes = mkOption {
type = listOfFilteringNulls unspecified;
default = [];
};
mainPath = mkOption {
type = listOfFilteringNulls unspecified;
default = [];
};
extraSrcFiles = mkOption {
type = listOfFilteringNulls unspecified;
default = [];
};
};
};
in {

View File

@ -1,7 +1,7 @@
{
"url": "https://github.com/input-output-hk/nix-tools",
"rev": "3d021291ddf148220050adef350ac51bf27d415f",
"date": "2019-05-31T11:38:58+08:00",
"sha256": "1zgkmm1a3xhclwvnfcbcsqp9zl91g20csiirk2rqyn8k8vy8wwkb",
"rev": "a9f227d9238102fb424157156712ef9b64413096",
"date": "2019-07-18T12:21:16+12:00",
"sha256": "19r5an3hrjn1a4mm2n0fhaa5b1fi975pyjcisbnrhwfajlqb362f",
"fetchSubmodules": false
}

View File

@ -7,7 +7,7 @@
"test-framework-hunit".revision = (((hackage."test-framework-hunit")."0.3.0.2").revisions).default;
"test-framework-hunit".flags.base4 = true;
"test-framework-hunit".flags.base3 = false;
"http-client".revision = (((hackage."http-client")."0.5.14").revisions).default;
"http-client".revision = (((hackage."http-client")."0.6.4").revisions).default;
"http-client".flags.network-uri = true;
"cookie".revision = (((hackage."cookie")."0.4.4").revisions).default;
"void".revision = (((hackage."void")."0.7.3").revisions).default;
@ -21,7 +21,6 @@
"semigroupoids".flags.containers = true;
"semigroupoids".flags.contravariant = true;
"free".revision = (((hackage."free")."5.1.1").revisions).default;
"tf-random".revision = (((hackage."tf-random")."0.5").revisions).default;
"Only".revision = (((hackage."Only")."0.1").revisions).default;
"cereal".revision = (((hackage."cereal")."0.5.8.0").revisions).default;
"cereal".flags.bytestring-builder = false;
@ -106,7 +105,7 @@
"distributive".flags.tagged = true;
"asn1-encoding".revision = (((hackage."asn1-encoding")."0.9.5").revisions).default;
"vector-th-unbox".revision = (((hackage."vector-th-unbox")."0.2.1.6").revisions).default;
"QuickCheck".revision = (((hackage."QuickCheck")."2.12.6.1").revisions).default;
"QuickCheck".revision = (((hackage."QuickCheck")."2.13.1").revisions).default;
"QuickCheck".flags.templatehaskell = true;
"scientific".revision = (((hackage."scientific")."0.3.6.2").revisions).default;
"scientific".flags.integer-simple = false;
@ -124,6 +123,7 @@
"optparse-applicative".revision = (((hackage."optparse-applicative")."0.14.3.0").revisions).default;
"network".revision = (((hackage."network")."3.1.0.0").revisions).default;
"connection".revision = (((hackage."connection")."0.3.0").revisions).default;
"splitmix".revision = (((hackage."splitmix")."0.0.2").revisions).default;
"async".revision = (((hackage."async")."2.2.1").revisions).default;
"async".flags.bench = false;
"dlist".revision = (((hackage."dlist")."0.8.0.6").revisions).default;
@ -216,9 +216,9 @@
"reflection".revision = (((hackage."reflection")."2.1.4").revisions).default;
"reflection".flags.slow = false;
"reflection".flags.template-haskell = true;
"these".revision = (((hackage."these")."0.7.6").revisions).default;
"these".revision = (((hackage."these")."0.8").revisions).default;
"regex-tdfa-text".revision = (((hackage."regex-tdfa-text")."1.0.0.3").revisions).default;
"dependent-sum".revision = (((hackage."dependent-sum")."0.4").revisions).default;
"dependent-sum".revision = (((hackage."dependent-sum")."0.5").revisions).default;
"socks".revision = (((hackage."socks")."0.6.0").revisions).default;
"streaming-commons".revision = (((hackage."streaming-commons")."0.2.1.0").revisions).default;
"streaming-commons".flags.use-bytestring-builder = false;
@ -266,6 +266,7 @@
"deriving-compat".flags.new-functor-classes = true;
"text".revision = (((hackage."text")."1.2.3.1").revisions).default;
"Cabal".revision = (((hackage."Cabal")."2.4.0.1").revisions).default;
"assoc".revision = (((hackage."assoc")."1").revisions).default;
"Diff".revision = (((hackage."Diff")."0.3.4").revisions).default;
"unordered-containers".revision = (((hackage."unordered-containers")."0.2.10.0").revisions).default;
"unordered-containers".flags.debug = false;
@ -292,7 +293,7 @@
"prettyprinter".flags.buildreadme = false;
"cryptohash-sha512".revision = (((hackage."cryptohash-sha512")."0.11.100.1").revisions).default;
"tasty-hunit".revision = (((hackage."tasty-hunit")."0.10.0.2").revisions).default;
"pretty-show".revision = (((hackage."pretty-show")."1.9.5").revisions).default;
"pretty-show".revision = (((hackage."pretty-show")."1.8.2").revisions).default;
"transformers".revision = (((hackage."transformers")."0.5.6.2").revisions).default;
"hashable".revision = (((hackage."hashable")."1.2.7.0").revisions).default;
"hashable".flags.sse2 = true;
@ -354,7 +355,6 @@
"array".revision = (((hackage."array")."0.5.3.0").revisions).default;
"repline".revision = (((hackage."repline")."0.2.1.0").revisions).default;
"xml".revision = (((hackage."xml")."1.3.14").revisions).default;
"erf".revision = (((hackage."erf")."2.0.0.0").revisions).default;
"lens-family-core".revision = (((hackage."lens-family-core")."1.2.3").revisions).default;
"integer-gmp".revision = (((hackage."integer-gmp")."1.0.2.0").revisions).default;
};

View File

@ -1,14 +1,14 @@
{ stdenv, mkCabalProjectPkgSet, callCabalProjectToNix }:
{ stdenv, mkCabalProjectPkgSet, callCabalProjectToNix, importAndFilterProject }:
with stdenv.lib;
let
pkgSet = mkCabalProjectPkgSet {
plan-pkgs = import (callCabalProjectToNix {
plan-pkgs = (importAndFilterProject (callCabalProjectToNix {
index-state = "2019-04-30T00:00:00Z";
# reuse the cabal-simple test project
src = ./.;
});
})).pkgs;
};
packages = pkgSet.config.hsPkgs;
in

View File

@ -1,14 +1,14 @@
{ stdenv, mkCabalProjectPkgSet, callCabalProjectToNix }:
{ stdenv, mkCabalProjectPkgSet, callCabalProjectToNix, importAndFilterProject }:
with stdenv.lib;
let
pkgSet = mkCabalProjectPkgSet {
plan-pkgs = import (callCabalProjectToNix {
plan-pkgs = (importAndFilterProject (callCabalProjectToNix {
index-state = "2019-04-30T00:00:00Z";
# reuse the cabal-simple test project
src = ../cabal-simple;
});
})).pkgs;
};
packages = pkgSet.config.hsPkgs;
in

View File

@ -1,12 +1,12 @@
{ stdenv, mkStackPkgSet, callStackToNix }:
{ stdenv, mkStackPkgSet, callStackToNix, importAndFilterProject }:
with stdenv.lib;
let
pkgSet = mkStackPkgSet {
stack-pkgs = import (callStackToNix {
stack-pkgs = (importAndFilterProject (callStackToNix {
src = ../stack-simple;
});
})).pkgs;
pkg-def-extras = [];
modules = [];
};