mirror of
https://github.com/ilyakooo0/nixpkgs.git
synced 2024-12-26 12:53:59 +03:00
b91dcad4bc
Also fix the hash in goPackages.inflect, the only user of the fetcher ATM. Closes #12002 (different `inflect` fix), fixes #12012. Using fetchzip-derived functions is likely more efficient than fetchhg, and it's lighter on dependencies (hash is the same as with fetchhg in this case).
56 lines
1.5 KiB
Nix
56 lines
1.5 KiB
Nix
# This function downloads and unpacks an archive file, such as a zip
|
|
# or tar file. This is primarily useful for dynamically generated
|
|
# archives, such as GitHub's /archive URLs, where the unpacked content
|
|
# of the zip file doesn't change, but the zip file itself may
|
|
# (e.g. due to minor changes in the compression algorithm, or changes
|
|
# in timestamps).
|
|
|
|
{ lib, fetchurl, unzip }:
|
|
|
|
{ # Optionally move the contents of the unpacked tree up one level.
|
|
stripRoot ? true
|
|
, url
|
|
, extraPostFetch ? ""
|
|
, ... } @ args:
|
|
|
|
lib.overrideDerivation (fetchurl ({
|
|
name = args.name or (baseNameOf url);
|
|
|
|
recursiveHash = true;
|
|
|
|
downloadToTemp = true;
|
|
|
|
postFetch =
|
|
''
|
|
export PATH=${unzip}/bin:$PATH
|
|
mkdir $out
|
|
|
|
unpackDir="$TMPDIR/unpack"
|
|
mkdir "$unpackDir"
|
|
cd "$unpackDir"
|
|
|
|
renamed="$TMPDIR/${baseNameOf url}"
|
|
mv "$downloadedFile" "$renamed"
|
|
unpackFile "$renamed"
|
|
|
|
shopt -s dotglob
|
|
''
|
|
+ (if stripRoot then ''
|
|
if [ $(ls "$unpackDir" | wc -l) != 1 ]; then
|
|
echo "error: zip file must contain a single file or directory."
|
|
exit 1
|
|
fi
|
|
fn=$(cd "$unpackDir" && echo *)
|
|
if [ -f "$unpackDir/$fn" ]; then
|
|
mv "$unpackDir/$fn" "$out"
|
|
else
|
|
mv "$unpackDir/$fn"/* "$out/"
|
|
fi
|
|
'' else ''
|
|
mv "$unpackDir"/* "$out/"
|
|
'') #*/
|
|
+ extraPostFetch;
|
|
} // removeAttrs args [ "stripRoot" "extraPostFetch" ]))
|
|
# Hackety-hack: we actually need unzip hooks, too
|
|
(x: {nativeBuildInputs = x.nativeBuildInputs++ [unzip];})
|