nixpkgs/maintainers/scripts/copy-tarballs.pl

221 lines
6.3 KiB
Perl
Raw Normal View History

#! /usr/bin/env nix-shell
2018-01-04 20:14:55 +03:00
#! nix-shell -i perl -p perl perlPackages.NetAmazonS3 perlPackages.FileSlurp nixUnstable nixUnstable.perl-bindings
# This command uploads tarballs to tarballs.nixos.org, the
# content-addressed cache used by fetchurl as a fallback for when
# upstream tarballs disappear or change. Usage:
#
2016-05-09 15:55:36 +03:00
# 1) To upload one or more files:
#
# $ copy-tarballs.pl --file /path/to/tarball.tar.gz
#
# 2) To upload all files obtained via calls to fetchurl in a Nix derivation:
#
# $ copy-tarballs.pl --expr '(import <nixpkgs> {}).hello'
2014-02-19 16:01:33 +04:00
use strict;
use warnings;
2014-02-19 16:01:33 +04:00
use File::Basename;
use File::Path;
2015-12-16 16:17:33 +03:00
use File::Slurp;
use JSON;
use Net::Amazon::S3;
2014-02-19 16:01:33 +04:00
use Nix::Store;
2016-05-09 15:55:36 +03:00
isValidPath("/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-foo"); # FIXME: forces Nix::Store initialisation
sub usage {
die "Syntax: $0 [--dry-run] [--exclude REGEXP] [--expr EXPR | --file FILES...]\n";
}
my $dryRun = 0;
my $expr;
my @fileNames;
my $exclude;
while (@ARGV) {
my $flag = shift @ARGV;
if ($flag eq "--expr") {
$expr = shift @ARGV or die "--expr requires an argument";
} elsif ($flag eq "--file") {
@fileNames = @ARGV;
last;
} elsif ($flag eq "--dry-run") {
$dryRun = 1;
} elsif ($flag eq "--exclude") {
$exclude = shift @ARGV or die "--exclude requires an argument";
} else {
usage();
}
}
# S3 setup.
2016-05-09 15:55:36 +03:00
my $aws_access_key_id = $ENV{'AWS_ACCESS_KEY_ID'} or die "AWS_ACCESS_KEY_ID not set\n";
my $aws_secret_access_key = $ENV{'AWS_SECRET_ACCESS_KEY'} or die "AWS_SECRET_ACCESS_KEY not set\n";
2014-02-19 16:01:33 +04:00
my $s3 = Net::Amazon::S3->new(
{ aws_access_key_id => $aws_access_key_id,
aws_secret_access_key => $aws_secret_access_key,
retry => 1,
2018-01-04 20:14:55 +03:00
host => "s3-eu-west-1.amazonaws.com",
});
2014-02-19 16:01:33 +04:00
my $bucket = $s3->bucket("nixpkgs-tarballs") or die;
2014-02-19 16:01:33 +04:00
2016-05-09 15:55:36 +03:00
my $doWrite = 0;
my $cacheFile = ($ENV{"HOME"} or die "\$HOME is not set") . "/.cache/nix/copy-tarballs";
2015-12-16 16:17:33 +03:00
my %cache;
$cache{$_} = 1 foreach read_file($cacheFile, err_mode => 'quiet', chomp => 1);
2016-05-09 15:55:36 +03:00
$doWrite = 1;
2015-12-16 16:17:33 +03:00
END() {
2016-05-09 15:55:36 +03:00
File::Path::mkpath(dirname($cacheFile), 0, 0755);
write_file($cacheFile, map { "$_\n" } keys %cache) if $doWrite;
2015-12-16 16:17:33 +03:00
}
sub alreadyMirrored {
my ($algo, $hash) = @_;
2015-12-16 16:17:33 +03:00
my $key = "$algo/$hash";
return 1 if defined $cache{$key};
my $res = defined $bucket->get_key($key);
$cache{$key} = 1 if $res;
return $res;
}
2014-02-19 16:01:33 +04:00
sub uploadFile {
my ($fn, $name) = @_;
2014-02-19 16:01:33 +04:00
my $md5_16 = hashFile("md5", 0, $fn) or die;
my $sha1_16 = hashFile("sha1", 0, $fn) or die;
my $sha256_32 = hashFile("sha256", 1, $fn) or die;
my $sha256_16 = hashFile("sha256", 0, $fn) or die;
my $sha512_32 = hashFile("sha512", 1, $fn) or die;
my $sha512_16 = hashFile("sha512", 0, $fn) or die;
2014-02-19 16:01:33 +04:00
my $mainKey = "sha512/$sha512_16";
2014-02-19 16:01:33 +04:00
# Create redirects from the other hash types.
sub redirect {
my ($name, $dest) = @_;
#print STDERR "linking $name to $dest...\n";
$bucket->add_key($name, "", { 'x-amz-website-redirect-location' => "/" . $dest })
or die "failed to create redirect from $name to $dest\n";
2015-12-16 16:17:33 +03:00
$cache{$name} = 1;
2014-02-19 16:01:33 +04:00
}
redirect "md5/$md5_16", $mainKey;
redirect "sha1/$sha1_16", $mainKey;
redirect "sha256/$sha256_32", $mainKey;
redirect "sha256/$sha256_16", $mainKey;
redirect "sha512/$sha512_32", $mainKey;
# Upload the file as sha512/<hash-in-base-16>.
print STDERR "uploading $fn to $mainKey...\n";
$bucket->add_key_filename($mainKey, $fn, { 'x-amz-meta-original-name' => $name })
or die "failed to upload $fn to $mainKey\n";
$cache{$mainKey} = 1;
}
2014-02-19 16:01:33 +04:00
2016-05-09 15:55:36 +03:00
if (scalar @fileNames) {
2015-12-15 13:10:59 +03:00
my $res = 0;
2016-05-09 15:55:36 +03:00
foreach my $fn (@fileNames) {
2015-12-16 13:37:22 +03:00
eval {
if (alreadyMirrored("sha512", hashFile("sha512", 0, $fn))) {
print STDERR "$fn is already mirrored\n";
} else {
uploadFile($fn, basename $fn);
}
};
if ($@) {
2016-05-09 15:55:36 +03:00
warn "$@";
2015-12-16 13:37:22 +03:00
$res = 1;
}
2014-02-19 16:01:33 +04:00
}
2015-12-15 13:10:59 +03:00
exit $res;
}
2014-02-19 16:01:33 +04:00
2016-05-09 15:55:36 +03:00
elsif (defined $expr) {
# Evaluate find-tarballs.nix.
my $pid = open(JSON, "-|", "nix-instantiate", "--eval", "--json", "--strict",
"<nixpkgs/maintainers/scripts/find-tarballs.nix>",
"--arg", "expr", $expr);
my $stdout = <JSON>;
waitpid($pid, 0);
die "$0: evaluation failed\n" if $?;
close JSON;
my $fetches = decode_json($stdout);
print STDERR "evaluation returned ", scalar(@{$fetches}), " tarballs\n";
# Check every fetchurl call discovered by find-tarballs.nix.
my $mirrored = 0;
my $have = 0;
2016-05-09 15:55:36 +03:00
foreach my $fetch (sort { $a->{url} cmp $b->{url} } @{$fetches}) {
my $url = $fetch->{url};
my $algo = $fetch->{type};
my $hash = $fetch->{hash};
my $name = $fetch->{name};
if (defined $ENV{DEBUG}) {
print "$url $algo $hash\n";
next;
}
if ($url !~ /^http:/ && $url !~ /^https:/ && $url !~ /^ftp:/ && $url !~ /^mirror:/) {
print STDERR "skipping $url (unsupported scheme)\n";
next;
}
2016-05-09 15:55:36 +03:00
next if defined $exclude && $url =~ /$exclude/;
if (alreadyMirrored($algo, $hash)) {
$have++;
next;
}
my $storePath = makeFixedOutputPath(0, $algo, $hash, $name);
print STDERR "mirroring $url ($storePath)...\n";
2016-05-09 15:55:36 +03:00
if ($dryRun) {
$mirrored++;
next;
}
# Substitute the output.
if (!isValidPath($storePath)) {
system("nix-store", "-r", $storePath);
}
# Otherwise download the file using nix-prefetch-url.
if (!isValidPath($storePath)) {
$ENV{QUIET} = 1;
$ENV{PRINT_PATH} = 1;
my $fh;
my $pid = open($fh, "-|", "nix-prefetch-url", "--type", $algo, $url, $hash) or die;
waitpid($pid, 0) or die;
if ($? != 0) {
print STDERR "failed to fetch $url: $?\n";
next;
}
<$fh>; my $storePath2 = <$fh>; chomp $storePath2;
if ($storePath ne $storePath2) {
warn "strange: $storePath != $storePath2\n";
next;
}
}
uploadFile($storePath, $url);
$mirrored++;
2014-02-19 16:01:33 +04:00
}
print STDERR "mirrored $mirrored files, already have $have files\n";
}
else {
2016-05-09 15:55:36 +03:00
usage();
2014-02-19 16:01:33 +04:00
}