1#! /usr/bin/env nix-shell
2#! nix-shell -i perl -p perl perlPackages.NetAmazonS3 perlPackages.FileSlurp nixUnstable nixUnstable.perl-bindings
3
4# This command uploads tarballs to tarballs.nixos.org, the
5# content-addressed cache used by fetchurl as a fallback for when
6# upstream tarballs disappear or change. Usage:
7#
8# 1) To upload one or more files:
9#
10# $ copy-tarballs.pl --file /path/to/tarball.tar.gz
11#
12# 2) To upload all files obtained via calls to fetchurl in a Nix derivation:
13#
14# $ copy-tarballs.pl --expr '(import <nixpkgs> {}).hello'
15
16use strict;
17use warnings;
18use File::Basename;
19use File::Path;
20use File::Slurp;
21use JSON;
22use Net::Amazon::S3;
23use Nix::Store;
24
25isValidPath("/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-foo"); # FIXME: forces Nix::Store initialisation
26
27sub usage {
28 die "Syntax: $0 [--dry-run] [--exclude REGEXP] [--expr EXPR | --file FILES...]\n";
29}
30
31my $dryRun = 0;
32my $expr;
33my @fileNames;
34my $exclude;
35
36while (@ARGV) {
37 my $flag = shift @ARGV;
38
39 if ($flag eq "--expr") {
40 $expr = shift @ARGV or die "--expr requires an argument";
41 } elsif ($flag eq "--file") {
42 @fileNames = @ARGV;
43 last;
44 } elsif ($flag eq "--dry-run") {
45 $dryRun = 1;
46 } elsif ($flag eq "--exclude") {
47 $exclude = shift @ARGV or die "--exclude requires an argument";
48 } else {
49 usage();
50 }
51}
52
53
54# S3 setup.
55my $aws_access_key_id = $ENV{'AWS_ACCESS_KEY_ID'} or die "AWS_ACCESS_KEY_ID not set\n";
56my $aws_secret_access_key = $ENV{'AWS_SECRET_ACCESS_KEY'} or die "AWS_SECRET_ACCESS_KEY not set\n";
57
58my $s3 = Net::Amazon::S3->new(
59 { aws_access_key_id => $aws_access_key_id,
60 aws_secret_access_key => $aws_secret_access_key,
61 retry => 1,
62 host => "s3-eu-west-1.amazonaws.com",
63 });
64
65my $bucket = $s3->bucket("nixpkgs-tarballs") or die;
66
67my $doWrite = 0;
68my $cacheFile = ($ENV{"HOME"} or die "\$HOME is not set") . "/.cache/nix/copy-tarballs";
69my %cache;
70$cache{$_} = 1 foreach read_file($cacheFile, err_mode => 'quiet', chomp => 1);
71$doWrite = 1;
72
73END() {
74 File::Path::mkpath(dirname($cacheFile), 0, 0755);
75 write_file($cacheFile, map { "$_\n" } keys %cache) if $doWrite;
76}
77
78sub alreadyMirrored {
79 my ($algo, $hash) = @_;
80 my $key = "$algo/$hash";
81 return 1 if defined $cache{$key};
82 my $res = defined $bucket->get_key($key);
83 $cache{$key} = 1 if $res;
84 return $res;
85}
86
87sub uploadFile {
88 my ($fn, $name) = @_;
89
90 my $md5_16 = hashFile("md5", 0, $fn) or die;
91 my $sha1_16 = hashFile("sha1", 0, $fn) or die;
92 my $sha256_32 = hashFile("sha256", 1, $fn) or die;
93 my $sha256_16 = hashFile("sha256", 0, $fn) or die;
94 my $sha512_32 = hashFile("sha512", 1, $fn) or die;
95 my $sha512_16 = hashFile("sha512", 0, $fn) or die;
96
97 my $mainKey = "sha512/$sha512_16";
98
99 # Create redirects from the other hash types.
100 sub redirect {
101 my ($name, $dest) = @_;
102 #print STDERR "linking $name to $dest...\n";
103 $bucket->add_key($name, "", { 'x-amz-website-redirect-location' => "/" . $dest })
104 or die "failed to create redirect from $name to $dest\n";
105 $cache{$name} = 1;
106 }
107 redirect "md5/$md5_16", $mainKey;
108 redirect "sha1/$sha1_16", $mainKey;
109 redirect "sha256/$sha256_32", $mainKey;
110 redirect "sha256/$sha256_16", $mainKey;
111 redirect "sha512/$sha512_32", $mainKey;
112
113 # Upload the file as sha512/<hash-in-base-16>.
114 print STDERR "uploading $fn to $mainKey...\n";
115 $bucket->add_key_filename($mainKey, $fn, { 'x-amz-meta-original-name' => $name })
116 or die "failed to upload $fn to $mainKey\n";
117 $cache{$mainKey} = 1;
118}
119
120if (scalar @fileNames) {
121 my $res = 0;
122 foreach my $fn (@fileNames) {
123 eval {
124 if (alreadyMirrored("sha512", hashFile("sha512", 0, $fn))) {
125 print STDERR "$fn is already mirrored\n";
126 } else {
127 uploadFile($fn, basename $fn);
128 }
129 };
130 if ($@) {
131 warn "$@";
132 $res = 1;
133 }
134 }
135 exit $res;
136}
137
138elsif (defined $expr) {
139
140 # Evaluate find-tarballs.nix.
141 my $pid = open(JSON, "-|", "nix-instantiate", "--eval", "--json", "--strict",
142 "<nixpkgs/maintainers/scripts/find-tarballs.nix>",
143 "--arg", "expr", $expr);
144 my $stdout = <JSON>;
145 waitpid($pid, 0);
146 die "$0: evaluation failed\n" if $?;
147 close JSON;
148
149 my $fetches = decode_json($stdout);
150
151 print STDERR "evaluation returned ", scalar(@{$fetches}), " tarballs\n";
152
153 # Check every fetchurl call discovered by find-tarballs.nix.
154 my $mirrored = 0;
155 my $have = 0;
156 foreach my $fetch (sort { $a->{url} cmp $b->{url} } @{$fetches}) {
157 my $url = $fetch->{url};
158 my $algo = $fetch->{type};
159 my $hash = $fetch->{hash};
160 my $name = $fetch->{name};
161
162 if (defined $ENV{DEBUG}) {
163 print "$url $algo $hash\n";
164 next;
165 }
166
167 if ($url !~ /^http:/ && $url !~ /^https:/ && $url !~ /^ftp:/ && $url !~ /^mirror:/) {
168 print STDERR "skipping $url (unsupported scheme)\n";
169 next;
170 }
171
172 next if defined $exclude && $url =~ /$exclude/;
173
174 if (alreadyMirrored($algo, $hash)) {
175 $have++;
176 next;
177 }
178
179 my $storePath = makeFixedOutputPath(0, $algo, $hash, $name);
180
181 print STDERR "mirroring $url ($storePath)...\n";
182
183 if ($dryRun) {
184 $mirrored++;
185 next;
186 }
187
188 # Substitute the output.
189 if (!isValidPath($storePath)) {
190 system("nix-store", "-r", $storePath);
191 }
192
193 # Otherwise download the file using nix-prefetch-url.
194 if (!isValidPath($storePath)) {
195 $ENV{QUIET} = 1;
196 $ENV{PRINT_PATH} = 1;
197 my $fh;
198 my $pid = open($fh, "-|", "nix-prefetch-url", "--type", $algo, $url, $hash) or die;
199 waitpid($pid, 0) or die;
200 if ($? != 0) {
201 print STDERR "failed to fetch $url: $?\n";
202 next;
203 }
204 <$fh>; my $storePath2 = <$fh>; chomp $storePath2;
205 if ($storePath ne $storePath2) {
206 warn "strange: $storePath != $storePath2\n";
207 next;
208 }
209 }
210
211 uploadFile($storePath, $url);
212 $mirrored++;
213 }
214
215 print STDERR "mirrored $mirrored files, already have $have files\n";
216}
217
218else {
219 usage();
220}