aboutsummaryrefslogtreecommitdiff
path: root/scripts
diff options
context:
space:
mode:
Diffstat (limited to 'scripts')
-rw-r--r--scripts/Makefile.am8
-rwxr-xr-xscripts/copy-from-other-stores.pl.in66
-rw-r--r--scripts/download-from-binary-cache.pl.in539
-rwxr-xr-xscripts/download-using-manifests.pl.in146
-rwxr-xr-xscripts/nix-build.in5
-rwxr-xr-xscripts/nix-channel.in40
-rwxr-xr-xscripts/nix-install-package.in33
-rwxr-xr-xscripts/nix-push.in318
8 files changed, 829 insertions, 326 deletions
diff --git a/scripts/Makefile.am b/scripts/Makefile.am
index 18a59dbdb..506b1aeb4 100644
--- a/scripts/Makefile.am
+++ b/scripts/Makefile.am
@@ -7,17 +7,14 @@ noinst_SCRIPTS = nix-profile.sh \
find-runtime-roots.pl build-remote.pl nix-reduce-build \
copy-from-other-stores.pl nix-http-export.cgi
-nix-pull nix-push: download-using-manifests.pl
-
-install-exec-local: download-using-manifests.pl copy-from-other-stores.pl find-runtime-roots.pl
+install-exec-local: download-using-manifests.pl copy-from-other-stores.pl download-from-binary-cache.pl find-runtime-roots.pl
$(INSTALL) -d $(DESTDIR)$(sysconfdir)/profile.d
$(INSTALL_DATA) nix-profile.sh $(DESTDIR)$(sysconfdir)/profile.d/nix.sh
$(INSTALL) -d $(DESTDIR)$(libexecdir)/nix
$(INSTALL_PROGRAM) find-runtime-roots.pl $(DESTDIR)$(libexecdir)/nix
$(INSTALL_PROGRAM) build-remote.pl $(DESTDIR)$(libexecdir)/nix
$(INSTALL) -d $(DESTDIR)$(libexecdir)/nix/substituters
- $(INSTALL_PROGRAM) download-using-manifests.pl $(DESTDIR)$(libexecdir)/nix/substituters
- $(INSTALL_PROGRAM) copy-from-other-stores.pl $(DESTDIR)$(libexecdir)/nix/substituters
+ $(INSTALL_PROGRAM) download-using-manifests.pl copy-from-other-stores.pl download-from-binary-cache.pl $(DESTDIR)$(libexecdir)/nix/substituters
$(INSTALL) -d $(DESTDIR)$(sysconfdir)/nix
include ../substitute.mk
@@ -29,6 +26,7 @@ EXTRA_DIST = nix-collect-garbage.in \
nix-build.in \
download-using-manifests.pl.in \
copy-from-other-stores.pl.in \
+ download-from-binary-cache.pl.in \
nix-copy-closure.in \
find-runtime-roots.pl.in \
build-remote.pl.in \
diff --git a/scripts/copy-from-other-stores.pl.in b/scripts/copy-from-other-stores.pl.in
index b930b7207..3ee6f075b 100755
--- a/scripts/copy-from-other-stores.pl.in
+++ b/scripts/copy-from-other-stores.pl.in
@@ -36,42 +36,45 @@ sub findStorePath {
if ($ARGV[0] eq "--query") {
while (<STDIN>) {
- my $cmd = $_; chomp $cmd;
+ chomp;
+ my ($cmd, @args) = split " ", $_;
if ($cmd eq "have") {
- my $storePath = <STDIN>; chomp $storePath;
- print STDOUT (defined findStorePath($storePath) ? "1\n" : "0\n");
+ foreach my $storePath (@args) {
+ print "$storePath\n" if defined findStorePath($storePath);
+ }
+ print "\n";
}
elsif ($cmd eq "info") {
- my $storePath = <STDIN>; chomp $storePath;
- my ($store, $sourcePath) = findStorePath($storePath);
- if (!defined $store) {
- print "0\n";
- next; # not an error
+ foreach my $storePath (@args) {
+ my ($store, $sourcePath) = findStorePath($storePath);
+ next unless defined $store;
+
+ $ENV{"NIX_DB_DIR"} = "$store/var/nix/db";
+
+ my $deriver = `@bindir@/nix-store --query --deriver $storePath`;
+ die "cannot query deriver of `$storePath'" if $? != 0;
+ chomp $deriver;
+ $deriver = "" if $deriver eq "unknown-deriver";
+
+ my @references = split "\n",
+ `@bindir@/nix-store --query --references $storePath`;
+ die "cannot query references of `$storePath'" if $? != 0;
+
+ my $narSize = `@bindir@/nix-store --query --size $storePath`;
+ die "cannot query size of `$storePath'" if $? != 0;
+ chomp $narSize;
+
+ print "$storePath\n";
+ print "$deriver\n";
+ print scalar @references, "\n";
+ print "$_\n" foreach @references;
+ print "$narSize\n";
+ print "$narSize\n";
}
- print "1\n";
-
- $ENV{"NIX_DB_DIR"} = "$store/var/nix/db";
-
- my $deriver = `@bindir@/nix-store --query --deriver $storePath`;
- die "cannot query deriver of `$storePath'" if $? != 0;
- chomp $deriver;
- $deriver = "" if $deriver eq "unknown-deriver";
-
- my @references = split "\n",
- `@bindir@/nix-store --query --references $storePath`;
- die "cannot query references of `$storePath'" if $? != 0;
-
- my $narSize = `@bindir@/nix-store --query --size $storePath`;
- die "cannot query size of `$storePath'" if $? != 0;
- chomp $narSize;
-
- print "$deriver\n";
- print scalar @references, "\n";
- print "$_\n" foreach @references;
- print "$narSize\n";
- print "$narSize\n";
+
+ print "\n";
}
else { die "unknown command `$cmd'"; }
@@ -84,9 +87,10 @@ elsif ($ARGV[0] eq "--substitute") {
my $storePath = $ARGV[1];
my ($store, $sourcePath) = findStorePath $storePath;
die unless $store;
- print "\n*** Copying `$storePath' from `$sourcePath'\n\n";
+ print STDERR "\n*** Copying `$storePath' from `$sourcePath'\n\n";
system("$binDir/nix-store --dump $sourcePath | $binDir/nix-store --restore $storePath") == 0
or die "cannot copy `$sourcePath' to `$storePath'";
+ print "\n"; # no hash to verify
}
diff --git a/scripts/download-from-binary-cache.pl.in b/scripts/download-from-binary-cache.pl.in
new file mode 100644
index 000000000..76306405c
--- /dev/null
+++ b/scripts/download-from-binary-cache.pl.in
@@ -0,0 +1,539 @@
+#! @perl@ -w @perlFlags@
+
+use DBI;
+use File::Basename;
+use IO::Select;
+use Nix::Config;
+use Nix::Store;
+use Nix::Utils;
+use WWW::Curl::Easy;
+use WWW::Curl::Multi;
+use List::MoreUtils qw(any uniq);
+use strict;
+
+
+Nix::Config::readConfig;
+
+my @caches;
+my $gotCaches = 0;
+
+my $maxParallelRequests = int($Nix::Config::config{"binary-caches-parallel-connections"} // 150);
+$maxParallelRequests = 1 if $maxParallelRequests < 1;
+
+my $debug = ($ENV{"NIX_DEBUG_SUBST"} // "") eq 1;
+
+my ($dbh, $queryCache, $insertNAR, $queryNAR, $insertNARExistence, $queryNARExistence);
+
+my $curlm = WWW::Curl::Multi->new;
+my $activeRequests = 0;
+my $curlIdCount = 1;
+my %requests;
+my %scheduled;
+my $caBundle = $ENV{"CURL_CA_BUNDLE"} // $ENV{"OPENSSL_X509_CERT_FILE"};
+
+
+sub addRequest {
+ my ($storePath, $url, $head) = @_;
+
+ my $curl = WWW::Curl::Easy->new;
+ my $curlId = $curlIdCount++;
+ $requests{$curlId} = { storePath => $storePath, url => $url, handle => $curl, content => "", type => $head ? "HEAD" : "GET" };
+
+ $curl->setopt(CURLOPT_PRIVATE, $curlId);
+ $curl->setopt(CURLOPT_URL, $url);
+ $curl->setopt(CURLOPT_WRITEDATA, \$requests{$curlId}->{content});
+ $curl->setopt(CURLOPT_FOLLOWLOCATION, 1);
+ $curl->setopt(CURLOPT_CAINFO, $caBundle) if defined $caBundle;
+ $curl->setopt(CURLOPT_USERAGENT, "Nix/$Nix::Config::version");
+ $curl->setopt(CURLOPT_NOBODY, 1) if $head;
+ $curl->setopt(CURLOPT_FAILONERROR, 1);
+
+ if ($activeRequests >= $maxParallelRequests) {
+ $scheduled{$curlId} = 1;
+ } else {
+ $curlm->add_handle($curl);
+ $activeRequests++;
+ }
+
+ return $requests{$curlId};
+}
+
+
+sub processRequests {
+ while ($activeRequests) {
+ my ($rfds, $wfds, $efds) = $curlm->fdset();
+ #print STDERR "R = @{$rfds}, W = @{$wfds}, E = @{$efds}\n";
+
+ # Sleep until we can read or write some data.
+ if (scalar @{$rfds} + scalar @{$wfds} + scalar @{$efds} > 0) {
+ IO::Select->select(IO::Select->new(@{$rfds}), IO::Select->new(@{$wfds}), IO::Select->new(@{$efds}), 0.1);
+ }
+
+ if ($curlm->perform() != $activeRequests) {
+ while (my ($id, $result) = $curlm->info_read) {
+ if ($id) {
+ my $request = $requests{$id} or die;
+ my $handle = $request->{handle};
+ $request->{result} = $result;
+ $request->{httpStatus} = $handle->getinfo(CURLINFO_RESPONSE_CODE);
+
+ print STDERR "$request->{type} on $request->{url} [$request->{result}, $request->{httpStatus}]\n" if $debug;
+
+ $activeRequests--;
+ delete $request->{handle};
+
+ if (scalar(keys %scheduled) > 0) {
+ my $id2 = (keys %scheduled)[0];
+ $curlm->add_handle($requests{$id2}->{handle});
+ $activeRequests++;
+ delete $scheduled{$id2};
+ }
+ }
+ }
+ }
+ }
+}
+
+
+sub initCache {
+ my $dbPath = "$Nix::Config::stateDir/binary-cache-v1.sqlite";
+
+ # Open/create the database.
+ $dbh = DBI->connect("dbi:SQLite:dbname=$dbPath", "", "")
+ or die "cannot open database `$dbPath'";
+ $dbh->{RaiseError} = 1;
+ $dbh->{PrintError} = 0;
+
+ $dbh->do("pragma synchronous = off"); # we can always reproduce the cache
+ $dbh->do("pragma journal_mode = truncate");
+
+ # Initialise the database schema, if necessary.
+ $dbh->do(<<EOF);
+ create table if not exists BinaryCaches (
+ id integer primary key autoincrement not null,
+ url text unique not null,
+ timestamp integer not null,
+ storeDir text not null,
+ wantMassQuery integer not null
+ );
+EOF
+
+ $dbh->do(<<EOF);
+ create table if not exists NARs (
+ cache integer not null,
+ storePath text not null,
+ url text not null,
+ compression text not null,
+ fileHash text,
+ fileSize integer,
+ narHash text,
+ narSize integer,
+ refs text,
+ deriver text,
+ system text,
+ timestamp integer not null,
+ primary key (cache, storePath),
+ foreign key (cache) references BinaryCaches(id) on delete cascade
+ );
+EOF
+
+ $dbh->do(<<EOF);
+ create table if not exists NARExistence (
+ cache integer not null,
+ storePath text not null,
+ exist integer not null,
+ timestamp integer not null,
+ primary key (cache, storePath),
+ foreign key (cache) references BinaryCaches(id) on delete cascade
+ );
+EOF
+
+ $queryCache = $dbh->prepare("select id, storeDir, wantMassQuery from BinaryCaches where url = ?") or die;
+
+ $insertNAR = $dbh->prepare(
+ "insert or replace into NARs(cache, storePath, url, compression, fileHash, fileSize, narHash, " .
+ "narSize, refs, deriver, system, timestamp) values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)") or die;
+
+ $queryNAR = $dbh->prepare("select * from NARs where cache = ? and storePath = ?") or die;
+
+ $insertNARExistence = $dbh->prepare(
+ "insert or replace into NARExistence(cache, storePath, exist, timestamp) values (?, ?, ?, ?)") or die;
+
+ $queryNARExistence = $dbh->prepare("select exist from NARExistence where cache = ? and storePath = ?") or die;
+}
+
+
+sub getAvailableCaches {
+ return if $gotCaches;
+ $gotCaches = 1;
+
+ sub strToList {
+ my ($s) = @_;
+ return map { s/\/+$//; $_ } split(/ /, $s);
+ }
+
+ my @urls = strToList
+ ($Nix::Config::config{"binary-caches"}
+ // ($Nix::Config::storeDir eq "/nix/store" ? "http://nixos.org/binary-cache" : ""));
+
+ my $urlsFiles = $Nix::Config::config{"binary-cache-files"}
+ // "/nix/var/nix/profiles/per-user/root/channels/binary-caches/*";
+ foreach my $urlFile (glob $urlsFiles) {
+ next unless -f $urlFile;
+ open FILE, "<$urlFile" or die "cannot open ‘$urlFile’\n";
+ my $url = <FILE>; chomp $url;
+ close FILE;
+ push @urls, strToList($url);
+ }
+
+ # Allow Nix daemon users to override the binary caches to a subset
+ # of those listed in the config file. Note that ‘untrusted-*’
+ # denotes options passed by the client.
+ if (defined $Nix::Config::config{"untrusted-binary-caches"}) {
+ my @untrustedUrls = strToList $Nix::Config::config{"untrusted-binary-caches"};
+ my @trustedUrls = (@urls, strToList($Nix::Config::config{"trusted-binary-caches"} // ""));
+ @urls = ();
+ foreach my $url (@untrustedUrls) {
+ die "binary cache ‘$url’ is not trusted (please add it to ‘trusted-binary-caches’ in $Nix::Config::confDir/nix.conf)\n"
+ unless any { $url eq $_ } @trustedUrls;
+ push @urls, $url;
+ }
+ }
+
+ foreach my $url (uniq @urls) {
+
+ # FIXME: not atomic.
+ $queryCache->execute($url);
+ my $res = $queryCache->fetchrow_hashref();
+ if (defined $res) {
+ next if $res->{storeDir} ne $Nix::Config::storeDir;
+ push @caches, { id => $res->{id}, url => $url, wantMassQuery => $res->{wantMassQuery} };
+ next;
+ }
+
+ # Get the cache info file.
+ my $request = addRequest(undef, $url . "/nix-cache-info");
+ processRequests;
+
+ if ($request->{result} != 0) {
+ print STDERR "could not download ‘$request->{url}’ (" .
+ ($request->{result} != 0 ? "Curl error $request->{result}" : "HTTP status $request->{httpStatus}") . ")\n";
+ next;
+ }
+
+ my $storeDir = "/nix/store";
+ my $wantMassQuery = 0;
+ foreach my $line (split "\n", $request->{content}) {
+ unless ($line =~ /^(.*): (.*)$/) {
+ print STDERR "bad cache info file ‘$request->{url}’\n";
+ return undef;
+ }
+ if ($1 eq "StoreDir") { $storeDir = $2; }
+ elsif ($1 eq "WantMassQuery") { $wantMassQuery = int($2); }
+ }
+
+ $dbh->do("insert into BinaryCaches(url, timestamp, storeDir, wantMassQuery) values (?, ?, ?, ?)",
+ {}, $url, time(), $storeDir, $wantMassQuery);
+ my $id = $dbh->last_insert_id("", "", "", "");
+ next if $storeDir ne $Nix::Config::storeDir;
+ push @caches, { id => $id, url => $url, wantMassQuery => $wantMassQuery };
+ }
+}
+
+
+sub processNARInfo {
+ my ($storePath, $cache, $request) = @_;
+
+ if ($request->{result} != 0) {
+ if ($request->{result} != 37 && $request->{httpStatus} != 404) {
+ print STDERR "could not download ‘$request->{url}’ (" .
+ ($request->{result} != 0 ? "Curl error $request->{result}" : "HTTP status $request->{httpStatus}") . ")\n";
+ } else {
+ $insertNARExistence->execute($cache->{id}, basename($storePath), 0, time())
+ unless $request->{url} =~ /^file:/;
+ }
+ return undef;
+ }
+
+ my ($storePath2, $url, $fileHash, $fileSize, $narHash, $narSize, $deriver, $system);
+ my $compression = "bzip2";
+ my @refs;
+ foreach my $line (split "\n", $request->{content}) {
+ unless ($line =~ /^(.*): (.*)$/) {
+ print STDERR "bad NAR info file ‘$request->{url}’\n";
+ return undef;
+ }
+ if ($1 eq "StorePath") { $storePath2 = $2; }
+ elsif ($1 eq "URL") { $url = $2; }
+ elsif ($1 eq "Compression") { $compression = $2; }
+ elsif ($1 eq "FileHash") { $fileHash = $2; }
+ elsif ($1 eq "FileSize") { $fileSize = int($2); }
+ elsif ($1 eq "NarHash") { $narHash = $2; }
+ elsif ($1 eq "NarSize") { $narSize = int($2); }
+ elsif ($1 eq "References") { @refs = split / /, $2; }
+ elsif ($1 eq "Deriver") { $deriver = $2; }
+ elsif ($1 eq "System") { $system = $2; }
+ }
+ return undef if $storePath ne $storePath2;
+ if ($storePath ne $storePath2 || !defined $url || !defined $narHash) {
+ print STDERR "bad NAR info file ‘$request->{url}’\n";
+ return undef;
+ }
+
+ # Cache the result.
+ $insertNAR->execute(
+ $cache->{id}, basename($storePath), $url, $compression, $fileHash, $fileSize,
+ $narHash, $narSize, join(" ", @refs), $deriver, $system, time())
+ unless $request->{url} =~ /^file:/;
+
+ return
+ { url => $url
+ , compression => $compression
+ , fileHash => $fileHash
+ , fileSize => $fileSize
+ , narHash => $narHash
+ , narSize => $narSize
+ , refs => [ @refs ]
+ , deriver => $deriver
+ , system => $system
+ };
+}
+
+
+sub getCachedInfoFrom {
+ my ($storePath, $cache) = @_;
+
+ $queryNAR->execute($cache->{id}, basename($storePath));
+ my $res = $queryNAR->fetchrow_hashref();
+ return undef unless defined $res;
+
+ return
+ { url => $res->{url}
+ , compression => $res->{compression}
+ , fileHash => $res->{fileHash}
+ , fileSize => $res->{fileSize}
+ , narHash => $res->{narHash}
+ , narSize => $res->{narSize}
+ , refs => [ split " ", $res->{refs} ]
+ , deriver => $res->{deriver}
+ } if defined $res;
+}
+
+
+sub negativeHit {
+ my ($storePath, $cache) = @_;
+ $queryNARExistence->execute($cache->{id}, basename($storePath));
+ my $res = $queryNARExistence->fetchrow_hashref();
+ return defined $res && $res->{exist} == 0;
+}
+
+
+sub positiveHit {
+ my ($storePath, $cache) = @_;
+ return 1 if defined getCachedInfoFrom($storePath, $cache);
+ $queryNARExistence->execute($cache->{id}, basename($storePath));
+ my $res = $queryNARExistence->fetchrow_hashref();
+ return defined $res && $res->{exist} == 1;
+}
+
+
+sub printInfo {
+ my ($storePath, $info) = @_;
+ print "$storePath\n";
+ print $info->{deriver} ? "$Nix::Config::storeDir/$info->{deriver}" : "", "\n";
+ print scalar @{$info->{refs}}, "\n";
+ print "$Nix::Config::storeDir/$_\n" foreach @{$info->{refs}};
+ print $info->{fileSize} || 0, "\n";
+ print $info->{narSize} || 0, "\n";
+}
+
+
+sub infoUrl {
+ my ($binaryCacheUrl, $storePath) = @_;
+ my $pathHash = substr(basename($storePath), 0, 32);
+ my $infoUrl = "$binaryCacheUrl/$pathHash.narinfo";
+}
+
+
+sub printInfoParallel {
+ my @paths = @_;
+
+ # First print all paths for which we have cached info.
+ my @left;
+ foreach my $storePath (@paths) {
+ my $found = 0;
+ foreach my $cache (@caches) {
+ my $info = getCachedInfoFrom($storePath, $cache);
+ if (defined $info) {
+ printInfo($storePath, $info);
+ $found = 1;
+ last;
+ }
+ }
+ push @left, $storePath if !$found;
+ }
+
+ return if scalar @left == 0;
+
+ foreach my $cache (@caches) {
+
+ my @left2;
+ %requests = ();
+ foreach my $storePath (@left) {
+ if (negativeHit($storePath, $cache)) {
+ push @left2, $storePath;
+ next;
+ }
+ addRequest($storePath, infoUrl($cache->{url}, $storePath));
+ }
+
+ processRequests;
+
+ foreach my $request (values %requests) {
+ my $info = processNARInfo($request->{storePath}, $cache, $request);
+ if (defined $info) {
+ printInfo($request->{storePath}, $info);
+ } else {
+ push @left2, $request->{storePath};
+ }
+ }
+
+ @left = @left2;
+ }
+}
+
+
+sub printSubstitutablePaths {
+ my @paths = @_;
+
+ # First look for paths that have cached info.
+ my @left;
+ foreach my $storePath (@paths) {
+ my $found = 0;
+ foreach my $cache (@caches) {
+ next unless $cache->{wantMassQuery};
+ if (positiveHit($storePath, $cache)) {
+ print "$storePath\n";
+ $found = 1;
+ last;
+ }
+ }
+ push @left, $storePath if !$found;
+ }
+
+ return if scalar @left == 0;
+
+ # For remaining paths, do HEAD requests.
+ foreach my $cache (@caches) {
+ next unless $cache->{wantMassQuery};
+ my @left2;
+ %requests = ();
+ foreach my $storePath (@left) {
+ if (negativeHit($storePath, $cache)) {
+ push @left2, $storePath;
+ next;
+ }
+ addRequest($storePath, infoUrl($cache->{url}, $storePath), 1);
+ }
+
+ processRequests;
+
+ foreach my $request (values %requests) {
+ if ($request->{result} != 0) {
+ if ($request->{result} != 37 && $request->{httpStatus} != 404) {
+ print STDERR "could not check ‘$request->{url}’ (" .
+ ($request->{result} != 0 ? "Curl error $request->{result}" : "HTTP status $request->{httpStatus}") . ")\n";
+ } else {
+ $insertNARExistence->execute($cache->{id}, basename($request->{storePath}), 0, time())
+ unless $request->{url} =~ /^file:/;
+ }
+ push @left2, $request->{storePath};
+ } else {
+ $insertNARExistence->execute($cache->{id}, basename($request->{storePath}), 1, time())
+ unless $request->{url} =~ /^file:/;
+ print "$request->{storePath}\n";
+ }
+ }
+
+ @left = @left2;
+ }
+}
+
+
+sub downloadBinary {
+ my ($storePath) = @_;
+
+ foreach my $cache (@caches) {
+ my $info = getCachedInfoFrom($storePath, $cache);
+
+ unless (defined $info) {
+ next if negativeHit($storePath, $cache);
+ my $request = addRequest($storePath, infoUrl($cache->{url}, $storePath));
+ processRequests;
+ $info = processNARInfo($storePath, $cache, $request);
+ }
+
+ next unless defined $info;
+
+ my $decompressor;
+ if ($info->{compression} eq "bzip2") { $decompressor = "$Nix::Config::bzip2 -d"; }
+ elsif ($info->{compression} eq "xz") { $decompressor = "$Nix::Config::xz -d"; }
+ else {
+ print STDERR "unknown compression method ‘$info->{compression}’\n";
+ next;
+ }
+ my $url = "$cache->{url}/$info->{url}"; # FIXME: handle non-relative URLs
+ print STDERR "\n*** Downloading ‘$url’ into ‘$storePath’...\n";
+ Nix::Utils::checkURL $url;
+ if (system("$Nix::Config::curl --fail --location --insecure '$url' | $decompressor | $Nix::Config::binDir/nix-store --restore $storePath") != 0) {
+ die "download of `$info->{url}' failed" . ($! ? ": $!" : "") . "\n" unless $? == 0;
+ next;
+ }
+
+ # Tell Nix about the expected hash so it can verify it.
+ print "$info->{narHash}\n";
+
+ print STDERR "\n";
+ return;
+ }
+
+ print STDERR "could not download ‘$storePath’ from any binary cache\n";
+}
+
+
+initCache();
+
+
+if ($ARGV[0] eq "--query") {
+
+ while (<STDIN>) {
+ getAvailableCaches;
+ chomp;
+ my ($cmd, @args) = split " ", $_;
+
+ if ($cmd eq "have") {
+ printSubstitutablePaths(@args);
+ print "\n";
+ }
+
+ elsif ($cmd eq "info") {
+ printInfoParallel(@args);
+ print "\n";
+ }
+
+ else { die "unknown command `$cmd'"; }
+
+ flush STDOUT;
+ }
+
+}
+
+elsif ($ARGV[0] eq "--substitute") {
+ my $storePath = $ARGV[1] or die;
+ getAvailableCaches;
+ downloadBinary($storePath);
+}
+
+else {
+ die;
+}
diff --git a/scripts/download-using-manifests.pl.in b/scripts/download-using-manifests.pl.in
index ef663dabb..24f7c98e0 100755
--- a/scripts/download-using-manifests.pl.in
+++ b/scripts/download-using-manifests.pl.in
@@ -4,6 +4,7 @@ use strict;
use Nix::Config;
use Nix::Manifest;
use Nix::Store;
+use Nix::Utils;
use POSIX qw(strftime);
use File::Temp qw(tempdir);
@@ -15,6 +16,9 @@ my $logFile = "$Nix::Config::logDir/downloads";
# estimating the expected download size.
my $fast = 1;
+# ‘--insecure’ is fine because Nix verifies the hash of the result.
+my $curl = "$Nix::Config::curl --fail --location --insecure";
+
# Open the manifest cache and update it if necessary.
my $dbh = updateManifestDB();
@@ -38,7 +42,7 @@ sub parseHash {
# given path.
sub computeSmallestDownload {
my $targetPath = shift;
-
+
# Build a graph of all store paths that might contribute to the
# construction of $targetPath, and the special node "start". The
# edges are either patch operations, or downloads of full NAR
@@ -93,7 +97,7 @@ sub computeSmallestDownload {
my $patchList = $dbh->selectall_arrayref(
"select * from Patches where storePath = ?",
{ Slice => {} }, $u);
-
+
foreach my $patch (@{$patchList}) {
if (isValidPath($patch->{basePath})) {
my ($baseHashAlgo, $baseHash) = parseHash $patch->{baseHash};
@@ -106,7 +110,7 @@ sub computeSmallestDownload {
$hash =~ s/.*://;
$hashCache->{$baseHashAlgo}->{$patch->{basePath}} = $hash;
}
-
+
next if $hash ne $baseHash;
}
push @queue, $patch->{basePath};
@@ -117,7 +121,7 @@ sub computeSmallestDownload {
my $narFileList = $dbh->selectall_arrayref(
"select * from NARs where storePath = ?",
{ Slice => {} }, $u);
-
+
foreach my $narFile (@{$narFileList}) {
# !!! how to handle files whose size is not known in advance?
# For now, assume some arbitrary size (1 GB).
@@ -173,58 +177,56 @@ sub computeSmallestDownload {
if ($ARGV[0] eq "--query") {
while (<STDIN>) {
- my $cmd = $_; chomp $cmd;
+ chomp;
+ my ($cmd, @args) = split " ", $_;
if ($cmd eq "have") {
- my $storePath = <STDIN>; chomp $storePath;
- print STDOUT (
- scalar @{$dbh->selectcol_arrayref("select 1 from NARs where storePath = ?", {}, $storePath)} > 0
- ? "1\n" : "0\n");
+ foreach my $storePath (@args) {
+ print "$storePath\n" if scalar @{$dbh->selectcol_arrayref("select 1 from NARs where storePath = ?", {}, $storePath)} > 0;
+ }
+ print "\n";
}
elsif ($cmd eq "info") {
- my $storePath = <STDIN>; chomp $storePath;
+ foreach my $storePath (@args) {
+
+ my $infos = $dbh->selectall_arrayref(
+ "select * from NARs where storePath = ?",
+ { Slice => {} }, $storePath);
+
+ next unless scalar @{$infos} > 0;
+ my $info = @{$infos}[0];
+
+ print "$storePath\n";
+ print "$info->{deriver}\n";
+ my @references = split " ", $info->{refs};
+ print scalar @references, "\n";
+ print "$_\n" foreach @references;
+
+ my @path = computeSmallestDownload $storePath;
+
+ my $downloadSize = 0;
+ while (scalar @path > 0) {
+ my $edge = pop @path;
+ my $u = $edge->{start};
+ my $v = $edge->{end};
+ if ($edge->{type} eq "patch") {
+ $downloadSize += $edge->{info}->{size} || 0;
+ }
+ elsif ($edge->{type} eq "narfile") {
+ $downloadSize += $edge->{info}->{size} || 0;
+ }
+ }
- my $infos = $dbh->selectall_arrayref(
- "select * from NARs where storePath = ?",
- { Slice => {} }, $storePath);
-
- my $info;
- if (scalar @{$infos} > 0) {
- $info = @{$infos}[0];
- }
- else {
- print "0\n";
- next; # not an error
- }
+ print "$downloadSize\n";
- print "1\n";
- print "$info->{deriver}\n";
- my @references = split " ", $info->{refs};
- print scalar @references, "\n";
- print "$_\n" foreach @references;
-
- my @path = computeSmallestDownload $storePath;
-
- my $downloadSize = 0;
- while (scalar @path > 0) {
- my $edge = pop @path;
- my $u = $edge->{start};
- my $v = $edge->{end};
- if ($edge->{type} eq "patch") {
- $downloadSize += $edge->{info}->{size} || 0;
- }
- elsif ($edge->{type} eq "narfile") {
- $downloadSize += $edge->{info}->{size} || 0;
- }
+ my $narSize = $info->{narSize} || 0;
+ print "$narSize\n";
}
- print "$downloadSize\n";
-
- my $narSize = $info->{narSize} || 0;
- print "$narSize\n";
+ print "\n";
}
-
+
else { die "unknown command `$cmd'"; }
}
@@ -273,16 +275,6 @@ $dbh->disconnect;
my $curStep = 1;
my $maxStep = scalar @path;
-sub downloadFile {
- my $url = shift;
- $ENV{"PRINT_PATH"} = 1;
- $ENV{"QUIET"} = 1;
- my ($hash, $path) = `$Nix::Config::binDir/nix-prefetch-url '$url'`;
- die "download of `$url' failed" . ($! ? ": $!" : "") . "\n" unless $? == 0;
- chomp $path;
- return $path;
-}
-
my $finalNarHash;
while (scalar @path > 0) {
@@ -314,13 +306,16 @@ while (scalar @path > 0) {
# Download the patch.
print STDERR " downloading patch...\n";
- my $patchPath = downloadFile "$patch->{url}";
+ my $patchPath = "$tmpDir/patch";
+ Nix::Utils::checkURL $patch->{url};
+ system("$curl '$patch->{url}' -o $patchPath") == 0
+ or die "cannot download patch `$patch->{url}'\n";
# Apply the patch to the NAR archive produced in step 1 (for
# the already present path) or a later step (for patch sequences).
print STDERR " applying patch...\n";
system("$Nix::Config::libexecDir/bspatch $tmpNar $tmpNar2 $patchPath") == 0
- or die "cannot apply patch `$patchPath' to $tmpNar";
+ or die "cannot apply patch `$patchPath' to $tmpNar\n";
if ($curStep < $maxStep) {
# The archive will be used as the base of the next patch.
@@ -330,7 +325,7 @@ while (scalar @path > 0) {
# into the target path.
print STDERR " unpacking patched archive...\n";
system("$Nix::Config::binDir/nix-store --restore $v < $tmpNar2") == 0
- or die "cannot unpack $tmpNar2 into `$v'";
+ or die "cannot unpack $tmpNar2 into `$v'\n";
}
$finalNarHash = $patch->{narHash};
@@ -342,20 +337,16 @@ while (scalar @path > 0) {
my $size = $narFile->{size} || -1;
print LOGFILE "$$ narfile $narFile->{url} $size $v\n";
-
- # Download the archive.
- print STDERR " downloading archive...\n";
- my $narFilePath = downloadFile "$narFile->{url}";
+ Nix::Utils::checkURL $narFile->{url};
if ($curStep < $maxStep) {
# The archive will be used a base to a patch.
- system("$Nix::Config::bzip2 -d < '$narFilePath' > $tmpNar") == 0
- or die "cannot unpack `$narFilePath' into `$v'";
+ system("$curl '$narFile->{url}' | $Nix::Config::bzip2 -d > $tmpNar") == 0
+ or die "cannot download and unpack `$narFile->{url}' into `$v'\n";
} else {
# Unpack the archive into the target path.
- print STDERR " unpacking archive...\n";
- system("$Nix::Config::bzip2 -d < '$narFilePath' | $Nix::Config::binDir/nix-store --restore '$v'") == 0
- or die "cannot unpack `$narFilePath' into `$v'";
+ system("$curl '$narFile->{url}' | $Nix::Config::bzip2 -d | $Nix::Config::binDir/nix-store --restore '$v'") == 0
+ or die "cannot download and unpack `$narFile->{url}' into `$v'\n";
}
$finalNarHash = $narFile->{narHash};
@@ -365,21 +356,10 @@ while (scalar @path > 0) {
}
-# Make sure that the hash declared in the manifest matches what we
-# downloaded and unpacked.
-
-if (defined $finalNarHash) {
- my ($hashAlgo, $hash) = parseHash $finalNarHash;
-
- # The hash in the manifest can be either in base-16 or base-32.
- # Handle both.
- my $hash2 = hashPath($hashAlgo, $hashAlgo eq "sha256" && length($hash) != 64, $targetPath);
-
- die "hash mismatch in downloaded path $targetPath; expected $hash, got $hash2\n"
- if $hash ne $hash2;
-} else {
- die "cannot check integrity of the downloaded path since its hash is not known\n";
-}
+# Tell Nix about the expected hash so it can verify it.
+die "cannot check integrity of the downloaded path since its hash is not known\n"
+ unless defined $finalNarHash;
+print "$finalNarHash\n";
print STDERR "\n";
diff --git a/scripts/nix-build.in b/scripts/nix-build.in
index afe0679a4..aa3f4661a 100755
--- a/scripts/nix-build.in
+++ b/scripts/nix-build.in
@@ -58,6 +58,11 @@ EOF
# '` hack
}
+ elsif ($arg eq "--version") {
+ print "nix-build (Nix) $Nix::Config::version\n";
+ exit 0;
+ }
+
elsif ($arg eq "--add-drv-link") {
$drvLink = "./derivation";
}
diff --git a/scripts/nix-channel.in b/scripts/nix-channel.in
index 6883ffd18..e057cc916 100755
--- a/scripts/nix-channel.in
+++ b/scripts/nix-channel.in
@@ -22,7 +22,7 @@ my $nixDefExpr = "$home/.nix-defexpr";
my $userName = getpwuid($<) or die "cannot figure out user name";
my $profile = "$Nix::Config::stateDir/profiles/per-user/$userName/channels";
mkpath(dirname $profile, 0, 0755);
-
+
my %channels;
@@ -77,20 +77,14 @@ sub removeChannel {
# channels.
sub update {
my @channelNames = @_;
-
- readChannels;
-
- # Create the manifests directory if it doesn't exist.
- mkdir $manifestDir, 0755 unless -e $manifestDir;
- # Do we have write permission to the manifests directory?
- die "$0: you do not have write permission to `$manifestDir'!\n" unless -W $manifestDir;
+ readChannels;
# Download each channel.
my $exprs = "";
foreach my $name (keys %channels) {
next if scalar @channelNames > 0 && ! grep { $_ eq $name } @{channelNames};
-
+
my $url = $channels{$name};
my $origUrl = "$url/MANIFEST";
@@ -101,11 +95,20 @@ sub update {
die "$0: unable to check `$url'\n" if $? != 0;
$headers =~ s/\r//g;
$url = $1 if $headers =~ /^Location:\s*(.*)\s*$/m;
-
- # Pull the channel manifest.
- $ENV{'NIX_ORIG_URL'} = $origUrl;
- system("$Nix::Config::binDir/nix-pull", "--skip-wrong-store", "$url/MANIFEST") == 0
- or die "cannot pull manifest from `$url'\n";
+
+ # Check if the channel advertises a binary cache.
+ my $binaryCacheURL = `$Nix::Config::curl --silent '$url'/binary-cache-url`;
+ my $extraAttrs = "";
+ if ($? == 0 && $binaryCacheURL ne "") {
+ $extraAttrs .= "binaryCacheURL = \"$binaryCacheURL\"; ";
+ } else {
+ # No binary cache, so pull the channel manifest.
+ mkdir $manifestDir, 0755 unless -e $manifestDir;
+ die "$0: you do not have write permission to `$manifestDir'!\n" unless -W $manifestDir;
+ $ENV{'NIX_ORIG_URL'} = $origUrl;
+ system("$Nix::Config::binDir/nix-pull", "--skip-wrong-store", "$url/MANIFEST") == 0
+ or die "cannot pull manifest from `$url'\n";
+ }
# Download the channel tarball.
my $fullURL = "$url/nixexprs.tar.bz2";
@@ -120,7 +123,7 @@ sub update {
my $cname = $name;
$cname .= $1 if basename($url) =~ /(-\d.*)$/;
- $exprs .= "'f: f { name = \"$cname\"; channelName = \"$name\"; src = builtins.storePath \"$path\"; }' ";
+ $exprs .= "'f: f { name = \"$cname\"; channelName = \"$name\"; src = builtins.storePath \"$path\"; $extraAttrs }' ";
}
# Unpack the channel tarballs into the Nix store and install them
@@ -189,11 +192,16 @@ while (scalar @ARGV) {
update(@ARGV);
last;
}
-
+
elsif ($arg eq "--help") {
usageError;
}
+ elsif ($arg eq "--version") {
+ print "nix-channel (Nix) $Nix::Config::version\n";
+ exit 0;
+ }
+
else {
die "unknown argument `$arg'; try `--help'";
}
diff --git a/scripts/nix-install-package.in b/scripts/nix-install-package.in
index 951c2918f..656452938 100755
--- a/scripts/nix-install-package.in
+++ b/scripts/nix-install-package.in
@@ -3,6 +3,7 @@
use strict;
use File::Temp qw(tempdir);
use Nix::Config;
+use Nix::Utils;
sub usageError {
@@ -72,7 +73,7 @@ my $tmpDir = tempdir("nix-install-package.XXXXXX", CLEANUP => 1, TMPDIR => 1)
sub barf {
my $msg = shift;
- print "$msg\n";
+ print "\nInstallation failed: $msg\n";
<STDIN> if $interactive;
exit 1;
}
@@ -92,7 +93,6 @@ open PKGFILE, "<$pkgFile" or barf "cannot open `$pkgFile': $!";
my $contents = <PKGFILE>;
close PKGFILE;
-my $urlRE = "(?: [a-zA-Z][a-zA-Z0-9\+\-\.]*\:[a-zA-Z0-9\%\/\?\:\@\&\=\+\$\,\-\_\.\!\~\*\']+ )";
my $nameRE = "(?: [A-Za-z0-9\+\-\.\_\?\=]+ )"; # see checkStoreName()
my $systemRE = "(?: [A-Za-z0-9\+\-\_]+ )";
my $pathRE = "(?: \/ [\/A-Za-z0-9\+\-\.\_\?\=]* )";
@@ -101,7 +101,7 @@ my $pathRE = "(?: \/ [\/A-Za-z0-9\+\-\.\_\?\=]* )";
# store path. We'll let nix-env do that.
$contents =~
- / ^ \s* (\S+) \s+ ($urlRE) \s+ ($nameRE) \s+ ($systemRE) \s+ ($pathRE) \s+ ($pathRE) /x
+ / ^ \s* (\S+) \s+ ($Nix::Utils::urlRE) \s+ ($nameRE) \s+ ($systemRE) \s+ ($pathRE) \s+ ($pathRE) ( \s+ ($Nix::Utils::urlRE) )? /x
or barf "invalid package contents";
my $version = $1;
my $manifestURL = $2;
@@ -109,6 +109,7 @@ my $drvName = $3;
my $system = $4;
my $drvPath = $5;
my $outPath = $6;
+my $binaryCacheURL = $8;
barf "invalid package version `$version'" unless $version eq "NIXPKG1";
@@ -122,17 +123,25 @@ if ($interactive) {
}
-# Store the manifest in the temporary directory so that we don't
-# pollute /nix/var/nix/manifests. This also requires that we don't
-# use the Nix daemon (because otherwise download-using-manifests won't
-# see our NIX_MANIFESTS_DIRS environment variable).
-$ENV{NIX_MANIFESTS_DIR} = $tmpDir;
-$ENV{NIX_REMOTE} = "";
+if (defined $binaryCacheURL) {
+ push @extraNixEnvArgs, "--option", "binary-caches", $binaryCacheURL;
-print "\nPulling manifests...\n";
-system("$Nix::Config::binDir/nix-pull", $manifestURL) == 0
- or barf "nix-pull failed: $?";
+} else {
+
+ # Store the manifest in the temporary directory so that we don't
+ # pollute /nix/var/nix/manifests. This also requires that we
+ # don't use the Nix daemon (because otherwise
+ # download-using-manifests won't see our NIX_MANIFESTS_DIRS
+ # environment variable).
+ $ENV{NIX_MANIFESTS_DIR} = $tmpDir;
+ $ENV{NIX_REMOTE} = "";
+
+ print "\nPulling manifests...\n";
+ system("$Nix::Config::binDir/nix-pull", $manifestURL) == 0
+ or barf "nix-pull failed: $?";
+
+}
print "\nInstalling package...\n";
diff --git a/scripts/nix-push.in b/scripts/nix-push.in
index a1c02190b..1edd8e773 100755
--- a/scripts/nix-push.in
+++ b/scripts/nix-push.in
@@ -1,85 +1,85 @@
#! @perl@ -w @perlFlags@
use strict;
+use File::Basename;
use File::Temp qw(tempdir);
+use File::Path qw(mkpath);
use File::stat;
+use File::Copy;
use Nix::Config;
+use Nix::Store;
use Nix::Manifest;
-my $hashAlgo = "sha256";
-
my $tmpDir = tempdir("nix-push.XXXXXX", CLEANUP => 1, TMPDIR => 1)
or die "cannot create a temporary directory";
my $nixExpr = "$tmpDir/create-nars.nix";
-my $manifest = "$tmpDir/MANIFEST";
-
-my $curl = "$Nix::Config::curl --fail --silent";
-my $extraCurlFlags = ${ENV{'CURL_FLAGS'}};
-$curl = "$curl $extraCurlFlags" if defined $extraCurlFlags;
# Parse the command line.
-my $localCopy;
-my $localArchivesDir;
-my $localManifestFile;
-
-my $targetArchivesUrl;
-
-my $archivesPutURL;
-my $archivesGetURL;
-my $manifestPutURL;
+my $compressionType = "xz";
+my $force = 0;
+my $destDir;
+my $writeManifest = 0;
+my $archivesURL;
+my @roots;
sub showSyntax {
print STDERR <<EOF
-Usage: nix-push --copy ARCHIVES_DIR MANIFEST_FILE PATHS...
- or: nix-push ARCHIVES_PUT_URL ARCHIVES_GET_URL MANIFEST_PUT_URL PATHS...
+Usage: nix-push --dest DIR [--manifest] [--url-prefix URL] PATHS...
-`nix-push' copies or uploads the closure of PATHS to the given
-destination.
+`nix-push' packs the closure of PATHS into a set of NAR files stored
+in DIR. Optionally generate a manifest.
EOF
; # `
exit 1;
}
-showSyntax if scalar @ARGV < 1;
-
-if ($ARGV[0] eq "--copy") {
- showSyntax if scalar @ARGV < 3;
- $localCopy = 1;
- shift @ARGV;
- $localArchivesDir = shift @ARGV;
- $localManifestFile = shift @ARGV;
- if ($ARGV[0] eq "--target") {
- shift @ARGV;
- $targetArchivesUrl = shift @ARGV;
- }
- else {
- $targetArchivesUrl = "file://$localArchivesDir";
+for (my $n = 0; $n < scalar @ARGV; $n++) {
+ my $arg = $ARGV[$n];
+
+ if ($arg eq "--help") {
+ showSyntax;
+ } elsif ($arg eq "--bzip2") {
+ $compressionType = "bzip2";
+ } elsif ($arg eq "--force") {
+ $force = 1;
+ } elsif ($arg eq "--dest") {
+ $n++;
+ die "$0: `$arg' requires an argument\n" unless $n < scalar @ARGV;
+ $destDir = $ARGV[$n];
+ mkpath($destDir, 0, 0755);
+ } elsif ($arg eq "--manifest") {
+ $writeManifest = 1;
+ } elsif ($arg eq "--url-prefix") {
+ $n++;
+ die "$0: `$arg' requires an argument\n" unless $n < scalar @ARGV;
+ $archivesURL = $ARGV[$n];
+ } elsif (substr($arg, 0, 1) eq "-") {
+ showSyntax;
+ } else {
+ push @roots, $arg;
}
}
-else {
- showSyntax if scalar @ARGV < 3;
- $localCopy = 0;
- $archivesPutURL = shift @ARGV;
- $archivesGetURL = shift @ARGV;
- $manifestPutURL = shift @ARGV;
-}
+
+showSyntax if !defined $destDir;
+
+$archivesURL = "file://$destDir" unless defined $archivesURL;
# From the given store paths, determine the set of requisite store
# paths, i.e, the paths required to realise them.
my %storePaths;
-foreach my $path (@ARGV) {
+foreach my $path (@roots) {
die unless $path =~ /^\//;
- # Get all paths referenced by the normalisation of the given
+ # Get all paths referenced by the normalisation of the given
# Nix expression.
my $pid = open(READ,
"$Nix::Config::binDir/nix-store --query --requisites --force-realise " .
"--include-outputs '$path'|") or die;
-
+
while (<READ>) {
chomp;
die "bad: $_" unless /^\//;
@@ -92,8 +92,8 @@ foreach my $path (@ARGV) {
my @storePaths = keys %storePaths;
-# For each path, create a Nix expression that turns the path into
-# a Nix archive.
+# Create a list of Nix derivations that turn each path into a Nix
+# archive.
open NIX, ">$nixExpr";
print NIX "[";
@@ -101,10 +101,10 @@ foreach my $storePath (@storePaths) {
die unless ($storePath =~ /\/[0-9a-z]{32}[^\"\\\$]*$/);
# Construct a Nix expression that creates a Nix archive.
- my $nixexpr =
+ my $nixexpr =
"(import <nix/nar.nix> " .
- "{ storePath = builtins.storePath \"$storePath\"; hashAlgo = \"$hashAlgo\"; }) ";
-
+ "{ storePath = builtins.storePath \"$storePath\"; hashAlgo = \"sha256\"; compressionType = \"$compressionType\"; }) ";
+
print NIX $nixexpr;
}
@@ -112,172 +112,132 @@ print NIX "]";
close NIX;
-# Instantiate store derivations from the Nix expression.
-my @storeExprs;
-print STDERR "instantiating store derivations...\n";
-my $pid = open(READ, "$Nix::Config::binDir/nix-instantiate $nixExpr|")
- or die "cannot run nix-instantiate";
+# Build the Nix expression.
+print STDERR "building compressed archives...\n";
+my @narPaths;
+my $pid = open(READ, "$Nix::Config::binDir/nix-build $nixExpr -o $tmpDir/result |")
+ or die "cannot run nix-build";
while (<READ>) {
chomp;
die unless /^\//;
- push @storeExprs, $_;
+ push @narPaths, $_;
}
-close READ or die "nix-instantiate failed: $?";
-
+close READ or die "nix-build failed: $?";
-# Build the derivations.
-print STDERR "creating archives...\n";
-my @narPaths;
-
-my @tmp = @storeExprs;
-while (scalar @tmp > 0) {
- my $n = scalar @tmp;
- if ($n > 256) { $n = 256 };
- my @tmp2 = @tmp[0..$n - 1];
- @tmp = @tmp[$n..scalar @tmp - 1];
-
- my $pid = open(READ, "$Nix::Config::binDir/nix-store --realise @tmp2|")
- or die "cannot run nix-store";
- while (<READ>) {
- chomp;
- die unless (/^\//);
- push @narPaths, "$_";
- }
- close READ or die "nix-store failed: $?";
+# Write the cache info file.
+my $cacheInfoFile = "$destDir/nix-cache-info";
+if (! -e $cacheInfoFile) {
+ open FILE, ">$cacheInfoFile" or die "cannot create $cacheInfoFile: $!";
+ print FILE "StoreDir: $Nix::Config::storeDir\n";
+ print FILE "WantMassQuery: 0\n"; # by default, don't hit this cache for "nix-env -qas"
+ close FILE;
}
-# Create the manifest.
-print STDERR "creating manifest...\n";
+# Copy the archives and the corresponding NAR info files.
+print STDERR "copying archives...\n";
+
+my $totalNarSize = 0;
+my $totalCompressedSize = 0;
my %narFiles;
-my %patches;
-my @narArchives;
for (my $n = 0; $n < scalar @storePaths; $n++) {
my $storePath = $storePaths[$n];
my $narDir = $narPaths[$n];
-
- $storePath =~ /\/([^\/]*)$/;
- my $basename = $1;
- defined $basename or die;
-
- open HASH, "$narDir/narbz2-hash" or die "cannot open narbz2-hash";
- my $narbz2Hash = <HASH>;
- chomp $narbz2Hash;
- $narbz2Hash =~ /^[0-9a-z]+$/ or die "invalid hash";
- close HASH;
-
- my $narName = "$narbz2Hash.nar.bz2";
-
- my $narFile = "$narDir/$narName";
- (-f $narFile) or die "narfile for $storePath not found";
- push @narArchives, $narFile;
-
- my $narbz2Size = stat($narFile)->size;
-
- my $references = `$Nix::Config::binDir/nix-store --query --references '$storePath'`;
- die "cannot query references for `$storePath'" if $? != 0;
- $references = join(" ", split(" ", $references));
+ my $baseName = basename $storePath;
- my $deriver = `$Nix::Config::binDir/nix-store --query --deriver '$storePath'`;
- die "cannot query deriver for `$storePath'" if $? != 0;
- chomp $deriver;
- $deriver = "" if $deriver eq "unknown-deriver";
-
- my $narHash = `$Nix::Config::binDir/nix-store --query --hash '$storePath'`;
- die "cannot query hash for `$storePath'" if $? != 0;
- chomp $narHash;
+ # Get info about the store path.
+ my ($deriver, $narHash, $time, $narSize, $refs) = queryPathInfo($storePath, 1);
# In some exceptional cases (such as VM tests that use the Nix
# store of the host), the database doesn't contain the hash. So
# compute it.
if ($narHash =~ /^sha256:0*$/) {
- $narHash = `$Nix::Config::binDir/nix-hash --type sha256 --base32 '$storePath'`;
- die "cannot hash `$storePath'" if $? != 0;
+ my $nar = "$tmpDir/nar";
+ system("$Nix::Config::binDir/nix-store --dump $storePath > $nar") == 0
+ or die "cannot dump $storePath\n";
+ $narHash = `$Nix::Config::binDir/nix-hash --type sha256 --base32 --flat $nar`;
+ die "cannot hash `$nar'" if $? != 0;
chomp $narHash;
$narHash = "sha256:$narHash";
+ $narSize = stat("$nar")->size;
+ unlink $nar or die;
}
- my $narSize = `$Nix::Config::binDir/nix-store --query --size '$storePath'`;
- die "cannot query size for `$storePath'" if $? != 0;
- chomp $narSize;
+ $totalNarSize += $narSize;
- my $url;
- if ($localCopy) {
- $url = "$targetArchivesUrl/$narName";
- } else {
- $url = "$archivesGetURL/$narName";
- }
- $narFiles{$storePath} = [
- { url => $url
- , hash => "$hashAlgo:$narbz2Hash"
- , size => $narbz2Size
- , narHash => "$narHash"
- , narSize => $narSize
- , references => $references
- , deriver => $deriver
- }
- ];
-}
-
-writeManifest $manifest, \%narFiles, \%patches;
+ # Get info about the compressed NAR.
+ open HASH, "$narDir/nar-compressed-hash" or die "cannot open nar-compressed-hash";
+ my $compressedHash = <HASH>;
+ chomp $compressedHash;
+ $compressedHash =~ /^[0-9a-z]+$/ or die "invalid hash";
+ close HASH;
+ my $narName = "$compressedHash.nar." . ($compressionType eq "xz" ? "xz" : "bz2");
-sub copyFile {
- my $src = shift;
- my $dst = shift;
- my $tmp = "$dst.tmp.$$";
- system("@coreutils@/cp", $src, $tmp) == 0 or die "cannot copy file";
- rename($tmp, $dst) or die "cannot rename file: $!";
-}
+ my $narFile = "$narDir/$narName";
+ (-f $narFile) or die "NAR file for $storePath not found";
+ my $compressedSize = stat($narFile)->size;
+ $totalCompressedSize += $compressedSize;
-# Upload/copy the archives.
-print STDERR "uploading/copying archives...\n";
+ printf STDERR "%s [%.2f MiB, %.1f%%]\n", $storePath,
+ $compressedSize / (1024 * 1024), $compressedSize / $narSize * 100;
-sub archiveExists {
- my $name = shift;
- print STDERR " HEAD on $archivesGetURL/$name\n";
- return system("$curl --head $archivesGetURL/$name > /dev/null") == 0;
-}
+ # Copy the compressed NAR.
+ my $dst = "$destDir/$narName";
+ if (! -f $dst) {
+ my $tmp = "$destDir/.tmp.$$.$narName";
+ copy($narFile, $tmp) or die "cannot copy $narFile to $tmp: $!\n";
+ rename($tmp, $dst) or die "cannot rename $tmp to $dst: $!\n";
+ }
-foreach my $narArchive (@narArchives) {
+ # Write the info file.
+ my $info;
+ $info .= "StorePath: $storePath\n";
+ $info .= "URL: $narName\n";
+ $info .= "Compression: $compressionType\n";
+ $info .= "FileHash: sha256:$compressedHash\n";
+ $info .= "FileSize: $compressedSize\n";
+ $info .= "NarHash: $narHash\n";
+ $info .= "NarSize: $narSize\n";
+ $info .= "References: " . join(" ", map { basename $_ } @{$refs}) . "\n";
+ if (defined $deriver) {
+ $info .= "Deriver: " . basename $deriver . "\n";
+ if (isValidPath($deriver)) {
+ my $drv = derivationFromPath($deriver);
+ $info .= "System: $drv->{platform}\n";
+ }
+ }
- $narArchive =~ /\/([^\/]*)$/;
- my $basename = $1;
+ my $pathHash = substr(basename($storePath), 0, 32);
- if ($localCopy) {
- # Since nix-push creates $dst atomically, if it exists we
- # don't have to copy again.
- my $dst = "$localArchivesDir/$basename";
- if (! -f "$localArchivesDir/$basename") {
- print STDERR " $narArchive\n";
- copyFile $narArchive, $dst;
- }
+ $dst = "$destDir/$pathHash.narinfo";
+ if ($force || ! -f $dst) {
+ my $tmp = "$destDir/.tmp.$$.$pathHash.narinfo";
+ open INFO, ">$tmp" or die;
+ print INFO "$info" or die;
+ close INFO or die;
+ rename($tmp, $dst) or die "cannot rename $tmp to $dst: $!\n";
}
- else {
- if (!archiveExists("$basename")) {
- print STDERR " $narArchive\n";
- system("$curl --show-error --upload-file " .
- "'$narArchive' '$archivesPutURL/$basename' > /dev/null") == 0 or
- die "curl failed on $narArchive: $?";
+
+ $narFiles{$storePath} = [
+ { url => "$archivesURL/$narName"
+ , hash => "sha256:$compressedHash"
+ , size => $compressedSize
+ , narHash => "$narHash"
+ , narSize => $narSize
+ , references => join(" ", @{$refs})
+ , deriver => $deriver
}
- }
+ ] if $writeManifest;
}
+printf STDERR "total compressed size %.2f MiB, %.1f%%\n",
+ $totalCompressedSize / (1024 * 1024), $totalCompressedSize / $totalNarSize * 100;
-# Upload the manifest.
-print STDERR "uploading manifest...\n";
-if ($localCopy) {
- copyFile $manifest, $localManifestFile;
- copyFile "$manifest.bz2", "$localManifestFile.bz2";
-} else {
- system("$curl --show-error --upload-file " .
- "'$manifest' '$manifestPutURL' > /dev/null") == 0 or
- die "curl failed on $manifest: $?";
- system("$curl --show-error --upload-file " .
- "'$manifest'.bz2 '$manifestPutURL'.bz2 > /dev/null") == 0 or
- die "curl failed on $manifest: $?";
-}
+
+# Optionally write a manifest.
+writeManifest "$destDir/MANIFEST", \%narFiles, \() if $writeManifest;