aboutsummaryrefslogtreecommitdiff
path: root/scripts
diff options
context:
space:
mode:
Diffstat (limited to 'scripts')
-rwxr-xr-xscripts/build-remote.pl.in7
-rwxr-xr-xscripts/download-using-manifests.pl.in376
-rw-r--r--scripts/install-nix-from-closure.sh5
-rw-r--r--scripts/local.mk11
-rwxr-xr-xscripts/nix-build.in9
-rwxr-xr-xscripts/nix-channel.in25
-rwxr-xr-xscripts/nix-generate-patches.in51
-rwxr-xr-xscripts/nix-install-package.in22
-rw-r--r--scripts/nix-profile.sh.in76
-rwxr-xr-xscripts/nix-pull.in102
10 files changed, 76 insertions, 608 deletions
diff --git a/scripts/build-remote.pl.in b/scripts/build-remote.pl.in
index ee214b930..bd8b44025 100755
--- a/scripts/build-remote.pl.in
+++ b/scripts/build-remote.pl.in
@@ -53,7 +53,7 @@ sub all { $_ || return 0 for @_; 1 }
# Initialisation.
my $loadIncreased = 0;
-my ($localSystem, $maxSilentTime, $printBuildTrace, $buildTimeout) = @ARGV;
+my ($localSystem, $maxSilentTime, $buildTimeout) = @ARGV;
my $currentLoad = $ENV{"NIX_CURRENT_LOAD"} // "/run/nix/current-load";
my $conf = $ENV{"NIX_REMOTE_SYSTEMS"} // "@sysconfdir@/nix/machines";
@@ -223,9 +223,6 @@ my @inputs = split /\s/, readline(STDIN);
my @outputs = split /\s/, readline(STDIN);
-print STDERR "@ build-remote $drvPath $hostName\n" if $printBuildTrace;
-
-
my $maybeSign = "";
$maybeSign = "--sign" if -e "$Nix::Config::confDir/signing-key.sec";
@@ -259,13 +256,11 @@ close UPLOADLOCK;
# Perform the build.
print STDERR "building ‘$drvPath’ on ‘$hostName’\n";
-print STDERR "@ build-remote-start $drvPath $hostName\n" if $printBuildTrace;
writeInt(6, $to) or die; # == cmdBuildPaths
writeStrings([$drvPath], $to);
writeInt($maxSilentTime, $to);
writeInt($buildTimeout, $to);
my $res = readInt($from);
-print STDERR "@ build-remote-done $drvPath $hostName\n" if $printBuildTrace;
if ($res != 0) {
my $msg = decode("utf-8", readString($from));
print STDERR "error: $msg on ‘$hostName’\n";
diff --git a/scripts/download-using-manifests.pl.in b/scripts/download-using-manifests.pl.in
deleted file mode 100755
index ffc49f8ff..000000000
--- a/scripts/download-using-manifests.pl.in
+++ /dev/null
@@ -1,376 +0,0 @@
-#! @perl@ -w @perlFlags@
-
-use utf8;
-use strict;
-use Nix::Config;
-use Nix::Manifest;
-use Nix::Store;
-use Nix::Utils;
-use POSIX qw(strftime);
-
-STDOUT->autoflush(1);
-binmode STDERR, ":encoding(utf8)";
-
-my $logFile = "$Nix::Config::logDir/downloads";
-
-# For queries, skip expensive calls to nix-hash etc. We're just
-# estimating the expected download size.
-my $fast = 1;
-
-my $curl = "$Nix::Config::curl --fail --location";
-
-
-# Open the manifest cache and update it if necessary.
-my $dbh = updateManifestDB();
-exit 0 unless defined $dbh; # exit if there are no manifests
-print "\n";
-
-
-# $hashCache->{$algo}->{$path} yields the $algo-hash of $path.
-my $hashCache;
-
-
-sub parseHash {
- my $hash = shift;
- if ($hash =~ /^(.+):(.+)$/) {
- return ($1, $2);
- } else {
- return ("md5", $hash);
- }
-}
-
-
-# Compute the most efficient sequence of downloads to produce the
-# given path.
-sub computeSmallestDownload {
- my $targetPath = shift;
-
- # Build a graph of all store paths that might contribute to the
- # construction of $targetPath, and the special node "start". The
- # edges are either patch operations, or downloads of full NAR
- # files. The latter edges only occur between "start" and a store
- # path.
- my %graph;
-
- $graph{"start"} = {d => 0, pred => undef, edges => []};
-
- my @queue = ();
- my $queueFront = 0;
- my %done;
-
- sub addNode {
- my $graph = shift;
- my $u = shift;
- $$graph{$u} = {d => 999999999999, pred => undef, edges => []}
- unless defined $$graph{$u};
- }
-
- sub addEdge {
- my $graph = shift;
- my $u = shift;
- my $v = shift;
- my $w = shift;
- my $type = shift;
- my $info = shift;
- addNode $graph, $u;
- push @{$$graph{$u}->{edges}},
- {weight => $w, start => $u, end => $v, type => $type, info => $info};
- my $n = scalar @{$$graph{$u}->{edges}};
- }
-
- push @queue, $targetPath;
-
- while ($queueFront < scalar @queue) {
- my $u = $queue[$queueFront++];
- next if defined $done{$u};
- $done{$u} = 1;
-
- addNode \%graph, $u;
-
- # If the path already exists, it has distance 0 from the
- # "start" node.
- if (isValidPath($u)) {
- addEdge \%graph, "start", $u, 0, "present", undef;
- }
-
- else {
-
- # Add patch edges.
- my $patchList = $dbh->selectall_arrayref(
- "select * from Patches where storePath = ?",
- { Slice => {} }, $u);
-
- foreach my $patch (@{$patchList}) {
- if (isValidPath($patch->{basePath})) {
- my ($baseHashAlgo, $baseHash) = parseHash $patch->{baseHash};
-
- my $hash = $hashCache->{$baseHashAlgo}->{$patch->{basePath}};
- if (!defined $hash) {
- $hash = $fast && $baseHashAlgo eq "sha256"
- ? queryPathHash($patch->{basePath})
- : hashPath($baseHashAlgo, $baseHashAlgo ne "md5", $patch->{basePath});
- $hash =~ s/.*://;
- $hashCache->{$baseHashAlgo}->{$patch->{basePath}} = $hash;
- }
-
- next if $hash ne $baseHash;
- }
- push @queue, $patch->{basePath};
- addEdge \%graph, $patch->{basePath}, $u, $patch->{size}, "patch", $patch;
- }
-
- # Add NAR file edges to the start node.
- my $narFileList = $dbh->selectall_arrayref(
- "select * from NARs where storePath = ?",
- { Slice => {} }, $u);
-
- foreach my $narFile (@{$narFileList}) {
- # !!! how to handle files whose size is not known in advance?
- # For now, assume some arbitrary size (1 GB).
- # This has the side-effect of preferring non-Hydra downloads.
- addEdge \%graph, "start", $u, ($narFile->{size} || 1000000000), "narfile", $narFile;
- }
- }
- }
-
-
- # Run Dijkstra's shortest path algorithm to determine the shortest
- # sequence of download and/or patch actions that will produce
- # $targetPath.
-
- my @todo = keys %graph;
-
- while (scalar @todo > 0) {
-
- # Remove the closest element from the todo list.
- # !!! inefficient, use a priority queue
- @todo = sort { -($graph{$a}->{d} <=> $graph{$b}->{d}) } @todo;
- my $u = pop @todo;
-
- my $u_ = $graph{$u};
-
- foreach my $edge (@{$u_->{edges}}) {
- my $v_ = $graph{$edge->{end}};
- if ($v_->{d} > $u_->{d} + $edge->{weight}) {
- $v_->{d} = $u_->{d} + $edge->{weight};
- # Store the edge; to edge->start is actually the
- # predecessor.
- $v_->{pred} = $edge;
- }
- }
- }
-
-
- # Retrieve the shortest path from "start" to $targetPath.
- my @path = ();
- my $cur = $targetPath;
- return () unless defined $graph{$targetPath}->{pred};
- while ($cur ne "start") {
- push @path, $graph{$cur}->{pred};
- $cur = $graph{$cur}->{pred}->{start};
- }
-
- return @path;
-}
-
-
-# Parse the arguments.
-
-if ($ARGV[0] eq "--query") {
-
- while (<STDIN>) {
- chomp;
- my ($cmd, @args) = split " ", $_;
-
- if ($cmd eq "have") {
- foreach my $storePath (@args) {
- print "$storePath\n" if scalar @{$dbh->selectcol_arrayref("select 1 from NARs where storePath = ?", {}, $storePath)} > 0;
- }
- print "\n";
- }
-
- elsif ($cmd eq "info") {
- foreach my $storePath (@args) {
-
- my $infos = $dbh->selectall_arrayref(
- "select * from NARs where storePath = ?",
- { Slice => {} }, $storePath);
-
- next unless scalar @{$infos} > 0;
- my $info = @{$infos}[0];
-
- print "$storePath\n";
- print "$info->{deriver}\n";
- my @references = split " ", $info->{refs};
- print scalar @references, "\n";
- print "$_\n" foreach @references;
-
- my @path = computeSmallestDownload $storePath;
-
- my $downloadSize = 0;
- while (scalar @path > 0) {
- my $edge = pop @path;
- my $u = $edge->{start};
- my $v = $edge->{end};
- if ($edge->{type} eq "patch") {
- $downloadSize += $edge->{info}->{size} || 0;
- }
- elsif ($edge->{type} eq "narfile") {
- $downloadSize += $edge->{info}->{size} || 0;
- }
- }
-
- print "$downloadSize\n";
-
- my $narSize = $info->{narSize} || 0;
- print "$narSize\n";
- }
-
- print "\n";
- }
-
- else { die "unknown command ‘$cmd’"; }
- }
-
- exit 0;
-}
-
-elsif ($ARGV[0] ne "--substitute") {
- die;
-}
-
-
-die unless scalar @ARGV == 3;
-my $targetPath = $ARGV[1];
-my $destPath = $ARGV[2];
-$fast = 0;
-
-
-# Create a temporary directory.
-my $tmpDir = mkTempDir("nix-download");
-
-my $tmpNar = "$tmpDir/nar";
-my $tmpNar2 = "$tmpDir/nar2";
-
-
-open LOGFILE, ">>$logFile" or die "cannot open log file $logFile";
-
-my $date = strftime ("%F %H:%M:%S UTC", gmtime (time));
-print LOGFILE "$$ get $targetPath $date\n";
-
-print STDERR "\n*** Trying to download/patch ‘$targetPath’\n";
-
-
-# Compute the shortest path.
-my @path = computeSmallestDownload $targetPath;
-die "don't know how to produce $targetPath\n" if scalar @path == 0;
-
-
-# We don't need the manifest anymore, so close it as an optimisation:
-# if we still have SQLite locks blocking other processes (we
-# shouldn't), this gets rid of them.
-$dbh->disconnect;
-
-
-# Traverse the shortest path, perform the actions described by the
-# edges.
-my $curStep = 1;
-my $maxStep = scalar @path;
-
-my $finalNarHash;
-
-while (scalar @path > 0) {
- my $edge = pop @path;
- my $u = $edge->{start};
- my $v = $edge->{end};
-
- print STDERR "\n*** Step $curStep/$maxStep: ";
-
- if ($edge->{type} eq "present") {
- print STDERR "using already present path ‘$v’\n";
- print LOGFILE "$$ present $v\n";
-
- if ($curStep < $maxStep) {
- # Since this is not the last step, the path will be used
- # as a base to one or more patches. So turn the base path
- # into a NAR archive, to which we can apply the patch.
- print STDERR " packing base path...\n";
- system("$Nix::Config::binDir/nix-store --dump $v > $tmpNar") == 0
- or die "cannot dump ‘$v’";
- }
- }
-
- elsif ($edge->{type} eq "patch") {
- my $patch = $edge->{info};
- print STDERR "applying patch ‘$patch->{url}’ to ‘$u’ to create ‘$v’\n";
-
- print LOGFILE "$$ patch $patch->{url} $patch->{size} $patch->{baseHash} $u $v\n";
-
- # Download the patch.
- print STDERR " downloading patch...\n";
- my $patchPath = "$tmpDir/patch";
- checkURL $patch->{url};
- system("$curl '$patch->{url}' -o $patchPath") == 0
- or die "cannot download patch ‘$patch->{url}’\n";
-
- # Apply the patch to the NAR archive produced in step 1 (for
- # the already present path) or a later step (for patch sequences).
- print STDERR " applying patch...\n";
- system("$Nix::Config::libexecDir/nix/bspatch $tmpNar $tmpNar2 $patchPath") == 0
- or die "cannot apply patch ‘$patchPath’ to $tmpNar\n";
-
- if ($curStep < $maxStep) {
- # The archive will be used as the base of the next patch.
- rename "$tmpNar2", "$tmpNar" or die "cannot rename NAR archive: $!";
- } else {
- # This was the last patch. Unpack the final NAR archive
- # into the target path.
- print STDERR " unpacking patched archive...\n";
- system("$Nix::Config::binDir/nix-store --restore $destPath < $tmpNar2") == 0
- or die "cannot unpack $tmpNar2 to ‘$v’\n";
- }
-
- $finalNarHash = $patch->{narHash};
- }
-
- elsif ($edge->{type} eq "narfile") {
- my $narFile = $edge->{info};
- print STDERR "downloading ‘$narFile->{url}’ to ‘$v’\n";
-
- my $size = $narFile->{size} || -1;
- print LOGFILE "$$ narfile $narFile->{url} $size $v\n";
-
- checkURL $narFile->{url};
-
- my $decompressor =
- $narFile->{compressionType} eq "bzip2" ? "| $Nix::Config::bzip2 -d" :
- $narFile->{compressionType} eq "xz" ? "| $Nix::Config::xz -d" :
- $narFile->{compressionType} eq "none" ? "" :
- die "unknown compression type ‘$narFile->{compressionType}’";
-
- if ($curStep < $maxStep) {
- # The archive will be used a base to a patch.
- system("$curl '$narFile->{url}' $decompressor > $tmpNar") == 0
- or die "cannot download and unpack ‘$narFile->{url}’ to ‘$v’\n";
- } else {
- # Unpack the archive to the target path.
- system("$curl '$narFile->{url}' $decompressor | $Nix::Config::binDir/nix-store --restore '$destPath'") == 0
- or die "cannot download and unpack ‘$narFile->{url}’ to ‘$v’\n";
- }
-
- $finalNarHash = $narFile->{narHash};
- }
-
- $curStep++;
-}
-
-
-# Tell Nix about the expected hash so it can verify it.
-die "cannot check integrity of the downloaded path since its hash is not known\n"
- unless defined $finalNarHash;
-print "$finalNarHash\n";
-
-
-print STDERR "\n";
-print LOGFILE "$$ success\n";
-close LOGFILE;
diff --git a/scripts/install-nix-from-closure.sh b/scripts/install-nix-from-closure.sh
index bef5cd4f1..509acc41f 100644
--- a/scripts/install-nix-from-closure.sh
+++ b/scripts/install-nix-from-closure.sh
@@ -1,4 +1,4 @@
-#! /usr/bin/env bash
+#!/bin/sh
set -e
@@ -49,7 +49,10 @@ for i in $(cd $self/store >/dev/null && echo *); do
fi
if ! [ -e "$dest/store/$i" ]; then
cp -Rp "$self/store/$i" "$i_tmp"
+ chmod -R a-w "$i_tmp"
+ chmod +w "$i_tmp"
mv "$i_tmp" "$dest/store/$i"
+ chmod -w "$dest/store/$i"
fi
done
echo "" >&2
diff --git a/scripts/local.mk b/scripts/local.mk
index cdac56bf1..13b13a86b 100644
--- a/scripts/local.mk
+++ b/scripts/local.mk
@@ -2,26 +2,18 @@ nix_bin_scripts := \
$(d)/nix-build \
$(d)/nix-channel \
$(d)/nix-copy-closure \
- $(d)/nix-generate-patches \
$(d)/nix-install-package \
- $(d)/nix-pull \
$(d)/nix-push
bin-scripts += $(nix_bin_scripts)
-nix_substituters := \
- $(d)/copy-from-other-stores.pl \
- $(d)/download-from-binary-cache.pl \
- $(d)/download-using-manifests.pl
-
nix_noinst_scripts := \
$(d)/build-remote.pl \
$(d)/find-runtime-roots.pl \
$(d)/resolve-system-dependencies.pl \
$(d)/nix-http-export.cgi \
$(d)/nix-profile.sh \
- $(d)/nix-reduce-build \
- $(nix_substituters)
+ $(d)/nix-reduce-build
noinst-scripts += $(nix_noinst_scripts)
@@ -31,7 +23,6 @@ $(eval $(call install-file-as, $(d)/nix-profile.sh, $(profiledir)/nix.sh, 0644))
$(eval $(call install-program-in, $(d)/find-runtime-roots.pl, $(libexecdir)/nix))
$(eval $(call install-program-in, $(d)/build-remote.pl, $(libexecdir)/nix))
$(eval $(call install-program-in, $(d)/resolve-system-dependencies.pl, $(libexecdir)/nix))
-$(foreach prog, $(nix_substituters), $(eval $(call install-program-in, $(prog), $(libexecdir)/nix/substituters)))
$(eval $(call install-symlink, nix-build, $(bindir)/nix-shell))
clean-files += $(nix_bin_scripts) $(nix_noinst_scripts)
diff --git a/scripts/nix-build.in b/scripts/nix-build.in
index b93e5ab13..78a69c94e 100755
--- a/scripts/nix-build.in
+++ b/scripts/nix-build.in
@@ -110,13 +110,6 @@ for (my $n = 0; $n < scalar @ARGV; $n++) {
$n += 2;
}
- elsif ($arg eq "--log-type") {
- $n++;
- die "$0: ‘$arg’ requires an argument\n" unless $n < scalar @ARGV;
- push @instArgs, ($arg, $ARGV[$n]);
- push @buildArgs, ($arg, $ARGV[$n]);
- }
-
elsif ($arg eq "--option") {
die "$0: ‘$arg’ requires two arguments\n" unless $n + 2 < scalar @ARGV;
push @instArgs, ($arg, $ARGV[$n + 1], $ARGV[$n + 2]);
@@ -124,7 +117,7 @@ for (my $n = 0; $n < scalar @ARGV; $n++) {
$n += 2;
}
- elsif ($arg eq "--max-jobs" || $arg eq "-j" || $arg eq "--max-silent-time" || $arg eq "--log-type" || $arg eq "--cores" || $arg eq "--timeout" || $arg eq '--add-root') {
+ elsif ($arg eq "--max-jobs" || $arg eq "-j" || $arg eq "--max-silent-time" || $arg eq "--cores" || $arg eq "--timeout" || $arg eq '--add-root') {
$n++;
die "$0: ‘$arg’ requires an argument\n" unless $n < scalar @ARGV;
push @buildArgs, ($arg, $ARGV[$n]);
diff --git a/scripts/nix-channel.in b/scripts/nix-channel.in
index 5191b5855..65084ff1f 100755
--- a/scripts/nix-channel.in
+++ b/scripts/nix-channel.in
@@ -12,8 +12,6 @@ binmode STDERR, ":encoding(utf8)";
Nix::Config::readConfig;
-my $manifestDir = $Nix::Config::manifestDir;
-
# Turn on caching in nix-prefetch-url.
my $channelCache = "$Nix::Config::stateDir/channel-cache";
@@ -75,7 +73,6 @@ sub removeChannel {
my ($name) = @_;
readChannels;
my $url = $channels{$name};
- deleteOldManifests($url . "/MANIFEST", undef) if defined $url;
delete $channels{$name};
writeChannels;
@@ -84,8 +81,7 @@ sub removeChannel {
}
-# Fetch Nix expressions and pull manifests from the subscribed
-# channels.
+# Fetch Nix expressions and binary cache URLs from the subscribed channels.
sub update {
my @channelNames = @_;
@@ -97,7 +93,6 @@ sub update {
next if scalar @channelNames > 0 && ! grep { $_ eq $name } @{channelNames};
my $url = $channels{$name};
- my $origUrl = "$url/MANIFEST";
# We want to download the url to a file to see if it's a tarball while also checking if we
# got redirected in the process, so that we can grab the various parts of a nix channel
@@ -132,22 +127,8 @@ sub update {
if ($ret != 0) {
# Check if the channel advertises a binary cache.
my $binaryCacheURL = `$Nix::Config::curl --silent '$url'/binary-cache-url`;
- my $getManifest = ($Nix::Config::config{"force-manifest"} // "false") eq "true";
- if ($? == 0 && $binaryCacheURL ne "") {
- $extraAttrs .= "binaryCacheURL = \"$binaryCacheURL\"; ";
- deleteOldManifests($origUrl, undef);
- } else {
- $getManifest = 1;
- }
-
- if ($getManifest) {
- # No binary cache, so pull the channel manifest.
- mkdir $manifestDir, 0755 unless -e $manifestDir;
- die "$0: you do not have write permission to ‘$manifestDir’!\n" unless -W $manifestDir;
- $ENV{'NIX_ORIG_URL'} = $origUrl;
- system("$Nix::Config::binDir/nix-pull", "--skip-wrong-store", "$url/MANIFEST") == 0
- or die "cannot pull manifest from ‘$url’\n";
- }
+ $extraAttrs .= "binaryCacheURL = \"$binaryCacheURL\"; "
+ if $? == 0 && $binaryCacheURL ne "";
# Download the channel tarball.
my $fullURL = "$url/nixexprs.tar.xz";
diff --git a/scripts/nix-generate-patches.in b/scripts/nix-generate-patches.in
deleted file mode 100755
index 0a29c0548..000000000
--- a/scripts/nix-generate-patches.in
+++ /dev/null
@@ -1,51 +0,0 @@
-#! @perl@ -w @perlFlags@
-
-use strict;
-use Nix::Manifest;
-use Nix::GeneratePatches;
-use Nix::Utils;
-
-if (scalar @ARGV != 5) {
- print STDERR <<EOF;
-Usage: nix-generate-patches NAR-DIR PATCH-DIR PATCH-URI OLD-MANIFEST NEW-MANIFEST
-
-This command generates binary patches between NAR files listed in
-OLD-MANIFEST and NEW-MANIFEST. The patches are written to the
-directory PATCH-DIR, and the prefix PATCH-URI is used to generate URIs
-for the patches. The patches are added to NEW-MANIFEST. All NARs are
-required to exist in NAR-DIR. Patches are generated between
-succeeding versions of packages with the same name.
-EOF
- exit 1;
-}
-
-my $narPath = $ARGV[0];
-my $patchesPath = $ARGV[1];
-my $patchesURL = $ARGV[2];
-my $srcManifest = $ARGV[3];
-my $dstManifest = $ARGV[4];
-
-my (%srcNarFiles, %srcLocalPaths, %srcPatches);
-readManifest $srcManifest, \%srcNarFiles, \%srcPatches;
-
-my (%dstNarFiles, %dstLocalPaths, %dstPatches);
-readManifest $dstManifest, \%dstNarFiles, \%dstPatches;
-
-my $tmpDir = mkTempDir("nix-generate-patches");
-
-generatePatches \%srcNarFiles, \%dstNarFiles, \%srcPatches, \%dstPatches,
- $narPath, $patchesPath, $patchesURL, $tmpDir;
-
-propagatePatches \%srcPatches, \%dstNarFiles, \%dstPatches;
-
-# Optionally add all new patches to the manifest in $NIX_ALL_PATCHES.
-my $allPatchesFile = $ENV{"NIX_ALL_PATCHES"};
-if (defined $allPatchesFile) {
- my (%dummy, %allPatches);
- readManifest("$patchesPath/all-patches", \%dummy, \%allPatches)
- if -f $allPatchesFile;
- copyPatches \%dstPatches, \%allPatches;
- writeManifest($allPatchesFile, {}, \%allPatches, 0);
-}
-
-writeManifest $dstManifest, \%dstNarFiles, \%dstPatches;
diff --git a/scripts/nix-install-package.in b/scripts/nix-install-package.in
index b442c708b..ba349774a 100755
--- a/scripts/nix-install-package.in
+++ b/scripts/nix-install-package.in
@@ -89,7 +89,7 @@ my $pathRE = "(?: \/ [\/A-Za-z0-9\+\-\.\_\?\=]* )";
# store path. We'll let nix-env do that.
$contents =~
- / ^ \s* (\S+) \s+ ($Nix::Utils::urlRE) \s+ ($nameRE) \s+ ($systemRE) \s+ ($pathRE) \s+ ($pathRE) ( \s+ ($Nix::Utils::urlRE) )? /x
+ / ^ \s* (\S+) \s+ (\S+) \s+ ($nameRE) \s+ ($systemRE) \s+ ($pathRE) \s+ ($pathRE) ( \s+ ($Nix::Utils::urlRE) )? /x
or barf "invalid package contents";
my $version = $1;
my $manifestURL = $2;
@@ -111,25 +111,9 @@ if ($interactive) {
}
-if (defined $binaryCacheURL) {
+die "$0: package does not supply a binary cache\n" unless defined $binaryCacheURL;
- push @extraNixEnvArgs, "--option", "extra-binary-caches", $binaryCacheURL;
-
-} else {
-
- # Store the manifest in the temporary directory so that we don't
- # pollute /nix/var/nix/manifests. This also requires that we
- # don't use the Nix daemon (because otherwise
- # download-using-manifests won't see our NIX_MANIFESTS_DIRS
- # environment variable).
- $ENV{NIX_MANIFESTS_DIR} = $tmpDir;
- $ENV{NIX_REMOTE} = "";
-
- print "\nPulling manifests...\n";
- system("$Nix::Config::binDir/nix-pull", $manifestURL) == 0
- or barf "nix-pull failed: $?";
-
-}
+push @extraNixEnvArgs, "--option", "extra-binary-caches", $binaryCacheURL;
print "\nInstalling package...\n";
diff --git a/scripts/nix-profile.sh.in b/scripts/nix-profile.sh.in
index 6616b12b0..5e01de951 100644
--- a/scripts/nix-profile.sh.in
+++ b/scripts/nix-profile.sh.in
@@ -1,24 +1,71 @@
-if [ -n "$HOME" ]; then
- NIX_LINK="$HOME/.nix-profile"
-
- # Set the default profile.
- if ! [ -L "$NIX_LINK" ]; then
- echo "creating $NIX_LINK" >&2
- _NIX_DEF_LINK=@localstatedir@/nix/profiles/default
- @coreutils@/ln -s "$_NIX_DEF_LINK" "$NIX_LINK"
+if [ -n "$HOME" ] && [ -n "$USER" ]; then
+ __savedpath="$PATH"
+ export PATH=@coreutils@
+
+ # Set up the per-user profile.
+ # This part should be kept in sync with nixpkgs:nixos/modules/programs/shell.nix
+
+ : ${NIX_LINK:=$HOME/.nix-profile}
+
+ : ${NIX_USER_PROFILE_DIR:=@localstatedir@/nix/profiles/per-user/$USER}
+
+ mkdir -m 0755 -p "$NIX_USER_PROFILE_DIR"
+
+ if [ "$(stat --printf '%u' "$NIX_USER_PROFILE_DIR")" != "$(id -u)" ]; then
+ echo "Nix: WARNING: bad ownership on "$NIX_USER_PROFILE_DIR", should be $(id -u)" >&2
fi
- export PATH=$NIX_LINK/bin:$NIX_LINK/sbin:$PATH
+ if [ -w "$HOME" ]; then
+ if ! [ -L "$NIX_LINK" ]; then
+ echo "Nix: creating $NIX_LINK" >&2
+ if [ "$USER" != root ]; then
+ if ! ln -s "$NIX_USER_PROFILE_DIR"/profile "$NIX_LINK"; then
+ echo "Nix: WARNING: could not create $NIX_LINK -> $NIX_USER_PROFILE_DIR/profile" >&2
+ fi
+ else
+ # Root installs in the system-wide profile by default.
+ ln -s @localstatedir@/nix/profiles/default "$NIX_LINK"
+ fi
+ fi
- # Subscribe the user to the Nixpkgs channel by default.
- if [ ! -e "$HOME/.nix-channels" ]; then
- echo "https://nixos.org/channels/nixpkgs-unstable nixpkgs" > "$HOME/.nix-channels"
+ # Subscribe the user to the unstable Nixpkgs channel by default.
+ if [ ! -e "$HOME/.nix-channels" ]; then
+ echo "https://nixos.org/channels/nixpkgs-unstable nixpkgs" > "$HOME/.nix-channels"
+ fi
+
+ # Create the per-user garbage collector roots directory.
+ __user_gcroots=@localstatedir@/nix/gcroots/per-user/"$USER"
+ mkdir -m 0755 -p "$__user_gcroots"
+ if [ "$(stat --printf '%u' "$__user_gcroots")" != "$(id -u)" ]; then
+ echo "Nix: WARNING: bad ownership on $__user_gcroots, should be $(id -u)" >&2
+ fi
+ unset __user_gcroots
+
+ # Set up a default Nix expression from which to install stuff.
+ __nix_defexpr="$HOME"/.nix-defexpr
+ [ -L "$__nix_defexpr" ] && rm -f "$__nix_defexpr"
+ mkdir -m 0755 -p "$__nix_defexpr"
+ if [ "$USER" != root ] && [ ! -L "$__nix_defexpr"/channels_root ]; then
+ ln -s @localstatedir@/nix/profiles/per-user/root/channels "$__nix_defexpr"/channels_root
+ fi
+ unset __nix_defexpr
fi
# Append ~/.nix-defexpr/channels/nixpkgs to $NIX_PATH so that
# <nixpkgs> paths work when the user has fetched the Nixpkgs
# channel.
- export NIX_PATH=${NIX_PATH:+$NIX_PATH:}nixpkgs=$HOME/.nix-defexpr/channels/nixpkgs
+ export NIX_PATH="${NIX_PATH:+$NIX_PATH:}nixpkgs=$HOME/.nix-defexpr/channels/nixpkgs"
+
+ # Set up environment.
+ # This part should be kept in sync with nixpkgs:nixos/modules/programs/environment.nix
+ export NIX_USER_PROFILE_DIR
+ export NIX_PROFILES="@localstatedir@/nix/profiles/default $NIX_USER_PROFILE_DIR"
+
+ for i in $NIX_PROFILES; do
+ if [ -d "$i/lib/aspell" ]; then
+ export ASPELL_CONF="dict-dir $i/lib/aspell"
+ fi
+ done
# Set $SSL_CERT_FILE so that Nixpkgs applications like curl work.
if [ -e /etc/ssl/certs/ca-certificates.crt ]; then # NixOS, Ubuntu, Debian, Gentoo, Arch
@@ -34,4 +81,7 @@ if [ -n "$HOME" ]; then
elif [ -e "$NIX_LINK/etc/ca-bundle.crt" ]; then # old cacert in Nix profile
export SSL_CERT_FILE="$NIX_LINK/etc/ca-bundle.crt"
fi
+
+ export PATH="$NIX_LINK/bin:$NIX_LINK/sbin:$__savedpath"
+ unset __savedpath
fi
diff --git a/scripts/nix-pull.in b/scripts/nix-pull.in
deleted file mode 100755
index 995b50935..000000000
--- a/scripts/nix-pull.in
+++ /dev/null
@@ -1,102 +0,0 @@
-#! @perl@ -w @perlFlags@
-
-use utf8;
-use strict;
-use Nix::Config;
-use Nix::Manifest;
-
-binmode STDERR, ":encoding(utf8)";
-
-my $manifestDir = $Nix::Config::manifestDir;
-
-
-# Prevent access problems in shared-stored installations.
-umask 0022;
-
-
-# Create the manifests directory if it doesn't exist.
-if (! -e $manifestDir) {
- mkdir $manifestDir, 0755 or die "cannot create directory ‘$manifestDir’";
-}
-
-
-# Make sure that the manifests directory is scanned for GC roots.
-my $gcRootsDir = "$Nix::Config::stateDir/gcroots";
-my $manifestDirLink = "$gcRootsDir/manifests";
-if (! -l $manifestDirLink) {
- symlink($manifestDir, $manifestDirLink) or die "cannot create symlink ‘$manifestDirLink’";
-}
-
-
-# Process the URLs specified on the command line.
-
-sub downloadFile {
- my $url = shift;
- $ENV{"PRINT_PATH"} = 1;
- $ENV{"QUIET"} = 1;
- my ($dummy, $path) = `$Nix::Config::binDir/nix-prefetch-url '$url'`;
- die "cannot fetch ‘$url’" if $? != 0;
- die "nix-prefetch-url did not return a path" unless defined $path;
- chomp $path;
- return $path;
-}
-
-sub processURL {
- my $url = shift;
-
- $url =~ s/\/$//;
-
- my $manifest;
-
- my $origUrl = $ENV{'NIX_ORIG_URL'} || $url;
-
- # First see if a bzipped manifest is available.
- if (system("$Nix::Config::curl --fail --silent --location --head '$url'.bz2 > /dev/null") == 0) {
- print "fetching list of Nix archives at ‘$url.bz2’...\n";
- $manifest = downloadFile "$url.bz2";
- }
-
- # Otherwise, just get the uncompressed manifest.
- else {
- print "fetching list of Nix archives at ‘$url’...\n";
- $manifest = downloadFile $url;
- }
-
- my $baseName = "unnamed";
- if ($url =~ /\/([^\/]+)\/[^\/]+$/) { # get the forelast component
- $baseName = $1;
- }
-
- my $hash = `$Nix::Config::binDir/nix-hash --flat '$manifest'`
- or die "cannot hash ‘$manifest’";
- chomp $hash;
-
- my $urlFile = "$manifestDir/$baseName-$hash.url";
- open URL, ">$urlFile" or die "cannot create ‘$urlFile’";
- print URL $origUrl;
- close URL;
-
- my $finalPath = "$manifestDir/$baseName-$hash.nixmanifest";
-
- unlink $finalPath if -e $finalPath;
-
- symlink("$manifest", "$finalPath")
- or die "cannot link ‘$finalPath’ to ‘$manifest’";
-
- deleteOldManifests($origUrl, $urlFile);
-}
-
-while (@ARGV) {
- my $url = shift @ARGV;
- if ($url eq "--help") {
- exec "man nix-pull" or die;
- } elsif ($url eq "--skip-wrong-store") {
- # No-op, no longer supported.
- } else {
- processURL $url;
- }
-}
-
-
-# Update the cache.
-updateManifestDB();