aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorEelco Dolstra <e.dolstra@tudelft.nl>2007-08-09 23:16:44 +0000
committerEelco Dolstra <e.dolstra@tudelft.nl>2007-08-09 23:16:44 +0000
commitf881f7a017059fb501668aa85d41e873fe8f5285 (patch)
treea8af6e6b159db1e01fd09f170e6f16718c390c3e
parentef240bc0d564d8de8877724756c1e7765c74ff75 (diff)
* nix-prefetch-url: support caching. If the environment variable
NIX_DOWNLOAD_CACHE is set, then nix-prefetch-url will store the hash and timestamp of downloaded files in the directory $NIX_DOWNLOAD_CACHE. This allows it to figure out if the file is still in the Nix store.
-rw-r--r--scripts/nix-channel.in3
-rw-r--r--scripts/nix-prefetch-url.in66
-rw-r--r--scripts/nix-pull.in2
3 files changed, 58 insertions, 13 deletions
diff --git a/scripts/nix-channel.in b/scripts/nix-channel.in
index 41a75adf1..fd6639ec1 100644
--- a/scripts/nix-channel.in
+++ b/scripts/nix-channel.in
@@ -98,7 +98,8 @@ sub update {
my $fullURL = "$url/nixexprs.tar.bz2";
print "downloading Nix expressions from `$fullURL'...\n";
$ENV{"PRINT_PATH"} = 1;
- my ($hash, $path) = `@bindir@/nix-prefetch-url '$fullURL' 2> /dev/null`;
+ $ENV{"QUIET"} = 1;
+ my ($hash, $path) = `@bindir@/nix-prefetch-url '$fullURL'`;
die "cannot fetch `$fullURL'" if $? != 0;
chomp $path;
$inputs .= '"' . $channelName . '"' . " " . $path . " ";
diff --git a/scripts/nix-prefetch-url.in b/scripts/nix-prefetch-url.in
index 2b4478501..2c55f9f24 100644
--- a/scripts/nix-prefetch-url.in
+++ b/scripts/nix-prefetch-url.in
@@ -36,6 +36,12 @@ if test -n "$expHash"; then
fi
+doDownload() {
+ @curl@ $cacheFlags --fail -# --show-error --location --max-redirs 20 --disable-epsv \
+ --cookie-jar $tmpPath/cookies "$url" -o $tmpFile
+}
+
+
# If we don't know the hash or a file with that hash doesn't exist,
# download the file and add it to the store.
if test -z "$finalPath"; then
@@ -44,22 +50,60 @@ if test -z "$finalPath"; then
tmpFile=$tmpPath/$name
mkdir $tmpPath
+ # Optionally do timestamp-based caching of the download.
+ # Actually, the only thing that we cache in $NIX_DOWNLOAD_CACHE is
+ # the hash and the timestamp of the file at $url. The caching of
+ # the file *contents* is done in Nix store, where it can be
+ # garbage-collected independently.
+ if test -n "$NIX_DOWNLOAD_CACHE"; then
+ urlHash="$(echo -n "$url" | nix-hash --type sha256 --base32 --flat /dev/stdin)"
+ echo "$url" > "$NIX_DOWNLOAD_CACHE/$urlHash.url"
+ cachedHashFN="$NIX_DOWNLOAD_CACHE/$urlHash.$hashType"
+ cachedTimestampFN="$NIX_DOWNLOAD_CACHE/$urlHash.stamp"
+ cacheFlags="--remote-time"
+ if test -e "$cachedTimestampFN" -a -e "$cachedHashFN"; then
+ # Only download the file if it is newer than the cached version.
+ cacheFlags="$cacheFlags --time-cond $cachedTimestampFN"
+ fi
+ fi
+
# Perform the download.
- @curl@ --fail --location --max-redirs 20 --disable-epsv \
- --cookie-jar $tmpPath/cookies "$url" > $tmpFile
+ doDownload
+
+ if test -n "$NIX_DOWNLOAD_CACHE" -a ! -e $tmpFile; then
+ # Curl didn't create $tmpFile, so apparently there's no newer
+ # file on the server.
+ hash=$(cat $cachedHashFN)
+ finalPath=$(@bindir@/nix-store --print-fixed-path "$hashType" "$hash" "$name")
+ if ! @bindir@/nix-store --check-validity "$finalPath" 2> /dev/null; then
+ echo "cached contents of \`$url' disappeared, redownloading..." >&2
+ finalPath=
+ cacheFlags="--remote-time"
+ doDownload
+ fi
+ fi
+
+ if test -z "$finalPath"; then
+
+ # Compute the hash.
+ hash=$(@bindir@/nix-hash --type "$hashType" $hashFormat --flat $tmpFile)
+ if ! test -n "$QUIET"; then echo "hash is $hash" >&2; fi
- # Compute the hash.
- hash=$(@bindir@/nix-hash --type "$hashType" $hashFormat --flat $tmpFile)
- if ! test -n "$QUIET"; then echo "hash is $hash" >&2; fi
+ if test -n "$NIX_DOWNLOAD_CACHE"; then
+ echo $hash > $cachedHashFN
+ touch -r $tmpFile $cachedTimestampFN
+ fi
- # Add the downloaded file to the Nix store.
- finalPath=$(@bindir@/nix-store --add-fixed "$hashType" $tmpFile)
+ # Add the downloaded file to the Nix store.
+ finalPath=$(@bindir@/nix-store --add-fixed "$hashType" $tmpFile)
- if test -n "$tmpPath"; then rm -rf $tmpPath || true; fi
+ if test -n "$tmpPath"; then rm -rf $tmpPath || true; fi
- if test -n "$expHash" -a "$expHash" != "$hash"; then
- echo "hash mismatch for URL \`$url'" >&2
- exit 1
+ if test -n "$expHash" -a "$expHash" != "$hash"; then
+ echo "hash mismatch for URL \`$url'" >&2
+ exit 1
+ fi
+
fi
fi
diff --git a/scripts/nix-pull.in b/scripts/nix-pull.in
index 46f9f147c..94ac74425 100644
--- a/scripts/nix-pull.in
+++ b/scripts/nix-pull.in
@@ -39,7 +39,7 @@ sub processURL {
$url =~ s/\/$//;
print "obtaining list of Nix archives at $url...\n";
- system("@curl@ --fail --silent --show-error --location --max-redirs 20 " .
+ system("@curl@ --fail -# --show-error --location --max-redirs 20 " .
"'$url' > '$manifest'") == 0
or die "curl failed: $?";