diff options
author | Eelco Dolstra <e.dolstra@tudelft.nl> | 2004-12-13 13:47:38 +0000 |
---|---|---|
committer | Eelco Dolstra <e.dolstra@tudelft.nl> | 2004-12-13 13:47:38 +0000 |
commit | 862f4c154e883611ba9dfefe921c87e6423075ea (patch) | |
tree | 63d8c96c29c18f322f0d70cf5c1a5224e306d835 /scripts | |
parent | dca48aed349375b8515a32ac58dce48f48f7264e (diff) |
* Patch deployment. `download.pl' (intended to be used in the
substitute mechanism) creates a store path by downloading full NAR
archives and/or patches specified in the available manifests.
Any combination of present paths, full downloads, and patches can be
used to construct the target path. In particular, patches can be
chained in sequence; and full NAR archives of the target path can be
omitted (i.e., patch-only deployment is possible). A shortest path
algorithm is used to find the smallest set of files to be downloaded
(the edge weights are currently file sizes, but one can imagine
taking the network speed to the various source into account).
Patches are binary deltas between two store paths. To be precise,
they are the output of the `bsdiff' program applied to the NAR
archives obtained by dumping (`nix-store --dump') the two store
paths. The advantage of diff'ing NAR archives (and not, say, doing
file-by-file diffs) is that file renames/moves are handled
automatically. The disadvantage is that we cannot optimise creation
of unchanged files (by hard-linking).
Diffstat (limited to 'scripts')
-rw-r--r-- | scripts/nix-pull.in | 18 | ||||
-rw-r--r-- | scripts/readmanifest.pm.in | 121 |
2 files changed, 105 insertions, 34 deletions
diff --git a/scripts/nix-pull.in b/scripts/nix-pull.in index a802760a5..66d99ff7c 100644 --- a/scripts/nix-pull.in +++ b/scripts/nix-pull.in @@ -19,14 +19,24 @@ my $confFile = "@sysconfdir@/nix/prebuilts.conf"; my %storePaths2urls; my %urls2hashes; my %successors; -sub doURL { + +sub processURL { my $url = shift; - processURL $manifest, $url, \%storePaths2urls, \%urls2hashes, \%successors; + + $url =~ s/\/$//; + print "obtaining list of Nix archives at $url...\n"; + + system("@curl@ --fail --silent --show-error --location --max-redirs 20 " . + "'$url' > '$manifest'") == 0 + or die "curl failed: $?"; + + readManifest $manifest, \%storePaths2urls, \%urls2hashes, \%successors; } + if (scalar @ARGV > 0) { while (@ARGV) { my $url = shift @ARGV; - doURL $url; + processURL $url; } } else { open CONFFILE, "<$confFile"; @@ -34,7 +44,7 @@ if (scalar @ARGV > 0) { chomp; if (/^\s*(\S+)\s*(\#.*)?$/) { my $url = $1; - doURL $url; + processURL $url; } } close CONFFILE; diff --git a/scripts/readmanifest.pm.in b/scripts/readmanifest.pm.in index d5527bf3b..8d6694ff2 100644 --- a/scripts/readmanifest.pm.in +++ b/scripts/readmanifest.pm.in @@ -1,27 +1,24 @@ use strict; -sub processURL { +sub readManifest { my $manifest = shift; - my $url = shift; - my $storePaths2urls = shift; - my $urls2hashes = shift; + my $narFiles = shift; + my $patches = shift; my $successors = shift; - $url =~ s/\/$//; - print "obtaining list of Nix archives at $url...\n"; - - system("@curl@ --fail --silent --show-error --location --max-redirs 20 " . - "'$url' > '$manifest'") == 0 - or die "curl failed: $?"; - open MANIFEST, "<$manifest"; my $inside = 0; + my $type; my $storePath; - my $narurl; + my $url; my $hash; + my $size; my @preds; + my $basePath; + my $baseHash; + my $patchType; while (<MANIFEST>) { chomp; @@ -29,38 +26,102 @@ sub processURL { next if (/^$/); if (!$inside) { - if (/^\{$/) { + if (/^\{$/) { + $type = "narfile"; $inside = 1; undef $storePath; - undef $narurl; + undef $url; undef $hash; + $size = 999999999; @preds = (); } + elsif (/^patch \{$/) { + $type = "patch"; + $inside = 1; + undef $url; + undef $hash; + undef $size; + undef $basePath; + undef $baseHash; + undef $patchType; + } else { die "bad line: $_"; } } else { + if (/^\}$/) { $inside = 0; - $$storePaths2urls{$storePath} = $narurl; - $$urls2hashes{$narurl} = $hash; + if ($type eq "narfile") { + + $$narFiles{$storePath} = [] + unless defined $$narFiles{$storePath}; + + my $narFileList = $$narFiles{$storePath}; + + my $found = 0; + foreach my $narFile (@{$narFileList}) { + if ($narFile->{url} eq $url) { + if ($narFile->{hash} eq $hash) { + $found = 1; + } else { + die "conflicting hashes for URL $url, " . + "namely $narFile->{hash} and $hash"; + } + } + } + if (!$found) { + push @{$narFileList}, + {url => $url, hash => $hash, size => $size}; + } + + foreach my $p (@preds) { + $$successors{$p} = $storePath; + } - foreach my $p (@preds) { - $$successors{$p} = $storePath; + } + + elsif ($type eq "patch") { + + $$patches{$storePath} = [] + unless defined $$patches{$storePath}; + + my $patchList = $$patches{$storePath}; + + my $found = 0; + foreach my $patch (@{$patchList}) { + if ($patch->{url} eq $url) { + if ($patch->{hash} eq $hash) { + $found = 1 if ($patch->{basePath} eq $basePath); + } else { + die "conflicting hashes for URL $url, " . + "namely $patch->{hash} and $hash"; + } + } + } + if (!$found) { + push @{$patchList}, + { url => $url, hash => $hash, size => $size + , basePath => $basePath, baseHash => $baseHash + }; + } + } } - elsif (/^\s*StorePath:\s*(\/\S+)\s*$/) { - $storePath = $1; - } - elsif (/^\s*NarURL:\s*(\S+)\s*$/) { - $narurl = $1; - } - elsif (/^\s*MD5:\s*(\S+)\s*$/) { - $hash = $1; - } - elsif (/^\s*SuccOf:\s*(\/\S+)\s*$/) { - push @preds, $1; - } + + elsif (/^\s*StorePath:\s*(\/\S+)\s*$/) { $storePath = $1; } + elsif (/^\s*Hash:\s*(\S+)\s*$/) { $hash = $1; } + elsif (/^\s*URL:\s*(\S+)\s*$/) { $url = $1; } + elsif (/^\s*Size:\s*(\d+)\s*$/) { $size = $1; } + elsif (/^\s*SuccOf:\s*(\/\S+)\s*$/) { push @preds, $1; } + elsif (/^\s*BasePath:\s*(\/\S+)\s*$/) { $basePath = $1; } + elsif (/^\s*BaseHash:\s*(\S+)\s*$/) { $baseHash = $1; } + elsif (/^\s*Type:\s*(\S+)\s*$/) { $patchType = $1; } + + # Compatibility; + elsif (/^\s*NarURL:\s*(\S+)\s*$/) { $url = $1; } + elsif (/^\s*MD5:\s*(\S+)\s*$/) { $hash = $1; } + else { die "bad line: $_"; } } } |