aboutsummaryrefslogtreecommitdiff
path: root/scripts
diff options
context:
space:
mode:
authorEelco Dolstra <eelco.dolstra@logicblox.com>2012-06-29 14:26:31 -0400
committerEelco Dolstra <eelco.dolstra@logicblox.com>2012-06-29 15:24:52 -0400
commit4911a10a4e51102a21a5d123a852c75d2ec92dbc (patch)
tree6e02ca6d92cc037efc89801960f4997a7d54e168 /scripts
parent49cd7387ad6546571ca31a41f208091b482defaa (diff)
Use XZ compression in binary caches
XZ compresses significantly better than bzip2. Here are the compression ratios and execution times (using 4 cores in parallel) on my /var/run/current-system (3.1 GiB): bzip2: total compressed size 849.56 MiB, 30.8% [2m08] xz -6: total compressed size 641.84 MiB, 23.4% [6m53] xz -7: total compressed size 621.82 MiB, 22.6% [7m19] xz -8: total compressed size 599.33 MiB, 21.8% [7m18] xz -9: total compressed size 588.18 MiB, 21.4% [7m40] Note that compression takes much longer. More importantly, however, decompression is much faster: bzip2: 1m47.274s xz -6: 0m55.446s xz -7: 0m54.119s xz -8: 0m52.388s xz -9: 0m51.842s The only downside to using -9 is that decompression takes a fair amount (~65 MB) of memory.
Diffstat (limited to 'scripts')
-rwxr-xr-xscripts/nix-push.in26
1 files changed, 13 insertions, 13 deletions
diff --git a/scripts/nix-push.in b/scripts/nix-push.in
index db94b51fd..9edd87319 100755
--- a/scripts/nix-push.in
+++ b/scripts/nix-push.in
@@ -118,7 +118,7 @@ close READ or die "nix-build failed: $?";
print STDERR "uploading/copying archives...\n";
my $totalNarSize = 0;
-my $totalNarBz2Size = 0;
+my $totalCompressedSize = 0;
for (my $n = 0; $n < scalar @storePaths; $n++) {
my $storePath = $storePaths[$n];
@@ -146,22 +146,22 @@ for (my $n = 0; $n < scalar @storePaths; $n++) {
$totalNarSize += $narSize;
# Get info about the compressed NAR.
- open HASH, "$narDir/narbz2-hash" or die "cannot open narbz2-hash";
- my $narBz2Hash = <HASH>;
- chomp $narBz2Hash;
- $narBz2Hash =~ /^[0-9a-z]+$/ or die "invalid hash";
+ open HASH, "$narDir/nar-compressed-hash" or die "cannot open nar-compressed-hash";
+ my $compressedHash = <HASH>;
+ chomp $compressedHash;
+ $compressedHash =~ /^[0-9a-z]+$/ or die "invalid hash";
close HASH;
- my $narName = "$narBz2Hash.nar.bz2";
+ my $narName = "$compressedHash.nar.bz2";
my $narFile = "$narDir/$narName";
(-f $narFile) or die "NAR file for $storePath not found";
- my $narBz2Size = stat($narFile)->size;
- $totalNarBz2Size += $narBz2Size;
+ my $compressedSize = stat($narFile)->size;
+ $totalCompressedSize += $compressedSize;
printf STDERR "%s [%.2f MiB, %.1f%%]\n", $storePath,
- $narBz2Size / (1024 * 1024), $narBz2Size / $narSize * 100;
+ $compressedSize / (1024 * 1024), $compressedSize / $narSize * 100;
# Upload the compressed NAR.
if ($localCopy) {
@@ -184,13 +184,13 @@ for (my $n = 0; $n < scalar @storePaths; $n++) {
my $info;
$info .= "StorePath: $storePath\n";
$info .= "URL: $narName\n";
- $info .= "CompressedHash: sha256:$narBz2Hash\n";
- $info .= "CompressedSize: $narBz2Size\n";
+ $info .= "CompressedHash: sha256:$compressedHash\n";
+ $info .= "CompressedSize: $compressedSize\n";
$info .= "NarHash: $narHash\n";
$info .= "NarSize: $narSize\n";
$info .= "References: " . join(" ", map { basename $_ } @{$refs}) . "\n";
if (defined $deriver) {
- $info .= "Deriver: " . basename $deriver, "\n";
+ $info .= "Deriver: " . basename $deriver . "\n";
if (isValidPath($deriver)) {
my $drv = derivationFromPath($deriver);
$info .= "System: $drv->{platform}\n";
@@ -214,4 +214,4 @@ for (my $n = 0; $n < scalar @storePaths; $n++) {
}
printf STDERR "total compressed size %.2f MiB, %.1f%%\n",
- $totalNarBz2Size / (1024 * 1024), $totalNarBz2Size / $totalNarSize * 100;
+ $totalCompressedSize / (1024 * 1024), $totalCompressedSize / $totalNarSize * 100;