aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--src/libexpr/primops/fetchGit.cc11
-rw-r--r--src/libstore/builtins/unpack-channel.cc12
-rw-r--r--src/libstore/download.cc12
-rw-r--r--src/libutil/serialise.hh1
-rw-r--r--src/libutil/tarfile.cc22
-rw-r--r--src/libutil/tarfile.hh5
-rw-r--r--src/nix-prefetch-url/nix-prefetch-url.cc4
7 files changed, 43 insertions, 24 deletions
diff --git a/src/libexpr/primops/fetchGit.cc b/src/libexpr/primops/fetchGit.cc
index 7ef3b3823..9d0c64291 100644
--- a/src/libexpr/primops/fetchGit.cc
+++ b/src/libexpr/primops/fetchGit.cc
@@ -4,6 +4,7 @@
#include "store-api.hh"
#include "pathlocks.hh"
#include "hash.hh"
+#include "tarfile.hh"
#include <sys/time.h>
@@ -164,14 +165,16 @@ GitInfo exportGit(ref<Store> store, const std::string & uri,
if (e.errNo != ENOENT) throw;
}
- // FIXME: should pipe this, or find some better way to extract a
- // revision.
- auto tar = runProgram("git", true, { "-C", cacheDir, "archive", gitInfo.rev });
+ auto source = sinkToSource([&](Sink & sink) {
+ RunOptions gitOptions("git", { "-C", cacheDir, "archive", gitInfo.rev });
+ gitOptions.standardOut = &sink;
+ runProgram2(gitOptions);
+ });
Path tmpDir = createTempDir();
AutoDelete delTmpDir(tmpDir, true);
- runProgram("tar", true, { "x", "-C", tmpDir }, tar);
+ unpackTarfile(*source, tmpDir);
gitInfo.storePath = store->addToStore(name, tmpDir);
diff --git a/src/libstore/builtins/unpack-channel.cc b/src/libstore/builtins/unpack-channel.cc
index 5fc68cd66..d18e3ddaf 100644
--- a/src/libstore/builtins/unpack-channel.cc
+++ b/src/libstore/builtins/unpack-channel.cc
@@ -1,5 +1,4 @@
#include "builtins.hh"
-#include "compression.hh"
#include "tarfile.hh"
namespace nix {
@@ -18,16 +17,7 @@ void builtinUnpackChannel(const BasicDerivation & drv)
createDirs(out);
- auto source = sinkToSource([&](Sink & sink) {
- auto decompressor =
- hasSuffix(src, ".bz2") ? makeDecompressionSink("bzip2", sink) :
- hasSuffix(src, ".xz") ? makeDecompressionSink("xz", sink) :
- makeDecompressionSink("none", sink);
- readFile(src, *decompressor);
- decompressor->finish();
- });
-
- unpackTarfile(*source, out);
+ unpackTarfile(src, out);
auto entries = readDirectory(out);
if (entries.size() != 1)
diff --git a/src/libstore/download.cc b/src/libstore/download.cc
index e80663dff..61e88c5c1 100644
--- a/src/libstore/download.cc
+++ b/src/libstore/download.cc
@@ -8,6 +8,7 @@
#include "compression.hh"
#include "pathlocks.hh"
#include "finally.hh"
+#include "tarfile.hh"
#ifdef ENABLE_S3
#include <aws/core/client/ClientConfiguration.h>
@@ -903,12 +904,15 @@ CachedDownloadResult Downloader::downloadCached(
unpackedStorePath = "";
}
if (unpackedStorePath.empty()) {
- printInfo(format("unpacking '%1%'...") % url);
+ printInfo("unpacking '%s'...", url);
Path tmpDir = createTempDir();
AutoDelete autoDelete(tmpDir, true);
- // FIXME: this requires GNU tar for decompression.
- runProgram("tar", true, {"xf", store->toRealPath(storePath), "-C", tmpDir, "--strip-components", "1"});
- unpackedStorePath = store->addToStore(name, tmpDir, true, htSHA256, defaultPathFilter, NoRepair);
+ unpackTarfile(store->toRealPath(storePath), tmpDir, baseNameOf(url));
+ auto members = readDirectory(tmpDir);
+ if (members.size() != 1)
+ throw nix::Error("tarball '%s' contains an unexpected number of top-level files", url);
+ auto topDir = tmpDir + "/" + members.begin()->name;
+ unpackedStorePath = store->addToStore(name, topDir, true, htSHA256, defaultPathFilter, NoRepair);
}
replaceSymlink(unpackedStorePath, unpackedLink);
storePath = unpackedStorePath;
diff --git a/src/libutil/serialise.hh b/src/libutil/serialise.hh
index 128e287f3..5780c93a6 100644
--- a/src/libutil/serialise.hh
+++ b/src/libutil/serialise.hh
@@ -77,7 +77,6 @@ struct BufferedSource : Source
size_t read(unsigned char * data, size_t len) override;
-
bool hasData();
protected:
diff --git a/src/libutil/tarfile.cc b/src/libutil/tarfile.cc
index ae6d512bd..f7d3ad417 100644
--- a/src/libutil/tarfile.cc
+++ b/src/libutil/tarfile.cc
@@ -1,4 +1,5 @@
#include "rust-ffi.hh"
+#include "compression.hh"
extern "C" {
rust::CBox2<rust::Result<std::tuple<>>> unpack_tarfile(rust::Source source, rust::StringSlice dest_dir);
@@ -6,9 +7,28 @@ extern "C" {
namespace nix {
-void unpackTarfile(Source & source, Path destDir)
+void unpackTarfile(Source & source, const Path & destDir)
{
unpack_tarfile(source, destDir).use()->unwrap();
}
+void unpackTarfile(const Path & tarFile, const Path & destDir,
+ std::optional<std::string> baseName)
+{
+ if (!baseName) baseName = baseNameOf(tarFile);
+
+ auto source = sinkToSource([&](Sink & sink) {
+ // FIXME: look at first few bytes to determine compression type.
+ auto decompressor =
+ // FIXME: add .gz support
+ hasSuffix(*baseName, ".bz2") ? makeDecompressionSink("bzip2", sink) :
+ hasSuffix(*baseName, ".xz") ? makeDecompressionSink("xz", sink) :
+ makeDecompressionSink("none", sink);
+ readFile(tarFile, *decompressor);
+ decompressor->finish();
+ });
+
+ unpackTarfile(*source, destDir);
+}
+
}
diff --git a/src/libutil/tarfile.hh b/src/libutil/tarfile.hh
index c3e95fb0c..ce0911e2a 100644
--- a/src/libutil/tarfile.hh
+++ b/src/libutil/tarfile.hh
@@ -2,6 +2,9 @@
namespace nix {
-void unpackTarfile(Source & source, Path destDir);
+void unpackTarfile(Source & source, const Path & destDir);
+
+void unpackTarfile(const Path & tarFile, const Path & destDir,
+ std::optional<std::string> baseName = {});
}
diff --git a/src/nix-prefetch-url/nix-prefetch-url.cc b/src/nix-prefetch-url/nix-prefetch-url.cc
index f54706a8a..78c883833 100644
--- a/src/nix-prefetch-url/nix-prefetch-url.cc
+++ b/src/nix-prefetch-url/nix-prefetch-url.cc
@@ -9,6 +9,7 @@
#include "legacy.hh"
#include "finally.hh"
#include "progress-bar.hh"
+#include "tarfile.hh"
#include <iostream>
@@ -192,8 +193,7 @@ static int _main(int argc, char * * argv)
if (hasSuffix(baseNameOf(uri), ".zip"))
runProgram("unzip", true, {"-qq", tmpFile, "-d", unpacked});
else
- // FIXME: this requires GNU tar for decompression.
- runProgram("tar", true, {"xf", tmpFile, "-C", unpacked});
+ unpackTarfile(tmpFile, unpacked, baseNameOf(uri));
/* If the archive unpacks to a single file/directory, then use
that as the top-level. */