diff options
author | John Ericson <John.Ericson@Obsidian.Systems> | 2020-07-27 14:29:32 +0000 |
---|---|---|
committer | John Ericson <John.Ericson@Obsidian.Systems> | 2020-07-27 14:29:32 +0000 |
commit | d5bb67cfa4da130a9949a9b4eb8aba6cb74ea5c7 (patch) | |
tree | ed365ca233fa7edfba9551539c4a903f109abef0 /src/libstore | |
parent | 048e916f6477acc7e57e1d85e832d3efb42ad3f6 (diff) | |
parent | d7c0f094cbcfe1ae4ccc3d54baec00b66ccb1ed0 (diff) |
Merge remote-tracking branch 'upstream/master' into optional-derivation-output-storepath
Diffstat (limited to 'src/libstore')
-rw-r--r-- | src/libstore/build.cc | 2 | ||||
-rw-r--r-- | src/libstore/daemon.cc | 62 | ||||
-rw-r--r-- | src/libstore/derivations.cc | 8 | ||||
-rw-r--r-- | src/libstore/derivations.hh | 4 | ||||
-rw-r--r-- | src/libstore/export-import.cc | 12 | ||||
-rw-r--r-- | src/libstore/filetransfer.cc | 39 | ||||
-rw-r--r-- | src/libstore/filetransfer.hh | 8 | ||||
-rw-r--r-- | src/libstore/local-store.cc | 172 | ||||
-rw-r--r-- | src/libstore/local-store.hh | 2 | ||||
-rw-r--r-- | src/libstore/references.cc | 15 | ||||
-rw-r--r-- | src/libstore/store-api.cc | 88 | ||||
-rw-r--r-- | src/libstore/store-api.hh | 2 |
12 files changed, 212 insertions, 202 deletions
diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 1c88d91bc..109368f8a 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -2774,7 +2774,7 @@ struct RestrictedStore : public LocalFSStore goal.addDependency(info.path); } - StorePath addToStoreFromDump(const string & dump, const string & name, + StorePath addToStoreFromDump(Source & dump, const string & name, FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair) override { auto path = next->addToStoreFromDump(dump, name, method, hashAlgo, repair); diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc index e574ea1a7..df295084a 100644 --- a/src/libstore/daemon.cc +++ b/src/libstore/daemon.cc @@ -173,31 +173,6 @@ struct TunnelSource : BufferedSource } }; -/* If the NAR archive contains a single file at top-level, then save - the contents of the file to `s'. Otherwise barf. */ -struct RetrieveRegularNARSink : ParseSink -{ - bool regular; - string s; - - RetrieveRegularNARSink() : regular(true) { } - - void createDirectory(const Path & path) - { - regular = false; - } - - void receiveContents(unsigned char * data, unsigned int len) - { - s.append((const char *) data, len); - } - - void createSymlink(const Path & path, const string & target) - { - regular = false; - } -}; - struct ClientSettings { bool keepFailed; @@ -375,25 +350,28 @@ static void performOp(TunnelLogger * logger, ref<Store> store, } case wopAddToStore: { - std::string s, baseName; + HashType hashAlgo; + std::string baseName; FileIngestionMethod method; { - bool fixed; uint8_t recursive; - from >> baseName >> fixed /* obsolete */ >> recursive >> s; + bool fixed; + uint8_t recursive; + std::string hashAlgoRaw; + from >> baseName >> fixed /* obsolete */ >> recursive >> hashAlgoRaw; if (recursive > (uint8_t) FileIngestionMethod::Recursive) throw Error("unsupported FileIngestionMethod with value of %i; you may need to upgrade nix-daemon", recursive); method = FileIngestionMethod { recursive }; /* Compatibility hack. */ if (!fixed) { - s = "sha256"; + hashAlgoRaw = "sha256"; method = FileIngestionMethod::Recursive; } + hashAlgo = parseHashType(hashAlgoRaw); } - HashType hashAlgo = parseHashType(s); - StringSink savedNAR; - TeeSource savedNARSource(from, savedNAR); - RetrieveRegularNARSink savedRegular; + StringSink saved; + TeeSource savedNARSource(from, saved); + RetrieveRegularNARSink savedRegular { saved }; if (method == FileIngestionMethod::Recursive) { /* Get the entire NAR dump from the client and save it to @@ -407,11 +385,9 @@ static void performOp(TunnelLogger * logger, ref<Store> store, logger->startWork(); if (!savedRegular.regular) throw Error("regular file expected"); - auto path = store->addToStoreFromDump( - method == FileIngestionMethod::Recursive ? *savedNAR.s : savedRegular.s, - baseName, - method, - hashAlgo); + // FIXME: try to stream directly from `from`. + StringSource dumpSource { *saved.s }; + auto path = store->addToStoreFromDump(dumpSource, baseName, method, hashAlgo); logger->stopWork(); to << store->printStorePath(path); @@ -727,15 +703,15 @@ static void performOp(TunnelLogger * logger, ref<Store> store, if (!trusted) info.ultimate = false; - std::string saved; std::unique_ptr<Source> source; if (GET_PROTOCOL_MINOR(clientVersion) >= 21) source = std::make_unique<TunnelSource>(from, to); else { - TeeParseSink tee(from); - parseDump(tee, tee.source); - saved = std::move(*tee.saved.s); - source = std::make_unique<StringSource>(saved); + StringSink saved; + TeeSource tee { from, saved }; + ParseSink ether; + parseDump(ether, tee); + source = std::make_unique<StringSource>(std::move(*saved.s)); } logger->startWork(); diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc index e74e46dda..7d0a5abeb 100644 --- a/src/libstore/derivations.cc +++ b/src/libstore/derivations.cc @@ -23,14 +23,6 @@ StorePath DerivationOutput::path(const Store & store, std::string_view drvName) }, output); } -const StorePath BasicDerivation::findOutput(const Store & store, const string & id) const -{ - auto i = outputs.find(id); - if (i == outputs.end()) - throw Error("derivation has no output '%s'", id); - return i->second.path(store, name); -} - bool BasicDerivation::isBuiltin() const { diff --git a/src/libstore/derivations.hh b/src/libstore/derivations.hh index fd8828373..133ffe50e 100644 --- a/src/libstore/derivations.hh +++ b/src/libstore/derivations.hh @@ -50,10 +50,6 @@ struct BasicDerivation BasicDerivation() { } virtual ~BasicDerivation() { }; - /* Return the path corresponding to the output identifier `id' in - the given derivation. */ - const StorePath findOutput(const Store & store, const std::string & id) const; - bool isBuiltin() const; /* Return true iff this is a fixed-output derivation. */ diff --git a/src/libstore/export-import.cc b/src/libstore/export-import.cc index 082d0f1d1..b963d64d7 100644 --- a/src/libstore/export-import.cc +++ b/src/libstore/export-import.cc @@ -60,8 +60,10 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs) if (n != 1) throw Error("input doesn't look like something created by 'nix-store --export'"); /* Extract the NAR from the source. */ - TeeParseSink tee(source); - parseDump(tee, tee.source); + StringSink saved; + TeeSource tee { source, saved }; + ParseSink ether; + parseDump(ether, tee); uint32_t magic = readInt(source); if (magic != exportMagic) @@ -77,15 +79,15 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs) if (deriver != "") info.deriver = parseStorePath(deriver); - info.narHash = hashString(htSHA256, *tee.saved.s); - info.narSize = tee.saved.s->size(); + info.narHash = hashString(htSHA256, *saved.s); + info.narSize = saved.s->size(); // Ignore optional legacy signature. if (readInt(source) == 1) readString(source); // Can't use underlying source, which would have been exhausted - auto source = StringSource { *tee.saved.s }; + auto source = StringSource { *saved.s }; addToStore(info, source, NoRepair, checkSigs); res.push_back(info.path); diff --git a/src/libstore/filetransfer.cc b/src/libstore/filetransfer.cc index beb508e67..4149f8155 100644 --- a/src/libstore/filetransfer.cc +++ b/src/libstore/filetransfer.cc @@ -124,7 +124,7 @@ struct curlFileTransfer : public FileTransfer if (requestHeaders) curl_slist_free_all(requestHeaders); try { if (!done) - fail(FileTransferError(Interrupted, "download of '%s' was interrupted", request.uri)); + fail(FileTransferError(Interrupted, nullptr, "download of '%s' was interrupted", request.uri)); } catch (...) { ignoreException(); } @@ -145,6 +145,7 @@ struct curlFileTransfer : public FileTransfer LambdaSink finalSink; std::shared_ptr<CompressionSink> decompressionSink; + std::optional<StringSink> errorSink; std::exception_ptr writeException; @@ -154,9 +155,19 @@ struct curlFileTransfer : public FileTransfer size_t realSize = size * nmemb; result.bodySize += realSize; - if (!decompressionSink) + if (!decompressionSink) { decompressionSink = makeDecompressionSink(encoding, finalSink); + if (! successfulStatuses.count(getHTTPStatus())) { + // In this case we want to construct a TeeSink, to keep + // the response around (which we figure won't be big + // like an actual download should be) to improve error + // messages. + errorSink = StringSink { }; + } + } + if (errorSink) + (*errorSink)((unsigned char *) contents, realSize); (*decompressionSink)((unsigned char *) contents, realSize); return realSize; @@ -412,16 +423,21 @@ struct curlFileTransfer : public FileTransfer attempt++; + std::shared_ptr<std::string> response; + if (errorSink) + response = errorSink->s; auto exc = code == CURLE_ABORTED_BY_CALLBACK && _isInterrupted - ? FileTransferError(Interrupted, fmt("%s of '%s' was interrupted", request.verb(), request.uri)) + ? FileTransferError(Interrupted, response, "%s of '%s' was interrupted", request.verb(), request.uri) : httpStatus != 0 ? FileTransferError(err, + response, fmt("unable to %s '%s': HTTP error %d ('%s')", request.verb(), request.uri, httpStatus, statusMsg) + (code == CURLE_OK ? "" : fmt(" (curl error: %s)", curl_easy_strerror(code))) ) : FileTransferError(err, + response, fmt("unable to %s '%s': %s (%d)", request.verb(), request.uri, curl_easy_strerror(code), code)); @@ -679,7 +695,7 @@ struct curlFileTransfer : public FileTransfer auto s3Res = s3Helper.getObject(bucketName, key); FileTransferResult res; if (!s3Res.data) - throw FileTransferError(NotFound, fmt("S3 object '%s' does not exist", request.uri)); + throw FileTransferError(NotFound, nullptr, "S3 object '%s' does not exist", request.uri); res.data = s3Res.data; callback(std::move(res)); #else @@ -824,6 +840,21 @@ void FileTransfer::download(FileTransferRequest && request, Sink & sink) } } +template<typename... Args> +FileTransferError::FileTransferError(FileTransfer::Error error, std::shared_ptr<string> response, const Args & ... args) + : Error(args...), error(error), response(response) +{ + const auto hf = hintfmt(args...); + // FIXME: Due to https://github.com/NixOS/nix/issues/3841 we don't know how + // to print different messages for different verbosity levels. For now + // we add some heuristics for detecting when we want to show the response. + if (response && (response->size() < 1024 || response->find("<html>") != string::npos)) { + err.hint = hintfmt("%1%\n\nresponse body:\n\n%2%", normaltxt(hf.str()), *response); + } else { + err.hint = hf; + } +} + bool isUri(const string & s) { if (s.compare(0, 8, "channel:") == 0) return true; diff --git a/src/libstore/filetransfer.hh b/src/libstore/filetransfer.hh index 11dca2fe0..25ade0add 100644 --- a/src/libstore/filetransfer.hh +++ b/src/libstore/filetransfer.hh @@ -103,10 +103,12 @@ class FileTransferError : public Error { public: FileTransfer::Error error; + std::shared_ptr<string> response; // intentionally optional + template<typename... Args> - FileTransferError(FileTransfer::Error error, const Args & ... args) - : Error(args...), error(error) - { } + FileTransferError(FileTransfer::Error error, std::shared_ptr<string> response, const Args & ... args); + + virtual const char* sname() const override { return "FileTransferError"; } }; bool isUri(const string & s); diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index d1a3b95c1..1e6b34cd1 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -1026,82 +1026,26 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source, } -StorePath LocalStore::addToStoreFromDump(const string & dump, const string & name, - FileIngestionMethod method, HashType hashAlgo, RepairFlag repair) -{ - Hash h = hashString(hashAlgo, dump); - - auto dstPath = makeFixedOutputPath(method, h, name); - - addTempRoot(dstPath); - - if (repair || !isValidPath(dstPath)) { - - /* The first check above is an optimisation to prevent - unnecessary lock acquisition. */ - - auto realPath = Store::toRealPath(dstPath); - - PathLocks outputLock({realPath}); - - if (repair || !isValidPath(dstPath)) { - - deletePath(realPath); - - autoGC(); - - if (method == FileIngestionMethod::Recursive) { - StringSource source(dump); - restorePath(realPath, source); - } else - writeFile(realPath, dump); - - canonicalisePathMetaData(realPath, -1); - - /* Register the SHA-256 hash of the NAR serialisation of - the path in the database. We may just have computed it - above (if called with recursive == true and hashAlgo == - sha256); otherwise, compute it here. */ - HashResult hash; - if (method == FileIngestionMethod::Recursive) { - hash.first = hashAlgo == htSHA256 ? h : hashString(htSHA256, dump); - hash.second = dump.size(); - } else - hash = hashPath(htSHA256, realPath); - - optimisePath(realPath); // FIXME: combine with hashPath() - - ValidPathInfo info(dstPath); - info.narHash = hash.first; - info.narSize = hash.second; - info.ca = FixedOutputHash { .method = method, .hash = h }; - registerValidPath(info); - } - - outputLock.setDeletion(true); - } - - return dstPath; -} - - StorePath LocalStore::addToStore(const string & name, const Path & _srcPath, FileIngestionMethod method, HashType hashAlgo, PathFilter & filter, RepairFlag repair) { Path srcPath(absPath(_srcPath)); + auto source = sinkToSource([&](Sink & sink) { + if (method == FileIngestionMethod::Recursive) + dumpPath(srcPath, sink, filter); + else + readFile(srcPath, sink); + }); + return addToStoreFromDump(*source, name, method, hashAlgo, repair); +} - if (method != FileIngestionMethod::Recursive) - return addToStoreFromDump(readFile(srcPath), name, method, hashAlgo, repair); - - /* For computing the NAR hash. */ - auto sha256Sink = std::make_unique<HashSink>(htSHA256); - /* For computing the store path. In recursive SHA-256 mode, this - is the same as the NAR hash, so no need to do it again. */ - std::unique_ptr<HashSink> hashSink = - hashAlgo == htSHA256 - ? nullptr - : std::make_unique<HashSink>(hashAlgo); +StorePath LocalStore::addToStoreFromDump(Source & source0, const string & name, + FileIngestionMethod method, HashType hashAlgo, RepairFlag repair) +{ + /* For computing the store path. */ + auto hashSink = std::make_unique<HashSink>(hashAlgo); + TeeSource source { source0, *hashSink }; /* Read the source path into memory, but only if it's up to narBufferSize bytes. If it's larger, write it to a temporary @@ -1109,55 +1053,49 @@ StorePath LocalStore::addToStore(const string & name, const Path & _srcPath, destination store path is already valid, we just delete the temporary path. Otherwise, we move it to the destination store path. */ - bool inMemory = true; - std::string nar; - - auto source = sinkToSource([&](Sink & sink) { - - LambdaSink sink2([&](const unsigned char * buf, size_t len) { - (*sha256Sink)(buf, len); - if (hashSink) (*hashSink)(buf, len); - - if (inMemory) { - if (nar.size() + len > settings.narBufferSize) { - inMemory = false; - sink << 1; - sink((const unsigned char *) nar.data(), nar.size()); - nar.clear(); - } else { - nar.append((const char *) buf, len); - } - } - - if (!inMemory) sink(buf, len); - }); - - dumpPath(srcPath, sink2, filter); - }); + bool inMemory = false; + + std::string dump; + + /* Fill out buffer, and decide whether we are working strictly in + memory based on whether we break out because the buffer is full + or the original source is empty */ + while (dump.size() < settings.narBufferSize) { + auto oldSize = dump.size(); + constexpr size_t chunkSize = 65536; + auto want = std::min(chunkSize, settings.narBufferSize - oldSize); + dump.resize(oldSize + want); + auto got = 0; + try { + got = source.read((uint8_t *) dump.data() + oldSize, want); + } catch (EndOfFile &) { + inMemory = true; + break; + } + dump.resize(oldSize + got); + } std::unique_ptr<AutoDelete> delTempDir; Path tempPath; - try { - /* Wait for the source coroutine to give us some dummy - data. This is so that we don't create the temporary - directory if the NAR fits in memory. */ - readInt(*source); + if (!inMemory) { + /* Drain what we pulled so far, and then keep on pulling */ + StringSource dumpSource { dump }; + ChainSource bothSource { dumpSource, source }; auto tempDir = createTempDir(realStoreDir, "add"); delTempDir = std::make_unique<AutoDelete>(tempDir); tempPath = tempDir + "/x"; - restorePath(tempPath, *source); + if (method == FileIngestionMethod::Recursive) + restorePath(tempPath, bothSource); + else + writeFile(tempPath, bothSource); - } catch (EndOfFile &) { - if (!inMemory) throw; - /* The NAR fits in memory, so we didn't do restorePath(). */ + dump.clear(); } - auto sha256 = sha256Sink->finish(); - - Hash hash = hashSink ? hashSink->finish().first : sha256.first; + auto [hash, size] = hashSink->finish(); auto dstPath = makeFixedOutputPath(method, hash, name); @@ -1179,22 +1117,34 @@ StorePath LocalStore::addToStore(const string & name, const Path & _srcPath, autoGC(); if (inMemory) { + StringSource dumpSource { dump }; /* Restore from the NAR in memory. */ - StringSource source(nar); - restorePath(realPath, source); + if (method == FileIngestionMethod::Recursive) + restorePath(realPath, dumpSource); + else + writeFile(realPath, dumpSource); } else { /* Move the temporary path we restored above. */ if (rename(tempPath.c_str(), realPath.c_str())) throw Error("renaming '%s' to '%s'", tempPath, realPath); } + /* For computing the nar hash. In recursive SHA-256 mode, this + is the same as the store hash, so no need to do it again. */ + auto narHash = std::pair { hash, size }; + if (method != FileIngestionMethod::Recursive || hashAlgo != htSHA256) { + HashSink narSink { htSHA256 }; + dumpPath(realPath, narSink); + narHash = narSink.finish(); + } + canonicalisePathMetaData(realPath, -1); // FIXME: merge into restorePath optimisePath(realPath); ValidPathInfo info(dstPath); - info.narHash = sha256.first; - info.narSize = sha256.second; + info.narHash = narHash.first; + info.narSize = narHash.second; info.ca = FixedOutputHash { .method = method, .hash = hash }; registerValidPath(info); } diff --git a/src/libstore/local-store.hh b/src/libstore/local-store.hh index c0e5d0286..355c2814f 100644 --- a/src/libstore/local-store.hh +++ b/src/libstore/local-store.hh @@ -153,7 +153,7 @@ public: in `dump', which is either a NAR serialisation (if recursive == true) or simply the contents of a regular file (if recursive == false). */ - StorePath addToStoreFromDump(const string & dump, const string & name, + StorePath addToStoreFromDump(Source & dump, const string & name, FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair) override; StorePath addTextToStore(const string & name, const string & s, diff --git a/src/libstore/references.cc b/src/libstore/references.cc index a10d536a3..968e2e425 100644 --- a/src/libstore/references.cc +++ b/src/libstore/references.cc @@ -48,13 +48,12 @@ static void search(const unsigned char * s, size_t len, struct RefScanSink : Sink { - HashSink hashSink; StringSet hashes; StringSet seen; string tail; - RefScanSink() : hashSink(htSHA256) { } + RefScanSink() { } void operator () (const unsigned char * data, size_t len); }; @@ -62,8 +61,6 @@ struct RefScanSink : Sink void RefScanSink::operator () (const unsigned char * data, size_t len) { - hashSink(data, len); - /* It's possible that a reference spans the previous and current fragment, so search in the concatenation of the tail of the previous fragment and the start of the current fragment. */ @@ -82,7 +79,9 @@ void RefScanSink::operator () (const unsigned char * data, size_t len) PathSet scanForReferences(const string & path, const PathSet & refs, HashResult & hash) { - RefScanSink sink; + RefScanSink refsSink; + HashSink hashSink { htSHA256 }; + TeeSink sink { refsSink, hashSink }; std::map<string, Path> backMap; /* For efficiency (and a higher hit rate), just search for the @@ -97,7 +96,7 @@ PathSet scanForReferences(const string & path, assert(s.size() == refLength); assert(backMap.find(s) == backMap.end()); // parseHash(htSHA256, s); - sink.hashes.insert(s); + refsSink.hashes.insert(s); backMap[s] = i; } @@ -106,13 +105,13 @@ PathSet scanForReferences(const string & path, /* Map the hashes found back to their store paths. */ PathSet found; - for (auto & i : sink.seen) { + for (auto & i : refsSink.seen) { std::map<string, Path>::iterator j; if ((j = backMap.find(i)) == backMap.end()) abort(); found.insert(j->second); } - hash = sink.hashSink.finish(); + hash = hashSink.finish(); return found; } diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 5b9f79049..ab21b0bd5 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -222,20 +222,73 @@ StorePath Store::computeStorePathForText(const string & name, const string & s, } +/* +The aim of this function is to compute in one pass the correct ValidPathInfo for +the files that we are trying to add to the store. To accomplish that in one +pass, given the different kind of inputs that we can take (normal nar archives, +nar archives with non SHA-256 hashes, and flat files), we set up a net of sinks +and aliases. Also, since the dataflow is obfuscated by this, we include here a +graphviz diagram: + +digraph graphname { + node [shape=box] + fileSource -> narSink + narSink [style=dashed] + narSink -> unsualHashTee [style = dashed, label = "Recursive && !SHA-256"] + narSink -> narHashSink [style = dashed, label = "else"] + unsualHashTee -> narHashSink + unsualHashTee -> caHashSink + fileSource -> parseSink + parseSink [style=dashed] + parseSink-> fileSink [style = dashed, label = "Flat"] + parseSink -> blank [style = dashed, label = "Recursive"] + fileSink -> caHashSink +} +*/ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath, FileIngestionMethod method, HashType hashAlgo, std::optional<Hash> expectedCAHash) { - /* FIXME: inefficient: we're reading/hashing 'tmpFile' three - times. */ + HashSink narHashSink { htSHA256 }; + HashSink caHashSink { hashAlgo }; + + /* Note that fileSink and unusualHashTee must be mutually exclusive, since + they both write to caHashSink. Note that that requisite is currently true + because the former is only used in the flat case. */ + RetrieveRegularNARSink fileSink { caHashSink }; + TeeSink unusualHashTee { narHashSink, caHashSink }; + + auto & narSink = method == FileIngestionMethod::Recursive && hashAlgo != htSHA256 + ? static_cast<Sink &>(unusualHashTee) + : narHashSink; + + /* Functionally, this means that fileSource will yield the content of + srcPath. The fact that we use scratchpadSink as a temporary buffer here + is an implementation detail. */ + auto fileSource = sinkToSource([&](Sink & scratchpadSink) { + dumpPath(srcPath, scratchpadSink); + }); - auto [narHash, narSize] = hashPath(htSHA256, srcPath); + /* tapped provides the same data as fileSource, but we also write all the + information to narSink. */ + TeeSource tapped { *fileSource, narSink }; - auto hash = method == FileIngestionMethod::Recursive - ? hashAlgo == htSHA256 - ? narHash - : hashPath(hashAlgo, srcPath).first - : hashFile(hashAlgo, srcPath); + ParseSink blank; + auto & parseSink = method == FileIngestionMethod::Flat + ? fileSink + : blank; + + /* The information that flows from tapped (besides being replicated in + narSink), is now put in parseSink. */ + parseDump(parseSink, tapped); + + /* We extract the result of the computation from the sink by calling + finish. */ + auto [narHash, narSize] = narHashSink.finish(); + + auto hash = method == FileIngestionMethod::Recursive && hashAlgo == htSHA256 + ? narHash + : caHashSink.finish().first; if (expectedCAHash && expectedCAHash != hash) throw Error("hash mismatch for '%s'", srcPath); @@ -246,8 +299,8 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath, info.ca = FixedOutputHash { .method = method, .hash = hash }; if (!isValidPath(info.path)) { - auto source = sinkToSource([&](Sink & sink) { - dumpPath(srcPath, sink); + auto source = sinkToSource([&](Sink & scratchpadSink) { + dumpPath(srcPath, scratchpadSink); }); addToStore(info, *source); } @@ -914,12 +967,20 @@ ref<Store> openStore(const std::string & uri_, throw Error("don't know how to open Nix store '%s'", uri); } +static bool isNonUriPath(const std::string & spec) { + return + // is not a URL + spec.find("://") == std::string::npos + // Has at least one path separator, and so isn't a single word that + // might be special like "auto" + && spec.find("/") != std::string::npos; +} StoreType getStoreType(const std::string & uri, const std::string & stateDir) { if (uri == "daemon") { return tDaemon; - } else if (uri == "local" || hasPrefix(uri, "/")) { + } else if (uri == "local" || isNonUriPath(uri)) { return tLocal; } else if (uri == "" || uri == "auto") { if (access(stateDir.c_str(), R_OK | W_OK) == 0) @@ -943,8 +1004,9 @@ static RegisterStoreImplementation regStore([]( return std::shared_ptr<Store>(std::make_shared<UDSRemoteStore>(params)); case tLocal: { Store::Params params2 = params; - if (hasPrefix(uri, "/")) - params2["root"] = uri; + if (isNonUriPath(uri)) { + params2["root"] = absPath(uri); + } return std::shared_ptr<Store>(std::make_shared<LocalStore>(params2)); } default: diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index a4be0411e..d1cb2035f 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -460,7 +460,7 @@ public: std::optional<Hash> expectedCAHash = {}); // FIXME: remove? - virtual StorePath addToStoreFromDump(const string & dump, const string & name, + virtual StorePath addToStoreFromDump(Source & dump, const string & name, FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair) { throw Error("addToStoreFromDump() is not supported by this store"); |