From 9ec10046e04a509cc982102f587cf06840f02327 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sat, 11 Jul 2020 16:06:24 +0000 Subject: Narrow scope of temporary value --- src/libstore/daemon.cc | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc index db7139374..6e0b290ed 100644 --- a/src/libstore/daemon.cc +++ b/src/libstore/daemon.cc @@ -375,21 +375,24 @@ static void performOp(TunnelLogger * logger, ref store, } case wopAddToStore: { - std::string s, baseName; + HashType hashAlgo; + std::string baseName; FileIngestionMethod method; { - bool fixed; uint8_t recursive; - from >> baseName >> fixed /* obsolete */ >> recursive >> s; + bool fixed; + uint8_t recursive; + std::string hashAlgoRaw; + from >> baseName >> fixed /* obsolete */ >> recursive >> hashAlgoRaw; if (recursive > (uint8_t) FileIngestionMethod::Recursive) throw Error("unsupported FileIngestionMethod with value of %i; you may need to upgrade nix-daemon", recursive); method = FileIngestionMethod { recursive }; /* Compatibility hack. */ if (!fixed) { - s = "sha256"; + hashAlgoRaw = "sha256"; method = FileIngestionMethod::Recursive; } + hashAlgo = parseHashType(hashAlgoRaw); } - HashType hashAlgo = parseHashType(s); StringSink savedNAR; TeeSource savedNARSource(from, savedNAR); -- cgit v1.2.3 From c86fc3a9657096b74fe967f2f0bbd120e46908f6 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sat, 11 Jul 2020 15:55:04 +0000 Subject: Crudely make `addToStoreFromDump` take `Source` not string I just as little beyond the type as possible, so the implementation changes this enables can be reviewed separately. --- src/libstore/build.cc | 2 +- src/libstore/daemon.cc | 5 ++++- src/libstore/local-store.cc | 5 ++++- src/libstore/local-store.hh | 2 +- src/libstore/store-api.hh | 2 +- 5 files changed, 11 insertions(+), 5 deletions(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index ac2e67574..62294a08c 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -2774,7 +2774,7 @@ struct RestrictedStore : public LocalFSStore goal.addDependency(info.path); } - StorePath addToStoreFromDump(const string & dump, const string & name, + StorePath addToStoreFromDump(Source & dump, const string & name, FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair) override { auto path = next->addToStoreFromDump(dump, name, method, hashAlgo, repair); diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc index 6e0b290ed..69d7ef511 100644 --- a/src/libstore/daemon.cc +++ b/src/libstore/daemon.cc @@ -410,8 +410,11 @@ static void performOp(TunnelLogger * logger, ref store, logger->startWork(); if (!savedRegular.regular) throw Error("regular file expected"); + StringSource dumpSource { + method == FileIngestionMethod::Recursive ? *savedNAR.s : savedRegular.s + }; auto path = store->addToStoreFromDump( - method == FileIngestionMethod::Recursive ? *savedNAR.s : savedRegular.s, + dumpSource, baseName, method, hashAlgo); diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 26b226fe8..603f36352 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -1033,9 +1033,12 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source, } -StorePath LocalStore::addToStoreFromDump(const string & dump, const string & name, +StorePath LocalStore::addToStoreFromDump(Source & dumpSource, const string & name, FileIngestionMethod method, HashType hashAlgo, RepairFlag repair) { + // FIXME: See if we can use the original source to reduce memory usage. + auto dump = dumpSource.drain(); + Hash h = hashString(hashAlgo, dump); auto dstPath = makeFixedOutputPath(method, h, name); diff --git a/src/libstore/local-store.hh b/src/libstore/local-store.hh index c0e5d0286..355c2814f 100644 --- a/src/libstore/local-store.hh +++ b/src/libstore/local-store.hh @@ -153,7 +153,7 @@ public: in `dump', which is either a NAR serialisation (if recursive == true) or simply the contents of a regular file (if recursive == false). */ - StorePath addToStoreFromDump(const string & dump, const string & name, + StorePath addToStoreFromDump(Source & dump, const string & name, FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair) override; StorePath addTextToStore(const string & name, const string & s, diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index a4be0411e..d1cb2035f 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -460,7 +460,7 @@ public: std::optional expectedCAHash = {}); // FIXME: remove? - virtual StorePath addToStoreFromDump(const string & dump, const string & name, + virtual StorePath addToStoreFromDump(Source & dump, const string & name, FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair) { throw Error("addToStoreFromDump() is not supported by this store"); -- cgit v1.2.3 From 9de96ef7d409fedea092045c4dbae7177f88962a Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sat, 11 Jul 2020 19:03:39 +0000 Subject: Dedup `LocalStore::addToStore*` The downsides is that the coroutine has byte-by-byte loop transfer. Will fix that next. --- src/libstore/local-store.cc | 83 ++++++++++++--------------------------------- src/libstore/local-store.hh | 4 +++ 2 files changed, 25 insertions(+), 62 deletions(-) diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 603f36352..925ac25bf 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -1033,65 +1033,16 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source, } -StorePath LocalStore::addToStoreFromDump(Source & dumpSource, const string & name, +StorePath LocalStore::addToStoreFromDump(Source & dump, const string & name, FileIngestionMethod method, HashType hashAlgo, RepairFlag repair) { - // FIXME: See if we can use the original source to reduce memory usage. - auto dump = dumpSource.drain(); - - Hash h = hashString(hashAlgo, dump); - - auto dstPath = makeFixedOutputPath(method, h, name); - - addTempRoot(dstPath); - - if (repair || !isValidPath(dstPath)) { - - /* The first check above is an optimisation to prevent - unnecessary lock acquisition. */ - - auto realPath = Store::toRealPath(dstPath); - - PathLocks outputLock({realPath}); - - if (repair || !isValidPath(dstPath)) { - - deletePath(realPath); - - autoGC(); - - if (method == FileIngestionMethod::Recursive) { - StringSource source(dump); - restorePath(realPath, source); - } else - writeFile(realPath, dump); - - canonicalisePathMetaData(realPath, -1); - - /* Register the SHA-256 hash of the NAR serialisation of - the path in the database. We may just have computed it - above (if called with recursive == true and hashAlgo == - sha256); otherwise, compute it here. */ - HashResult hash; - if (method == FileIngestionMethod::Recursive) { - hash.first = hashAlgo == htSHA256 ? h : hashString(htSHA256, dump); - hash.second = dump.size(); - } else - hash = hashPath(htSHA256, realPath); - - optimisePath(realPath); // FIXME: combine with hashPath() - - ValidPathInfo info(dstPath); - info.narHash = hash.first; - info.narSize = hash.second; - info.ca = FixedOutputHash { .method = method, .hash = h }; - registerValidPath(info); + return addToStoreCommon(name, method, hashAlgo, repair, [&](auto & sink) { + while (1) { + uint8_t buf[1]; + auto n = dump.read(buf, 1); + sink(buf, n); } - - outputLock.setDeletion(true); - } - - return dstPath; + }); } @@ -1100,6 +1051,19 @@ StorePath LocalStore::addToStore(const string & name, const Path & _srcPath, { Path srcPath(absPath(_srcPath)); + return addToStoreCommon(name, method, hashAlgo, repair, [&](auto & sink) { + if (method == FileIngestionMethod::Recursive) + dumpPath(srcPath, sink, filter); + else + readFile(srcPath, sink); + }); +} + + +StorePath LocalStore::addToStoreCommon( + const string & name, FileIngestionMethod method, HashType hashAlgo, RepairFlag repair, + std::function demux) +{ /* For computing the NAR hash. */ auto sha256Sink = std::make_unique(htSHA256); @@ -1120,7 +1084,6 @@ StorePath LocalStore::addToStore(const string & name, const Path & _srcPath, std::string nar; auto source = sinkToSource([&](Sink & sink) { - LambdaSink sink2([&](const unsigned char * buf, size_t len) { (*sha256Sink)(buf, len); if (hashSink) (*hashSink)(buf, len); @@ -1138,11 +1101,7 @@ StorePath LocalStore::addToStore(const string & name, const Path & _srcPath, if (!inMemory) sink(buf, len); }); - - if (method == FileIngestionMethod::Recursive) - dumpPath(srcPath, sink2, filter); - else - readFile(srcPath, sink2); + demux(sink2); }); std::unique_ptr delTempDir; diff --git a/src/libstore/local-store.hh b/src/libstore/local-store.hh index 355c2814f..215731f87 100644 --- a/src/libstore/local-store.hh +++ b/src/libstore/local-store.hh @@ -290,6 +290,10 @@ private: specified by the ‘secret-key-files’ option. */ void signPathInfo(ValidPathInfo & info); + StorePath addToStoreCommon( + const string & name, FileIngestionMethod method, HashType hashAlgo, RepairFlag repair, + std::function demux); + Path getRealStoreDir() override { return realStoreDir; } void createUser(const std::string & userName, uid_t userId) override; -- cgit v1.2.3 From 592851fb67cd15807109d6f65fb81f6af89af966 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sat, 11 Jul 2020 23:40:49 +0000 Subject: LocalStore::addToStoreFromDump copy in chunks Rather than copying byte-by-byte, we let the coroutine know how much data we would like it to send back to us. --- src/libstore/local-store.cc | 16 +++++++++------- src/libstore/local-store.hh | 2 +- src/libutil/serialise.cc | 33 ++++++++++++++++++++------------- src/libutil/serialise.hh | 11 ++++++++++- 4 files changed, 40 insertions(+), 22 deletions(-) diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 925ac25bf..dac7a50c4 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -1036,11 +1036,13 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source, StorePath LocalStore::addToStoreFromDump(Source & dump, const string & name, FileIngestionMethod method, HashType hashAlgo, RepairFlag repair) { - return addToStoreCommon(name, method, hashAlgo, repair, [&](auto & sink) { + return addToStoreCommon(name, method, hashAlgo, repair, [&](auto & sink, size_t & wanted) { while (1) { - uint8_t buf[1]; - auto n = dump.read(buf, 1); + constexpr size_t bufSize = 1024; + uint8_t buf[bufSize]; + auto n = dump.read(buf, std::min(wanted, bufSize)); sink(buf, n); + // when control is yielded back to us wanted will be updated. } }); } @@ -1051,7 +1053,7 @@ StorePath LocalStore::addToStore(const string & name, const Path & _srcPath, { Path srcPath(absPath(_srcPath)); - return addToStoreCommon(name, method, hashAlgo, repair, [&](auto & sink) { + return addToStoreCommon(name, method, hashAlgo, repair, [&](auto & sink, size_t & _) { if (method == FileIngestionMethod::Recursive) dumpPath(srcPath, sink, filter); else @@ -1062,7 +1064,7 @@ StorePath LocalStore::addToStore(const string & name, const Path & _srcPath, StorePath LocalStore::addToStoreCommon( const string & name, FileIngestionMethod method, HashType hashAlgo, RepairFlag repair, - std::function demux) + std::function demux) { /* For computing the NAR hash. */ auto sha256Sink = std::make_unique(htSHA256); @@ -1083,7 +1085,7 @@ StorePath LocalStore::addToStoreCommon( bool inMemory = true; std::string nar; - auto source = sinkToSource([&](Sink & sink) { + auto source = sinkToSource([&](Sink & sink, size_t & wanted) { LambdaSink sink2([&](const unsigned char * buf, size_t len) { (*sha256Sink)(buf, len); if (hashSink) (*hashSink)(buf, len); @@ -1101,7 +1103,7 @@ StorePath LocalStore::addToStoreCommon( if (!inMemory) sink(buf, len); }); - demux(sink2); + demux(sink2, wanted); }); std::unique_ptr delTempDir; diff --git a/src/libstore/local-store.hh b/src/libstore/local-store.hh index 215731f87..ae23004c4 100644 --- a/src/libstore/local-store.hh +++ b/src/libstore/local-store.hh @@ -292,7 +292,7 @@ private: StorePath addToStoreCommon( const string & name, FileIngestionMethod method, HashType hashAlgo, RepairFlag repair, - std::function demux); + std::function demux); Path getRealStoreDir() override { return realStoreDir; } diff --git a/src/libutil/serialise.cc b/src/libutil/serialise.cc index c8b71188f..141e9e976 100644 --- a/src/libutil/serialise.cc +++ b/src/libutil/serialise.cc @@ -165,35 +165,43 @@ size_t StringSource::read(unsigned char * data, size_t len) #endif std::unique_ptr sinkToSource( - std::function fun, + std::function fun, std::function eof) { struct SinkToSource : Source { - typedef boost::coroutines2::coroutine coro_t; + typedef boost::coroutines2::coroutine> coro_t; - std::function fun; + std::function fun; std::function eof; std::optional coro; bool started = false; - SinkToSource(std::function fun, std::function eof) + /* It would be nicer to have the co-routines have both args and a + return value, but unfortunately that was removed from Boost's + implementation for some reason, so we use some extra state instead. + */ + size_t wanted = 0; + + SinkToSource(std::function fun, std::function eof) : fun(fun), eof(eof) { } - std::string cur; + std::basic_string cur; size_t pos = 0; size_t read(unsigned char * data, size_t len) override { - if (!coro) + wanted = len < cur.size() ? 0 : len - cur.size(); + if (!coro) { coro = coro_t::pull_type([&](coro_t::push_type & yield) { - LambdaSink sink([&](const unsigned char * data, size_t len) { - if (len) yield(std::string((const char *) data, len)); + LambdaSink sink([&](const uint8_t * data, size_t len) { + if (len) yield(std::basic_string { data, len }); }); - fun(sink); + fun(sink, wanted); }); + } if (!*coro) { eof(); abort(); } @@ -203,11 +211,10 @@ std::unique_ptr sinkToSource( pos = 0; } - auto n = std::min(cur.size() - pos, len); - memcpy(data, (unsigned char *) cur.data() + pos, n); - pos += n; + auto numCopied = cur.copy(data, len, pos); + pos += numCopied; - return n; + return numCopied; } }; diff --git a/src/libutil/serialise.hh b/src/libutil/serialise.hh index 8386a4991..6cb9d1bf5 100644 --- a/src/libutil/serialise.hh +++ b/src/libutil/serialise.hh @@ -260,11 +260,20 @@ struct LambdaSource : Source /* Convert a function that feeds data into a Sink into a Source. The Source executes the function as a coroutine. */ std::unique_ptr sinkToSource( - std::function fun, + std::function fun, std::function eof = []() { throw EndOfFile("coroutine has finished"); }); +static inline std::unique_ptr sinkToSource( + std::function fun, + std::function eof = []() { + throw EndOfFile("coroutine has finished"); + }) +{ + return sinkToSource([fun](Sink & s, size_t & _) { fun(s); }, eof); +} + void writePadding(size_t len, Sink & sink); void writeString(const unsigned char * buf, size_t len, Sink & sink); -- cgit v1.2.3 From 8173e7bfefc6a5771b2c9ec48bd6edd3b161dd90 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 14 Jul 2020 21:11:08 +0000 Subject: Fix localhost::addToStore(...Path...) We were calculating the nar hash wrong when the file ingestion method was flat. I don't think there's anything we can do in that case but dump the file again, so that's what I do. As an optomization, we again could reuse the original dump for just the recursive and non-sha256 case, but I rather do that after this fix, and after my other PRs which deduplicate this code. --- src/libstore/local-store.cc | 33 ++++++++++++++++----------------- 1 file changed, 16 insertions(+), 17 deletions(-) diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 26b226fe8..5827dfc58 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -1097,15 +1097,8 @@ StorePath LocalStore::addToStore(const string & name, const Path & _srcPath, { Path srcPath(absPath(_srcPath)); - /* For computing the NAR hash. */ - auto sha256Sink = std::make_unique(htSHA256); - - /* For computing the store path. In recursive SHA-256 mode, this - is the same as the NAR hash, so no need to do it again. */ - std::unique_ptr hashSink = - method == FileIngestionMethod::Recursive && hashAlgo == htSHA256 - ? nullptr - : std::make_unique(hashAlgo); + /* For computing the store path. */ + auto hashSink = std::make_unique(hashAlgo); /* Read the source path into memory, but only if it's up to narBufferSize bytes. If it's larger, write it to a temporary @@ -1114,13 +1107,12 @@ StorePath LocalStore::addToStore(const string & name, const Path & _srcPath, temporary path. Otherwise, we move it to the destination store path. */ bool inMemory = true; - std::string nar; + std::string nar; // TODO rename from "nar" to "dump" auto source = sinkToSource([&](Sink & sink) { LambdaSink sink2([&](const unsigned char * buf, size_t len) { - (*sha256Sink)(buf, len); - if (hashSink) (*hashSink)(buf, len); + (*hashSink)(buf, len); if (inMemory) { if (nar.size() + len > settings.narBufferSize) { @@ -1165,9 +1157,7 @@ StorePath LocalStore::addToStore(const string & name, const Path & _srcPath, /* The NAR fits in memory, so we didn't do restorePath(). */ } - auto sha256 = sha256Sink->finish(); - - Hash hash = hashSink ? hashSink->finish().first : sha256.first; + auto [hash, size] = hashSink->finish(); auto dstPath = makeFixedOutputPath(method, hash, name); @@ -1201,13 +1191,22 @@ StorePath LocalStore::addToStore(const string & name, const Path & _srcPath, throw Error("renaming '%s' to '%s'", tempPath, realPath); } + /* For computing the nar hash. In recursive SHA-256 mode, this + is the same as the store hash, so no need to do it again. */ + auto narHash = std::pair { hash, size }; + if (method != FileIngestionMethod::Recursive || hashAlgo != htSHA256) { + HashSink narSink { htSHA256 }; + dumpPath(realPath, narSink); + narHash = narSink.finish(); + } + canonicalisePathMetaData(realPath, -1); // FIXME: merge into restorePath optimisePath(realPath); ValidPathInfo info(dstPath); - info.narHash = sha256.first; - info.narSize = sha256.second; + info.narHash = narHash.first; + info.narSize = narHash.second; info.ca = FixedOutputHash { .method = method, .hash = hash }; registerValidPath(info); } -- cgit v1.2.3 From 650c2c655810c375296b52997e2f85298c7c566a Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 14 Jul 2020 21:28:50 +0000 Subject: Rename variable `nar` -> `dump` according to TODO --- src/libstore/local-store.cc | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 5827dfc58..cd92f138c 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -1107,7 +1107,7 @@ StorePath LocalStore::addToStore(const string & name, const Path & _srcPath, temporary path. Otherwise, we move it to the destination store path. */ bool inMemory = true; - std::string nar; // TODO rename from "nar" to "dump" + std::string dump; auto source = sinkToSource([&](Sink & sink) { @@ -1115,13 +1115,13 @@ StorePath LocalStore::addToStore(const string & name, const Path & _srcPath, (*hashSink)(buf, len); if (inMemory) { - if (nar.size() + len > settings.narBufferSize) { + if (dump.size() + len > settings.narBufferSize) { inMemory = false; sink << 1; - sink((const unsigned char *) nar.data(), nar.size()); - nar.clear(); + sink((const unsigned char *) dump.data(), dump.size()); + dump.clear(); } else { - nar.append((const char *) buf, len); + dump.append((const char *) buf, len); } } @@ -1180,7 +1180,7 @@ StorePath LocalStore::addToStore(const string & name, const Path & _srcPath, if (inMemory) { /* Restore from the NAR in memory. */ - StringSource source(nar); + StringSource source(dump); if (method == FileIngestionMethod::Recursive) restorePath(realPath, source); else -- cgit v1.2.3 From d087cf48552ee82e2bc78bb6c99854bab350ee00 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 15 Jul 2020 21:10:33 +0000 Subject: Revert "Revert "LocalStore::addToStore(srcPath): Handle the flat case"" This reverts commit cff2157185912025c24a1b9dc99056161634176c. --- src/libstore/local-store.cc | 20 +++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index b3f4b3f7d..26b226fe8 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -1097,16 +1097,13 @@ StorePath LocalStore::addToStore(const string & name, const Path & _srcPath, { Path srcPath(absPath(_srcPath)); - if (method != FileIngestionMethod::Recursive) - return addToStoreFromDump(readFile(srcPath), name, method, hashAlgo, repair); - /* For computing the NAR hash. */ auto sha256Sink = std::make_unique(htSHA256); /* For computing the store path. In recursive SHA-256 mode, this is the same as the NAR hash, so no need to do it again. */ std::unique_ptr hashSink = - hashAlgo == htSHA256 + method == FileIngestionMethod::Recursive && hashAlgo == htSHA256 ? nullptr : std::make_unique(hashAlgo); @@ -1139,7 +1136,10 @@ StorePath LocalStore::addToStore(const string & name, const Path & _srcPath, if (!inMemory) sink(buf, len); }); - dumpPath(srcPath, sink2, filter); + if (method == FileIngestionMethod::Recursive) + dumpPath(srcPath, sink2, filter); + else + readFile(srcPath, sink2); }); std::unique_ptr delTempDir; @@ -1155,7 +1155,10 @@ StorePath LocalStore::addToStore(const string & name, const Path & _srcPath, delTempDir = std::make_unique(tempDir); tempPath = tempDir + "/x"; - restorePath(tempPath, *source); + if (method == FileIngestionMethod::Recursive) + restorePath(tempPath, *source); + else + writeFile(tempPath, *source); } catch (EndOfFile &) { if (!inMemory) throw; @@ -1188,7 +1191,10 @@ StorePath LocalStore::addToStore(const string & name, const Path & _srcPath, if (inMemory) { /* Restore from the NAR in memory. */ StringSource source(nar); - restorePath(realPath, source); + if (method == FileIngestionMethod::Recursive) + restorePath(realPath, source); + else + writeFile(realPath, source); } else { /* Move the temporary path we restored above. */ if (rename(tempPath.c_str(), realPath.c_str())) -- cgit v1.2.3 From bc109648c41f8021707b55b815e68a890a09f2f6 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 15 Jul 2020 23:14:30 +0000 Subject: Get rid of `LocalStore::addToStoreCommon` I got it to just become `LocalStore::addToStoreFromDump`, cleanly taking a store and then doing nothing too fancy with it. `LocalStore::addToStore(...Path...)` is now just a simple wrapper with a bare-bones sinkToSource of the right dump command. --- src/libstore/local-store.cc | 93 +++++++++++++++++++-------------------------- src/libstore/local-store.hh | 4 -- src/libutil/serialise.cc | 13 +++++++ src/libutil/serialise.hh | 15 +++++++- 4 files changed, 67 insertions(+), 58 deletions(-) diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index b9fae6089..07e1679da 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -1033,38 +1033,22 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source, } -StorePath LocalStore::addToStoreFromDump(Source & dump, const string & name, - FileIngestionMethod method, HashType hashAlgo, RepairFlag repair) -{ - return addToStoreCommon(name, method, hashAlgo, repair, [&](auto & sink, size_t & wanted) { - while (1) { - constexpr size_t bufSize = 1024; - uint8_t buf[bufSize]; - auto n = dump.read(buf, std::min(wanted, bufSize)); - sink(buf, n); - // when control is yielded back to us wanted will be updated. - } - }); -} - - StorePath LocalStore::addToStore(const string & name, const Path & _srcPath, FileIngestionMethod method, HashType hashAlgo, PathFilter & filter, RepairFlag repair) { Path srcPath(absPath(_srcPath)); - - return addToStoreCommon(name, method, hashAlgo, repair, [&](auto & sink, size_t & _) { + auto source = sinkToSource([&](Sink & sink, size_t & wanted) { if (method == FileIngestionMethod::Recursive) dumpPath(srcPath, sink, filter); else readFile(srcPath, sink); }); + return addToStoreFromDump(*source, name, method, hashAlgo, repair); } -StorePath LocalStore::addToStoreCommon( - const string & name, FileIngestionMethod method, HashType hashAlgo, RepairFlag repair, - std::function demux) +StorePath LocalStore::addToStoreFromDump(Source & source, const string & name, + FileIngestionMethod method, HashType hashAlgo, RepairFlag repair) { /* For computing the store path. */ auto hashSink = std::make_unique(hashAlgo); @@ -1075,50 +1059,53 @@ StorePath LocalStore::addToStoreCommon( destination store path is already valid, we just delete the temporary path. Otherwise, we move it to the destination store path. */ - bool inMemory = true; - std::string dump; + bool inMemory = false; - auto source = sinkToSource([&](Sink & sink, size_t & wanted) { - LambdaSink sink2([&](const unsigned char * buf, size_t len) { - (*hashSink)(buf, len); - - if (inMemory) { - if (dump.size() + len > settings.narBufferSize) { - inMemory = false; - sink << 1; - sink((const unsigned char *) dump.data(), dump.size()); - dump.clear(); - } else { - dump.append((const char *) buf, len); - } - } + std::string dump; - if (!inMemory) sink(buf, len); - }); - demux(sink2, wanted); - }); + /* Fill out buffer, and decide whether we are working strictly in + memory based on whether we break out because the buffer is full + or the original source is empty */ + while (dump.size() < settings.narBufferSize) { + auto oldSize = dump.size(); + constexpr size_t chunkSize = 1024; + auto want = std::min(chunkSize, settings.narBufferSize - oldSize); + dump.resize(oldSize + want); + auto got = 0; + try { + got = source.read((uint8_t *) dump.data() + oldSize, want); + } catch (EndOfFile &) { + inMemory = true; + break; + } + /* Start hashing as we get data */ + (*hashSink)((const uint8_t *) dump.data() + oldSize, got); + dump.resize(oldSize + got); + } std::unique_ptr delTempDir; Path tempPath; - try { - /* Wait for the source coroutine to give us some dummy - data. This is so that we don't create the temporary - directory if the NAR fits in memory. */ - readInt(*source); + if (!inMemory) { + StringSource dumpSource { dump }; + TeeSource rest { source, *hashSink }; + ChainSource bothSource { + .source1 = dumpSource, + /* Continue hashing what's left, but don't rehash what we + already did. */ + .source2 = rest, + }; auto tempDir = createTempDir(realStoreDir, "add"); delTempDir = std::make_unique(tempDir); tempPath = tempDir + "/x"; if (method == FileIngestionMethod::Recursive) - restorePath(tempPath, *source); + restorePath(tempPath, bothSource); else - writeFile(tempPath, *source); + writeFile(tempPath, bothSource); - } catch (EndOfFile &) { - if (!inMemory) throw; - /* The NAR fits in memory, so we didn't do restorePath(). */ + dump.clear(); } auto [hash, size] = hashSink->finish(); @@ -1143,12 +1130,12 @@ StorePath LocalStore::addToStoreCommon( autoGC(); if (inMemory) { + StringSource dumpSource { dump }; /* Restore from the NAR in memory. */ - StringSource source(dump); if (method == FileIngestionMethod::Recursive) - restorePath(realPath, source); + restorePath(realPath, dumpSource); else - writeFile(realPath, source); + writeFile(realPath, dumpSource); } else { /* Move the temporary path we restored above. */ if (rename(tempPath.c_str(), realPath.c_str())) diff --git a/src/libstore/local-store.hh b/src/libstore/local-store.hh index ae23004c4..355c2814f 100644 --- a/src/libstore/local-store.hh +++ b/src/libstore/local-store.hh @@ -290,10 +290,6 @@ private: specified by the ‘secret-key-files’ option. */ void signPathInfo(ValidPathInfo & info); - StorePath addToStoreCommon( - const string & name, FileIngestionMethod method, HashType hashAlgo, RepairFlag repair, - std::function demux); - Path getRealStoreDir() override { return realStoreDir; } void createUser(const std::string & userName, uid_t userId) override; diff --git a/src/libutil/serialise.cc b/src/libutil/serialise.cc index 141e9e976..4c72dc9f2 100644 --- a/src/libutil/serialise.cc +++ b/src/libutil/serialise.cc @@ -329,5 +329,18 @@ void StringSink::operator () (const unsigned char * data, size_t len) s->append((const char *) data, len); } +size_t ChainSource::read(unsigned char * data, size_t len) +{ + if (useSecond) { + return source2.read(data, len); + } else { + try { + return source1.read(data, len); + } catch (EndOfFile &) { + useSecond = true; + return this->read(data, len); + } + } +} } diff --git a/src/libutil/serialise.hh b/src/libutil/serialise.hh index 6cb9d1bf5..3e3735ca5 100644 --- a/src/libutil/serialise.hh +++ b/src/libutil/serialise.hh @@ -256,6 +256,19 @@ struct LambdaSource : Source } }; +/* Chain two sources together so after the first is exhausted, the second is + used */ +struct ChainSource : Source +{ + Source & source1, & source2; + bool useSecond = false; + ChainSource(Source & s1, Source & s2) + : source1(s1), source2(s2) + { } + + size_t read(unsigned char * data, size_t len) override; +}; + /* Convert a function that feeds data into a Sink into a Source. The Source executes the function as a coroutine. */ @@ -271,7 +284,7 @@ static inline std::unique_ptr sinkToSource( throw EndOfFile("coroutine has finished"); }) { - return sinkToSource([fun](Sink & s, size_t & _) { fun(s); }, eof); + return sinkToSource([fun](Sink & s, size_t & _) { fun(s); }, eof); } -- cgit v1.2.3 From 5602637d9ea195784368e99a226718fc95e6b978 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 15 Jul 2020 23:19:41 +0000 Subject: Revert "LocalStore::addToStoreFromDump copy in chunks" This reverts commit 592851fb67cd15807109d6f65fb81f6af89af966. We don't need this extra feature anymore --- src/libstore/local-store.cc | 2 +- src/libutil/serialise.cc | 33 +++++++++++++-------------------- src/libutil/serialise.hh | 11 +---------- 3 files changed, 15 insertions(+), 31 deletions(-) diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 07e1679da..b2b5afadd 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -1037,7 +1037,7 @@ StorePath LocalStore::addToStore(const string & name, const Path & _srcPath, FileIngestionMethod method, HashType hashAlgo, PathFilter & filter, RepairFlag repair) { Path srcPath(absPath(_srcPath)); - auto source = sinkToSource([&](Sink & sink, size_t & wanted) { + auto source = sinkToSource([&](Sink & sink) { if (method == FileIngestionMethod::Recursive) dumpPath(srcPath, sink, filter); else diff --git a/src/libutil/serialise.cc b/src/libutil/serialise.cc index 4c72dc9f2..00c945113 100644 --- a/src/libutil/serialise.cc +++ b/src/libutil/serialise.cc @@ -165,43 +165,35 @@ size_t StringSource::read(unsigned char * data, size_t len) #endif std::unique_ptr sinkToSource( - std::function fun, + std::function fun, std::function eof) { struct SinkToSource : Source { - typedef boost::coroutines2::coroutine> coro_t; + typedef boost::coroutines2::coroutine coro_t; - std::function fun; + std::function fun; std::function eof; std::optional coro; bool started = false; - /* It would be nicer to have the co-routines have both args and a - return value, but unfortunately that was removed from Boost's - implementation for some reason, so we use some extra state instead. - */ - size_t wanted = 0; - - SinkToSource(std::function fun, std::function eof) + SinkToSource(std::function fun, std::function eof) : fun(fun), eof(eof) { } - std::basic_string cur; + std::string cur; size_t pos = 0; size_t read(unsigned char * data, size_t len) override { - wanted = len < cur.size() ? 0 : len - cur.size(); - if (!coro) { + if (!coro) coro = coro_t::pull_type([&](coro_t::push_type & yield) { - LambdaSink sink([&](const uint8_t * data, size_t len) { - if (len) yield(std::basic_string { data, len }); + LambdaSink sink([&](const unsigned char * data, size_t len) { + if (len) yield(std::string((const char *) data, len)); }); - fun(sink, wanted); + fun(sink); }); - } if (!*coro) { eof(); abort(); } @@ -211,10 +203,11 @@ std::unique_ptr sinkToSource( pos = 0; } - auto numCopied = cur.copy(data, len, pos); - pos += numCopied; + auto n = std::min(cur.size() - pos, len); + memcpy(data, (unsigned char *) cur.data() + pos, n); + pos += n; - return numCopied; + return n; } }; diff --git a/src/libutil/serialise.hh b/src/libutil/serialise.hh index 3e3735ca5..aa6b42597 100644 --- a/src/libutil/serialise.hh +++ b/src/libutil/serialise.hh @@ -273,19 +273,10 @@ struct ChainSource : Source /* Convert a function that feeds data into a Sink into a Source. The Source executes the function as a coroutine. */ std::unique_ptr sinkToSource( - std::function fun, - std::function eof = []() { - throw EndOfFile("coroutine has finished"); - }); - -static inline std::unique_ptr sinkToSource( std::function fun, std::function eof = []() { throw EndOfFile("coroutine has finished"); - }) -{ - return sinkToSource([fun](Sink & s, size_t & _) { fun(s); }, eof); -} + }); void writePadding(size_t len, Sink & sink); -- cgit v1.2.3 From 68dfb8c6aef7afebf0312c48bb5010653fc464b3 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 16 Jul 2020 05:09:41 +0000 Subject: Optimize `addToStoreSlow` and remove `TeeParseSink` --- src/libstore/daemon.cc | 47 +++++++++---------------------------------- src/libstore/export-import.cc | 12 ++++++----- src/libstore/store-api.cc | 35 +++++++++++++++++++++++++------- src/libutil/archive.hh | 25 +++++++++++++++++++---- 4 files changed, 65 insertions(+), 54 deletions(-) diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc index db7139374..573836f7f 100644 --- a/src/libstore/daemon.cc +++ b/src/libstore/daemon.cc @@ -173,31 +173,6 @@ struct TunnelSource : BufferedSource } }; -/* If the NAR archive contains a single file at top-level, then save - the contents of the file to `s'. Otherwise barf. */ -struct RetrieveRegularNARSink : ParseSink -{ - bool regular; - string s; - - RetrieveRegularNARSink() : regular(true) { } - - void createDirectory(const Path & path) - { - regular = false; - } - - void receiveContents(unsigned char * data, unsigned int len) - { - s.append((const char *) data, len); - } - - void createSymlink(const Path & path, const string & target) - { - regular = false; - } -}; - struct ClientSettings { bool keepFailed; @@ -391,9 +366,9 @@ static void performOp(TunnelLogger * logger, ref store, } HashType hashAlgo = parseHashType(s); - StringSink savedNAR; - TeeSource savedNARSource(from, savedNAR); - RetrieveRegularNARSink savedRegular; + StringSink saved; + TeeSource savedNARSource(from, saved); + RetrieveRegularNARSink savedRegular { saved }; if (method == FileIngestionMethod::Recursive) { /* Get the entire NAR dump from the client and save it to @@ -407,11 +382,7 @@ static void performOp(TunnelLogger * logger, ref store, logger->startWork(); if (!savedRegular.regular) throw Error("regular file expected"); - auto path = store->addToStoreFromDump( - method == FileIngestionMethod::Recursive ? *savedNAR.s : savedRegular.s, - baseName, - method, - hashAlgo); + auto path = store->addToStoreFromDump(*saved.s, baseName, method, hashAlgo); logger->stopWork(); to << store->printStorePath(path); @@ -727,15 +698,15 @@ static void performOp(TunnelLogger * logger, ref store, if (!trusted) info.ultimate = false; - std::string saved; std::unique_ptr source; if (GET_PROTOCOL_MINOR(clientVersion) >= 21) source = std::make_unique(from, to); else { - TeeParseSink tee(from); - parseDump(tee, tee.source); - saved = std::move(*tee.saved.s); - source = std::make_unique(saved); + StringSink saved; + TeeSource tee { from, saved }; + ParseSink ether; + parseDump(ether, tee); + source = std::make_unique(std::move(*saved.s)); } logger->startWork(); diff --git a/src/libstore/export-import.cc b/src/libstore/export-import.cc index 082d0f1d1..b963d64d7 100644 --- a/src/libstore/export-import.cc +++ b/src/libstore/export-import.cc @@ -60,8 +60,10 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs) if (n != 1) throw Error("input doesn't look like something created by 'nix-store --export'"); /* Extract the NAR from the source. */ - TeeParseSink tee(source); - parseDump(tee, tee.source); + StringSink saved; + TeeSource tee { source, saved }; + ParseSink ether; + parseDump(ether, tee); uint32_t magic = readInt(source); if (magic != exportMagic) @@ -77,15 +79,15 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs) if (deriver != "") info.deriver = parseStorePath(deriver); - info.narHash = hashString(htSHA256, *tee.saved.s); - info.narSize = tee.saved.s->size(); + info.narHash = hashString(htSHA256, *saved.s); + info.narSize = saved.s->size(); // Ignore optional legacy signature. if (readInt(source) == 1) readString(source); // Can't use underlying source, which would have been exhausted - auto source = StringSource { *tee.saved.s }; + auto source = StringSource { *saved.s }; addToStore(info, source, NoRepair, checkSigs); res.push_back(info.path); diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 5b9f79049..5c8dddba5 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -226,16 +226,37 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath, FileIngestionMethod method, HashType hashAlgo, std::optional expectedCAHash) { - /* FIXME: inefficient: we're reading/hashing 'tmpFile' three + /* FIXME: inefficient: we're reading/hashing 'tmpFile' two times. */ + HashSink narHashSink { htSHA256 }; + HashSink caHashSink { hashAlgo }; + RetrieveRegularNARSink fileSink { caHashSink }; - auto [narHash, narSize] = hashPath(htSHA256, srcPath); + TeeSink sinkIfNar { narHashSink, caHashSink }; - auto hash = method == FileIngestionMethod::Recursive - ? hashAlgo == htSHA256 - ? narHash - : hashPath(hashAlgo, srcPath).first - : hashFile(hashAlgo, srcPath); + /* We use the tee sink if we need to hash he nar twice */ + auto & sink = method == FileIngestionMethod::Recursive && hashAlgo != htSHA256 + ? static_cast(sinkIfNar) + : narHashSink; + + auto fileSource = sinkToSource([&](Sink & sink) { + dumpPath(srcPath, sink); + }); + + TeeSource tapped { *fileSource, sink }; + + ParseSink blank; + auto & parseSink = method == FileIngestionMethod::Flat + ? fileSink + : blank; + + parseDump(parseSink, tapped); + + auto [narHash, narSize] = narHashSink.finish(); + + auto hash = method == FileIngestionMethod::Recursive && hashAlgo == htSHA256 + ? narHash + : caHashSink.finish().first; if (expectedCAHash && expectedCAHash != hash) throw Error("hash mismatch for '%s'", srcPath); diff --git a/src/libutil/archive.hh b/src/libutil/archive.hh index 302b1bb18..57780d16a 100644 --- a/src/libutil/archive.hh +++ b/src/libutil/archive.hh @@ -63,12 +63,29 @@ struct ParseSink virtual void createSymlink(const Path & path, const string & target) { }; }; -struct TeeParseSink : ParseSink +/* If the NAR archive contains a single file at top-level, then save + the contents of the file to `s'. Otherwise barf. */ +struct RetrieveRegularNARSink : ParseSink { - StringSink saved; - TeeSource source; + bool regular = true; + Sink & sink; - TeeParseSink(Source & source) : source(source, saved) { } + RetrieveRegularNARSink(Sink & sink) : sink(sink) { } + + void createDirectory(const Path & path) + { + regular = false; + } + + void receiveContents(unsigned char * data, unsigned int len) + { + sink(data, len); + } + + void createSymlink(const Path & path, const string & target) + { + regular = false; + } }; void parseDump(ParseSink & sink, Source & source); -- cgit v1.2.3 From 5517eee17e37565a1d5b7fb19f9e810068c9428d Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 16 Jul 2020 15:14:22 +0200 Subject: Generations API cleanup --- src/libstore/profiles.cc | 81 ++++++++++++++++++++++-------------------------- src/libstore/profiles.hh | 25 ++++++--------- src/nix-env/nix-env.cc | 28 +++++++---------- 3 files changed, 59 insertions(+), 75 deletions(-) diff --git a/src/libstore/profiles.cc b/src/libstore/profiles.cc index 6cfe393a4..6862b42f0 100644 --- a/src/libstore/profiles.cc +++ b/src/libstore/profiles.cc @@ -12,30 +12,24 @@ namespace nix { -static bool cmpGensByNumber(const Generation & a, const Generation & b) -{ - return a.number < b.number; -} - - /* Parse a generation name of the format `--link'. */ -static int parseName(const string & profileName, const string & name) +static std::optional parseName(const string & profileName, const string & name) { - if (string(name, 0, profileName.size() + 1) != profileName + "-") return -1; + if (string(name, 0, profileName.size() + 1) != profileName + "-") return {}; string s = string(name, profileName.size() + 1); string::size_type p = s.find("-link"); - if (p == string::npos) return -1; - int n; + if (p == string::npos) return {}; + unsigned int n; if (string2Int(string(s, 0, p), n) && n >= 0) return n; else - return -1; + return {}; } -Generations findGenerations(Path profile, int & curGen) +std::pair> findGenerations(Path profile) { Generations gens; @@ -43,30 +37,34 @@ Generations findGenerations(Path profile, int & curGen) auto profileName = std::string(baseNameOf(profile)); for (auto & i : readDirectory(profileDir)) { - int n; - if ((n = parseName(profileName, i.name)) != -1) { - Generation gen; - gen.path = profileDir + "/" + i.name; - gen.number = n; + if (auto n = parseName(profileName, i.name)) { + auto path = profileDir + "/" + i.name; struct stat st; - if (lstat(gen.path.c_str(), &st) != 0) - throw SysError("statting '%1%'", gen.path); - gen.creationTime = st.st_mtime; - gens.push_back(gen); + if (lstat(path.c_str(), &st) != 0) + throw SysError("statting '%1%'", path); + gens.push_back({ + .number = *n, + .path = path, + .creationTime = st.st_mtime + }); } } - gens.sort(cmpGensByNumber); + gens.sort([](const Generation & a, const Generation & b) + { + return a.number < b.number; + }); - curGen = pathExists(profile) + return { + gens, + pathExists(profile) ? parseName(profileName, readLink(profile)) - : -1; - - return gens; + : std::nullopt + }; } -static void makeName(const Path & profile, unsigned int num, +static void makeName(const Path & profile, GenerationNumber num, Path & outLink) { Path prefix = (format("%1%-%2%") % profile % num).str(); @@ -78,10 +76,9 @@ Path createGeneration(ref store, Path profile, Path outPath) { /* The new generation number should be higher than old the previous ones. */ - int dummy; - Generations gens = findGenerations(profile, dummy); + auto [gens, dummy] = findGenerations(profile); - unsigned int num; + GenerationNumber num; if (gens.size() > 0) { Generation last = gens.back(); @@ -121,7 +118,7 @@ static void removeFile(const Path & path) } -void deleteGeneration(const Path & profile, unsigned int gen) +void deleteGeneration(const Path & profile, GenerationNumber gen) { Path generation; makeName(profile, gen, generation); @@ -129,7 +126,7 @@ void deleteGeneration(const Path & profile, unsigned int gen) } -static void deleteGeneration2(const Path & profile, unsigned int gen, bool dryRun) +static void deleteGeneration2(const Path & profile, GenerationNumber gen, bool dryRun) { if (dryRun) printInfo(format("would remove generation %1%") % gen); @@ -140,31 +137,29 @@ static void deleteGeneration2(const Path & profile, unsigned int gen, bool dryRu } -void deleteGenerations(const Path & profile, const std::set & gensToDelete, bool dryRun) +void deleteGenerations(const Path & profile, const std::set & gensToDelete, bool dryRun) { PathLocks lock; lockProfile(lock, profile); - int curGen; - Generations gens = findGenerations(profile, curGen); + auto [gens, curGen] = findGenerations(profile); - if (gensToDelete.find(curGen) != gensToDelete.end()) + if (gensToDelete.count(*curGen)) throw Error("cannot delete current generation of profile %1%'", profile); for (auto & i : gens) { - if (gensToDelete.find(i.number) == gensToDelete.end()) continue; + if (!gensToDelete.count(i.number)) continue; deleteGeneration2(profile, i.number, dryRun); } } -void deleteGenerationsGreaterThan(const Path & profile, int max, bool dryRun) +void deleteGenerationsGreaterThan(const Path & profile, GenerationNumber max, bool dryRun) { PathLocks lock; lockProfile(lock, profile); - int curGen; bool fromCurGen = false; - Generations gens = findGenerations(profile, curGen); + auto [gens, curGen] = findGenerations(profile); for (auto i = gens.rbegin(); i != gens.rend(); ++i) { if (i->number == curGen) { fromCurGen = true; @@ -186,8 +181,7 @@ void deleteOldGenerations(const Path & profile, bool dryRun) PathLocks lock; lockProfile(lock, profile); - int curGen; - Generations gens = findGenerations(profile, curGen); + auto [gens, curGen] = findGenerations(profile); for (auto & i : gens) if (i.number != curGen) @@ -200,8 +194,7 @@ void deleteGenerationsOlderThan(const Path & profile, time_t t, bool dryRun) PathLocks lock; lockProfile(lock, profile); - int curGen; - Generations gens = findGenerations(profile, curGen); + auto [gens, curGen] = findGenerations(profile); bool canDelete = false; for (auto i = gens.rbegin(); i != gens.rend(); ++i) diff --git a/src/libstore/profiles.hh b/src/libstore/profiles.hh index 78645d8b6..abe507f0e 100644 --- a/src/libstore/profiles.hh +++ b/src/libstore/profiles.hh @@ -9,37 +9,32 @@ namespace nix { +typedef unsigned int GenerationNumber; + struct Generation { - int number; + GenerationNumber number; Path path; time_t creationTime; - Generation() - { - number = -1; - } - operator bool() const - { - return number != -1; - } }; -typedef list Generations; +typedef std::list Generations; /* Returns the list of currently present generations for the specified - profile, sorted by generation number. */ -Generations findGenerations(Path profile, int & curGen); + profile, sorted by generation number. Also returns the number of + the current generation. */ +std::pair> findGenerations(Path profile); class LocalFSStore; Path createGeneration(ref store, Path profile, Path outPath); -void deleteGeneration(const Path & profile, unsigned int gen); +void deleteGeneration(const Path & profile, GenerationNumber gen); -void deleteGenerations(const Path & profile, const std::set & gensToDelete, bool dryRun); +void deleteGenerations(const Path & profile, const std::set & gensToDelete, bool dryRun); -void deleteGenerationsGreaterThan(const Path & profile, const int max, bool dryRun); +void deleteGenerationsGreaterThan(const Path & profile, GenerationNumber max, bool dryRun); void deleteOldGenerations(const Path & profile, bool dryRun); diff --git a/src/nix-env/nix-env.cc b/src/nix-env/nix-env.cc index c992b7d74..5795c2c09 100644 --- a/src/nix-env/nix-env.cc +++ b/src/nix-env/nix-env.cc @@ -1208,18 +1208,17 @@ static void opSwitchProfile(Globals & globals, Strings opFlags, Strings opArgs) } -static const int prevGen = -2; +static constexpr GenerationNumber prevGen = std::numeric_limits::max(); -static void switchGeneration(Globals & globals, int dstGen) +static void switchGeneration(Globals & globals, GenerationNumber dstGen) { PathLocks lock; lockProfile(lock, globals.profile); - int curGen; - Generations gens = findGenerations(globals.profile, curGen); + auto [gens, curGen] = findGenerations(globals.profile); - Generation dst; + std::optional dst; for (auto & i : gens) if ((dstGen == prevGen && i.number < curGen) || (dstGen >= 0 && i.number == dstGen)) @@ -1227,18 +1226,16 @@ static void switchGeneration(Globals & globals, int dstGen) if (!dst) { if (dstGen == prevGen) - throw Error("no generation older than the current (%1%) exists", - curGen); + throw Error("no generation older than the current (%1%) exists", curGen.value_or(0)); else throw Error("generation %1% does not exist", dstGen); } - printInfo(format("switching from generation %1% to %2%") - % curGen % dst.number); + printInfo("switching from generation %1% to %2%", curGen.value_or(0), dst->number); if (globals.dryRun) return; - switchLink(globals.profile, dst.path); + switchLink(globals.profile, dst->path); } @@ -1249,7 +1246,7 @@ static void opSwitchGeneration(Globals & globals, Strings opFlags, Strings opArg if (opArgs.size() != 1) throw UsageError("exactly one argument expected"); - int dstGen; + GenerationNumber dstGen; if (!string2Int(opArgs.front(), dstGen)) throw UsageError("expected a generation number"); @@ -1278,8 +1275,7 @@ static void opListGenerations(Globals & globals, Strings opFlags, Strings opArgs PathLocks lock; lockProfile(lock, globals.profile); - int curGen; - Generations gens = findGenerations(globals.profile, curGen); + auto [gens, curGen] = findGenerations(globals.profile); RunPager pager; @@ -1308,14 +1304,14 @@ static void opDeleteGenerations(Globals & globals, Strings opFlags, Strings opAr if(opArgs.front().size() < 2) throw Error("invalid number of generations ‘%1%’", opArgs.front()); string str_max = string(opArgs.front(), 1, opArgs.front().size()); - int max; + GenerationNumber max; if (!string2Int(str_max, max) || max == 0) throw Error("invalid number of generations to keep ‘%1%’", opArgs.front()); deleteGenerationsGreaterThan(globals.profile, max, globals.dryRun); } else { - std::set gens; + std::set gens; for (auto & i : opArgs) { - unsigned int n; + GenerationNumber n; if (!string2Int(i, n)) throw UsageError("invalid generation number '%1%'", i); gens.insert(n); -- cgit v1.2.3 From 3dcca18c30cbc09652f5ac644a9f8750f9ced0c9 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 16 Jul 2020 13:39:03 +0000 Subject: Fix bug in TeeSource We use this to simplify `LocalStore::addToStoreFromDump`. Also, hope I fixed build error with old clang (used in Darwin CI). --- src/libstore/local-store.cc | 14 ++++---------- src/libutil/serialise.hh | 2 +- 2 files changed, 5 insertions(+), 11 deletions(-) diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index b2b5afadd..96d10d9ba 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -1047,11 +1047,12 @@ StorePath LocalStore::addToStore(const string & name, const Path & _srcPath, } -StorePath LocalStore::addToStoreFromDump(Source & source, const string & name, +StorePath LocalStore::addToStoreFromDump(Source & source0, const string & name, FileIngestionMethod method, HashType hashAlgo, RepairFlag repair) { /* For computing the store path. */ auto hashSink = std::make_unique(hashAlgo); + TeeSource source { source0, *hashSink }; /* Read the source path into memory, but only if it's up to narBufferSize bytes. If it's larger, write it to a temporary @@ -1078,8 +1079,6 @@ StorePath LocalStore::addToStoreFromDump(Source & source, const string & name, inMemory = true; break; } - /* Start hashing as we get data */ - (*hashSink)((const uint8_t *) dump.data() + oldSize, got); dump.resize(oldSize + got); } @@ -1087,14 +1086,9 @@ StorePath LocalStore::addToStoreFromDump(Source & source, const string & name, Path tempPath; if (!inMemory) { + /* Drain what we pulled so far, and then keep on pulling */ StringSource dumpSource { dump }; - TeeSource rest { source, *hashSink }; - ChainSource bothSource { - .source1 = dumpSource, - /* Continue hashing what's left, but don't rehash what we - already did. */ - .source2 = rest, - }; + ChainSource bothSource { dumpSource, source }; auto tempDir = createTempDir(realStoreDir, "add"); delTempDir = std::make_unique(tempDir); diff --git a/src/libutil/serialise.hh b/src/libutil/serialise.hh index aa6b42597..5d9acf887 100644 --- a/src/libutil/serialise.hh +++ b/src/libutil/serialise.hh @@ -189,7 +189,7 @@ struct TeeSource : Source size_t read(unsigned char * data, size_t len) { size_t n = orig.read(data, len); - sink(data, len); + sink(data, n); return n; } }; -- cgit v1.2.3 From 16c9f6762d082155b967710a5fd3a095937d76ba Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 16 Jul 2020 17:00:42 +0200 Subject: Add command 'nix profile diff-closure' MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This shows all changes between generations of a profile. E.g. $ nix profile diff-closures --profile /nix/var/nix/profiles/system Generation 654 -> 655: nix: 2.4pre20200617_5d69bbf → 2.4pre20200701_6ff9aa8, +42.2 KiB Generation 655 -> 656: blender-bin: 2.83.0 → 2.83.1, -294.2 KiB Generation 656 -> 657: curl: 7.68.0 → 7.70.0, +19.1 KiB firmware-linux-nonfree: 2020-01-22 → 2020-05-19, +30827.7 KiB ibus: -21.8 KiB initrd-linux: 5.4.46 → 5.4.49 ... --- src/nix/command.hh | 6 +++ src/nix/diff-closures.cc | 100 ++++++++++++++++++++++++++--------------------- src/nix/profile.cc | 43 +++++++++++++++++++- 3 files changed, 104 insertions(+), 45 deletions(-) diff --git a/src/nix/command.hh b/src/nix/command.hh index 1c7413300..856721ebf 100644 --- a/src/nix/command.hh +++ b/src/nix/command.hh @@ -244,4 +244,10 @@ void completeFlakeRefWithFragment( const Strings & defaultFlakeAttrPaths, std::string_view prefix); +void printClosureDiff( + ref store, + const StorePath & beforePath, + const StorePath & afterPath, + std::string_view indent); + } diff --git a/src/nix/diff-closures.cc b/src/nix/diff-closures.cc index 56ddb575b..4199dae0f 100644 --- a/src/nix/diff-closures.cc +++ b/src/nix/diff-closures.cc @@ -6,7 +6,7 @@ #include -using namespace nix; +namespace nix { struct Info { @@ -52,6 +52,60 @@ std::string showVersions(const std::set & versions) return concatStringsSep(", ", versions2); } +void printClosureDiff( + ref store, + const StorePath & beforePath, + const StorePath & afterPath, + std::string_view indent) +{ + auto beforeClosure = getClosureInfo(store, beforePath); + auto afterClosure = getClosureInfo(store, afterPath); + + std::set allNames; + for (auto & [name, _] : beforeClosure) allNames.insert(name); + for (auto & [name, _] : afterClosure) allNames.insert(name); + + for (auto & name : allNames) { + auto & beforeVersions = beforeClosure[name]; + auto & afterVersions = afterClosure[name]; + + auto totalSize = [&](const std::map> & versions) + { + uint64_t sum = 0; + for (auto & [_, paths] : versions) + for (auto & [path, _] : paths) + sum += store->queryPathInfo(path)->narSize; + return sum; + }; + + auto beforeSize = totalSize(beforeVersions); + auto afterSize = totalSize(afterVersions); + auto sizeDelta = (int64_t) afterSize - (int64_t) beforeSize; + auto showDelta = abs(sizeDelta) >= 8 * 1024; + + std::set removed, unchanged; + for (auto & [version, _] : beforeVersions) + if (!afterVersions.count(version)) removed.insert(version); else unchanged.insert(version); + + std::set added; + for (auto & [version, _] : afterVersions) + if (!beforeVersions.count(version)) added.insert(version); + + if (showDelta || !removed.empty() || !added.empty()) { + std::vector items; + if (!removed.empty() || !added.empty()) + items.push_back(fmt("%s → %s", showVersions(removed), showVersions(added))); + if (showDelta) + items.push_back(fmt("%s%+.1f KiB" ANSI_NORMAL, sizeDelta > 0 ? ANSI_RED : ANSI_GREEN, sizeDelta / 1024.0)); + std::cout << fmt("%s%s: %s\n", indent, name, concatStringsSep(", ", items)); + } + } +} + +} + +using namespace nix; + struct CmdDiffClosures : SourceExprCommand { std::string _before, _after; @@ -85,49 +139,7 @@ struct CmdDiffClosures : SourceExprCommand auto beforePath = toStorePath(store, Realise::Outputs, operateOn, before); auto after = parseInstallable(store, _after); auto afterPath = toStorePath(store, Realise::Outputs, operateOn, after); - - auto beforeClosure = getClosureInfo(store, beforePath); - auto afterClosure = getClosureInfo(store, afterPath); - - std::set allNames; - for (auto & [name, _] : beforeClosure) allNames.insert(name); - for (auto & [name, _] : afterClosure) allNames.insert(name); - - for (auto & name : allNames) { - auto & beforeVersions = beforeClosure[name]; - auto & afterVersions = afterClosure[name]; - - auto totalSize = [&](const std::map> & versions) - { - uint64_t sum = 0; - for (auto & [_, paths] : versions) - for (auto & [path, _] : paths) - sum += store->queryPathInfo(path)->narSize; - return sum; - }; - - auto beforeSize = totalSize(beforeVersions); - auto afterSize = totalSize(afterVersions); - auto sizeDelta = (int64_t) afterSize - (int64_t) beforeSize; - auto showDelta = abs(sizeDelta) >= 8 * 1024; - - std::set removed, unchanged; - for (auto & [version, _] : beforeVersions) - if (!afterVersions.count(version)) removed.insert(version); else unchanged.insert(version); - - std::set added; - for (auto & [version, _] : afterVersions) - if (!beforeVersions.count(version)) added.insert(version); - - if (showDelta || !removed.empty() || !added.empty()) { - std::vector items; - if (!removed.empty() || !added.empty()) - items.push_back(fmt("%s → %s", showVersions(removed), showVersions(added))); - if (showDelta) - items.push_back(fmt("%s%+.1f KiB" ANSI_NORMAL, sizeDelta > 0 ? ANSI_RED : ANSI_GREEN, sizeDelta / 1024.0)); - std::cout << fmt("%s: %s\n", name, concatStringsSep(", ", items)); - } - } + printClosureDiff(store, beforePath, afterPath, ""); } }; diff --git a/src/nix/profile.cc b/src/nix/profile.cc index 307e236d8..729924e3a 100644 --- a/src/nix/profile.cc +++ b/src/nix/profile.cc @@ -7,6 +7,7 @@ #include "builtins/buildenv.hh" #include "flake/flakeref.hh" #include "../nix-env/user-env.hh" +#include "profiles.hh" #include #include @@ -394,6 +395,46 @@ struct CmdProfileInfo : virtual EvalCommand, virtual StoreCommand, MixDefaultPro } }; +struct CmdProfileDiffClosures : virtual EvalCommand, virtual StoreCommand, MixDefaultProfile +{ + std::string description() override + { + return "show the closure difference between each generation of a profile"; + } + + Examples examples() override + { + return { + Example{ + "To show what changed between each generation of the NixOS system profile:", + "nix profile diff-closure --profile /nix/var/nix/profiles/system" + }, + }; + } + + void run(ref store) override + { + auto [gens, curGen] = findGenerations(*profile); + + std::optional prevGen; + bool first = true; + + for (auto & gen : gens) { + if (prevGen) { + if (!first) std::cout << "\n"; + first = false; + std::cout << fmt("Generation %d -> %d:\n", prevGen->number, gen.number); + printClosureDiff(store, + store->followLinksToStorePath(prevGen->path), + store->followLinksToStorePath(gen.path), + " "); + } + + prevGen = gen; + } + } +}; + struct CmdProfile : virtual MultiCommand, virtual Command { CmdProfile() @@ -402,6 +443,7 @@ struct CmdProfile : virtual MultiCommand, virtual Command {"remove", []() { return make_ref(); }}, {"upgrade", []() { return make_ref(); }}, {"info", []() { return make_ref(); }}, + {"diff-closures", []() { return make_ref(); }}, }) { } @@ -425,4 +467,3 @@ struct CmdProfile : virtual MultiCommand, virtual Command }; static auto r1 = registerCommand("profile"); - -- cgit v1.2.3 From 52c8be38e0563c964857491afef01eb2f543d0de Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 17 Jul 2020 12:36:12 +0200 Subject: nix profile diff-closures: Don't inherit EvalCommand --- src/nix/profile.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/nix/profile.cc b/src/nix/profile.cc index 729924e3a..c6cd88c49 100644 --- a/src/nix/profile.cc +++ b/src/nix/profile.cc @@ -395,7 +395,7 @@ struct CmdProfileInfo : virtual EvalCommand, virtual StoreCommand, MixDefaultPro } }; -struct CmdProfileDiffClosures : virtual EvalCommand, virtual StoreCommand, MixDefaultProfile +struct CmdProfileDiffClosures : virtual StoreCommand, MixDefaultProfile { std::string description() override { -- cgit v1.2.3 From 17f75f9cc4dd70e3e6de7e266ef2bd18a0da310b Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 17 Jul 2020 14:54:21 +0200 Subject: parseFlakeRef(): Only search for the top-level directory for CLI flakerefs --- src/libexpr/flake/flakeref.cc | 93 +++++++++++++++++++++++-------------------- tests/flakes.sh | 9 ++--- 2 files changed, 52 insertions(+), 50 deletions(-) diff --git a/src/libexpr/flake/flakeref.cc b/src/libexpr/flake/flakeref.cc index 701546671..6363446f6 100644 --- a/src/libexpr/flake/flakeref.cc +++ b/src/libexpr/flake/flakeref.cc @@ -102,56 +102,61 @@ std::pair parseFlakeRefWithFragment( percentDecode(std::string(match[6]))); } - /* Check if 'url' is a path (either absolute or relative to - 'baseDir'). If so, search upward to the root of the repo - (i.e. the directory containing .git). */ - else if (std::regex_match(url, match, pathUrlRegex)) { std::string path = match[1]; - if (!baseDir && !hasPrefix(path, "/")) - throw BadURL("flake reference '%s' is not an absolute path", url); - path = absPath(path, baseDir, true); - - if (!S_ISDIR(lstat(path).st_mode)) - throw BadURL("path '%s' is not a flake (because it's not a directory)", path); - - if (!allowMissing && !pathExists(path + "/flake.nix")) - throw BadURL("path '%s' is not a flake (because it doesn't contain a 'flake.nix' file)", path); - - auto fragment = percentDecode(std::string(match[3])); - - auto flakeRoot = path; - std::string subdir; - - while (flakeRoot != "/") { - if (pathExists(flakeRoot + "/.git")) { - auto base = std::string("git+file://") + flakeRoot; - - auto parsedURL = ParsedURL{ - .url = base, // FIXME - .base = base, - .scheme = "git+file", - .authority = "", - .path = flakeRoot, - .query = decodeQuery(match[2]), - }; - - if (subdir != "") { - if (parsedURL.query.count("dir")) - throw Error("flake URL '%s' has an inconsistent 'dir' parameter", url); - parsedURL.query.insert_or_assign("dir", subdir); - } + std::string fragment = percentDecode(std::string(match[3])); + + if (baseDir) { + /* Check if 'url' is a path (either absolute or relative + to 'baseDir'). If so, search upward to the root of the + repo (i.e. the directory containing .git). */ + + path = absPath(path, baseDir, true); + + if (!S_ISDIR(lstat(path).st_mode)) + throw BadURL("path '%s' is not a flake (because it's not a directory)", path); + + if (!allowMissing && !pathExists(path + "/flake.nix")) + throw BadURL("path '%s' is not a flake (because it doesn't contain a 'flake.nix' file)", path); - if (pathExists(flakeRoot + "/.git/shallow")) - parsedURL.query.insert_or_assign("shallow", "1"); + auto flakeRoot = path; + std::string subdir; + + while (flakeRoot != "/") { + if (pathExists(flakeRoot + "/.git")) { + auto base = std::string("git+file://") + flakeRoot; + + auto parsedURL = ParsedURL{ + .url = base, // FIXME + .base = base, + .scheme = "git+file", + .authority = "", + .path = flakeRoot, + .query = decodeQuery(match[2]), + }; + + if (subdir != "") { + if (parsedURL.query.count("dir")) + throw Error("flake URL '%s' has an inconsistent 'dir' parameter", url); + parsedURL.query.insert_or_assign("dir", subdir); + } + + if (pathExists(flakeRoot + "/.git/shallow")) + parsedURL.query.insert_or_assign("shallow", "1"); + + return std::make_pair( + FlakeRef(Input::fromURL(parsedURL), get(parsedURL.query, "dir").value_or("")), + fragment); + } - return std::make_pair( - FlakeRef(Input::fromURL(parsedURL), get(parsedURL.query, "dir").value_or("")), - fragment); + subdir = std::string(baseNameOf(flakeRoot)) + (subdir.empty() ? "" : "/" + subdir); + flakeRoot = dirOf(flakeRoot); } - subdir = std::string(baseNameOf(flakeRoot)) + (subdir.empty() ? "" : "/" + subdir); - flakeRoot = dirOf(flakeRoot); + } else { + if (!hasPrefix(path, "/")) + throw BadURL("flake reference '%s' is not an absolute path", url); + path = canonPath(path); } fetchers::Attrs attrs; diff --git a/tests/flakes.sh b/tests/flakes.sh index 25e1847e1..5aec563ac 100644 --- a/tests/flakes.sh +++ b/tests/flakes.sh @@ -18,7 +18,6 @@ registry=$TEST_ROOT/registry.json flake1Dir=$TEST_ROOT/flake1 flake2Dir=$TEST_ROOT/flake2 flake3Dir=$TEST_ROOT/flake3 -flake4Dir=$TEST_ROOT/flake4 flake5Dir=$TEST_ROOT/flake5 flake6Dir=$TEST_ROOT/flake6 flake7Dir=$TEST_ROOT/flake7 @@ -390,14 +389,12 @@ cat > $flake3Dir/flake.nix < Date: Fri, 17 Jul 2020 14:57:22 +0000 Subject: Add back flake-compat shell.nix This was removed in the merge commit adf2fbbdc2c94644b0d1023d844c7dc0e485a20f. I think this was a mistake that occurred when resolving a conflict. --- shell.nix | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 shell.nix diff --git a/shell.nix b/shell.nix new file mode 100644 index 000000000..330df0ab6 --- /dev/null +++ b/shell.nix @@ -0,0 +1,3 @@ +(import (fetchTarball https://github.com/edolstra/flake-compat/archive/master.tar.gz) { + src = ./.; +}).shellNix -- cgit v1.2.3 From bc73590151bff82b03077c34f0c5aa9f84c89e63 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lars=20M=C3=BChmel?= Date: Fri, 17 Jul 2020 17:35:59 +0200 Subject: nix edit: call restoreSignals() before `execvp`-ing the $EDITOR Currently resizing of the terminal doesn't play nicely with nix edit when using kakoune as the editor, as it relies on the SIGWINCH signal which is trapped by nix. How this is not a problem with e.g. vim is beyond me. Virtually all other exec* calls are following a call to restoreSignals(). This commit adds this behavior to nix edit as well. --- src/nix/edit.cc | 1 + 1 file changed, 1 insertion(+) diff --git a/src/nix/edit.cc b/src/nix/edit.cc index dc9775635..378a3739c 100644 --- a/src/nix/edit.cc +++ b/src/nix/edit.cc @@ -45,6 +45,7 @@ struct CmdEdit : InstallableCommand auto args = editorFor(pos); + restoreSignals(); execvp(args.front().c_str(), stringsToCharPtrs(args).data()); std::string command; -- cgit v1.2.3 From 5526683ad3e259f1c02461c48c7e109de185383d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Sat, 18 Jul 2020 07:58:37 +0100 Subject: fix make's impurity on /bin/sh This is important when using tooling like BEAR to generate compilation database since the used glibc version needs to match for LD_PRELOAD to work. It might be also beneficial when building on systems other than NixOS with nix develop since /bin/sh might be not bash (which is what all nix devs use for testing). This fix is not perfect because Makefile.config.in itself is also build with make but strictly better than the status quo. --- Makefile.config.in | 1 + 1 file changed, 1 insertion(+) diff --git a/Makefile.config.in b/Makefile.config.in index b632444e8..5c245b8e9 100644 --- a/Makefile.config.in +++ b/Makefile.config.in @@ -19,6 +19,7 @@ LIBLZMA_LIBS = @LIBLZMA_LIBS@ OPENSSL_LIBS = @OPENSSL_LIBS@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_VERSION = @PACKAGE_VERSION@ +SHELL = @bash@ SODIUM_LIBS = @SODIUM_LIBS@ SQLITE3_LIBS = @SQLITE3_LIBS@ bash = @bash@ -- cgit v1.2.3 From 3294b0a4b05b8bfa9b8aa9be587dd46a67705864 Mon Sep 17 00:00:00 2001 From: Alex Kovar Date: Sat, 18 Jul 2020 10:23:43 -0500 Subject: Add newline to profile sourcing line #3393 --- scripts/install-nix-from-closure.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/install-nix-from-closure.sh b/scripts/install-nix-from-closure.sh index 5824c2217..6fb0beb2b 100644 --- a/scripts/install-nix-from-closure.sh +++ b/scripts/install-nix-from-closure.sh @@ -207,7 +207,7 @@ if [ -z "$NIX_INSTALLER_NO_MODIFY_PROFILE" ]; then if [ -w "$fn" ]; then if ! grep -q "$p" "$fn"; then echo "modifying $fn..." >&2 - echo "if [ -e $p ]; then . $p; fi # added by Nix installer" >> "$fn" + echo -e "\nif [ -e $p ]; then . $p; fi # added by Nix installer" >> "$fn" fi added=1 break @@ -218,7 +218,7 @@ if [ -z "$NIX_INSTALLER_NO_MODIFY_PROFILE" ]; then if [ -w "$fn" ]; then if ! grep -q "$p" "$fn"; then echo "modifying $fn..." >&2 - echo "if [ -e $p ]; then . $p; fi # added by Nix installer" >> "$fn" + echo -e "\nif [ -e $p ]; then . $p; fi # added by Nix installer" >> "$fn" fi added=1 break -- cgit v1.2.3 From f0100f55909d00f15bfdbef89d6cdcf6c38b2f59 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Sat, 18 Jul 2020 08:48:13 +0100 Subject: README: improve development docs --- README.md | 25 +++---------------------- doc/manual/hacking.xml | 45 +++++++++++++++++++++++++++++++++++++++------ 2 files changed, 42 insertions(+), 28 deletions(-) diff --git a/README.md b/README.md index a1588284d..e5f7a694f 100644 --- a/README.md +++ b/README.md @@ -12,7 +12,7 @@ for more details. On Linux and macOS the easiest way to Install Nix is to run the following shell command (as a user other than root): -``` +```console $ curl -L https://nixos.org/nix/install | sh ``` @@ -20,27 +20,8 @@ Information on additional installation methods is available on the [Nix download ## Building And Developing -### Building Nix - -You can build Nix using one of the targets provided by [release.nix](./release.nix): - -``` -$ nix-build ./release.nix -A build.aarch64-linux -$ nix-build ./release.nix -A build.x86_64-darwin -$ nix-build ./release.nix -A build.i686-linux -$ nix-build ./release.nix -A build.x86_64-linux -``` - -### Development Environment - -You can use the provided `shell.nix` to get a working development environment: - -``` -$ nix-shell -$ ./bootstrap.sh -$ ./configure -$ make -``` +See our [Hacking guide](hydra.nixos.org/job/nix/master/build.x86_64-linux/latest/download-by-type/doc/manual#chap-hacking) in our manual for instruction on how to +build nix from source with nix-build or how to get a development environment. ## Additional Resources diff --git a/doc/manual/hacking.xml b/doc/manual/hacking.xml index b671811d3..c0ece76b6 100644 --- a/doc/manual/hacking.xml +++ b/doc/manual/hacking.xml @@ -4,18 +4,37 @@ Hacking -This section provides some notes on how to hack on Nix. To get +This section provides some notes on how to hack on Nix. To get the latest version of Nix from GitHub: -$ git clone git://github.com/NixOS/nix.git +$ git clone https://github.com/NixOS/nix.git $ cd nix -To build it and its dependencies: +To build Nix for the current operating system/architecture use + + +$ nix-build + + +or if you have a flakes-enabled nix: + + +$ nix build + + +This will build defaultPackage attribute defined in the flake.nix file. + +To build for other platforms add one of the following suffixes to it: aarch64-linux, +i686-linux, x86_64-darwin, x86_64-linux. + +i.e. + -$ nix-build release.nix -A build.x86_64-linux +nix-build -A defaultPackage.x86_64-linux + To build all dependencies and start a shell in which all @@ -27,13 +46,27 @@ $ nix-shell To build Nix itself in this shell: [nix-shell]$ ./bootstrap.sh -[nix-shell]$ configurePhase -[nix-shell]$ make +[nix-shell]$ ./configure $configureFlags +[nix-shell]$ make -j $NIX_BUILD_CORES To install it in $(pwd)/inst and test it: [nix-shell]$ make install [nix-shell]$ make installcheck +[nix-shell]$ ./inst/bin/nix --version +nix (Nix) 2.4 + + +If you have a flakes-enabled nix you can replace: + + +$ nix-shell + + +by: + + +$ nix develop -- cgit v1.2.3 From ac2fc7ba1fe6b64ec535e4ce63d13fcadf7fdba7 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 20 Jul 2020 11:29:46 -0400 Subject: Apply suggestions from code review Co-authored-by: Eelco Dolstra --- src/libstore/store-api.cc | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 5c8dddba5..6c0a61766 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -234,7 +234,7 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath, TeeSink sinkIfNar { narHashSink, caHashSink }; - /* We use the tee sink if we need to hash he nar twice */ + /* We use the tee sink if we need to hash the nar twice */ auto & sink = method == FileIngestionMethod::Recursive && hashAlgo != htSHA256 ? static_cast(sinkIfNar) : narHashSink; @@ -250,7 +250,11 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath, ? fileSink : blank; - parseDump(parseSink, tapped); + parseDump( + parseSink, + method == FileIngestionMethod::Recursive && hashAlgo == htSHA256 + ? *fileSource // don't need to hash twice if we just can use the `narHash` twice + : tapped); auto [narHash, narSize] = narHashSink.finish(); -- cgit v1.2.3 From 6633605341c6e01bc72b8311e478ed9932719e7e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Mon, 20 Jul 2020 22:30:39 +0100 Subject: Update doc/manual/hacking.xml Co-authored-by: Eelco Dolstra --- doc/manual/hacking.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual/hacking.xml b/doc/manual/hacking.xml index c0ece76b6..d25d4b84a 100644 --- a/doc/manual/hacking.xml +++ b/doc/manual/hacking.xml @@ -12,7 +12,7 @@ $ cd nix -To build Nix for the current operating system/architecture use +To build Nix for the current operating system/architecture use $ nix-build -- cgit v1.2.3 From 9aae179f34ec2f38167585c07f18a8ab8acefafb Mon Sep 17 00:00:00 2001 From: Carlo Nucera Date: Mon, 20 Jul 2020 20:18:12 -0400 Subject: Correct bug, thoroughly document addToStoreSlow --- src/libstore/store-api.cc | 62 ++++++++++++++++++++++++++++++++++------------- 1 file changed, 45 insertions(+), 17 deletions(-) diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 6c0a61766..14661722d 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -222,40 +222,68 @@ StorePath Store::computeStorePathForText(const string & name, const string & s, } +/* +The aim of this function is to compute in one pass the correct ValidPathInfo for +the files that we are trying to add to the store. To accomplish that in one +pass, given the different kind of inputs that we can take (normal nar archives, +nar archives with non SHA-256 hashes, and flat files), we set up a net of sinks +and aliases. Also, since the dataflow is obfuscated by this, we include here a +graphviz diagram: + +digraph graphname { + node [shape=box] + fileSource -> narSink + narSink [style=dashed] + narSink -> unsualHashTee [style = dashed, label = "Recursive && !SHA-256"] + narSink -> narHashSink [style = dashed, label = "else"] + unsualHashTee -> narHashSink + unsualHashTee -> caHashSink + fileSource -> parseSink + parseSink [style=dashed] + parseSink-> fileSink [style = dashed, label = "Flat"] + parseSink -> blank [style = dashed, label = "Recursive"] + fileSink -> caHashSink +} +*/ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath, FileIngestionMethod method, HashType hashAlgo, std::optional expectedCAHash) { - /* FIXME: inefficient: we're reading/hashing 'tmpFile' two - times. */ HashSink narHashSink { htSHA256 }; HashSink caHashSink { hashAlgo }; - RetrieveRegularNARSink fileSink { caHashSink }; - TeeSink sinkIfNar { narHashSink, caHashSink }; + /* Note that fileSink and unusualHashTee must be mutually exclusive, since + they both write to caHashSink. Note that that requisite is currently true + because the former is only used in the flat case. */ + RetrieveRegularNARSink fileSink { caHashSink }; + TeeSink unusualHashTee { narHashSink, caHashSink }; - /* We use the tee sink if we need to hash the nar twice */ - auto & sink = method == FileIngestionMethod::Recursive && hashAlgo != htSHA256 - ? static_cast(sinkIfNar) + auto & narSink = method == FileIngestionMethod::Recursive && hashAlgo != htSHA256 + ? static_cast(unusualHashTee) : narHashSink; - auto fileSource = sinkToSource([&](Sink & sink) { - dumpPath(srcPath, sink); + /* Functionally, this means that fileSource will yield the content of + srcPath. The fact that we use scratchpadSink as a temporary buffer here + is an implementation detail. */ + auto fileSource = sinkToSource([&](Sink & scratchpadSink) { + dumpPath(srcPath, scratchpadSink); }); - TeeSource tapped { *fileSource, sink }; + /* tapped provides the same data as fileSource, but we also write all the + information to narSink. */ + TeeSource tapped { *fileSource, narSink }; ParseSink blank; auto & parseSink = method == FileIngestionMethod::Flat ? fileSink : blank; - parseDump( - parseSink, - method == FileIngestionMethod::Recursive && hashAlgo == htSHA256 - ? *fileSource // don't need to hash twice if we just can use the `narHash` twice - : tapped); + /* The information that flows from tapped (besides being replicated in + narSink), is now put in parseSink. */ + parseDump(parseSink, tapped); + /* We extract the result of the computation from the sink by calling + finish. */ auto [narHash, narSize] = narHashSink.finish(); auto hash = method == FileIngestionMethod::Recursive && hashAlgo == htSHA256 @@ -271,8 +299,8 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath, info.ca = FixedOutputHash { .method = method, .hash = hash }; if (!isValidPath(info.path)) { - auto source = sinkToSource([&](Sink & sink) { - dumpPath(srcPath, sink); + auto source = sinkToSource([&](Sink & scratchpadSink) { + dumpPath(srcPath, scratchpadSink); }); addToStore(info, *source); } -- cgit v1.2.3