aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/libexpr/flake/flakeref.cc93
-rw-r--r--src/libstore/build.cc2
-rw-r--r--src/libstore/daemon.cc62
-rw-r--r--src/libstore/export-import.cc12
-rw-r--r--src/libstore/local-store.cc172
-rw-r--r--src/libstore/local-store.hh2
-rw-r--r--src/libstore/profiles.cc81
-rw-r--r--src/libstore/profiles.hh25
-rw-r--r--src/libstore/store-api.cc73
-rw-r--r--src/libstore/store-api.hh2
-rw-r--r--src/libutil/archive.hh25
-rw-r--r--src/libutil/serialise.cc13
-rw-r--r--src/libutil/serialise.hh15
-rw-r--r--src/nix-env/nix-env.cc28
-rw-r--r--src/nix/command.hh6
-rw-r--r--src/nix/diff-closures.cc100
-rw-r--r--src/nix/edit.cc1
-rw-r--r--src/nix/profile.cc43
18 files changed, 414 insertions, 341 deletions
diff --git a/src/libexpr/flake/flakeref.cc b/src/libexpr/flake/flakeref.cc
index 701546671..6363446f6 100644
--- a/src/libexpr/flake/flakeref.cc
+++ b/src/libexpr/flake/flakeref.cc
@@ -102,56 +102,61 @@ std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
percentDecode(std::string(match[6])));
}
- /* Check if 'url' is a path (either absolute or relative to
- 'baseDir'). If so, search upward to the root of the repo
- (i.e. the directory containing .git). */
-
else if (std::regex_match(url, match, pathUrlRegex)) {
std::string path = match[1];
- if (!baseDir && !hasPrefix(path, "/"))
- throw BadURL("flake reference '%s' is not an absolute path", url);
- path = absPath(path, baseDir, true);
-
- if (!S_ISDIR(lstat(path).st_mode))
- throw BadURL("path '%s' is not a flake (because it's not a directory)", path);
-
- if (!allowMissing && !pathExists(path + "/flake.nix"))
- throw BadURL("path '%s' is not a flake (because it doesn't contain a 'flake.nix' file)", path);
-
- auto fragment = percentDecode(std::string(match[3]));
-
- auto flakeRoot = path;
- std::string subdir;
-
- while (flakeRoot != "/") {
- if (pathExists(flakeRoot + "/.git")) {
- auto base = std::string("git+file://") + flakeRoot;
-
- auto parsedURL = ParsedURL{
- .url = base, // FIXME
- .base = base,
- .scheme = "git+file",
- .authority = "",
- .path = flakeRoot,
- .query = decodeQuery(match[2]),
- };
-
- if (subdir != "") {
- if (parsedURL.query.count("dir"))
- throw Error("flake URL '%s' has an inconsistent 'dir' parameter", url);
- parsedURL.query.insert_or_assign("dir", subdir);
- }
+ std::string fragment = percentDecode(std::string(match[3]));
+
+ if (baseDir) {
+ /* Check if 'url' is a path (either absolute or relative
+ to 'baseDir'). If so, search upward to the root of the
+ repo (i.e. the directory containing .git). */
+
+ path = absPath(path, baseDir, true);
+
+ if (!S_ISDIR(lstat(path).st_mode))
+ throw BadURL("path '%s' is not a flake (because it's not a directory)", path);
+
+ if (!allowMissing && !pathExists(path + "/flake.nix"))
+ throw BadURL("path '%s' is not a flake (because it doesn't contain a 'flake.nix' file)", path);
- if (pathExists(flakeRoot + "/.git/shallow"))
- parsedURL.query.insert_or_assign("shallow", "1");
+ auto flakeRoot = path;
+ std::string subdir;
+
+ while (flakeRoot != "/") {
+ if (pathExists(flakeRoot + "/.git")) {
+ auto base = std::string("git+file://") + flakeRoot;
+
+ auto parsedURL = ParsedURL{
+ .url = base, // FIXME
+ .base = base,
+ .scheme = "git+file",
+ .authority = "",
+ .path = flakeRoot,
+ .query = decodeQuery(match[2]),
+ };
+
+ if (subdir != "") {
+ if (parsedURL.query.count("dir"))
+ throw Error("flake URL '%s' has an inconsistent 'dir' parameter", url);
+ parsedURL.query.insert_or_assign("dir", subdir);
+ }
+
+ if (pathExists(flakeRoot + "/.git/shallow"))
+ parsedURL.query.insert_or_assign("shallow", "1");
+
+ return std::make_pair(
+ FlakeRef(Input::fromURL(parsedURL), get(parsedURL.query, "dir").value_or("")),
+ fragment);
+ }
- return std::make_pair(
- FlakeRef(Input::fromURL(parsedURL), get(parsedURL.query, "dir").value_or("")),
- fragment);
+ subdir = std::string(baseNameOf(flakeRoot)) + (subdir.empty() ? "" : "/" + subdir);
+ flakeRoot = dirOf(flakeRoot);
}
- subdir = std::string(baseNameOf(flakeRoot)) + (subdir.empty() ? "" : "/" + subdir);
- flakeRoot = dirOf(flakeRoot);
+ } else {
+ if (!hasPrefix(path, "/"))
+ throw BadURL("flake reference '%s' is not an absolute path", url);
+ path = canonPath(path);
}
fetchers::Attrs attrs;
diff --git a/src/libstore/build.cc b/src/libstore/build.cc
index 0c128d757..7d82ef09b 100644
--- a/src/libstore/build.cc
+++ b/src/libstore/build.cc
@@ -2774,7 +2774,7 @@ struct RestrictedStore : public LocalFSStore
goal.addDependency(info.path);
}
- StorePath addToStoreFromDump(const string & dump, const string & name,
+ StorePath addToStoreFromDump(Source & dump, const string & name,
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair) override
{
auto path = next->addToStoreFromDump(dump, name, method, hashAlgo, repair);
diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc
index 33b6ab9b1..9e138e8d9 100644
--- a/src/libstore/daemon.cc
+++ b/src/libstore/daemon.cc
@@ -173,31 +173,6 @@ struct TunnelSource : BufferedSource
}
};
-/* If the NAR archive contains a single file at top-level, then save
- the contents of the file to `s'. Otherwise barf. */
-struct RetrieveRegularNARSink : ParseSink
-{
- bool regular;
- string s;
-
- RetrieveRegularNARSink() : regular(true) { }
-
- void createDirectory(const Path & path)
- {
- regular = false;
- }
-
- void receiveContents(unsigned char * data, unsigned int len)
- {
- s.append((const char *) data, len);
- }
-
- void createSymlink(const Path & path, const string & target)
- {
- regular = false;
- }
-};
-
struct ClientSettings
{
bool keepFailed;
@@ -375,25 +350,28 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
}
case wopAddToStore: {
- std::string s, baseName;
+ HashType hashAlgo;
+ std::string baseName;
FileIngestionMethod method;
{
- bool fixed; uint8_t recursive;
- from >> baseName >> fixed /* obsolete */ >> recursive >> s;
+ bool fixed;
+ uint8_t recursive;
+ std::string hashAlgoRaw;
+ from >> baseName >> fixed /* obsolete */ >> recursive >> hashAlgoRaw;
if (recursive > (uint8_t) FileIngestionMethod::Recursive)
throw Error("unsupported FileIngestionMethod with value of %i; you may need to upgrade nix-daemon", recursive);
method = FileIngestionMethod { recursive };
/* Compatibility hack. */
if (!fixed) {
- s = "sha256";
+ hashAlgoRaw = "sha256";
method = FileIngestionMethod::Recursive;
}
+ hashAlgo = parseHashType(hashAlgoRaw);
}
- HashType hashAlgo = parseHashType(s);
- StringSink savedNAR;
- TeeSource savedNARSource(from, savedNAR);
- RetrieveRegularNARSink savedRegular;
+ StringSink saved;
+ TeeSource savedNARSource(from, saved);
+ RetrieveRegularNARSink savedRegular { saved };
if (method == FileIngestionMethod::Recursive) {
/* Get the entire NAR dump from the client and save it to
@@ -407,11 +385,9 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
logger->startWork();
if (!savedRegular.regular) throw Error("regular file expected");
- auto path = store->addToStoreFromDump(
- method == FileIngestionMethod::Recursive ? *savedNAR.s : savedRegular.s,
- baseName,
- method,
- hashAlgo);
+ // FIXME: try to stream directly from `from`.
+ StringSource dumpSource { *saved.s };
+ auto path = store->addToStoreFromDump(dumpSource, baseName, method, hashAlgo);
logger->stopWork();
to << store->printStorePath(path);
@@ -727,15 +703,15 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
if (!trusted)
info.ultimate = false;
- std::string saved;
std::unique_ptr<Source> source;
if (GET_PROTOCOL_MINOR(clientVersion) >= 21)
source = std::make_unique<TunnelSource>(from, to);
else {
- TeeParseSink tee(from);
- parseDump(tee, tee.source);
- saved = std::move(*tee.saved.s);
- source = std::make_unique<StringSource>(saved);
+ StringSink saved;
+ TeeSource tee { from, saved };
+ ParseSink ether;
+ parseDump(ether, tee);
+ source = std::make_unique<StringSource>(std::move(*saved.s));
}
logger->startWork();
diff --git a/src/libstore/export-import.cc b/src/libstore/export-import.cc
index e0622fb31..a0fc22264 100644
--- a/src/libstore/export-import.cc
+++ b/src/libstore/export-import.cc
@@ -60,8 +60,10 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs)
if (n != 1) throw Error("input doesn't look like something created by 'nix-store --export'");
/* Extract the NAR from the source. */
- TeeParseSink tee(source);
- parseDump(tee, tee.source);
+ StringSink saved;
+ TeeSource tee { source, saved };
+ ParseSink ether;
+ parseDump(ether, tee);
uint32_t magic = readInt(source);
if (magic != exportMagic)
@@ -77,15 +79,15 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs)
if (deriver != "")
info.deriver = parseStorePath(deriver);
- info.narHash = hashString(htSHA256, *tee.saved.s);
- info.narSize = tee.saved.s->size();
+ info.narHash = hashString(htSHA256, *saved.s);
+ info.narSize = saved.s->size();
// Ignore optional legacy signature.
if (readInt(source) == 1)
readString(source);
// Can't use underlying source, which would have been exhausted
- auto source = StringSource { *tee.saved.s };
+ auto source = StringSource { *saved.s };
addToStore(info, source, NoRepair, checkSigs);
res.push_back(info.path);
diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc
index 135419af7..b053d9b76 100644
--- a/src/libstore/local-store.cc
+++ b/src/libstore/local-store.cc
@@ -1033,82 +1033,26 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
}
-StorePath LocalStore::addToStoreFromDump(const string & dump, const string & name,
- FileIngestionMethod method, HashType hashAlgo, RepairFlag repair)
-{
- Hash h = hashString(hashAlgo, dump);
-
- auto dstPath = makeFixedOutputPath(method, h, name);
-
- addTempRoot(dstPath);
-
- if (repair || !isValidPath(dstPath)) {
-
- /* The first check above is an optimisation to prevent
- unnecessary lock acquisition. */
-
- auto realPath = Store::toRealPath(dstPath);
-
- PathLocks outputLock({realPath});
-
- if (repair || !isValidPath(dstPath)) {
-
- deletePath(realPath);
-
- autoGC();
-
- if (method == FileIngestionMethod::Recursive) {
- StringSource source(dump);
- restorePath(realPath, source);
- } else
- writeFile(realPath, dump);
-
- canonicalisePathMetaData(realPath, -1);
-
- /* Register the SHA-256 hash of the NAR serialisation of
- the path in the database. We may just have computed it
- above (if called with recursive == true and hashAlgo ==
- sha256); otherwise, compute it here. */
- HashResult hash = method == FileIngestionMethod::Recursive
- ? HashResult {
- hashAlgo == htSHA256 ? h : hashString(htSHA256, dump),
- dump.size(),
- }
- : hashPath(htSHA256, realPath);
-
- optimisePath(realPath); // FIXME: combine with hashPath()
-
- ValidPathInfo info(dstPath);
- info.narHash = hash.first;
- info.narSize = hash.second;
- info.ca = FixedOutputHash { .method = method, .hash = h };
- registerValidPath(info);
- }
-
- outputLock.setDeletion(true);
- }
-
- return dstPath;
-}
-
-
StorePath LocalStore::addToStore(const string & name, const Path & _srcPath,
FileIngestionMethod method, HashType hashAlgo, PathFilter & filter, RepairFlag repair)
{
Path srcPath(absPath(_srcPath));
+ auto source = sinkToSource([&](Sink & sink) {
+ if (method == FileIngestionMethod::Recursive)
+ dumpPath(srcPath, sink, filter);
+ else
+ readFile(srcPath, sink);
+ });
+ return addToStoreFromDump(*source, name, method, hashAlgo, repair);
+}
- if (method != FileIngestionMethod::Recursive)
- return addToStoreFromDump(readFile(srcPath), name, method, hashAlgo, repair);
-
- /* For computing the NAR hash. */
- auto sha256Sink = std::make_unique<HashSink>(htSHA256);
- /* For computing the store path. In recursive SHA-256 mode, this
- is the same as the NAR hash, so no need to do it again. */
- std::unique_ptr<HashSink> hashSink =
- hashAlgo == htSHA256
- ? nullptr
- : std::make_unique<HashSink>(hashAlgo);
+StorePath LocalStore::addToStoreFromDump(Source & source0, const string & name,
+ FileIngestionMethod method, HashType hashAlgo, RepairFlag repair)
+{
+ /* For computing the store path. */
+ auto hashSink = std::make_unique<HashSink>(hashAlgo);
+ TeeSource source { source0, *hashSink };
/* Read the source path into memory, but only if it's up to
narBufferSize bytes. If it's larger, write it to a temporary
@@ -1116,55 +1060,49 @@ StorePath LocalStore::addToStore(const string & name, const Path & _srcPath,
destination store path is already valid, we just delete the
temporary path. Otherwise, we move it to the destination store
path. */
- bool inMemory = true;
- std::string nar;
-
- auto source = sinkToSource([&](Sink & sink) {
-
- LambdaSink sink2([&](const unsigned char * buf, size_t len) {
- (*sha256Sink)(buf, len);
- if (hashSink) (*hashSink)(buf, len);
-
- if (inMemory) {
- if (nar.size() + len > settings.narBufferSize) {
- inMemory = false;
- sink << 1;
- sink((const unsigned char *) nar.data(), nar.size());
- nar.clear();
- } else {
- nar.append((const char *) buf, len);
- }
- }
-
- if (!inMemory) sink(buf, len);
- });
-
- dumpPath(srcPath, sink2, filter);
- });
+ bool inMemory = false;
+
+ std::string dump;
+
+ /* Fill out buffer, and decide whether we are working strictly in
+ memory based on whether we break out because the buffer is full
+ or the original source is empty */
+ while (dump.size() < settings.narBufferSize) {
+ auto oldSize = dump.size();
+ constexpr size_t chunkSize = 1024;
+ auto want = std::min(chunkSize, settings.narBufferSize - oldSize);
+ dump.resize(oldSize + want);
+ auto got = 0;
+ try {
+ got = source.read((uint8_t *) dump.data() + oldSize, want);
+ } catch (EndOfFile &) {
+ inMemory = true;
+ break;
+ }
+ dump.resize(oldSize + got);
+ }
std::unique_ptr<AutoDelete> delTempDir;
Path tempPath;
- try {
- /* Wait for the source coroutine to give us some dummy
- data. This is so that we don't create the temporary
- directory if the NAR fits in memory. */
- readInt(*source);
+ if (!inMemory) {
+ /* Drain what we pulled so far, and then keep on pulling */
+ StringSource dumpSource { dump };
+ ChainSource bothSource { dumpSource, source };
auto tempDir = createTempDir(realStoreDir, "add");
delTempDir = std::make_unique<AutoDelete>(tempDir);
tempPath = tempDir + "/x";
- restorePath(tempPath, *source);
+ if (method == FileIngestionMethod::Recursive)
+ restorePath(tempPath, bothSource);
+ else
+ writeFile(tempPath, bothSource);
- } catch (EndOfFile &) {
- if (!inMemory) throw;
- /* The NAR fits in memory, so we didn't do restorePath(). */
+ dump.clear();
}
- auto sha256 = sha256Sink->finish();
-
- Hash hash = hashSink ? hashSink->finish().first : sha256.first;
+ auto [hash, size] = hashSink->finish();
auto dstPath = makeFixedOutputPath(method, hash, name);
@@ -1186,22 +1124,34 @@ StorePath LocalStore::addToStore(const string & name, const Path & _srcPath,
autoGC();
if (inMemory) {
+ StringSource dumpSource { dump };
/* Restore from the NAR in memory. */
- StringSource source(nar);
- restorePath(realPath, source);
+ if (method == FileIngestionMethod::Recursive)
+ restorePath(realPath, dumpSource);
+ else
+ writeFile(realPath, dumpSource);
} else {
/* Move the temporary path we restored above. */
if (rename(tempPath.c_str(), realPath.c_str()))
throw Error("renaming '%s' to '%s'", tempPath, realPath);
}
+ /* For computing the nar hash. In recursive SHA-256 mode, this
+ is the same as the store hash, so no need to do it again. */
+ auto narHash = std::pair { hash, size };
+ if (method != FileIngestionMethod::Recursive || hashAlgo != htSHA256) {
+ HashSink narSink { htSHA256 };
+ dumpPath(realPath, narSink);
+ narHash = narSink.finish();
+ }
+
canonicalisePathMetaData(realPath, -1); // FIXME: merge into restorePath
optimisePath(realPath);
ValidPathInfo info(dstPath);
- info.narHash = sha256.first;
- info.narSize = sha256.second;
+ info.narHash = narHash.first;
+ info.narSize = narHash.second;
info.ca = FixedOutputHash { .method = method, .hash = hash };
registerValidPath(info);
}
diff --git a/src/libstore/local-store.hh b/src/libstore/local-store.hh
index c0e5d0286..355c2814f 100644
--- a/src/libstore/local-store.hh
+++ b/src/libstore/local-store.hh
@@ -153,7 +153,7 @@ public:
in `dump', which is either a NAR serialisation (if recursive ==
true) or simply the contents of a regular file (if recursive ==
false). */
- StorePath addToStoreFromDump(const string & dump, const string & name,
+ StorePath addToStoreFromDump(Source & dump, const string & name,
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair) override;
StorePath addTextToStore(const string & name, const string & s,
diff --git a/src/libstore/profiles.cc b/src/libstore/profiles.cc
index 6cfe393a4..6862b42f0 100644
--- a/src/libstore/profiles.cc
+++ b/src/libstore/profiles.cc
@@ -12,30 +12,24 @@
namespace nix {
-static bool cmpGensByNumber(const Generation & a, const Generation & b)
-{
- return a.number < b.number;
-}
-
-
/* Parse a generation name of the format
`<profilename>-<number>-link'. */
-static int parseName(const string & profileName, const string & name)
+static std::optional<GenerationNumber> parseName(const string & profileName, const string & name)
{
- if (string(name, 0, profileName.size() + 1) != profileName + "-") return -1;
+ if (string(name, 0, profileName.size() + 1) != profileName + "-") return {};
string s = string(name, profileName.size() + 1);
string::size_type p = s.find("-link");
- if (p == string::npos) return -1;
- int n;
+ if (p == string::npos) return {};
+ unsigned int n;
if (string2Int(string(s, 0, p), n) && n >= 0)
return n;
else
- return -1;
+ return {};
}
-Generations findGenerations(Path profile, int & curGen)
+std::pair<Generations, std::optional<GenerationNumber>> findGenerations(Path profile)
{
Generations gens;
@@ -43,30 +37,34 @@ Generations findGenerations(Path profile, int & curGen)
auto profileName = std::string(baseNameOf(profile));
for (auto & i : readDirectory(profileDir)) {
- int n;
- if ((n = parseName(profileName, i.name)) != -1) {
- Generation gen;
- gen.path = profileDir + "/" + i.name;
- gen.number = n;
+ if (auto n = parseName(profileName, i.name)) {
+ auto path = profileDir + "/" + i.name;
struct stat st;
- if (lstat(gen.path.c_str(), &st) != 0)
- throw SysError("statting '%1%'", gen.path);
- gen.creationTime = st.st_mtime;
- gens.push_back(gen);
+ if (lstat(path.c_str(), &st) != 0)
+ throw SysError("statting '%1%'", path);
+ gens.push_back({
+ .number = *n,
+ .path = path,
+ .creationTime = st.st_mtime
+ });
}
}
- gens.sort(cmpGensByNumber);
+ gens.sort([](const Generation & a, const Generation & b)
+ {
+ return a.number < b.number;
+ });
- curGen = pathExists(profile)
+ return {
+ gens,
+ pathExists(profile)
? parseName(profileName, readLink(profile))
- : -1;
-
- return gens;
+ : std::nullopt
+ };
}
-static void makeName(const Path & profile, unsigned int num,
+static void makeName(const Path & profile, GenerationNumber num,
Path & outLink)
{
Path prefix = (format("%1%-%2%") % profile % num).str();
@@ -78,10 +76,9 @@ Path createGeneration(ref<LocalFSStore> store, Path profile, Path outPath)
{
/* The new generation number should be higher than old the
previous ones. */
- int dummy;
- Generations gens = findGenerations(profile, dummy);
+ auto [gens, dummy] = findGenerations(profile);
- unsigned int num;
+ GenerationNumber num;
if (gens.size() > 0) {
Generation last = gens.back();
@@ -121,7 +118,7 @@ static void removeFile(const Path & path)
}
-void deleteGeneration(const Path & profile, unsigned int gen)
+void deleteGeneration(const Path & profile, GenerationNumber gen)
{
Path generation;
makeName(profile, gen, generation);
@@ -129,7 +126,7 @@ void deleteGeneration(const Path & profile, unsigned int gen)
}
-static void deleteGeneration2(const Path & profile, unsigned int gen, bool dryRun)
+static void deleteGeneration2(const Path & profile, GenerationNumber gen, bool dryRun)
{
if (dryRun)
printInfo(format("would remove generation %1%") % gen);
@@ -140,31 +137,29 @@ static void deleteGeneration2(const Path & profile, unsigned int gen, bool dryRu
}
-void deleteGenerations(const Path & profile, const std::set<unsigned int> & gensToDelete, bool dryRun)
+void deleteGenerations(const Path & profile, const std::set<GenerationNumber> & gensToDelete, bool dryRun)
{
PathLocks lock;
lockProfile(lock, profile);
- int curGen;
- Generations gens = findGenerations(profile, curGen);
+ auto [gens, curGen] = findGenerations(profile);
- if (gensToDelete.find(curGen) != gensToDelete.end())
+ if (gensToDelete.count(*curGen))
throw Error("cannot delete current generation of profile %1%'", profile);
for (auto & i : gens) {
- if (gensToDelete.find(i.number) == gensToDelete.end()) continue;
+ if (!gensToDelete.count(i.number)) continue;
deleteGeneration2(profile, i.number, dryRun);
}
}
-void deleteGenerationsGreaterThan(const Path & profile, int max, bool dryRun)
+void deleteGenerationsGreaterThan(const Path & profile, GenerationNumber max, bool dryRun)
{
PathLocks lock;
lockProfile(lock, profile);
- int curGen;
bool fromCurGen = false;
- Generations gens = findGenerations(profile, curGen);
+ auto [gens, curGen] = findGenerations(profile);
for (auto i = gens.rbegin(); i != gens.rend(); ++i) {
if (i->number == curGen) {
fromCurGen = true;
@@ -186,8 +181,7 @@ void deleteOldGenerations(const Path & profile, bool dryRun)
PathLocks lock;
lockProfile(lock, profile);
- int curGen;
- Generations gens = findGenerations(profile, curGen);
+ auto [gens, curGen] = findGenerations(profile);
for (auto & i : gens)
if (i.number != curGen)
@@ -200,8 +194,7 @@ void deleteGenerationsOlderThan(const Path & profile, time_t t, bool dryRun)
PathLocks lock;
lockProfile(lock, profile);
- int curGen;
- Generations gens = findGenerations(profile, curGen);
+ auto [gens, curGen] = findGenerations(profile);
bool canDelete = false;
for (auto i = gens.rbegin(); i != gens.rend(); ++i)
diff --git a/src/libstore/profiles.hh b/src/libstore/profiles.hh
index 78645d8b6..abe507f0e 100644
--- a/src/libstore/profiles.hh
+++ b/src/libstore/profiles.hh
@@ -9,37 +9,32 @@
namespace nix {
+typedef unsigned int GenerationNumber;
+
struct Generation
{
- int number;
+ GenerationNumber number;
Path path;
time_t creationTime;
- Generation()
- {
- number = -1;
- }
- operator bool() const
- {
- return number != -1;
- }
};
-typedef list<Generation> Generations;
+typedef std::list<Generation> Generations;
/* Returns the list of currently present generations for the specified
- profile, sorted by generation number. */
-Generations findGenerations(Path profile, int & curGen);
+ profile, sorted by generation number. Also returns the number of
+ the current generation. */
+std::pair<Generations, std::optional<GenerationNumber>> findGenerations(Path profile);
class LocalFSStore;
Path createGeneration(ref<LocalFSStore> store, Path profile, Path outPath);
-void deleteGeneration(const Path & profile, unsigned int gen);
+void deleteGeneration(const Path & profile, GenerationNumber gen);
-void deleteGenerations(const Path & profile, const std::set<unsigned int> & gensToDelete, bool dryRun);
+void deleteGenerations(const Path & profile, const std::set<GenerationNumber> & gensToDelete, bool dryRun);
-void deleteGenerationsGreaterThan(const Path & profile, const int max, bool dryRun);
+void deleteGenerationsGreaterThan(const Path & profile, GenerationNumber max, bool dryRun);
void deleteOldGenerations(const Path & profile, bool dryRun);
diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc
index 2fb9c968a..80a10a2bf 100644
--- a/src/libstore/store-api.cc
+++ b/src/libstore/store-api.cc
@@ -222,20 +222,73 @@ StorePath Store::computeStorePathForText(const string & name, const string & s,
}
+/*
+The aim of this function is to compute in one pass the correct ValidPathInfo for
+the files that we are trying to add to the store. To accomplish that in one
+pass, given the different kind of inputs that we can take (normal nar archives,
+nar archives with non SHA-256 hashes, and flat files), we set up a net of sinks
+and aliases. Also, since the dataflow is obfuscated by this, we include here a
+graphviz diagram:
+
+digraph graphname {
+ node [shape=box]
+ fileSource -> narSink
+ narSink [style=dashed]
+ narSink -> unsualHashTee [style = dashed, label = "Recursive && !SHA-256"]
+ narSink -> narHashSink [style = dashed, label = "else"]
+ unsualHashTee -> narHashSink
+ unsualHashTee -> caHashSink
+ fileSource -> parseSink
+ parseSink [style=dashed]
+ parseSink-> fileSink [style = dashed, label = "Flat"]
+ parseSink -> blank [style = dashed, label = "Recursive"]
+ fileSink -> caHashSink
+}
+*/
ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
FileIngestionMethod method, HashType hashAlgo,
std::optional<Hash> expectedCAHash)
{
- /* FIXME: inefficient: we're reading/hashing 'tmpFile' three
- times. */
+ HashSink narHashSink { htSHA256 };
+ HashSink caHashSink { hashAlgo };
+
+ /* Note that fileSink and unusualHashTee must be mutually exclusive, since
+ they both write to caHashSink. Note that that requisite is currently true
+ because the former is only used in the flat case. */
+ RetrieveRegularNARSink fileSink { caHashSink };
+ TeeSink unusualHashTee { narHashSink, caHashSink };
+
+ auto & narSink = method == FileIngestionMethod::Recursive && hashAlgo != htSHA256
+ ? static_cast<Sink &>(unusualHashTee)
+ : narHashSink;
+
+ /* Functionally, this means that fileSource will yield the content of
+ srcPath. The fact that we use scratchpadSink as a temporary buffer here
+ is an implementation detail. */
+ auto fileSource = sinkToSource([&](Sink & scratchpadSink) {
+ dumpPath(srcPath, scratchpadSink);
+ });
- auto [narHash, narSize] = hashPath(htSHA256, srcPath);
+ /* tapped provides the same data as fileSource, but we also write all the
+ information to narSink. */
+ TeeSource tapped { *fileSource, narSink };
- auto hash = method == FileIngestionMethod::Recursive
- ? hashAlgo == htSHA256
- ? narHash
- : hashPath(hashAlgo, srcPath).first
- : hashFile(hashAlgo, srcPath);
+ ParseSink blank;
+ auto & parseSink = method == FileIngestionMethod::Flat
+ ? fileSink
+ : blank;
+
+ /* The information that flows from tapped (besides being replicated in
+ narSink), is now put in parseSink. */
+ parseDump(parseSink, tapped);
+
+ /* We extract the result of the computation from the sink by calling
+ finish. */
+ auto [narHash, narSize] = narHashSink.finish();
+
+ auto hash = method == FileIngestionMethod::Recursive && hashAlgo == htSHA256
+ ? narHash
+ : caHashSink.finish().first;
if (expectedCAHash && expectedCAHash != hash)
throw Error("hash mismatch for '%s'", srcPath);
@@ -246,8 +299,8 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
info.ca = FixedOutputHash { .method = method, .hash = hash };
if (!isValidPath(info.path)) {
- auto source = sinkToSource([&](Sink & sink) {
- dumpPath(srcPath, sink);
+ auto source = sinkToSource([&](Sink & scratchpadSink) {
+ dumpPath(srcPath, scratchpadSink);
});
addToStore(info, *source);
}
diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh
index cbabe3d77..a7288d0cc 100644
--- a/src/libstore/store-api.hh
+++ b/src/libstore/store-api.hh
@@ -461,7 +461,7 @@ public:
std::optional<Hash> expectedCAHash = {});
// FIXME: remove?
- virtual StorePath addToStoreFromDump(const string & dump, const string & name,
+ virtual StorePath addToStoreFromDump(Source & dump, const string & name,
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair)
{
throw Error("addToStoreFromDump() is not supported by this store");
diff --git a/src/libutil/archive.hh b/src/libutil/archive.hh
index 302b1bb18..57780d16a 100644
--- a/src/libutil/archive.hh
+++ b/src/libutil/archive.hh
@@ -63,12 +63,29 @@ struct ParseSink
virtual void createSymlink(const Path & path, const string & target) { };
};
-struct TeeParseSink : ParseSink
+/* If the NAR archive contains a single file at top-level, then save
+ the contents of the file to `s'. Otherwise barf. */
+struct RetrieveRegularNARSink : ParseSink
{
- StringSink saved;
- TeeSource source;
+ bool regular = true;
+ Sink & sink;
- TeeParseSink(Source & source) : source(source, saved) { }
+ RetrieveRegularNARSink(Sink & sink) : sink(sink) { }
+
+ void createDirectory(const Path & path)
+ {
+ regular = false;
+ }
+
+ void receiveContents(unsigned char * data, unsigned int len)
+ {
+ sink(data, len);
+ }
+
+ void createSymlink(const Path & path, const string & target)
+ {
+ regular = false;
+ }
};
void parseDump(ParseSink & sink, Source & source);
diff --git a/src/libutil/serialise.cc b/src/libutil/serialise.cc
index c8b71188f..00c945113 100644
--- a/src/libutil/serialise.cc
+++ b/src/libutil/serialise.cc
@@ -322,5 +322,18 @@ void StringSink::operator () (const unsigned char * data, size_t len)
s->append((const char *) data, len);
}
+size_t ChainSource::read(unsigned char * data, size_t len)
+{
+ if (useSecond) {
+ return source2.read(data, len);
+ } else {
+ try {
+ return source1.read(data, len);
+ } catch (EndOfFile &) {
+ useSecond = true;
+ return this->read(data, len);
+ }
+ }
+}
}
diff --git a/src/libutil/serialise.hh b/src/libutil/serialise.hh
index 8386a4991..5d9acf887 100644
--- a/src/libutil/serialise.hh
+++ b/src/libutil/serialise.hh
@@ -189,7 +189,7 @@ struct TeeSource : Source
size_t read(unsigned char * data, size_t len)
{
size_t n = orig.read(data, len);
- sink(data, len);
+ sink(data, n);
return n;
}
};
@@ -256,6 +256,19 @@ struct LambdaSource : Source
}
};
+/* Chain two sources together so after the first is exhausted, the second is
+ used */
+struct ChainSource : Source
+{
+ Source & source1, & source2;
+ bool useSecond = false;
+ ChainSource(Source & s1, Source & s2)
+ : source1(s1), source2(s2)
+ { }
+
+ size_t read(unsigned char * data, size_t len) override;
+};
+
/* Convert a function that feeds data into a Sink into a Source. The
Source executes the function as a coroutine. */
diff --git a/src/nix-env/nix-env.cc b/src/nix-env/nix-env.cc
index c992b7d74..5795c2c09 100644
--- a/src/nix-env/nix-env.cc
+++ b/src/nix-env/nix-env.cc
@@ -1208,18 +1208,17 @@ static void opSwitchProfile(Globals & globals, Strings opFlags, Strings opArgs)
}
-static const int prevGen = -2;
+static constexpr GenerationNumber prevGen = std::numeric_limits<GenerationNumber>::max();
-static void switchGeneration(Globals & globals, int dstGen)
+static void switchGeneration(Globals & globals, GenerationNumber dstGen)
{
PathLocks lock;
lockProfile(lock, globals.profile);
- int curGen;
- Generations gens = findGenerations(globals.profile, curGen);
+ auto [gens, curGen] = findGenerations(globals.profile);
- Generation dst;
+ std::optional<Generation> dst;
for (auto & i : gens)
if ((dstGen == prevGen && i.number < curGen) ||
(dstGen >= 0 && i.number == dstGen))
@@ -1227,18 +1226,16 @@ static void switchGeneration(Globals & globals, int dstGen)
if (!dst) {
if (dstGen == prevGen)
- throw Error("no generation older than the current (%1%) exists",
- curGen);
+ throw Error("no generation older than the current (%1%) exists", curGen.value_or(0));
else
throw Error("generation %1% does not exist", dstGen);
}
- printInfo(format("switching from generation %1% to %2%")
- % curGen % dst.number);
+ printInfo("switching from generation %1% to %2%", curGen.value_or(0), dst->number);
if (globals.dryRun) return;
- switchLink(globals.profile, dst.path);
+ switchLink(globals.profile, dst->path);
}
@@ -1249,7 +1246,7 @@ static void opSwitchGeneration(Globals & globals, Strings opFlags, Strings opArg
if (opArgs.size() != 1)
throw UsageError("exactly one argument expected");
- int dstGen;
+ GenerationNumber dstGen;
if (!string2Int(opArgs.front(), dstGen))
throw UsageError("expected a generation number");
@@ -1278,8 +1275,7 @@ static void opListGenerations(Globals & globals, Strings opFlags, Strings opArgs
PathLocks lock;
lockProfile(lock, globals.profile);
- int curGen;
- Generations gens = findGenerations(globals.profile, curGen);
+ auto [gens, curGen] = findGenerations(globals.profile);
RunPager pager;
@@ -1308,14 +1304,14 @@ static void opDeleteGenerations(Globals & globals, Strings opFlags, Strings opAr
if(opArgs.front().size() < 2)
throw Error("invalid number of generations ‘%1%’", opArgs.front());
string str_max = string(opArgs.front(), 1, opArgs.front().size());
- int max;
+ GenerationNumber max;
if (!string2Int(str_max, max) || max == 0)
throw Error("invalid number of generations to keep ‘%1%’", opArgs.front());
deleteGenerationsGreaterThan(globals.profile, max, globals.dryRun);
} else {
- std::set<unsigned int> gens;
+ std::set<GenerationNumber> gens;
for (auto & i : opArgs) {
- unsigned int n;
+ GenerationNumber n;
if (!string2Int(i, n))
throw UsageError("invalid generation number '%1%'", i);
gens.insert(n);
diff --git a/src/nix/command.hh b/src/nix/command.hh
index 1c7413300..856721ebf 100644
--- a/src/nix/command.hh
+++ b/src/nix/command.hh
@@ -244,4 +244,10 @@ void completeFlakeRefWithFragment(
const Strings & defaultFlakeAttrPaths,
std::string_view prefix);
+void printClosureDiff(
+ ref<Store> store,
+ const StorePath & beforePath,
+ const StorePath & afterPath,
+ std::string_view indent);
+
}
diff --git a/src/nix/diff-closures.cc b/src/nix/diff-closures.cc
index 56ddb575b..4199dae0f 100644
--- a/src/nix/diff-closures.cc
+++ b/src/nix/diff-closures.cc
@@ -6,7 +6,7 @@
#include <regex>
-using namespace nix;
+namespace nix {
struct Info
{
@@ -52,6 +52,60 @@ std::string showVersions(const std::set<std::string> & versions)
return concatStringsSep(", ", versions2);
}
+void printClosureDiff(
+ ref<Store> store,
+ const StorePath & beforePath,
+ const StorePath & afterPath,
+ std::string_view indent)
+{
+ auto beforeClosure = getClosureInfo(store, beforePath);
+ auto afterClosure = getClosureInfo(store, afterPath);
+
+ std::set<std::string> allNames;
+ for (auto & [name, _] : beforeClosure) allNames.insert(name);
+ for (auto & [name, _] : afterClosure) allNames.insert(name);
+
+ for (auto & name : allNames) {
+ auto & beforeVersions = beforeClosure[name];
+ auto & afterVersions = afterClosure[name];
+
+ auto totalSize = [&](const std::map<std::string, std::map<StorePath, Info>> & versions)
+ {
+ uint64_t sum = 0;
+ for (auto & [_, paths] : versions)
+ for (auto & [path, _] : paths)
+ sum += store->queryPathInfo(path)->narSize;
+ return sum;
+ };
+
+ auto beforeSize = totalSize(beforeVersions);
+ auto afterSize = totalSize(afterVersions);
+ auto sizeDelta = (int64_t) afterSize - (int64_t) beforeSize;
+ auto showDelta = abs(sizeDelta) >= 8 * 1024;
+
+ std::set<std::string> removed, unchanged;
+ for (auto & [version, _] : beforeVersions)
+ if (!afterVersions.count(version)) removed.insert(version); else unchanged.insert(version);
+
+ std::set<std::string> added;
+ for (auto & [version, _] : afterVersions)
+ if (!beforeVersions.count(version)) added.insert(version);
+
+ if (showDelta || !removed.empty() || !added.empty()) {
+ std::vector<std::string> items;
+ if (!removed.empty() || !added.empty())
+ items.push_back(fmt("%s → %s", showVersions(removed), showVersions(added)));
+ if (showDelta)
+ items.push_back(fmt("%s%+.1f KiB" ANSI_NORMAL, sizeDelta > 0 ? ANSI_RED : ANSI_GREEN, sizeDelta / 1024.0));
+ std::cout << fmt("%s%s: %s\n", indent, name, concatStringsSep(", ", items));
+ }
+ }
+}
+
+}
+
+using namespace nix;
+
struct CmdDiffClosures : SourceExprCommand
{
std::string _before, _after;
@@ -85,49 +139,7 @@ struct CmdDiffClosures : SourceExprCommand
auto beforePath = toStorePath(store, Realise::Outputs, operateOn, before);
auto after = parseInstallable(store, _after);
auto afterPath = toStorePath(store, Realise::Outputs, operateOn, after);
-
- auto beforeClosure = getClosureInfo(store, beforePath);
- auto afterClosure = getClosureInfo(store, afterPath);
-
- std::set<std::string> allNames;
- for (auto & [name, _] : beforeClosure) allNames.insert(name);
- for (auto & [name, _] : afterClosure) allNames.insert(name);
-
- for (auto & name : allNames) {
- auto & beforeVersions = beforeClosure[name];
- auto & afterVersions = afterClosure[name];
-
- auto totalSize = [&](const std::map<std::string, std::map<StorePath, Info>> & versions)
- {
- uint64_t sum = 0;
- for (auto & [_, paths] : versions)
- for (auto & [path, _] : paths)
- sum += store->queryPathInfo(path)->narSize;
- return sum;
- };
-
- auto beforeSize = totalSize(beforeVersions);
- auto afterSize = totalSize(afterVersions);
- auto sizeDelta = (int64_t) afterSize - (int64_t) beforeSize;
- auto showDelta = abs(sizeDelta) >= 8 * 1024;
-
- std::set<std::string> removed, unchanged;
- for (auto & [version, _] : beforeVersions)
- if (!afterVersions.count(version)) removed.insert(version); else unchanged.insert(version);
-
- std::set<std::string> added;
- for (auto & [version, _] : afterVersions)
- if (!beforeVersions.count(version)) added.insert(version);
-
- if (showDelta || !removed.empty() || !added.empty()) {
- std::vector<std::string> items;
- if (!removed.empty() || !added.empty())
- items.push_back(fmt("%s → %s", showVersions(removed), showVersions(added)));
- if (showDelta)
- items.push_back(fmt("%s%+.1f KiB" ANSI_NORMAL, sizeDelta > 0 ? ANSI_RED : ANSI_GREEN, sizeDelta / 1024.0));
- std::cout << fmt("%s: %s\n", name, concatStringsSep(", ", items));
- }
- }
+ printClosureDiff(store, beforePath, afterPath, "");
}
};
diff --git a/src/nix/edit.cc b/src/nix/edit.cc
index dc9775635..378a3739c 100644
--- a/src/nix/edit.cc
+++ b/src/nix/edit.cc
@@ -45,6 +45,7 @@ struct CmdEdit : InstallableCommand
auto args = editorFor(pos);
+ restoreSignals();
execvp(args.front().c_str(), stringsToCharPtrs(args).data());
std::string command;
diff --git a/src/nix/profile.cc b/src/nix/profile.cc
index e04079603..7dcc0b6d4 100644
--- a/src/nix/profile.cc
+++ b/src/nix/profile.cc
@@ -7,6 +7,7 @@
#include "builtins/buildenv.hh"
#include "flake/flakeref.hh"
#include "../nix-env/user-env.hh"
+#include "profiles.hh"
#include <nlohmann/json.hpp>
#include <regex>
@@ -394,6 +395,46 @@ struct CmdProfileInfo : virtual EvalCommand, virtual StoreCommand, MixDefaultPro
}
};
+struct CmdProfileDiffClosures : virtual StoreCommand, MixDefaultProfile
+{
+ std::string description() override
+ {
+ return "show the closure difference between each generation of a profile";
+ }
+
+ Examples examples() override
+ {
+ return {
+ Example{
+ "To show what changed between each generation of the NixOS system profile:",
+ "nix profile diff-closure --profile /nix/var/nix/profiles/system"
+ },
+ };
+ }
+
+ void run(ref<Store> store) override
+ {
+ auto [gens, curGen] = findGenerations(*profile);
+
+ std::optional<Generation> prevGen;
+ bool first = true;
+
+ for (auto & gen : gens) {
+ if (prevGen) {
+ if (!first) std::cout << "\n";
+ first = false;
+ std::cout << fmt("Generation %d -> %d:\n", prevGen->number, gen.number);
+ printClosureDiff(store,
+ store->followLinksToStorePath(prevGen->path),
+ store->followLinksToStorePath(gen.path),
+ " ");
+ }
+
+ prevGen = gen;
+ }
+ }
+};
+
struct CmdProfile : virtual MultiCommand, virtual Command
{
CmdProfile()
@@ -402,6 +443,7 @@ struct CmdProfile : virtual MultiCommand, virtual Command
{"remove", []() { return make_ref<CmdProfileRemove>(); }},
{"upgrade", []() { return make_ref<CmdProfileUpgrade>(); }},
{"info", []() { return make_ref<CmdProfileInfo>(); }},
+ {"diff-closures", []() { return make_ref<CmdProfileDiffClosures>(); }},
})
{ }
@@ -425,4 +467,3 @@ struct CmdProfile : virtual MultiCommand, virtual Command
};
static auto r1 = registerCommand<CmdProfile>("profile");
-