diff options
Diffstat (limited to 'src/libstore')
-rw-r--r-- | src/libstore/builtins/buildenv.hh | 2 | ||||
-rw-r--r-- | src/libstore/daemon.cc | 52 | ||||
-rw-r--r-- | src/libstore/export-import.cc | 12 | ||||
-rw-r--r-- | src/libstore/filetransfer.cc | 18 | ||||
-rw-r--r-- | src/libstore/globals.hh | 3 | ||||
-rw-r--r-- | src/libstore/local-store.cc | 2 | ||||
-rw-r--r-- | src/libstore/local.mk | 3 | ||||
-rw-r--r-- | src/libstore/profiles.cc | 81 | ||||
-rw-r--r-- | src/libstore/profiles.hh | 25 | ||||
-rw-r--r-- | src/libstore/sqlite.cc | 9 | ||||
-rw-r--r-- | src/libstore/sqlite.hh | 4 | ||||
-rw-r--r-- | src/libstore/store-api.cc | 39 |
12 files changed, 126 insertions, 124 deletions
diff --git a/src/libstore/builtins/buildenv.hh b/src/libstore/builtins/buildenv.hh index 0a37459b0..73c0f5f7f 100644 --- a/src/libstore/builtins/buildenv.hh +++ b/src/libstore/builtins/buildenv.hh @@ -9,7 +9,7 @@ struct Package { Path path; bool active; int priority; - Package(Path path, bool active, int priority) : path{path}, active{active}, priority{priority} {} + Package(const Path & path, bool active, int priority) : path{path}, active{active}, priority{priority} {} }; typedef std::vector<Package> Packages; diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc index 69d7ef511..7e16529a5 100644 --- a/src/libstore/daemon.cc +++ b/src/libstore/daemon.cc @@ -173,31 +173,6 @@ struct TunnelSource : BufferedSource } }; -/* If the NAR archive contains a single file at top-level, then save - the contents of the file to `s'. Otherwise barf. */ -struct RetrieveRegularNARSink : ParseSink -{ - bool regular; - string s; - - RetrieveRegularNARSink() : regular(true) { } - - void createDirectory(const Path & path) - { - regular = false; - } - - void receiveContents(unsigned char * data, unsigned int len) - { - s.append((const char *) data, len); - } - - void createSymlink(const Path & path, const string & target) - { - regular = false; - } -}; - struct ClientSettings { bool keepFailed; @@ -394,9 +369,9 @@ static void performOp(TunnelLogger * logger, ref<Store> store, hashAlgo = parseHashType(hashAlgoRaw); } - StringSink savedNAR; - TeeSource savedNARSource(from, savedNAR); - RetrieveRegularNARSink savedRegular; + StringSink saved; + TeeSource savedNARSource(from, saved); + RetrieveRegularNARSink savedRegular { saved }; if (method == FileIngestionMethod::Recursive) { /* Get the entire NAR dump from the client and save it to @@ -410,14 +385,9 @@ static void performOp(TunnelLogger * logger, ref<Store> store, logger->startWork(); if (!savedRegular.regular) throw Error("regular file expected"); - StringSource dumpSource { - method == FileIngestionMethod::Recursive ? *savedNAR.s : savedRegular.s - }; - auto path = store->addToStoreFromDump( - dumpSource, - baseName, - method, - hashAlgo); + // FIXME: try to stream directly from `from`. + StringSource dumpSource { *saved.s }; + auto path = store->addToStoreFromDump(dumpSource, baseName, method, hashAlgo); logger->stopWork(); to << store->printStorePath(path); @@ -733,15 +703,15 @@ static void performOp(TunnelLogger * logger, ref<Store> store, if (!trusted) info.ultimate = false; - std::string saved; std::unique_ptr<Source> source; if (GET_PROTOCOL_MINOR(clientVersion) >= 21) source = std::make_unique<TunnelSource>(from, to); else { - TeeParseSink tee(from); - parseDump(tee, tee.source); - saved = std::move(*tee.saved.s); - source = std::make_unique<StringSource>(saved); + StringSink saved; + TeeSource tee { from, saved }; + ParseSink ether; + parseDump(ether, tee); + source = std::make_unique<StringSource>(std::move(*saved.s)); } logger->startWork(); diff --git a/src/libstore/export-import.cc b/src/libstore/export-import.cc index 082d0f1d1..b963d64d7 100644 --- a/src/libstore/export-import.cc +++ b/src/libstore/export-import.cc @@ -60,8 +60,10 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs) if (n != 1) throw Error("input doesn't look like something created by 'nix-store --export'"); /* Extract the NAR from the source. */ - TeeParseSink tee(source); - parseDump(tee, tee.source); + StringSink saved; + TeeSource tee { source, saved }; + ParseSink ether; + parseDump(ether, tee); uint32_t magic = readInt(source); if (magic != exportMagic) @@ -77,15 +79,15 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs) if (deriver != "") info.deriver = parseStorePath(deriver); - info.narHash = hashString(htSHA256, *tee.saved.s); - info.narSize = tee.saved.s->size(); + info.narHash = hashString(htSHA256, *saved.s); + info.narSize = saved.s->size(); // Ignore optional legacy signature. if (readInt(source) == 1) readString(source); // Can't use underlying source, which would have been exhausted - auto source = StringSource { *tee.saved.s }; + auto source = StringSource { *saved.s }; addToStore(info, source, NoRepair, checkSigs); res.push_back(info.path); diff --git a/src/libstore/filetransfer.cc b/src/libstore/filetransfer.cc index 531b85af8..beb508e67 100644 --- a/src/libstore/filetransfer.cc +++ b/src/libstore/filetransfer.cc @@ -22,6 +22,7 @@ #include <queue> #include <random> #include <thread> +#include <regex> using namespace std::string_literals; @@ -56,7 +57,7 @@ struct curlFileTransfer : public FileTransfer Callback<FileTransferResult> callback; CURL * req = 0; bool active = false; // whether the handle has been added to the multi object - std::string status; + std::string statusMsg; unsigned int attempt = 0; @@ -175,12 +176,13 @@ struct curlFileTransfer : public FileTransfer size_t realSize = size * nmemb; std::string line((char *) contents, realSize); printMsg(lvlVomit, format("got header for '%s': %s") % request.uri % trim(line)); - if (line.compare(0, 5, "HTTP/") == 0) { // new response starts + static std::regex statusLine("HTTP/[^ ]+ +[0-9]+(.*)", std::regex::extended | std::regex::icase); + std::smatch match; + if (std::regex_match(line, match, statusLine)) { result.etag = ""; - auto ss = tokenizeString<vector<string>>(line, " "); - status = ss.size() >= 2 ? ss[1] : ""; result.data = std::make_shared<std::string>(); result.bodySize = 0; + statusMsg = trim(match[1]); acceptRanges = false; encoding = ""; } else { @@ -194,7 +196,9 @@ struct curlFileTransfer : public FileTransfer the expected ETag on a 200 response, then shut down the connection because we already have the data. */ - if (result.etag == request.expectedETag && status == "200") { + long httpStatus = 0; + curl_easy_getinfo(req, CURLINFO_RESPONSE_CODE, &httpStatus); + if (result.etag == request.expectedETag && httpStatus == 200) { debug(format("shutting down on 200 HTTP response with expected ETag")); return 0; } @@ -413,8 +417,8 @@ struct curlFileTransfer : public FileTransfer ? FileTransferError(Interrupted, fmt("%s of '%s' was interrupted", request.verb(), request.uri)) : httpStatus != 0 ? FileTransferError(err, - fmt("unable to %s '%s': HTTP error %d", - request.verb(), request.uri, httpStatus) + fmt("unable to %s '%s': HTTP error %d ('%s')", + request.verb(), request.uri, httpStatus, statusMsg) + (code == CURLE_OK ? "" : fmt(" (curl error: %s)", curl_easy_strerror(code))) ) : FileTransferError(err, diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index 4d5eec7bf..d47e0b6b5 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -368,6 +368,9 @@ public: Setting<size_t> narBufferSize{this, 32 * 1024 * 1024, "nar-buffer-size", "Maximum size of NARs before spilling them to disk."}; + + Setting<std::string> flakeRegistry{this, "https://github.com/NixOS/flake-registry/raw/master/flake-registry.json", "flake-registry", + "Path or URI of the global flake registry."}; }; diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 96d10d9ba..340fb5306 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -594,7 +594,7 @@ uint64_t LocalStore::addValidPath(State & state, (concatStringsSep(" ", info.sigs), !info.sigs.empty()) (renderContentAddress(info.ca), (bool) info.ca) .exec(); - uint64_t id = sqlite3_last_insert_rowid(state.db); + uint64_t id = state.db.getLastInsertedRowId(); /* If this is a derivation, then store the derivation outputs in the database. This is useful for the garbage collector: it can diff --git a/src/libstore/local.mk b/src/libstore/local.mk index aec4ed493..d266c8efe 100644 --- a/src/libstore/local.mk +++ b/src/libstore/local.mk @@ -61,3 +61,6 @@ $(d)/build.cc: clean-files += $(d)/schema.sql.gen.hh $(eval $(call install-file-in, $(d)/nix-store.pc, $(prefix)/lib/pkgconfig, 0644)) + +$(foreach i, $(wildcard src/libstore/builtins/*.hh), \ + $(eval $(call install-file-in, $(i), $(includedir)/nix/builtins, 0644))) diff --git a/src/libstore/profiles.cc b/src/libstore/profiles.cc index 6cfe393a4..6862b42f0 100644 --- a/src/libstore/profiles.cc +++ b/src/libstore/profiles.cc @@ -12,30 +12,24 @@ namespace nix { -static bool cmpGensByNumber(const Generation & a, const Generation & b) -{ - return a.number < b.number; -} - - /* Parse a generation name of the format `<profilename>-<number>-link'. */ -static int parseName(const string & profileName, const string & name) +static std::optional<GenerationNumber> parseName(const string & profileName, const string & name) { - if (string(name, 0, profileName.size() + 1) != profileName + "-") return -1; + if (string(name, 0, profileName.size() + 1) != profileName + "-") return {}; string s = string(name, profileName.size() + 1); string::size_type p = s.find("-link"); - if (p == string::npos) return -1; - int n; + if (p == string::npos) return {}; + unsigned int n; if (string2Int(string(s, 0, p), n) && n >= 0) return n; else - return -1; + return {}; } -Generations findGenerations(Path profile, int & curGen) +std::pair<Generations, std::optional<GenerationNumber>> findGenerations(Path profile) { Generations gens; @@ -43,30 +37,34 @@ Generations findGenerations(Path profile, int & curGen) auto profileName = std::string(baseNameOf(profile)); for (auto & i : readDirectory(profileDir)) { - int n; - if ((n = parseName(profileName, i.name)) != -1) { - Generation gen; - gen.path = profileDir + "/" + i.name; - gen.number = n; + if (auto n = parseName(profileName, i.name)) { + auto path = profileDir + "/" + i.name; struct stat st; - if (lstat(gen.path.c_str(), &st) != 0) - throw SysError("statting '%1%'", gen.path); - gen.creationTime = st.st_mtime; - gens.push_back(gen); + if (lstat(path.c_str(), &st) != 0) + throw SysError("statting '%1%'", path); + gens.push_back({ + .number = *n, + .path = path, + .creationTime = st.st_mtime + }); } } - gens.sort(cmpGensByNumber); + gens.sort([](const Generation & a, const Generation & b) + { + return a.number < b.number; + }); - curGen = pathExists(profile) + return { + gens, + pathExists(profile) ? parseName(profileName, readLink(profile)) - : -1; - - return gens; + : std::nullopt + }; } -static void makeName(const Path & profile, unsigned int num, +static void makeName(const Path & profile, GenerationNumber num, Path & outLink) { Path prefix = (format("%1%-%2%") % profile % num).str(); @@ -78,10 +76,9 @@ Path createGeneration(ref<LocalFSStore> store, Path profile, Path outPath) { /* The new generation number should be higher than old the previous ones. */ - int dummy; - Generations gens = findGenerations(profile, dummy); + auto [gens, dummy] = findGenerations(profile); - unsigned int num; + GenerationNumber num; if (gens.size() > 0) { Generation last = gens.back(); @@ -121,7 +118,7 @@ static void removeFile(const Path & path) } -void deleteGeneration(const Path & profile, unsigned int gen) +void deleteGeneration(const Path & profile, GenerationNumber gen) { Path generation; makeName(profile, gen, generation); @@ -129,7 +126,7 @@ void deleteGeneration(const Path & profile, unsigned int gen) } -static void deleteGeneration2(const Path & profile, unsigned int gen, bool dryRun) +static void deleteGeneration2(const Path & profile, GenerationNumber gen, bool dryRun) { if (dryRun) printInfo(format("would remove generation %1%") % gen); @@ -140,31 +137,29 @@ static void deleteGeneration2(const Path & profile, unsigned int gen, bool dryRu } -void deleteGenerations(const Path & profile, const std::set<unsigned int> & gensToDelete, bool dryRun) +void deleteGenerations(const Path & profile, const std::set<GenerationNumber> & gensToDelete, bool dryRun) { PathLocks lock; lockProfile(lock, profile); - int curGen; - Generations gens = findGenerations(profile, curGen); + auto [gens, curGen] = findGenerations(profile); - if (gensToDelete.find(curGen) != gensToDelete.end()) + if (gensToDelete.count(*curGen)) throw Error("cannot delete current generation of profile %1%'", profile); for (auto & i : gens) { - if (gensToDelete.find(i.number) == gensToDelete.end()) continue; + if (!gensToDelete.count(i.number)) continue; deleteGeneration2(profile, i.number, dryRun); } } -void deleteGenerationsGreaterThan(const Path & profile, int max, bool dryRun) +void deleteGenerationsGreaterThan(const Path & profile, GenerationNumber max, bool dryRun) { PathLocks lock; lockProfile(lock, profile); - int curGen; bool fromCurGen = false; - Generations gens = findGenerations(profile, curGen); + auto [gens, curGen] = findGenerations(profile); for (auto i = gens.rbegin(); i != gens.rend(); ++i) { if (i->number == curGen) { fromCurGen = true; @@ -186,8 +181,7 @@ void deleteOldGenerations(const Path & profile, bool dryRun) PathLocks lock; lockProfile(lock, profile); - int curGen; - Generations gens = findGenerations(profile, curGen); + auto [gens, curGen] = findGenerations(profile); for (auto & i : gens) if (i.number != curGen) @@ -200,8 +194,7 @@ void deleteGenerationsOlderThan(const Path & profile, time_t t, bool dryRun) PathLocks lock; lockProfile(lock, profile); - int curGen; - Generations gens = findGenerations(profile, curGen); + auto [gens, curGen] = findGenerations(profile); bool canDelete = false; for (auto i = gens.rbegin(); i != gens.rend(); ++i) diff --git a/src/libstore/profiles.hh b/src/libstore/profiles.hh index 78645d8b6..abe507f0e 100644 --- a/src/libstore/profiles.hh +++ b/src/libstore/profiles.hh @@ -9,37 +9,32 @@ namespace nix { +typedef unsigned int GenerationNumber; + struct Generation { - int number; + GenerationNumber number; Path path; time_t creationTime; - Generation() - { - number = -1; - } - operator bool() const - { - return number != -1; - } }; -typedef list<Generation> Generations; +typedef std::list<Generation> Generations; /* Returns the list of currently present generations for the specified - profile, sorted by generation number. */ -Generations findGenerations(Path profile, int & curGen); + profile, sorted by generation number. Also returns the number of + the current generation. */ +std::pair<Generations, std::optional<GenerationNumber>> findGenerations(Path profile); class LocalFSStore; Path createGeneration(ref<LocalFSStore> store, Path profile, Path outPath); -void deleteGeneration(const Path & profile, unsigned int gen); +void deleteGeneration(const Path & profile, GenerationNumber gen); -void deleteGenerations(const Path & profile, const std::set<unsigned int> & gensToDelete, bool dryRun); +void deleteGenerations(const Path & profile, const std::set<GenerationNumber> & gensToDelete, bool dryRun); -void deleteGenerationsGreaterThan(const Path & profile, const int max, bool dryRun); +void deleteGenerationsGreaterThan(const Path & profile, GenerationNumber max, bool dryRun); void deleteOldGenerations(const Path & profile, bool dryRun); diff --git a/src/libstore/sqlite.cc b/src/libstore/sqlite.cc index 76c822c4e..31a1f0cac 100644 --- a/src/libstore/sqlite.cc +++ b/src/libstore/sqlite.cc @@ -61,6 +61,11 @@ void SQLite::exec(const std::string & stmt) }); } +uint64_t SQLite::getLastInsertedRowId() +{ + return sqlite3_last_insert_rowid(db); +} + void SQLiteStmt::create(sqlite3 * db, const string & sql) { checkInterrupt(); @@ -95,10 +100,10 @@ SQLiteStmt::Use::~Use() sqlite3_reset(stmt); } -SQLiteStmt::Use & SQLiteStmt::Use::operator () (const std::string & value, bool notNull) +SQLiteStmt::Use & SQLiteStmt::Use::operator () (std::string_view value, bool notNull) { if (notNull) { - if (sqlite3_bind_text(stmt, curArg++, value.c_str(), -1, SQLITE_TRANSIENT) != SQLITE_OK) + if (sqlite3_bind_text(stmt, curArg++, value.data(), -1, SQLITE_TRANSIENT) != SQLITE_OK) throwSQLiteError(stmt.db, "binding argument"); } else bind(); diff --git a/src/libstore/sqlite.hh b/src/libstore/sqlite.hh index dd81ab051..99f0d56ce 100644 --- a/src/libstore/sqlite.hh +++ b/src/libstore/sqlite.hh @@ -26,6 +26,8 @@ struct SQLite void isCache(); void exec(const std::string & stmt); + + uint64_t getLastInsertedRowId(); }; /* RAII wrapper to create and destroy SQLite prepared statements. */ @@ -54,7 +56,7 @@ struct SQLiteStmt ~Use(); /* Bind the next parameter. */ - Use & operator () (const std::string & value, bool notNull = true); + Use & operator () (std::string_view value, bool notNull = true); Use & operator () (const unsigned char * data, size_t len, bool notNull = true); Use & operator () (int64_t value, bool notNull = true); Use & bind(); // null diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 5b9f79049..6c0a61766 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -226,16 +226,41 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath, FileIngestionMethod method, HashType hashAlgo, std::optional<Hash> expectedCAHash) { - /* FIXME: inefficient: we're reading/hashing 'tmpFile' three + /* FIXME: inefficient: we're reading/hashing 'tmpFile' two times. */ + HashSink narHashSink { htSHA256 }; + HashSink caHashSink { hashAlgo }; + RetrieveRegularNARSink fileSink { caHashSink }; - auto [narHash, narSize] = hashPath(htSHA256, srcPath); + TeeSink sinkIfNar { narHashSink, caHashSink }; - auto hash = method == FileIngestionMethod::Recursive - ? hashAlgo == htSHA256 - ? narHash - : hashPath(hashAlgo, srcPath).first - : hashFile(hashAlgo, srcPath); + /* We use the tee sink if we need to hash the nar twice */ + auto & sink = method == FileIngestionMethod::Recursive && hashAlgo != htSHA256 + ? static_cast<Sink &>(sinkIfNar) + : narHashSink; + + auto fileSource = sinkToSource([&](Sink & sink) { + dumpPath(srcPath, sink); + }); + + TeeSource tapped { *fileSource, sink }; + + ParseSink blank; + auto & parseSink = method == FileIngestionMethod::Flat + ? fileSink + : blank; + + parseDump( + parseSink, + method == FileIngestionMethod::Recursive && hashAlgo == htSHA256 + ? *fileSource // don't need to hash twice if we just can use the `narHash` twice + : tapped); + + auto [narHash, narSize] = narHashSink.finish(); + + auto hash = method == FileIngestionMethod::Recursive && hashAlgo == htSHA256 + ? narHash + : caHashSink.finish().first; if (expectedCAHash && expectedCAHash != hash) throw Error("hash mismatch for '%s'", srcPath); |