diff options
author | Eelco Dolstra <e.dolstra@tudelft.nl> | 2011-12-16 23:33:01 +0000 |
---|---|---|
committer | Eelco Dolstra <e.dolstra@tudelft.nl> | 2011-12-16 23:33:01 +0000 |
commit | 194d21f9f63ceb034f3e8294f89aa6bf6a217bc9 (patch) | |
tree | 7eb6da5955482a82f4d34b60dcb10514a4a55f59 /src | |
parent | 3c3107da86ff71a08ce44027ee5899acf486796a (diff) | |
parent | 273b288a7e862ac1918064537ff130cc751fa9fd (diff) |
* Sync with the trunk.
Diffstat (limited to 'src')
-rw-r--r-- | src/libexpr/common-opts.cc | 11 | ||||
-rw-r--r-- | src/libexpr/common-opts.hh | 2 | ||||
-rw-r--r-- | src/libexpr/primops.cc | 17 | ||||
-rw-r--r-- | src/libmain/Makefile.am | 7 | ||||
-rw-r--r-- | src/libmain/shared.cc | 51 | ||||
-rw-r--r-- | src/libstore/Makefile.am | 11 | ||||
-rw-r--r-- | src/libstore/build.cc | 24 | ||||
-rw-r--r-- | src/libstore/globals.cc | 30 | ||||
-rw-r--r-- | src/libstore/globals.hh | 4 | ||||
-rw-r--r-- | src/libstore/local-store.cc | 66 | ||||
-rw-r--r-- | src/libstore/local-store.hh | 4 | ||||
-rw-r--r-- | src/libstore/references.cc | 4 | ||||
-rw-r--r-- | src/libstore/remote-store.cc | 51 | ||||
-rw-r--r-- | src/libstore/remote-store.hh | 2 | ||||
-rw-r--r-- | src/libstore/store-api.cc | 11 | ||||
-rw-r--r-- | src/libstore/store-api.hh | 16 | ||||
-rw-r--r-- | src/libstore/worker-protocol.hh | 8 | ||||
-rw-r--r-- | src/libutil/hash.cc | 38 | ||||
-rw-r--r-- | src/libutil/hash.hh | 8 | ||||
-rw-r--r-- | src/libutil/serialise.cc | 144 | ||||
-rw-r--r-- | src/libutil/serialise.hh | 106 | ||||
-rw-r--r-- | src/nix-env/nix-env.cc | 2 | ||||
-rw-r--r-- | src/nix-hash/nix-hash.cc | 6 | ||||
-rw-r--r-- | src/nix-instantiate/nix-instantiate.cc | 3 | ||||
-rw-r--r-- | src/nix-store/nix-store.cc | 24 | ||||
-rw-r--r-- | src/nix-worker/nix-worker.cc | 86 |
26 files changed, 461 insertions, 275 deletions
diff --git a/src/libexpr/common-opts.cc b/src/libexpr/common-opts.cc index d029d2ec3..e0865d9fc 100644 --- a/src/libexpr/common-opts.cc +++ b/src/libexpr/common-opts.cc @@ -44,4 +44,15 @@ bool parseSearchPathArg(const string & arg, Strings::iterator & i, } +Path lookupFileArg(EvalState & state, string s) +{ + if (s.size() > 2 && s.at(0) == '<' && s.at(s.size() - 1) == '>') { + Path p = state.findFile(s.substr(1, s.size() - 2)); + if (p == "") throw Error(format("file `%1%' was not found in the Nix search path (add it using $NIX_PATH or -I)") % p); + return p; + } else + return absPath(s); +} + + } diff --git a/src/libexpr/common-opts.hh b/src/libexpr/common-opts.hh index 6b7247fc3..c28641e90 100644 --- a/src/libexpr/common-opts.hh +++ b/src/libexpr/common-opts.hh @@ -14,6 +14,8 @@ bool parseOptionArg(const string & arg, Strings::iterator & i, bool parseSearchPathArg(const string & arg, Strings::iterator & i, const Strings::iterator & argsEnd, EvalState & state); +Path lookupFileArg(EvalState & state, string s); + } diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index b11562baa..dc361c043 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -363,9 +363,8 @@ static void prim_derivationStrict(EvalState & state, Value * * args, Value & v) foreach (PathSet::iterator, j, refs) { drv.inputSrcs.insert(*j); if (isDerivation(*j)) - drv.inputDrvs[*j] = store -> queryDerivationOutputNames(*j); + drv.inputDrvs[*j] = store->queryDerivationOutputNames(*j); } - explicitlyPassed = true; } else if (path.at(0) == '!') { size_t index; @@ -387,7 +386,7 @@ static void prim_derivationStrict(EvalState & state, Value * * args, Value & v) debug(format("derivation uses `%1%'") % path); if (!useDrvAsSrc && isDerivation(path)) if (explicitlyPassed) - drv.inputDrvs[path] = store -> queryDerivationOutputNames(path); + drv.inputDrvs[path] = store->queryDerivationOutputNames(path); else if (drv.inputDrvs.find(path) == drv.inputDrvs.end()) drv.inputDrvs[path] = singleton<StringSet>(output); else @@ -416,17 +415,7 @@ static void prim_derivationStrict(EvalState & state, Value * * args, Value & v) HashType ht = parseHashType(outputHashAlgo); if (ht == htUnknown) throw EvalError(format("unknown hash algorithm `%1%'") % outputHashAlgo); - Hash h(ht); - if (outputHash.size() == h.hashSize * 2) - /* hexadecimal representation */ - h = parseHash(ht, outputHash); - else if (outputHash.size() == hashLength32(h)) - /* base-32 representation */ - h = parseHash32(ht, outputHash); - else - throw Error(format("hash `%1%' has wrong length for hash type `%2%'") - % outputHash % outputHashAlgo); - string s = outputHash; + Hash h = parseHash16or32(ht, outputHash); outputHash = printHash(h); if (outputHashRecursive) outputHashAlgo = "r:" + outputHashAlgo; diff --git a/src/libmain/Makefile.am b/src/libmain/Makefile.am index ababc3595..404353c62 100644 --- a/src/libmain/Makefile.am +++ b/src/libmain/Makefile.am @@ -7,13 +7,6 @@ libmain_la_LIBADD = ../libstore/libstore.la @BDW_GC_LIBS@ pkginclude_HEADERS = shared.hh AM_CXXFLAGS = \ - -DNIX_STORE_DIR=\"$(storedir)\" \ - -DNIX_DATA_DIR=\"$(datadir)\" \ - -DNIX_STATE_DIR=\"$(localstatedir)/nix\" \ - -DNIX_LOG_DIR=\"$(localstatedir)/log/nix\" \ - -DNIX_CONF_DIR=\"$(sysconfdir)/nix\" \ - -DNIX_LIBEXEC_DIR=\"$(libexecdir)\" \ - -DNIX_BIN_DIR=\"$(bindir)\" \ -DNIX_VERSION=\"$(VERSION)\" \ -I$(srcdir)/.. -I$(srcdir)/../libutil \ -I$(srcdir)/../libstore diff --git a/src/libmain/shared.cc b/src/libmain/shared.cc index 8532cdaad..d3b73f8fd 100644 --- a/src/libmain/shared.cc +++ b/src/libmain/shared.cc @@ -65,7 +65,7 @@ void printMissing(StoreAPI & store, const PathSet & paths) } if (!willSubstitute.empty()) { - printMsg(lvlInfo, format("these paths will be downloaded/copied (%.2f MiB download, %.2f MiB unpacked):") + printMsg(lvlInfo, format("these paths will be fetched (%.2f MiB download, %.2f MiB unpacked):") % (downloadSize / (1024.0 * 1024.0)) % (narSize / (1024.0 * 1024.0))); foreach (PathSet::iterator, i, willSubstitute) @@ -90,23 +90,6 @@ static void setLogType(string lt) } -static void closeStore() -{ - try { - throw; - } catch (std::exception & e) { - printMsg(lvlError, - format("FATAL: unexpected exception (closing store and aborting): %1%") % e.what()); - } - try { - store.reset((StoreAPI *) 0); - } catch (...) { - ignoreException(); - } - abort(); -} - - RemoveTempRoots::~RemoveTempRoots() { removeTempRoots(); @@ -120,30 +103,8 @@ static bool showTrace = false; processor. */ static void initAndRun(int argc, char * * argv) { - /* Setup Nix paths. */ - nixStore = canonPath(getEnv("NIX_STORE_DIR", getEnv("NIX_STORE", NIX_STORE_DIR))); - nixDataDir = canonPath(getEnv("NIX_DATA_DIR", NIX_DATA_DIR)); - nixLogDir = canonPath(getEnv("NIX_LOG_DIR", NIX_LOG_DIR)); - nixStateDir = canonPath(getEnv("NIX_STATE_DIR", NIX_STATE_DIR)); - nixDBPath = getEnv("NIX_DB_DIR", nixStateDir + "/db"); - nixConfDir = canonPath(getEnv("NIX_CONF_DIR", NIX_CONF_DIR)); - nixLibexecDir = canonPath(getEnv("NIX_LIBEXEC_DIR", NIX_LIBEXEC_DIR)); - nixBinDir = canonPath(getEnv("NIX_BIN_DIR", NIX_BIN_DIR)); - - string subs = getEnv("NIX_SUBSTITUTERS", "default"); - if (subs == "default") { - substituters.push_back(nixLibexecDir + "/nix/substituters/copy-from-other-stores.pl"); - substituters.push_back(nixLibexecDir + "/nix/substituters/download-using-manifests.pl"); - } else - substituters = tokenizeString(subs, ":"); - - /* Get some settings from the configuration file. */ - thisSystem = querySetting("system", SYSTEM); - maxBuildJobs = queryIntSetting("build-max-jobs", 1); - buildCores = queryIntSetting("build-cores", 1); - maxSilentTime = queryIntSetting("build-max-silent-time", 0); - buildTimeout = queryIntSetting("build-timeout", 0); - + setDefaultsFromEnvironment(); + /* Catch SIGINT. */ struct sigaction act; act.sa_handler = sigintHandler; @@ -260,12 +221,6 @@ static void initAndRun(int argc, char * * argv) exit. */ RemoveTempRoots removeTempRoots __attribute__((unused)); - /* Make sure that the database gets closed properly, even if - terminate() is called (which happens sometimes due to bugs in - destructor/exceptions interaction, but that needn't preclude a - clean shutdown of the database). */ - std::set_terminate(closeStore); - run(remaining); /* Close the Nix database. */ diff --git a/src/libstore/Makefile.am b/src/libstore/Makefile.am index e19256b92..39a61233b 100644 --- a/src/libstore/Makefile.am +++ b/src/libstore/Makefile.am @@ -15,7 +15,16 @@ libstore_la_LIBADD = ../libutil/libutil.la ../boost/format/libformat.la ${aterm_ EXTRA_DIST = schema.sql AM_CXXFLAGS = -Wall \ - ${sqlite_include} -I$(srcdir)/.. -I$(srcdir)/../libutil + ${sqlite_include} -I$(srcdir)/.. -I$(srcdir)/../libutil \ + -DNIX_STORE_DIR=\"$(storedir)\" \ + -DNIX_DATA_DIR=\"$(datadir)\" \ + -DNIX_STATE_DIR=\"$(localstatedir)/nix\" \ + -DNIX_LOG_DIR=\"$(localstatedir)/log/nix\" \ + -DNIX_CONF_DIR=\"$(sysconfdir)/nix\" \ + -DNIX_LIBEXEC_DIR=\"$(libexecdir)\" \ + -DNIX_BIN_DIR=\"$(bindir)\" \ + -I$(srcdir)/.. -I$(srcdir)/../libutil \ + -I$(srcdir)/../libstore local-store.lo: schema.sql.hh diff --git a/src/libstore/build.cc b/src/libstore/build.cc index d12f41d66..a8ef9b23e 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -1650,6 +1650,9 @@ void DerivationGoal::startBuilder() (format("nixbld:!:%1%:\n") % (buildUser.enabled() ? buildUser.getGID() : getgid())).str()); + /* Create /etc/hosts with localhost entry. */ + writeFile(chrootRootDir + "/etc/hosts", "127.0.0.1 localhost\n"); + /* Bind-mount a user-configurable set of directories from the host file system. The `/dev/pts' directory must be mounted separately so that newly-created pseudo-terminals show @@ -2199,9 +2202,7 @@ void SubstitutionGoal::tryNext() if (subs.size() == 0) { /* None left. Terminate this goal and let someone else deal with it. */ - printMsg(lvlError, - format("path `%1%' is required, but there is no substituter that can build it") - % storePath); + debug(format("path `%1%' is required, but there is no substituter that can build it") % storePath); amDone(ecFailed); return; } @@ -2232,8 +2233,7 @@ void SubstitutionGoal::referencesValid() trace("all references realised"); if (nrFailed > 0) { - printMsg(lvlError, - format("some references of path `%1%' could not be realised") % storePath); + debug(format("some references of path `%1%' could not be realised") % storePath); amDone(ecFailed); return; } @@ -2286,9 +2286,7 @@ void SubstitutionGoal::tryToRun() return; } - printMsg(lvlInfo, - format("substituting path `%1%' using substituter `%2%'") - % storePath % sub); + printMsg(lvlInfo, format("fetching path `%1%'...") % storePath); logPipe.create(); @@ -2364,19 +2362,15 @@ void SubstitutionGoal::finished() try { if (!statusOk(status)) - throw SubstError(format("builder for `%1%' %2%") + throw SubstError(format("fetching path `%1%' %2%") % storePath % statusToString(status)); if (!pathExists(storePath)) - throw SubstError( - format("substitute did not produce path `%1%'") - % storePath); + throw SubstError(format("substitute did not produce path `%1%'") % storePath); } catch (SubstError & e) { - printMsg(lvlInfo, - format("substitution of path `%1%' using substituter `%2%' failed: %3%") - % storePath % sub % e.msg()); + printMsg(lvlInfo, e.msg()); if (printBuildTrace) { printMsg(lvlError, format("@ substituter-failed %1% %2% %3%") diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc index 2e9dc8823..5c22f1406 100644 --- a/src/libstore/globals.cc +++ b/src/libstore/globals.cc @@ -1,3 +1,5 @@ +#include "config.h" + #include "globals.hh" #include "util.hh" @@ -138,5 +140,33 @@ void reloadSettings() settings.clear(); } + +void setDefaultsFromEnvironment() +{ + /* Setup Nix paths. */ + nixStore = canonPath(getEnv("NIX_STORE_DIR", getEnv("NIX_STORE", NIX_STORE_DIR))); + nixDataDir = canonPath(getEnv("NIX_DATA_DIR", NIX_DATA_DIR)); + nixLogDir = canonPath(getEnv("NIX_LOG_DIR", NIX_LOG_DIR)); + nixStateDir = canonPath(getEnv("NIX_STATE_DIR", NIX_STATE_DIR)); + nixDBPath = getEnv("NIX_DB_DIR", nixStateDir + "/db"); + nixConfDir = canonPath(getEnv("NIX_CONF_DIR", NIX_CONF_DIR)); + nixLibexecDir = canonPath(getEnv("NIX_LIBEXEC_DIR", NIX_LIBEXEC_DIR)); + nixBinDir = canonPath(getEnv("NIX_BIN_DIR", NIX_BIN_DIR)); + + string subs = getEnv("NIX_SUBSTITUTERS", "default"); + if (subs == "default") { + substituters.push_back(nixLibexecDir + "/nix/substituters/copy-from-other-stores.pl"); + substituters.push_back(nixLibexecDir + "/nix/substituters/download-using-manifests.pl"); + } else + substituters = tokenizeString(subs, ":"); + + /* Get some settings from the configuration file. */ + thisSystem = querySetting("system", SYSTEM); + maxBuildJobs = queryIntSetting("build-max-jobs", 1); + buildCores = queryIntSetting("build-cores", 1); + maxSilentTime = queryIntSetting("build-max-silent-time", 0); + buildTimeout = queryIntSetting("build-timeout", 0); +} + } diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index 231c1f850..12a9b9ca1 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -114,7 +114,9 @@ void overrideSetting(const string & name, const Strings & value); void reloadSettings(); - +void setDefaultsFromEnvironment(); + + } diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 8ca5daa9f..29817df9d 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -327,10 +327,9 @@ void LocalStore::openDB(bool create) if (sqlite3_exec(db, ("pragma synchronous = " + syncMode + ";").c_str(), 0, 0, 0) != SQLITE_OK) throwSQLiteError(db, "setting synchronous mode"); - /* Set the SQLite journal mode. WAL mode is fastest, but doesn't - seem entirely stable at the moment (Oct. 2010). Thus, use - truncate mode by default. */ - string mode = queryBoolSetting("use-sqlite-wal", false) ? "wal" : "truncate"; + /* Set the SQLite journal mode. WAL mode is fastest, so it's the + default. */ + string mode = queryBoolSetting("use-sqlite-wal", true) ? "wal" : "truncate"; string prevMode; { SQLiteStmt stmt; @@ -367,7 +366,7 @@ void LocalStore::openDB(bool create) stmtRegisterValidPath.create(db, "insert into ValidPaths (path, hash, registrationTime, deriver, narSize) values (?, ?, ?, ?, ?);"); stmtUpdatePathInfo.create(db, - "update ValidPaths set narSize = ? where path = ?;"); + "update ValidPaths set narSize = ?, hash = ? where path = ?;"); stmtAddReference.create(db, "insert or replace into Refs (referrer, reference) values (?, ?);"); stmtQueryPathInfo.create(db, @@ -684,7 +683,7 @@ ValidPathInfo LocalStore::queryPathInfo(const Path & path) } -/* Update path info in the database. Currently only updated the +/* Update path info in the database. Currently only updates the narSize field. */ void LocalStore::updatePathInfo(const ValidPathInfo & info) { @@ -693,6 +692,7 @@ void LocalStore::updatePathInfo(const ValidPathInfo & info) stmtUpdatePathInfo.bind64(info.narSize); else stmtUpdatePathInfo.bind(); // null + stmtUpdatePathInfo.bind("sha256:" + printHash(info.hash)); stmtUpdatePathInfo.bind(info.path); if (sqlite3_step(stmtUpdatePathInfo) != SQLITE_DONE) throwSQLiteError(db, format("updating info of path `%1%' in database") % info.path); @@ -1125,16 +1125,14 @@ struct HashAndWriteSink : Sink HashAndWriteSink(Sink & writeSink) : writeSink(writeSink), hashSink(htSHA256) { } - virtual void operator () - (const unsigned char * data, unsigned int len) + virtual void operator () (const unsigned char * data, size_t len) { writeSink(data, len); hashSink(data, len); } Hash currentHash() { - HashSink hashSinkClone(hashSink); - return hashSinkClone.finish().first; + return hashSink.currentHash().first; } }; @@ -1180,7 +1178,7 @@ void LocalStore::exportPath(const Path & path, bool sign, PathSet references; queryReferences(path, references); - writeStringSet(references, hashAndWriteSink); + writeStrings(references, hashAndWriteSink); Path deriver = queryDeriver(path); writeString(deriver, hashAndWriteSink); @@ -1223,11 +1221,11 @@ struct HashAndReadSource : Source { hashing = true; } - virtual void operator () - (unsigned char * data, unsigned int len) + size_t read(unsigned char * data, size_t len) { - readSource(data, len); - if (hashing) hashSink(data, len); + size_t n = readSource.read(data, len); + if (hashing) hashSink(data, n); + return n; } }; @@ -1267,7 +1265,7 @@ Path LocalStore::importPath(bool requireSignature, Source & source) Path dstPath = readStorePath(hashAndReadSource); - PathSet references = readStorePaths(hashAndReadSource); + PathSet references = readStorePaths<PathSet>(hashAndReadSource); Path deriver = readString(hashAndReadSource); if (deriver != "") assertStorePath(deriver); @@ -1278,7 +1276,7 @@ Path LocalStore::importPath(bool requireSignature, Source & source) bool haveSignature = readInt(hashAndReadSource) == 1; if (requireSignature && !haveSignature) - throw Error("imported archive lacks a signature"); + throw Error(format("imported archive of `%1%' lacks a signature") % dstPath); if (haveSignature) { string signature = readString(hashAndReadSource); @@ -1354,6 +1352,19 @@ Path LocalStore::importPath(bool requireSignature, Source & source) } +Paths LocalStore::importPaths(bool requireSignature, Source & source) +{ + Paths res; + while (true) { + unsigned long long n = readLongLong(source); + if (n == 0) break; + if (n != 1) throw Error("input doesn't look like something created by `nix-store --export'"); + res.push_back(importPath(requireSignature, source)); + } + return res; +} + + void LocalStore::deleteFromStore(const Path & path, unsigned long long & bytesFreed, unsigned long long & blocksFreed) { @@ -1369,7 +1380,7 @@ void LocalStore::deleteFromStore(const Path & path, unsigned long long & bytesFr PathSet referrers; queryReferrers(path, referrers); referrers.erase(path); /* ignore self-references */ if (!referrers.empty()) - throw PathInUse(format("cannot delete path `%1%' because it is in use by `%2%'") + throw PathInUse(format("cannot delete path `%1%' because it is in use by %2%") % path % showPaths(referrers)); invalidatePath(path); } @@ -1409,6 +1420,8 @@ void LocalStore::verifyStore(bool checkContents) if (checkContents) { printMsg(lvlInfo, "checking hashes..."); + Hash nullHash(htSHA256); + foreach (PathSet::iterator, i, validPaths) { try { ValidPathInfo info = queryPathInfo(*i); @@ -1417,17 +1430,30 @@ void LocalStore::verifyStore(bool checkContents) printMsg(lvlTalkative, format("checking contents of `%1%'") % *i); HashResult current = hashPath(info.hash.type, *i); - if (current.first != info.hash) { + if (info.hash != nullHash && info.hash != current.first) { printMsg(lvlError, format("path `%1%' was modified! " "expected hash `%2%', got `%3%'") % *i % printHash(info.hash) % printHash(current.first)); } else { + + bool update = false; + + /* Fill in missing hashes. */ + if (info.hash == nullHash) { + printMsg(lvlError, format("fixing missing hash on `%1%'") % *i); + info.hash = current.first; + update = true; + } + /* Fill in missing narSize fields (from old stores). */ if (info.narSize == 0) { printMsg(lvlError, format("updating size field on `%1%' to %2%") % *i % current.second); info.narSize = current.second; - updatePathInfo(info); + update = true; } + + if (update) updatePathInfo(info); + } } catch (Error & e) { diff --git a/src/libstore/local-store.hh b/src/libstore/local-store.hh index b97e2f406..2739c4eea 100644 --- a/src/libstore/local-store.hh +++ b/src/libstore/local-store.hh @@ -148,7 +148,7 @@ public: void exportPath(const Path & path, bool sign, Sink & sink); - Path importPath(bool requireSignature, Source & source); + Paths importPaths(bool requireSignature, Source & source); void buildDerivations(const PathSet & drvPaths); @@ -261,6 +261,8 @@ private: Path createTempDirInStore(); + Path importPath(bool requireSignature, Source & source); + void checkDerivationOutputs(const Path & drvPath, const Derivation & drv); }; diff --git a/src/libstore/references.cc b/src/libstore/references.cc index ade9c9aa2..c1f9e3ba7 100644 --- a/src/libstore/references.cc +++ b/src/libstore/references.cc @@ -57,11 +57,11 @@ struct RefScanSink : Sink RefScanSink() : hashSink(htSHA256) { } - void operator () (const unsigned char * data, unsigned int len); + void operator () (const unsigned char * data, size_t len); }; -void RefScanSink::operator () (const unsigned char * data, unsigned int len) +void RefScanSink::operator () (const unsigned char * data, size_t len) { hashSink(data, len); diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index 84c87246f..ce99c205e 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -27,13 +27,15 @@ Path readStorePath(Source & from) } -PathSet readStorePaths(Source & from) +template<class T> T readStorePaths(Source & from) { - PathSet paths = readStringSet(from); - foreach (PathSet::iterator, i, paths) assertStorePath(*i); + T paths = readStrings<T>(from); + foreach (typename T::iterator, i, paths) assertStorePath(*i); return paths; } +template PathSet readStorePaths(Source & from); + RemoteStore::RemoteStore() { @@ -65,6 +67,7 @@ void RemoteStore::openConnection() /* Send the magic greeting, check for the reply. */ try { writeInt(WORKER_MAGIC_1, to); + to.flush(); unsigned int magic = readInt(from); if (magic != WORKER_MAGIC_2) throw Error("protocol mismatch"); @@ -166,6 +169,7 @@ void RemoteStore::connectToDaemon() RemoteStore::~RemoteStore() { try { + to.flush(); fdSocket.close(); if (child != -1) child.wait(true); @@ -213,7 +217,7 @@ PathSet RemoteStore::queryValidPaths() openConnection(); writeInt(wopQueryValidPaths, to); processStderr(); - return readStorePaths(from); + return readStorePaths<PathSet>(from); } @@ -240,7 +244,7 @@ bool RemoteStore::querySubstitutablePathInfo(const Path & path, if (reply == 0) return false; info.deriver = readString(from); if (info.deriver != "") assertStorePath(info.deriver); - info.references = readStorePaths(from); + info.references = readStorePaths<PathSet>(from); info.downloadSize = readLongLong(from); info.narSize = GET_PROTOCOL_MINOR(daemonVersion) >= 7 ? readLongLong(from) : 0; return true; @@ -258,7 +262,7 @@ ValidPathInfo RemoteStore::queryPathInfo(const Path & path) info.deriver = readString(from); if (info.deriver != "") assertStorePath(info.deriver); info.hash = parseHash(htSHA256, readString(from)); - info.references = readStorePaths(from); + info.references = readStorePaths<PathSet>(from); info.registrationTime = readInt(from); info.narSize = readLongLong(from); return info; @@ -283,7 +287,7 @@ void RemoteStore::queryReferences(const Path & path, writeInt(wopQueryReferences, to); writeString(path, to); processStderr(); - PathSet references2 = readStorePaths(from); + PathSet references2 = readStorePaths<PathSet>(from); references.insert(references2.begin(), references2.end()); } @@ -295,7 +299,7 @@ void RemoteStore::queryReferrers(const Path & path, writeInt(wopQueryReferrers, to); writeString(path, to); processStderr(); - PathSet referrers2 = readStorePaths(from); + PathSet referrers2 = readStorePaths<PathSet>(from); referrers.insert(referrers2.begin(), referrers2.end()); } @@ -318,7 +322,7 @@ PathSet RemoteStore::queryDerivationOutputs(const Path & path) writeInt(wopQueryDerivationOutputs, to); writeString(path, to); processStderr(); - return readStorePaths(from); + return readStorePaths<PathSet>(from); } @@ -338,7 +342,7 @@ Path RemoteStore::addToStore(const Path & _srcPath, openConnection(); Path srcPath(absPath(_srcPath)); - + writeInt(wopAddToStore, to); writeString(baseNameOf(srcPath), to); /* backwards compatibility hack */ @@ -358,7 +362,7 @@ Path RemoteStore::addTextToStore(const string & name, const string & s, writeInt(wopAddTextToStore, to); writeString(name, to); writeString(s, to); - writeStringSet(references, to); + writeStrings(references, to); processStderr(); return readStorePath(from); @@ -377,14 +381,14 @@ void RemoteStore::exportPath(const Path & path, bool sign, } -Path RemoteStore::importPath(bool requireSignature, Source & source) +Paths RemoteStore::importPaths(bool requireSignature, Source & source) { openConnection(); - writeInt(wopImportPath, to); + writeInt(wopImportPaths, to); /* We ignore requireSignature, since the worker forces it to true - anyway. */ + anyway. */ processStderr(0, &source); - return readStorePath(from); + return readStorePaths<Paths>(from); } @@ -392,7 +396,7 @@ void RemoteStore::buildDerivations(const PathSet & drvPaths) { openConnection(); writeInt(wopBuildDerivations, to); - writeStringSet(drvPaths, to); + writeStrings(drvPaths, to); processStderr(); readInt(from); } @@ -459,7 +463,7 @@ void RemoteStore::collectGarbage(const GCOptions & options, GCResults & results) writeInt(wopCollectGarbage, to); writeInt(options.action, to); - writeStringSet(options.pathsToDelete, to); + writeStrings(options.pathsToDelete, to); writeInt(options.ignoreLiveness, to); writeLongLong(options.maxFreed, to); writeInt(options.maxLinks, to); @@ -471,7 +475,7 @@ void RemoteStore::collectGarbage(const GCOptions & options, GCResults & results) processStderr(); - results.paths = readStringSet(from); + results.paths = readStrings<PathSet>(from); results.bytesFreed = readLongLong(from); results.blocksFreed = readLongLong(from); } @@ -482,7 +486,7 @@ PathSet RemoteStore::queryFailedPaths() openConnection(); writeInt(wopQueryFailedPaths, to); processStderr(); - return readStorePaths(from); + return readStorePaths<PathSet>(from); } @@ -490,7 +494,7 @@ void RemoteStore::clearFailedPaths(const PathSet & paths) { openConnection(); writeInt(wopClearFailedPaths, to); - writeStringSet(paths, to); + writeStrings(paths, to); processStderr(); readInt(from); } @@ -498,6 +502,7 @@ void RemoteStore::clearFailedPaths(const PathSet & paths) void RemoteStore::processStderr(Sink * sink, Source * source) { + to.flush(); unsigned int msg; while ((msg = readInt(from)) == STDERR_NEXT || msg == STDERR_READ || msg == STDERR_WRITE) { @@ -508,11 +513,11 @@ void RemoteStore::processStderr(Sink * sink, Source * source) } else if (msg == STDERR_READ) { if (!source) throw Error("no source"); - unsigned int len = readInt(from); + size_t len = readInt(from); unsigned char * buf = new unsigned char[len]; AutoDeleteArray<unsigned char> d(buf); - (*source)(buf, len); - writeString(string((const char *) buf, len), to); + writeString(buf, source->read(buf, len), to); + to.flush(); } else { string s = readString(from); diff --git a/src/libstore/remote-store.hh b/src/libstore/remote-store.hh index 3be9e315a..c5853ef53 100644 --- a/src/libstore/remote-store.hh +++ b/src/libstore/remote-store.hh @@ -58,7 +58,7 @@ public: void exportPath(const Path & path, bool sign, Sink & sink); - Path importPath(bool requireSignature, Source & source); + Paths importPaths(bool requireSignature, Source & source); void buildDerivations(const PathSet & drvPaths); diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index d67ff2c77..36ade2170 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -298,6 +298,17 @@ string showPaths(const PathSet & paths) } +void exportPaths(StoreAPI & store, const Paths & paths, + bool sign, Sink & sink) +{ + foreach (Paths::const_iterator, i, paths) { + writeInt(1, sink); + store.exportPath(*i, sign, sink); + } + writeInt(0, sink); +} + + } diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index 038465749..14890f522 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -154,9 +154,7 @@ public: /* Copy the contents of a path to the store and register the validity the resulting path. The resulting path is returned. - If `fixed' is true, then the output of a fixed-output - derivation is pre-loaded into the Nix store. The function - object `filter' can be used to exclude files (see + The function object `filter' can be used to exclude files (see libutil/archive.hh). */ virtual Path addToStore(const Path & srcPath, bool recursive = true, HashType hashAlgo = htSHA256, @@ -174,9 +172,9 @@ public: virtual void exportPath(const Path & path, bool sign, Sink & sink) = 0; - /* Import a NAR dump created by exportPath() into the Nix - store. */ - virtual Path importPath(bool requireSignature, Source & source) = 0; + /* Import a sequence of NAR dumps created by exportPaths() into + the Nix store. */ + virtual Paths importPaths(bool requireSignature, Source & source) = 0; /* Ensure that the output paths of the derivation are valid. If they are already valid, this is a no-op. Otherwise, validity @@ -345,6 +343,12 @@ ValidPathInfo decodeValidPathInfo(std::istream & str, bool hashGiven = false); +/* Export multiple paths in the format expected by ‘nix-store + --import’. */ +void exportPaths(StoreAPI & store, const Paths & paths, + bool sign, Sink & sink); + + } diff --git a/src/libstore/worker-protocol.hh b/src/libstore/worker-protocol.hh index 576e754d2..ef1e0993d 100644 --- a/src/libstore/worker-protocol.hh +++ b/src/libstore/worker-protocol.hh @@ -8,7 +8,7 @@ namespace nix { #define WORKER_MAGIC_1 0x6e697863 #define WORKER_MAGIC_2 0x6478696f -#define PROTOCOL_VERSION 0x108 +#define PROTOCOL_VERSION 0x109 #define GET_PROTOCOL_MAJOR(x) ((x) & 0xff00) #define GET_PROTOCOL_MINOR(x) ((x) & 0x00ff) @@ -29,7 +29,6 @@ typedef enum { wopSyncWithGC = 13, wopFindRoots = 14, wopExportPath = 16, - wopImportPath = 17, wopQueryDeriver = 18, wopSetOptions = 19, wopCollectGarbage = 20, @@ -39,7 +38,8 @@ typedef enum { wopQueryFailedPaths = 24, wopClearFailedPaths = 25, wopQueryPathInfo = 26, - wopQueryDerivationOutputNames = 27, + wopImportPaths = 27, + wopQueryDerivationOutputNames = 28, } WorkerOp; @@ -59,7 +59,7 @@ typedef enum { Path readStorePath(Source & from); -PathSet readStorePaths(Source & from); +template<class T> T readStorePaths(Source & from); } diff --git a/src/libutil/hash.cc b/src/libutil/hash.cc index b9e784699..bbfe7847f 100644 --- a/src/libutil/hash.cc +++ b/src/libutil/hash.cc @@ -204,6 +204,22 @@ Hash parseHash32(HashType ht, const string & s) } +Hash parseHash16or32(HashType ht, const string & s) +{ + Hash hash(ht); + if (s.size() == hash.hashSize * 2) + /* hexadecimal representation */ + hash = parseHash(ht, s); + else if (s.size() == hashLength32(hash)) + /* base-32 representation */ + hash = parseHash32(ht, s); + else + throw Error(format("hash `%1%' has wrong length for hash type `%2%'") + % s % printHashType(ht)); + return hash; +} + + bool isHash(const string & s) { if (s.length() != 32) return false; @@ -290,21 +306,13 @@ HashSink::HashSink(HashType ht) : ht(ht) start(ht, *ctx); } -HashSink::HashSink(const HashSink & h) -{ - ht = h.ht; - bytes = h.bytes; - ctx = new Ctx; - *ctx = *h.ctx; -} - HashSink::~HashSink() { + bufPos = 0; delete ctx; } -void HashSink::operator () - (const unsigned char * data, unsigned int len) +void HashSink::write(const unsigned char * data, size_t len) { bytes += len; update(ht, *ctx, data, len); @@ -312,11 +320,21 @@ void HashSink::operator () HashResult HashSink::finish() { + flush(); Hash hash(ht); nix::finish(ht, *ctx, hash.hash); return HashResult(hash, bytes); } +HashResult HashSink::currentHash() +{ + flush(); + Ctx ctx2 = *ctx; + Hash hash(ht); + nix::finish(ht, ctx2, hash.hash); + return HashResult(hash, bytes); +} + HashResult hashPath( HashType ht, const Path & path, PathFilter & filter) diff --git a/src/libutil/hash.hh b/src/libutil/hash.hh index 13740954d..e0b6478cc 100644 --- a/src/libutil/hash.hh +++ b/src/libutil/hash.hh @@ -58,6 +58,9 @@ string printHash32(const Hash & hash); /* Parse a base-32 representation of a hash code. */ Hash parseHash32(HashType ht, const string & s); +/* Parse a base-16 or base-32 representation of a hash code. */ +Hash parseHash16or32(HashType ht, const string & s); + /* Verify that the given string is a valid hash code. */ bool isHash(const string & s); @@ -88,7 +91,7 @@ string printHashType(HashType ht); union Ctx; -class HashSink : public Sink +class HashSink : public BufferedSink { private: HashType ht; @@ -99,8 +102,9 @@ public: HashSink(HashType ht); HashSink(const HashSink & h); ~HashSink(); - virtual void operator () (const unsigned char * data, unsigned int len); + void write(const unsigned char * data, size_t len); HashResult finish(); + HashResult currentHash(); }; diff --git a/src/libutil/serialise.cc b/src/libutil/serialise.cc index 9b4222713..c4563ffd1 100644 --- a/src/libutil/serialise.cc +++ b/src/libutil/serialise.cc @@ -2,24 +2,117 @@ #include "util.hh" #include <cstring> +#include <cerrno> namespace nix { -void FdSink::operator () (const unsigned char * data, unsigned int len) +BufferedSink::~BufferedSink() +{ + /* We can't call flush() here, because C++ for some insane reason + doesn't allow you to call virtual methods from a destructor. */ + assert(!bufPos); + if (buffer) delete[] buffer; +} + + +void BufferedSink::operator () (const unsigned char * data, size_t len) +{ + if (!buffer) buffer = new unsigned char[bufSize]; + + while (len) { + /* Optimisation: bypass the buffer if the data exceeds the + buffer size. */ + if (bufPos + len >= bufSize) { + flush(); + write(data, len); + break; + } + /* Otherwise, copy the bytes to the buffer. Flush the buffer + when it's full. */ + size_t n = bufPos + len > bufSize ? bufSize - bufPos : len; + memcpy(buffer + bufPos, data, n); + data += n; bufPos += n; len -= n; + if (bufPos == bufSize) flush(); + } +} + + +void BufferedSink::flush() +{ + if (bufPos == 0) return; + size_t n = bufPos; + bufPos = 0; // don't trigger the assert() in ~BufferedSink() + write(buffer, n); +} + + +FdSink::~FdSink() +{ + try { flush(); } catch (...) { ignoreException(); } +} + + +void FdSink::write(const unsigned char * data, size_t len) { writeFull(fd, data, len); } -void FdSource::operator () (unsigned char * data, unsigned int len) +void Source::operator () (unsigned char * data, size_t len) +{ + while (len) { + size_t n = read(data, len); + data += n; len -= n; + } +} + + +BufferedSource::~BufferedSource() { - readFull(fd, data, len); + if (buffer) delete[] buffer; } -void writePadding(unsigned int len, Sink & sink) +size_t BufferedSource::read(unsigned char * data, size_t len) +{ + if (!buffer) buffer = new unsigned char[bufSize]; + + if (!bufPosIn) bufPosIn = readUnbuffered(buffer, bufSize); + + /* Copy out the data in the buffer. */ + size_t n = len > bufPosIn - bufPosOut ? bufPosIn - bufPosOut : len; + memcpy(data, buffer + bufPosOut, n); + bufPosOut += n; + if (bufPosIn == bufPosOut) bufPosIn = bufPosOut = 0; + return n; +} + + +size_t FdSource::readUnbuffered(unsigned char * data, size_t len) +{ + ssize_t n; + do { + checkInterrupt(); + n = ::read(fd, (char *) data, bufSize); + } while (n == -1 && errno == EINTR); + if (n == -1) throw SysError("reading from file"); + if (n == 0) throw EndOfFile("unexpected end-of-file"); + return n; +} + + +size_t StringSource::read(unsigned char * data, size_t len) +{ + if (pos == s.size()) throw EndOfFile("end of string reached"); + size_t n = s.copy((char *) data, len, pos); + pos += n; + return n; +} + + +void writePadding(size_t len, Sink & sink) { if (len % 8) { unsigned char zero[8]; @@ -56,28 +149,36 @@ void writeLongLong(unsigned long long n, Sink & sink) } -void writeString(const string & s, Sink & sink) +void writeString(const unsigned char * buf, size_t len, Sink & sink) { - unsigned int len = s.length(); writeInt(len, sink); - sink((const unsigned char *) s.c_str(), len); + sink(buf, len); writePadding(len, sink); } -void writeStringSet(const StringSet & ss, Sink & sink) +void writeString(const string & s, Sink & sink) +{ + writeString((const unsigned char *) s.c_str(), s.size(), sink); +} + + +template<class T> void writeStrings(const T & ss, Sink & sink) { writeInt(ss.size(), sink); - for (StringSet::iterator i = ss.begin(); i != ss.end(); ++i) + foreach (typename T::const_iterator, i, ss) writeString(*i, sink); } +template void writeStrings(const Paths & ss, Sink & sink); +template void writeStrings(const PathSet & ss, Sink & sink); -void readPadding(unsigned int len, Source & source) + +void readPadding(size_t len, Source & source) { if (len % 8) { unsigned char zero[8]; - unsigned int n = 8 - (len % 8); + size_t n = 8 - (len % 8); source(zero, n); for (unsigned int i = 0; i < n; i++) if (zero[i]) throw SerialisationError("non-zero padding"); @@ -115,9 +216,19 @@ unsigned long long readLongLong(Source & source) } +size_t readString(unsigned char * buf, size_t max, Source & source) +{ + size_t len = readInt(source); + if (len > max) throw Error("string is too long"); + source(buf, len); + readPadding(len, source); + return len; +} + + string readString(Source & source) { - unsigned int len = readInt(source); + size_t len = readInt(source); unsigned char * buf = new unsigned char[len]; AutoDeleteArray<unsigned char> d(buf); source(buf, len); @@ -126,14 +237,17 @@ string readString(Source & source) } -StringSet readStringSet(Source & source) +template<class T> T readStrings(Source & source) { unsigned int count = readInt(source); - StringSet ss; + T ss; while (count--) - ss.insert(readString(source)); + ss.insert(ss.end(), readString(source)); return ss; } +template Paths readStrings(Source & source); +template PathSet readStrings(Source & source); + } diff --git a/src/libutil/serialise.hh b/src/libutil/serialise.hh index 0e797d63b..ded4b12a0 100644 --- a/src/libutil/serialise.hh +++ b/src/libutil/serialise.hh @@ -11,7 +11,25 @@ namespace nix { struct Sink { virtual ~Sink() { } - virtual void operator () (const unsigned char * data, unsigned int len) = 0; + virtual void operator () (const unsigned char * data, size_t len) = 0; +}; + + +/* A buffered abstract sink. */ +struct BufferedSink : Sink +{ + size_t bufSize, bufPos; + unsigned char * buffer; + + BufferedSink(size_t bufSize = 32 * 1024) + : bufSize(bufSize), bufPos(0), buffer(0) { } + ~BufferedSink(); + + void operator () (const unsigned char * data, size_t len); + + void flush(); + + virtual void write(const unsigned char * data, size_t len) = 0; }; @@ -20,49 +38,55 @@ struct Source { virtual ~Source() { } - /* The callee should store exactly *len bytes in the buffer - pointed to by data. It should block if that much data is not - yet available, or throw an error if it is not going to be - available. */ - virtual void operator () (unsigned char * data, unsigned int len) = 0; + /* Store exactly ‘len’ bytes in the buffer pointed to by ‘data’. + It blocks until all the requested data is available, or throws + an error if it is not going to be available. */ + void operator () (unsigned char * data, size_t len); + + /* Store up to ‘len’ in the buffer pointed to by ‘data’, and + return the number of bytes stored. If blocks until at least + one byte is available. */ + virtual size_t read(unsigned char * data, size_t len) = 0; }; -/* A sink that writes data to a file descriptor. */ -struct FdSink : Sink +/* A buffered abstract source. */ +struct BufferedSource : Source { - int fd; + size_t bufSize, bufPosIn, bufPosOut; + unsigned char * buffer; - FdSink() - { - fd = -1; - } + BufferedSource(size_t bufSize = 32 * 1024) + : bufSize(bufSize), bufPosIn(0), bufPosOut(0), buffer(0) { } + ~BufferedSource(); - FdSink(int fd) - { - this->fd = fd; - } + size_t read(unsigned char * data, size_t len); - void operator () (const unsigned char * data, unsigned int len); + /* Underlying read call, to be overriden. */ + virtual size_t readUnbuffered(unsigned char * data, size_t len) = 0; }; -/* A source that reads data from a file descriptor. */ -struct FdSource : Source +/* A sink that writes data to a file descriptor. */ +struct FdSink : BufferedSink { int fd; - FdSource() - { - fd = -1; - } + FdSink() : fd(-1) { } + FdSink(int fd) : fd(fd) { } + ~FdSink(); - FdSource(int fd) - { - this->fd = fd; - } - - void operator () (unsigned char * data, unsigned int len); + void write(const unsigned char * data, size_t len); +}; + + +/* A source that reads data from a file descriptor. */ +struct FdSource : BufferedSource +{ + int fd; + FdSource() : fd(-1) { } + FdSource(int fd) : fd(fd) { } + size_t readUnbuffered(unsigned char * data, size_t len); }; @@ -70,7 +94,7 @@ struct FdSource : Source struct StringSink : Sink { string s; - virtual void operator () (const unsigned char * data, unsigned int len) + void operator () (const unsigned char * data, size_t len) { s.append((const char *) data, len); } @@ -81,29 +105,25 @@ struct StringSink : Sink struct StringSource : Source { const string & s; - unsigned int pos; + size_t pos; StringSource(const string & _s) : s(_s), pos(0) { } - virtual void operator () (unsigned char * data, unsigned int len) - { - s.copy((char *) data, len, pos); - pos += len; - if (pos > s.size()) - throw Error("end of string reached"); - } + size_t read(unsigned char * data, size_t len); }; -void writePadding(unsigned int len, Sink & sink); +void writePadding(size_t len, Sink & sink); void writeInt(unsigned int n, Sink & sink); void writeLongLong(unsigned long long n, Sink & sink); +void writeString(const unsigned char * buf, size_t len, Sink & sink); void writeString(const string & s, Sink & sink); -void writeStringSet(const StringSet & ss, Sink & sink); +template<class T> void writeStrings(const T & ss, Sink & sink); -void readPadding(unsigned int len, Source & source); +void readPadding(size_t len, Source & source); unsigned int readInt(Source & source); unsigned long long readLongLong(Source & source); +size_t readString(unsigned char * buf, size_t max, Source & source); string readString(Source & source); -StringSet readStringSet(Source & source); +template<class T> T readStrings(Source & source); MakeError(SerialisationError, Error) diff --git a/src/nix-env/nix-env.cc b/src/nix-env/nix-env.cc index 3dfecb2d7..a8d9076cf 100644 --- a/src/nix-env/nix-env.cc +++ b/src/nix-env/nix-env.cc @@ -1270,7 +1270,7 @@ void run(Strings args) else if (arg == "--profile" || arg == "-p") globals.profile = absPath(needArg(i, args, arg)); else if (arg == "--file" || arg == "-f") - globals.instSource.nixExprPath = absPath(needArg(i, args, arg)); + globals.instSource.nixExprPath = lookupFileArg(globals.state, needArg(i, args, arg)); else if (arg == "--switch-profile" || arg == "-S") op = opSwitchProfile; else if (arg == "--switch-generation" || arg == "-G") diff --git a/src/nix-hash/nix-hash.cc b/src/nix-hash/nix-hash.cc index 4867234bf..5b35ccd9d 100644 --- a/src/nix-hash/nix-hash.cc +++ b/src/nix-hash/nix-hash.cc @@ -43,7 +43,7 @@ void run(Strings args) } if (op == opHash) { - for (Strings::iterator i = ss.begin(); i != ss.end(); ++i) { + foreach (Strings::iterator, i, ss) { Hash h = flat ? hashFile(ht, *i) : hashPath(ht, *i).first; if (truncate && h.hashSize > 20) h = compressHash(h, 20); std::cout << format("%1%\n") % @@ -52,8 +52,8 @@ void run(Strings args) } else { - for (Strings::iterator i = ss.begin(); i != ss.end(); ++i) { - Hash h = op == opTo16 ? parseHash32(ht, *i) : parseHash(ht, *i); + foreach (Strings::iterator, i, ss) { + Hash h = parseHash16or32(ht, *i); std::cout << format("%1%\n") % (op == opTo16 ? printHash(h) : printHash32(h)); } diff --git a/src/nix-instantiate/nix-instantiate.cc b/src/nix-instantiate/nix-instantiate.cc index 98eadbd69..93aa50943 100644 --- a/src/nix-instantiate/nix-instantiate.cc +++ b/src/nix-instantiate/nix-instantiate.cc @@ -138,8 +138,7 @@ void run(Strings args) } foreach (Strings::iterator, i, files) { - Path path = absPath(*i); - Expr * e = state.parseExprFromFile(path); + Expr * e = state.parseExprFromFile(lookupFileArg(state, *i)); processExpr(state, attrPaths, parseOnly, strict, autoArgs, evalOnly, xmlOutput, xmlOutputSourceLocation, e); } diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index 371ca54af..e92ccb153 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -133,14 +133,6 @@ static void opAddFixed(Strings opFlags, Strings opArgs) } -static Hash parseHash16or32(HashType ht, const string & s) -{ - return s.size() == Hash(ht).hashSize * 2 - ? parseHash(ht, s) - : parseHash32(ht, s); -} - - /* Hack to support caching in `nix-prefetch-url'. */ static void opPrintFixedPath(Strings opFlags, Strings opArgs) { @@ -594,11 +586,7 @@ static void opExport(Strings opFlags, Strings opArgs) else throw UsageError(format("unknown flag `%1%'") % *i); FdSink sink(STDOUT_FILENO); - for (Strings::iterator i = opArgs.begin(); i != opArgs.end(); ++i) { - writeInt(1, sink); - store->exportPath(*i, sign, sink); - } - writeInt(0, sink); + exportPaths(*store, opArgs, sign, sink); } @@ -612,12 +600,10 @@ static void opImport(Strings opFlags, Strings opArgs) if (!opArgs.empty()) throw UsageError("no arguments expected"); FdSource source(STDIN_FILENO); - while (true) { - unsigned long long n = readLongLong(source); - if (n == 0) break; - if (n != 1) throw Error("input doesn't look like something created by `nix-store --export'"); - cout << format("%1%\n") % store->importPath(requireSignature, source) << std::flush; - } + Paths paths = store->importPaths(requireSignature, source); + + foreach (Paths::iterator, i, paths) + cout << format("%1%\n") % *i << std::flush; } diff --git a/src/nix-worker/nix-worker.cc b/src/nix-worker/nix-worker.cc index 3587bd7fd..68567f341 100644 --- a/src/nix-worker/nix-worker.cc +++ b/src/nix-worker/nix-worker.cc @@ -56,7 +56,8 @@ static void tunnelStderr(const unsigned char * buf, size_t count) if (canSendStderr && myPid == getpid()) { try { writeInt(STDERR_NEXT, to); - writeString(string((char *) buf, count), to); + writeString(buf, count, to); + to.flush(); } catch (...) { /* Write failed; that means that the other side is gone. */ @@ -200,26 +201,20 @@ static void stopWork(bool success = true, const string & msg = "", unsigned int struct TunnelSink : Sink { Sink & to; - TunnelSink(Sink & to) : to(to) - { - } - virtual void operator () - (const unsigned char * data, unsigned int len) + TunnelSink(Sink & to) : to(to) { } + virtual void operator () (const unsigned char * data, size_t len) { writeInt(STDERR_WRITE, to); - writeString(string((const char *) data, len), to); + writeString(data, len, to); } }; -struct TunnelSource : Source +struct TunnelSource : BufferedSource { Source & from; - TunnelSource(Source & from) : from(from) - { - } - virtual void operator () - (unsigned char * data, unsigned int len) + TunnelSource(Source & from) : from(from) { } + size_t readUnbuffered(unsigned char * data, size_t len) { /* Careful: we're going to receive data from the client now, so we have to disable the SIGPOLL handler. */ @@ -228,11 +223,12 @@ struct TunnelSource : Source writeInt(STDERR_READ, to); writeInt(len, to); - string s = readString(from); - if (s.size() != len) throw Error("not enough data"); - memcpy(data, (const unsigned char *) s.c_str(), len); + to.flush(); + size_t n = readString(data, len, from); startWork(); + if (n == 0) throw EndOfFile("unexpected end-of-file"); + return n; } }; @@ -241,11 +237,14 @@ struct TunnelSource : Source the contents of the file to `s'. Otherwise barf. */ struct RetrieveRegularNARSink : ParseSink { + bool regular; string s; + RetrieveRegularNARSink() : regular(true) { } + void createDirectory(const Path & path) { - throw Error("regular file expected"); + regular = false; } void receiveContents(unsigned char * data, unsigned int len) @@ -255,7 +254,7 @@ struct RetrieveRegularNARSink : ParseSink void createSymlink(const Path & path, const string & target) { - throw Error("regular file expected"); + regular = false; } }; @@ -266,10 +265,11 @@ struct SavingSourceAdapter : Source Source & orig; string s; SavingSourceAdapter(Source & orig) : orig(orig) { } - void operator () (unsigned char * data, unsigned int len) + size_t read(unsigned char * data, size_t len) { - orig(data, len); - s.append((const char *) data, len); + size_t n = orig.read(data, len); + s.append((const char *) data, n); + return n; } }; @@ -327,7 +327,7 @@ static void performOp(unsigned int clientVersion, store->queryReferrers(path, paths); else paths = store->queryDerivationOutputs(path); stopWork(); - writeStringSet(paths, to); + writeStrings(paths, to); break; } @@ -371,11 +371,11 @@ static void performOp(unsigned int clientVersion, addToStoreFromDump(). */ ParseSink sink; /* null sink; just parse the NAR */ parseDump(sink, savedNAR); - } else { + } else parseDump(savedRegular, from); - } startWork(); + if (!savedRegular.regular) throw Error("regular file expected"); Path path = dynamic_cast<LocalStore *>(store.get()) ->addToStoreFromDump(recursive ? savedNAR.s : savedRegular.s, baseName, recursive, hashAlgo); stopWork(); @@ -387,7 +387,7 @@ static void performOp(unsigned int clientVersion, case wopAddTextToStore: { string suffix = readString(from); string s = readString(from); - PathSet refs = readStorePaths(from); + PathSet refs = readStorePaths<PathSet>(from); startWork(); Path path = store->addTextToStore(suffix, s, refs); stopWork(); @@ -406,17 +406,17 @@ static void performOp(unsigned int clientVersion, break; } - case wopImportPath: { + case wopImportPaths: { startWork(); TunnelSource source(from); - Path path = store->importPath(true, source); + Paths paths = store->importPaths(true, source); stopWork(); - writeString(path, to); + writeStrings(paths, to); break; } case wopBuildDerivations: { - PathSet drvs = readStorePaths(from); + PathSet drvs = readStorePaths<PathSet>(from); startWork(); store->buildDerivations(drvs); stopWork(); @@ -474,7 +474,7 @@ static void performOp(unsigned int clientVersion, case wopCollectGarbage: { GCOptions options; options.action = (GCOptions::GCAction) readInt(from); - options.pathsToDelete = readStorePaths(from); + options.pathsToDelete = readStorePaths<PathSet>(from); options.ignoreLiveness = readInt(from); options.maxFreed = readLongLong(from); options.maxLinks = readInt(from); @@ -492,7 +492,7 @@ static void performOp(unsigned int clientVersion, store->collectGarbage(options, results); stopWork(); - writeStringSet(results.paths, to); + writeStrings(results.paths, to); writeLongLong(results.bytesFreed, to); writeLongLong(results.blocksFreed, to); @@ -530,7 +530,7 @@ static void performOp(unsigned int clientVersion, writeInt(res ? 1 : 0, to); if (res) { writeString(info.deriver, to); - writeStringSet(info.references, to); + writeStrings(info.references, to); writeLongLong(info.downloadSize, to); if (GET_PROTOCOL_MINOR(clientVersion) >= 7) writeLongLong(info.narSize, to); @@ -542,7 +542,7 @@ static void performOp(unsigned int clientVersion, startWork(); PathSet paths = store->queryValidPaths(); stopWork(); - writeStringSet(paths, to); + writeStrings(paths, to); break; } @@ -550,12 +550,12 @@ static void performOp(unsigned int clientVersion, startWork(); PathSet paths = store->queryFailedPaths(); stopWork(); - writeStringSet(paths, to); + writeStrings(paths, to); break; } case wopClearFailedPaths: { - PathSet paths = readStringSet(from); + PathSet paths = readStrings<PathSet>(from); startWork(); store->clearFailedPaths(paths); stopWork(); @@ -570,7 +570,7 @@ static void performOp(unsigned int clientVersion, stopWork(); writeString(info.deriver, to); writeString(printHash(info.hash), to); - writeStringSet(info.references, to); + writeStrings(info.references, to); writeInt(info.registrationTime, to); writeLongLong(info.narSize, to); break; @@ -603,8 +603,8 @@ static void processConnection() unsigned int magic = readInt(from); if (magic != WORKER_MAGIC_1) throw Error("protocol mismatch"); writeInt(WORKER_MAGIC_2, to); - writeInt(PROTOCOL_VERSION, to); + to.flush(); unsigned int clientVersion = readInt(from); /* Send startup error messages to the client. */ @@ -626,9 +626,11 @@ static void processConnection() store = boost::shared_ptr<StoreAPI>(new LocalStore()); stopWork(); + to.flush(); } catch (Error & e) { stopWork(false, e.msg()); + to.flush(); return; } @@ -648,9 +650,19 @@ static void processConnection() try { performOp(clientVersion, from, to, op); } catch (Error & e) { + /* If we're not in a state were we can send replies, then + something went wrong processing the input of the + client. This can happen especially if I/O errors occur + during addTextToStore() / importPath(). If that + happens, just send the error message and exit. */ + bool errorAllowed = canSendStderr; + if (!errorAllowed) printMsg(lvlError, format("error processing client input: %1%") % e.msg()); stopWork(false, e.msg(), GET_PROTOCOL_MINOR(clientVersion) >= 8 ? e.status : 0); + if (!errorAllowed) break; } + to.flush(); + assert(!canSendStderr); }; |