aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorEelco Dolstra <e.dolstra@tudelft.nl>2010-11-16 17:11:46 +0000
committerEelco Dolstra <e.dolstra@tudelft.nl>2010-11-16 17:11:46 +0000
commita3883cbd28057a3dd2573f77dcda9a26faaac555 (patch)
tree3ad605543e6bba8869f9413fb4f77f73e936cd1a
parentfb9368b5a0b2457b28f19d4902bc0790123338a2 (diff)
* Store the size of a store path in the database (to be precise, the
size of the NAR serialisation of the path, i.e., `nix-store --dump PATH'). This is useful for Hydra.
-rw-r--r--externals/Makefile.am2
-rw-r--r--src/libstore/build.cc41
-rw-r--r--src/libstore/local-store.cc72
-rw-r--r--src/libstore/local-store.hh9
-rw-r--r--src/libstore/optimise-store.cc2
-rw-r--r--src/libstore/references.cc2
-rw-r--r--src/libstore/references.hh2
-rw-r--r--src/libstore/remote-store.cc6
-rw-r--r--src/libstore/remote-store.hh2
-rw-r--r--src/libstore/schema.sql3
-rw-r--r--src/libstore/store-api.cc23
-rw-r--r--src/libstore/store-api.hh40
-rw-r--r--src/libutil/hash.cc10
-rw-r--r--src/libutil/hash.hh7
-rw-r--r--src/nix-hash/nix-hash.cc2
-rw-r--r--src/nix-store/nix-store.cc12
16 files changed, 144 insertions, 91 deletions
diff --git a/externals/Makefile.am b/externals/Makefile.am
index 060ae2c52..bdc29fcb5 100644
--- a/externals/Makefile.am
+++ b/externals/Makefile.am
@@ -47,7 +47,7 @@ build-sqlite:
else
build-sqlite: $(SQLITE)
(cd $(SQLITE) && \
- CC="$(CC)" ./configure --disable-static --prefix=$(pkglibdir)/dummy --libdir=${pkglibdir} $(SUB_CONFIGURE_FLAGS) && \
+ CC="$(CC)" CFLAGS="-DSQLITE_ENABLE_COLUMN_METADATA=1" ./configure --disable-static --prefix=$(pkglibdir)/dummy --libdir=${pkglibdir} $(SUB_CONFIGURE_FLAGS) && \
$(MAKE) )
touch build-sqlite
diff --git a/src/libstore/build.cc b/src/libstore/build.cc
index 210486fbc..8b8be3e80 100644
--- a/src/libstore/build.cc
+++ b/src/libstore/build.cc
@@ -1547,7 +1547,7 @@ void DerivationGoal::startBuilder()
/* Write closure info to `fileName'. */
writeFile(tmpDir + "/" + fileName,
- makeValidityRegistration(paths, false, false));
+ worker.store.makeValidityRegistration(paths, false, false));
}
@@ -1870,7 +1870,7 @@ PathSet parseReferenceSpecifiers(const Derivation & drv, string attr)
void DerivationGoal::computeClosure()
{
map<Path, PathSet> allReferences;
- map<Path, Hash> contentHashes;
+ map<Path, HashResult> contentHashes;
/* When using a build hook, the build hook can register the output
as valid (by doing `nix-store --import'). If so we don't have
@@ -1927,7 +1927,7 @@ void DerivationGoal::computeClosure()
if (ht == htUnknown)
throw BuildError(format("unknown hash algorithm `%1%'") % algo);
Hash h = parseHash(ht, i->second.hash);
- Hash h2 = recursive ? hashPath(ht, path) : hashFile(ht, path);
+ Hash h2 = recursive ? hashPath(ht, path).first : hashFile(ht, path);
if (h != h2)
throw BuildError(
format("output path `%1%' should have %2% hash `%3%', instead has `%4%'")
@@ -1941,7 +1941,7 @@ void DerivationGoal::computeClosure()
contained in it. Compute the SHA-256 NAR hash at the same
time. The hash is stored in the database so that we can
verify later on whether nobody has messed with the store. */
- Hash hash;
+ HashResult hash;
PathSet references = scanForReferences(path, allPaths, hash);
contentHashes[path] = hash;
@@ -1970,14 +1970,18 @@ void DerivationGoal::computeClosure()
}
/* Register each output path as valid, and register the sets of
- paths referenced by each of them. !!! this should be
- atomic so that either all paths are registered as valid, or
- none are. */
- foreach (DerivationOutputs::iterator, i, drv.outputs)
- worker.store.registerValidPath(i->second.path,
- contentHashes[i->second.path],
- allReferences[i->second.path],
- drvPath);
+ paths referenced by each of them. */
+ ValidPathInfos infos;
+ foreach (DerivationOutputs::iterator, i, drv.outputs) {
+ ValidPathInfo info;
+ info.path = i->second.path;
+ info.hash = contentHashes[i->second.path].first;
+ info.narSize = contentHashes[i->second.path].second;
+ info.references = allReferences[i->second.path];
+ info.deriver = drvPath;
+ infos.push_back(info);
+ }
+ worker.store.registerValidPaths(infos);
/* It is now safe to delete the lock files, since all future
lockers will see that the output paths are valid; they will not
@@ -2385,10 +2389,15 @@ void SubstitutionGoal::finished()
canonicalisePathMetaData(storePath);
- Hash contentHash = hashPath(htSHA256, storePath);
-
- worker.store.registerValidPath(storePath, contentHash,
- info.references, info.deriver);
+ HashResult hash = hashPath(htSHA256, storePath);
+
+ ValidPathInfo info2;
+ info2.path = storePath;
+ info2.hash = hash.first;
+ info2.narSize = hash.second;
+ info2.references = info.references;
+ info2.deriver = info.deriver;
+ worker.store.registerValidPath(info2);
outputLock->setDeletion(true);
diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc
index c0c75e34d..9595561bc 100644
--- a/src/libstore/local-store.cc
+++ b/src/libstore/local-store.cc
@@ -327,9 +327,16 @@ void LocalStore::openDB(bool create)
throw SQLiteError(db, "initialising database schema");
}
+ /* Backwards compatibility with old (pre-release) databases. Can
+ remove this eventually. */
+ if (sqlite3_table_column_metadata(db, 0, "ValidPaths", "narSize", 0, 0, 0, 0, 0) != SQLITE_OK) {
+ if (sqlite3_exec(db, "alter table ValidPaths add column narSize integer" , 0, 0, 0) != SQLITE_OK)
+ throw SQLiteError(db, "adding column narSize");
+ }
+
/* Prepare SQL statements. */
stmtRegisterValidPath.create(db,
- "insert into ValidPaths (path, hash, registrationTime, deriver) values (?, ?, ?, ?);");
+ "insert into ValidPaths (path, hash, registrationTime, deriver, narSize) values (?, ?, ?, ?, ?);");
stmtAddReference.create(db,
"insert or replace into Refs (referrer, reference) values (?, ?);");
stmtQueryPathInfo.create(db,
@@ -431,19 +438,6 @@ void canonicalisePathMetaData(const Path & path)
}
-void LocalStore::registerValidPath(const Path & path,
- const Hash & hash, const PathSet & references,
- const Path & deriver)
-{
- ValidPathInfo info;
- info.path = path;
- info.hash = hash;
- info.references = references;
- info.deriver = deriver;
- registerValidPath(info);
-}
-
-
unsigned long long LocalStore::addValidPath(const ValidPathInfo & info)
{
SQLiteStmtUse use(stmtRegisterValidPath);
@@ -454,6 +448,10 @@ unsigned long long LocalStore::addValidPath(const ValidPathInfo & info)
stmtRegisterValidPath.bind(info.deriver);
else
stmtRegisterValidPath.bind(); // null
+ if (info.narSize != 0)
+ stmtRegisterValidPath.bind(info.narSize);
+ else
+ stmtRegisterValidPath.bind(); // null
if (sqlite3_step(stmtRegisterValidPath) != SQLITE_DONE)
throw SQLiteError(db, format("registering valid path `%1%' in database") % info.path);
unsigned long long id = sqlite3_last_insert_rowid(db);
@@ -920,10 +918,18 @@ Path LocalStore::addToStoreFromDump(const string & dump, const string & name,
the path in the database. We may just have computed it
above (if called with recursive == true and hashAlgo ==
sha256); otherwise, compute it here. */
- registerValidPath(dstPath,
- (recursive && hashAlgo == htSHA256) ? h :
- (recursive ? hashString(htSHA256, dump) : hashPath(htSHA256, dstPath)),
- PathSet(), "");
+ HashResult hash;
+ if (recursive) {
+ hash.first = hashAlgo == htSHA256 ? h : hashString(htSHA256, dump);
+ hash.second = dump.size();
+ } else
+ hash = hashPath(htSHA256, dstPath);
+
+ ValidPathInfo info;
+ info.path = dstPath;
+ info.hash = hash.first;
+ info.narSize = hash.second;
+ registerValidPath(info);
}
outputLock.setDeletion(true);
@@ -970,9 +976,15 @@ Path LocalStore::addTextToStore(const string & name, const string & s,
writeFile(dstPath, s);
canonicalisePathMetaData(dstPath);
+
+ HashResult hash = hashPath(htSHA256, dstPath);
- registerValidPath(dstPath,
- hashPath(htSHA256, dstPath), references, "");
+ ValidPathInfo info;
+ info.path = dstPath;
+ info.hash = hash.first;
+ info.narSize = hash.second;
+ info.references = references;
+ registerValidPath(info);
}
outputLock.setDeletion(true);
@@ -998,7 +1010,7 @@ struct HashAndWriteSink : Sink
Hash currentHash()
{
HashSink hashSinkClone(hashSink);
- return hashSinkClone.finish();
+ return hashSinkClone.finish().first;
}
};
@@ -1136,7 +1148,7 @@ Path LocalStore::importPath(bool requireSignature, Source & source)
Path deriver = readString(hashAndReadSource);
if (deriver != "") assertStorePath(deriver);
- Hash hash = hashAndReadSource.hashSink.finish();
+ Hash hash = hashAndReadSource.hashSink.finish().first;
hashAndReadSource.hashing = false;
bool haveSignature = readInt(hashAndReadSource) == 1;
@@ -1200,9 +1212,15 @@ Path LocalStore::importPath(bool requireSignature, Source & source)
/* !!! if we were clever, we could prevent the hashPath()
here. */
- if (deriver != "" && !isValidPath(deriver)) deriver = "";
- registerValidPath(dstPath,
- hashPath(htSHA256, dstPath), references, deriver);
+ HashResult hash = hashPath(htSHA256, dstPath);
+
+ ValidPathInfo info;
+ info.path = dstPath;
+ info.hash = hash.first;
+ info.narSize = hash.second;
+ info.references = references;
+ info.deriver = deriver != "" && isValidPath(deriver) ? deriver : "";
+ registerValidPath(info);
}
outputLock.setDeletion(true);
@@ -1263,12 +1281,14 @@ void LocalStore::verifyStore(bool checkContents)
/* Check the content hash (optionally - slow). */
printMsg(lvlTalkative, format("checking contents of `%1%'") % *i);
- Hash current = hashPath(info.hash.type, *i);
+ Hash current = hashPath(info.hash.type, *i).first;
if (current != info.hash) {
printMsg(lvlError, format("path `%1%' was modified! "
"expected hash `%2%', got `%3%'")
% *i % printHash(info.hash) % printHash(current));
}
+
+ /* !!! Check info.narSize */
}
}
}
diff --git a/src/libstore/local-store.hh b/src/libstore/local-store.hh
index 0d7ec1f49..2fd31c26d 100644
--- a/src/libstore/local-store.hh
+++ b/src/libstore/local-store.hh
@@ -97,6 +97,8 @@ public:
PathSet queryValidPaths();
+ ValidPathInfo queryPathInfo(const Path & path);
+
Hash queryPathHash(const Path & path);
void queryReferences(const Path & path, PathSet & references);
@@ -173,8 +175,7 @@ public:
execution of the derivation (or something equivalent). Also
register the hash of the file system contents of the path. The
hash must be a SHA-256 hash. */
- void registerValidPath(const Path & path,
- const Hash & hash, const PathSet & references, const Path & deriver);
+ void registerValidPath(const ValidPathInfo & info);
void registerValidPaths(const ValidPathInfos & infos);
@@ -224,10 +225,6 @@ private:
void addReference(unsigned long long referrer, unsigned long long reference);
- void registerValidPath(const ValidPathInfo & info);
-
- ValidPathInfo queryPathInfo(const Path & path);
-
void appendReferrer(const Path & from, const Path & to, bool lock);
void rewriteReferrers(const Path & path, bool purge, PathSet referrers);
diff --git a/src/libstore/optimise-store.cc b/src/libstore/optimise-store.cc
index 3ed54e24d..89be6ac65 100644
--- a/src/libstore/optimise-store.cc
+++ b/src/libstore/optimise-store.cc
@@ -68,7 +68,7 @@ static void hashAndLink(bool dryRun, HashToPath & hashToPath,
the contents of the symlink (i.e. the result of
readlink()), not the contents of the target (which may not
even exist). */
- Hash hash = hashPath(htSHA256, path);
+ Hash hash = hashPath(htSHA256, path).first;
stats.totalFiles++;
printMsg(lvlDebug, format("`%1%' has hash `%2%'") % path % printHash(hash));
diff --git a/src/libstore/references.cc b/src/libstore/references.cc
index a6f6e85fc..ade9c9aa2 100644
--- a/src/libstore/references.cc
+++ b/src/libstore/references.cc
@@ -81,7 +81,7 @@ void RefScanSink::operator () (const unsigned char * data, unsigned int len)
PathSet scanForReferences(const string & path,
- const PathSet & refs, Hash & hash)
+ const PathSet & refs, HashResult & hash)
{
RefScanSink sink;
std::map<string, Path> backMap;
diff --git a/src/libstore/references.hh b/src/libstore/references.hh
index 7d068eb51..158c08a77 100644
--- a/src/libstore/references.hh
+++ b/src/libstore/references.hh
@@ -7,7 +7,7 @@
namespace nix {
PathSet scanForReferences(const Path & path, const PathSet & refs,
- Hash & hash);
+ HashResult & hash);
}
diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc
index 92d517bbb..517c886b4 100644
--- a/src/libstore/remote-store.cc
+++ b/src/libstore/remote-store.cc
@@ -247,6 +247,12 @@ bool RemoteStore::querySubstitutablePathInfo(const Path & path,
}
+ValidPathInfo RemoteStore::queryPathInfo(const Path & path)
+{
+ throw Error("not implemented");
+}
+
+
Hash RemoteStore::queryPathHash(const Path & path)
{
openConnection();
diff --git a/src/libstore/remote-store.hh b/src/libstore/remote-store.hh
index 02a1c4752..519f46fd1 100644
--- a/src/libstore/remote-store.hh
+++ b/src/libstore/remote-store.hh
@@ -29,6 +29,8 @@ public:
PathSet queryValidPaths();
+ ValidPathInfo queryPathInfo(const Path & path);
+
Hash queryPathHash(const Path & path);
void queryReferences(const Path & path, PathSet & references);
diff --git a/src/libstore/schema.sql b/src/libstore/schema.sql
index 7438632ed..c1b4a689a 100644
--- a/src/libstore/schema.sql
+++ b/src/libstore/schema.sql
@@ -3,7 +3,8 @@ create table if not exists ValidPaths (
path text unique not null,
hash text not null,
registrationTime integer not null,
- deriver text
+ deriver text,
+ narSize integer
);
create table if not exists Refs (
diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc
index 01dd51621..4b04f5751 100644
--- a/src/libstore/store-api.cc
+++ b/src/libstore/store-api.cc
@@ -190,7 +190,7 @@ std::pair<Path, Hash> computeStorePathForPath(const Path & srcPath,
bool recursive, HashType hashAlgo, PathFilter & filter)
{
HashType ht(hashAlgo);
- Hash h = recursive ? hashPath(ht, srcPath, filter) : hashFile(ht, srcPath);
+ Hash h = recursive ? hashPath(ht, srcPath, filter).first : hashFile(ht, srcPath);
string name = baseNameOf(srcPath);
Path dstPath = makeFixedOutputPath(recursive, hashAlgo, h, name);
return std::pair<Path, Hash>(dstPath, h);
@@ -216,7 +216,7 @@ Path computeStorePathForText(const string & name, const string & s,
/* Return a string accepted by decodeValidPathInfo() that
registers the specified paths as valid. Note: it's the
responsibility of the caller to provide a closure. */
-string makeValidityRegistration(const PathSet & paths,
+string StoreAPI::makeValidityRegistration(const PathSet & paths,
bool showDerivers, bool showHash)
{
string s = "";
@@ -224,18 +224,19 @@ string makeValidityRegistration(const PathSet & paths,
foreach (PathSet::iterator, i, paths) {
s += *i + "\n";
- if (showHash)
- s += printHash(store->queryPathHash(*i)) + "\n";
+ ValidPathInfo info = queryPathInfo(*i);
- Path deriver = showDerivers ? store->queryDeriver(*i) : "";
+ if (showHash) {
+ s += printHash(info.hash) + "\n";
+ s += (format("%1%\n") % info.narSize).str();
+ }
+
+ Path deriver = showDerivers ? info.deriver : "";
s += deriver + "\n";
- PathSet references;
- store->queryReferences(*i, references);
+ s += (format("%1%\n") % info.references.size()).str();
- s += (format("%1%\n") % references.size()).str();
-
- foreach (PathSet::iterator, j, references)
+ foreach (PathSet::iterator, j, info.references)
s += *j + "\n";
}
@@ -252,6 +253,8 @@ ValidPathInfo decodeValidPathInfo(std::istream & str, bool hashGiven)
string s;
getline(str, s);
info.hash = parseHash(htSHA256, s);
+ getline(str, s);
+ if (!string2Int(s, info.narSize)) throw Error("number expected");
}
getline(str, info.deriver);
string s; int n;
diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh
index 082a9edc4..b0071da83 100644
--- a/src/libstore/store-api.hh
+++ b/src/libstore/store-api.hh
@@ -90,6 +90,21 @@ struct SubstitutablePathInfo
};
+struct ValidPathInfo
+{
+ Path path;
+ Path deriver;
+ Hash hash;
+ PathSet references;
+ time_t registrationTime;
+ unsigned long long narSize; // 0 = unknown
+ unsigned long long id; // internal use only
+ ValidPathInfo() : registrationTime(0), narSize(0) { }
+};
+
+typedef list<ValidPathInfo> ValidPathInfos;
+
+
class StoreAPI
{
public:
@@ -102,6 +117,9 @@ public:
/* Query the set of valid paths. */
virtual PathSet queryValidPaths() = 0;
+ /* Query information about a valid path. */
+ virtual ValidPathInfo queryPathInfo(const Path & path) = 0;
+
/* Queries the hash of a valid path. */
virtual Hash queryPathHash(const Path & path) = 0;
@@ -214,6 +232,12 @@ public:
/* Clear the "failed" status of the given paths. The special
value `*' causes all failed paths to be cleared. */
virtual void clearFailedPaths(const PathSet & paths) = 0;
+
+ /* Return a string representing information about the path that
+ can be loaded into the database using `nix-store --load-db' or
+ `nix-store --register-validity'. */
+ string makeValidityRegistration(const PathSet & paths,
+ bool showDerivers, bool showHash);
};
@@ -307,22 +331,6 @@ boost::shared_ptr<StoreAPI> openStore();
string showPaths(const PathSet & paths);
-string makeValidityRegistration(const PathSet & paths,
- bool showDerivers, bool showHash);
-
-struct ValidPathInfo
-{
- Path path;
- Path deriver;
- Hash hash;
- PathSet references;
- time_t registrationTime;
- unsigned long long id; // internal use only
- ValidPathInfo() : registrationTime(0) { }
-};
-
-typedef list<ValidPathInfo> ValidPathInfos;
-
ValidPathInfo decodeValidPathInfo(std::istream & str,
bool hashGiven = false);
diff --git a/src/libutil/hash.cc b/src/libutil/hash.cc
index bd7e33a48..b9e784699 100644
--- a/src/libutil/hash.cc
+++ b/src/libutil/hash.cc
@@ -286,12 +286,14 @@ Hash hashFile(HashType ht, const Path & path)
HashSink::HashSink(HashType ht) : ht(ht)
{
ctx = new Ctx;
+ bytes = 0;
start(ht, *ctx);
}
HashSink::HashSink(const HashSink & h)
{
ht = h.ht;
+ bytes = h.bytes;
ctx = new Ctx;
*ctx = *h.ctx;
}
@@ -304,18 +306,20 @@ HashSink::~HashSink()
void HashSink::operator ()
(const unsigned char * data, unsigned int len)
{
+ bytes += len;
update(ht, *ctx, data, len);
}
-Hash HashSink::finish()
+HashResult HashSink::finish()
{
Hash hash(ht);
nix::finish(ht, *ctx, hash.hash);
- return hash;
+ return HashResult(hash, bytes);
}
-Hash hashPath(HashType ht, const Path & path, PathFilter & filter)
+HashResult hashPath(
+ HashType ht, const Path & path, PathFilter & filter)
{
HashSink sink(ht);
dumpPath(path, sink, filter);
diff --git a/src/libutil/hash.hh b/src/libutil/hash.hh
index 81425b234..13740954d 100644
--- a/src/libutil/hash.hh
+++ b/src/libutil/hash.hh
@@ -40,7 +40,6 @@ struct Hash
/* For sorting. */
bool operator < (const Hash & h) const;
-
};
@@ -72,7 +71,8 @@ Hash hashFile(HashType ht, const Path & path);
(essentially) hashString(ht, dumpPath(path)). */
struct PathFilter;
extern PathFilter defaultPathFilter;
-Hash hashPath(HashType ht, const Path & path,
+typedef std::pair<Hash, unsigned long long> HashResult;
+HashResult hashPath(HashType ht, const Path & path,
PathFilter & filter = defaultPathFilter);
/* Compress a hash to the specified number of bytes by cyclically
@@ -93,13 +93,14 @@ class HashSink : public Sink
private:
HashType ht;
Ctx * ctx;
+ unsigned long long bytes;
public:
HashSink(HashType ht);
HashSink(const HashSink & h);
~HashSink();
virtual void operator () (const unsigned char * data, unsigned int len);
- Hash finish();
+ HashResult finish();
};
diff --git a/src/nix-hash/nix-hash.cc b/src/nix-hash/nix-hash.cc
index 8a6655403..4867234bf 100644
--- a/src/nix-hash/nix-hash.cc
+++ b/src/nix-hash/nix-hash.cc
@@ -44,7 +44,7 @@ void run(Strings args)
if (op == opHash) {
for (Strings::iterator i = ss.begin(); i != ss.end(); ++i) {
- Hash h = flat ? hashFile(ht, *i) : hashPath(ht, *i);
+ Hash h = flat ? hashFile(ht, *i) : hashPath(ht, *i).first;
if (truncate && h.hashSize > 20) h = compressHash(h, 20);
std::cout << format("%1%\n") %
(base32 ? printHash32(h) : printHash(h));
diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc
index 695eb10dc..120f6ce72 100644
--- a/src/nix-store/nix-store.cc
+++ b/src/nix-store/nix-store.cc
@@ -393,9 +393,8 @@ static void opDumpDB(Strings opFlags, Strings opArgs)
if (!opArgs.empty())
throw UsageError("no arguments expected");
PathSet validPaths = store->queryValidPaths();
- foreach (PathSet::iterator, i, validPaths) {
- cout << makeValidityRegistration(singleton<PathSet>(*i), true, true);
- }
+ foreach (PathSet::iterator, i, validPaths)
+ cout << store->makeValidityRegistration(singleton<PathSet>(*i), true, true);
}
@@ -410,8 +409,11 @@ static void registerValidity(bool reregister, bool hashGiven, bool canonicalise)
/* !!! races */
if (canonicalise)
canonicalisePathMetaData(info.path);
- if (!hashGiven)
- info.hash = hashPath(htSHA256, info.path);
+ if (!hashGiven) {
+ HashResult hash = hashPath(htSHA256, info.path);
+ info.hash = hash.first;
+ info.narSize = hash.second;
+ }
infos.push_back(info);
}
}