aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rwxr-xr-xsrc/aterm-helper.pl5
-rw-r--r--src/bin2c/bin2c.c4
-rw-r--r--src/libexpr/attr-path.cc1
-rw-r--r--src/libexpr/common-opts.cc1
-rw-r--r--src/libexpr/eval.cc1
-rw-r--r--src/libexpr/eval.hh3
-rw-r--r--src/libexpr/expr-to-xml.hh1
-rw-r--r--src/libexpr/get-drvs.cc1
-rw-r--r--src/libexpr/parser.y8
-rw-r--r--src/libexpr/primops.cc1
-rw-r--r--src/libstore/Makefile.am11
-rw-r--r--src/libstore/build.cc2
-rw-r--r--src/libstore/derivations.hh2
-rw-r--r--src/libstore/gc.cc73
-rw-r--r--src/libstore/local-store.cc947
-rw-r--r--src/libstore/local-store.hh90
-rw-r--r--src/libstore/misc.cc8
-rw-r--r--src/libstore/remote-store.cc14
-rw-r--r--src/libstore/remote-store.hh2
-rw-r--r--src/libstore/schema.sql43
-rw-r--r--src/libstore/store-api.cc13
-rw-r--r--src/libstore/store-api.hh28
-rw-r--r--src/libstore/worker-protocol.hh2
-rw-r--r--src/libutil/Makefile.am2
-rw-r--r--src/libutil/aterm-map.hh3
-rw-r--r--src/libutil/hash.cc7
-rw-r--r--src/libutil/hash.hh3
-rw-r--r--src/libutil/util.cc3
-rw-r--r--src/libutil/util.hh2
-rw-r--r--src/nix-env/Makefile.am5
-rw-r--r--src/nix-env/nix-env.cc3
-rw-r--r--src/nix-hash/Makefile.am2
-rw-r--r--src/nix-hash/nix-hash.cc2
-rw-r--r--src/nix-instantiate/Makefile.am2
-rw-r--r--src/nix-instantiate/nix-instantiate.cc3
-rw-r--r--src/nix-log2xml/log2xml.cc14
-rw-r--r--src/nix-setuid-helper/Makefile.am5
-rw-r--r--src/nix-store/Makefile.am4
-rw-r--r--src/nix-store/nix-store.cc2
-rw-r--r--src/nix-worker/Makefile.am4
-rw-r--r--src/nix-worker/nix-worker.cc16
41 files changed, 764 insertions, 579 deletions
diff --git a/src/aterm-helper.pl b/src/aterm-helper.pl
index 9b2bde700..f1eb77ee8 100755
--- a/src/aterm-helper.pl
+++ b/src/aterm-helper.pl
@@ -146,8 +146,9 @@ while (<STDIN>) {
my $value = $2;
print HEADER "extern ATerm $name;\n";
print IMPL "ATerm $name = 0;\n";
- $init .= " $name = $value;\n";
- }
+ $init .= " $name = $value;\n";
+ $init .= " ATprotect(&$name);\n";
+ }
elsif (/^\s*init\s+(\w+)\s*$/) {
$initFun = $1;
diff --git a/src/bin2c/bin2c.c b/src/bin2c/bin2c.c
index 18bf81d69..5ed8a5708 100644
--- a/src/bin2c/bin2c.c
+++ b/src/bin2c/bin2c.c
@@ -14,10 +14,10 @@ int main(int argc, char * * argv)
{
int c;
if (argc != 2) abort();
- print("static unsigned char %s[] = {", argv[1]);
+ print("static unsigned char %s[] = { ", argv[1]);
while ((c = getchar()) != EOF) {
print("0x%02x, ", (unsigned char) c);
}
- print("};\n");
+ print("0 };\n");
return 0;
}
diff --git a/src/libexpr/attr-path.cc b/src/libexpr/attr-path.cc
index e8e4c050c..092d9b1c2 100644
--- a/src/libexpr/attr-path.cc
+++ b/src/libexpr/attr-path.cc
@@ -1,6 +1,7 @@
#include "attr-path.hh"
#include "nixexpr-ast.hh"
#include "util.hh"
+#include "aterm.hh"
namespace nix {
diff --git a/src/libexpr/common-opts.cc b/src/libexpr/common-opts.cc
index 9e3f8f961..0ef488373 100644
--- a/src/libexpr/common-opts.cc
+++ b/src/libexpr/common-opts.cc
@@ -2,6 +2,7 @@
#include "../libmain/shared.hh"
#include "util.hh"
#include "parser.hh"
+#include "aterm.hh"
namespace nix {
diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc
index cd9c64594..5a0e8bc27 100644
--- a/src/libexpr/eval.cc
+++ b/src/libexpr/eval.cc
@@ -6,6 +6,7 @@
#include "derivations.hh"
#include "nixexpr-ast.hh"
#include "globals.hh"
+#include "aterm.hh"
#define LocalNoInline(f) static f __attribute__((noinline)); f
diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh
index fed6d3472..00d0e3564 100644
--- a/src/libexpr/eval.hh
+++ b/src/libexpr/eval.hh
@@ -3,9 +3,10 @@
#include <map>
-#include "aterm.hh"
#include "nixexpr.hh"
+typedef union _ATermList * ATermList;
+
namespace nix {
diff --git a/src/libexpr/expr-to-xml.hh b/src/libexpr/expr-to-xml.hh
index 36b8e4042..576a46fc3 100644
--- a/src/libexpr/expr-to-xml.hh
+++ b/src/libexpr/expr-to-xml.hh
@@ -5,7 +5,6 @@
#include <map>
#include "nixexpr.hh"
-#include "aterm.hh"
namespace nix {
diff --git a/src/libexpr/get-drvs.cc b/src/libexpr/get-drvs.cc
index 1442d7988..cd5a85e5b 100644
--- a/src/libexpr/get-drvs.cc
+++ b/src/libexpr/get-drvs.cc
@@ -1,6 +1,7 @@
#include "get-drvs.hh"
#include "nixexpr-ast.hh"
#include "util.hh"
+#include "aterm.hh"
namespace nix {
diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y
index c4afb72ea..8706ce025 100644
--- a/src/libexpr/parser.y
+++ b/src/libexpr/parser.y
@@ -85,6 +85,10 @@ static Expr fixAttrs(bool recursive, ATermList as)
{
Tree attrs;
+ /* This ATermMap is needed to ensure that the `leaf' fields in the
+ Tree nodes are not garbage collected. */
+ ATermMap gcRoots;
+
for (ATermIterator i(as); i; ++i) {
ATermList names, attrPath; Expr src, e; ATerm name, pos;
@@ -95,7 +99,9 @@ static Expr fixAttrs(bool recursive, ATermList as)
throw ParseError(format("duplicate definition of attribute `%1%' at %2%")
% showAttrPath(ATmakeList1(*j)) % showPos(pos));
Tree & t(attrs.children[*j]);
- t.leaf = fromScope ? makeVar(*j) : makeSelect(src, *j);
+ Expr leaf = fromScope ? makeVar(*j) : makeSelect(src, *j);
+ gcRoots.set(leaf, leaf);
+ t.leaf = leaf;
t.pos = pos;
if (recursive && fromScope) t.recursive = false;
}
diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index 7dddc91a8..bf2752d0d 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -8,6 +8,7 @@
#include "nixexpr-ast.hh"
#include "parser.hh"
#include "names.hh"
+#include "aterm.hh"
#include <sys/types.h>
#include <sys/stat.h>
diff --git a/src/libstore/Makefile.am b/src/libstore/Makefile.am
index 863871519..dd6760554 100644
--- a/src/libstore/Makefile.am
+++ b/src/libstore/Makefile.am
@@ -10,14 +10,19 @@ pkginclude_HEADERS = \
globals.hh references.hh pathlocks.hh \
worker-protocol.hh
-libstore_la_LIBADD = ../libutil/libutil.la ../boost/format/libformat.la @ADDITIONAL_NETWORK_LIBS@
+libstore_la_LIBADD = ../libutil/libutil.la ../boost/format/libformat.la ${aterm_lib} ${sqlite_lib}
BUILT_SOURCES = derivations-ast.cc derivations-ast.hh
-EXTRA_DIST = derivations-ast.def derivations-ast.cc
+EXTRA_DIST = derivations-ast.def derivations-ast.cc schema.sql
AM_CXXFLAGS = -Wall \
- -I$(srcdir)/.. ${aterm_include} -I$(srcdir)/../libutil
+ -I$(srcdir)/.. ${aterm_include} ${sqlite_include} -I$(srcdir)/../libutil -I${top_srcdir}/externals/sqlite-3.6.22/
+
+local-store.lo: schema.sql.hh
+
+%.sql.hh: %.sql
+ ../bin2c/bin2c schema < $< > $@ || (rm $@ && exit 1)
derivations-ast.cc derivations-ast.hh: ../aterm-helper.pl derivations-ast.def
$(perl) $(srcdir)/../aterm-helper.pl derivations-ast.hh derivations-ast.cc < $(srcdir)/derivations-ast.def
diff --git a/src/libstore/build.cc b/src/libstore/build.cc
index 4d542e31d..d0d051f08 100644
--- a/src/libstore/build.cc
+++ b/src/libstore/build.cc
@@ -1576,7 +1576,7 @@ void DerivationGoal::startBuilder()
"nixbld:x:%1%:%2%:Nix build user:/:/noshell\n"
"nobody:x:65534:65534:Nobody:/:/noshell\n")
% (buildUser.enabled() ? buildUser.getUID() : getuid())
- % (buildUser.enabled() ? buildUser.getGID() : getgid())).str());
+ % (buildUser.enabled() ? buildUser.getGID() : getgid())).str());
/* Bind-mount a user-configurable set of directories from the
host file system. The `/dev/pts' directory must be mounted
diff --git a/src/libstore/derivations.hh b/src/libstore/derivations.hh
index 042f4738d..c3f579bee 100644
--- a/src/libstore/derivations.hh
+++ b/src/libstore/derivations.hh
@@ -1,7 +1,7 @@
#ifndef __DERIVATIONS_H
#define __DERIVATIONS_H
-#include <aterm1.h>
+typedef union _ATerm * ATerm;
#include "hash.hh"
diff --git a/src/libstore/gc.cc b/src/libstore/gc.cc
index f58f691c9..87e0a05bc 100644
--- a/src/libstore/gc.cc
+++ b/src/libstore/gc.cc
@@ -416,12 +416,7 @@ struct LocalStore::GCState
PathSet busy;
bool gcKeepOutputs;
bool gcKeepDerivations;
-
- bool drvsIndexed;
- typedef std::multimap<string, Path> DrvsByName;
- DrvsByName drvsByName; // derivation paths hashed by name attribute
-
- GCState(GCResults & results_) : results(results_), drvsIndexed(false)
+ GCState(GCResults & results_) : results(results_)
{
}
};
@@ -441,42 +436,6 @@ bool LocalStore::isActiveTempFile(const GCState & state,
&& state.tempRoots.find(string(path, 0, path.size() - suffix.size())) != state.tempRoots.end();
}
-
-/* Return all the derivations in the Nix store that have `path' as an
- output. This function assumes that derivations have the same name
- as their outputs. */
-PathSet LocalStore::findDerivers(GCState & state, const Path & path)
-{
- PathSet derivers;
-
- Path deriver = queryDeriver(path);
- if (deriver != "") derivers.insert(deriver);
-
- if (!state.drvsIndexed) {
- Paths entries = readDirectory(nixStore);
- foreach (Paths::iterator, i, entries)
- if (isDerivation(*i))
- state.drvsByName.insert(std::pair<string, Path>(
- getNameOfStorePath(*i), nixStore + "/" + *i));
- state.drvsIndexed = true;
- }
-
- string name = getNameOfStorePath(path);
-
- // Urgh, I should have used Haskell...
- std::pair<GCState::DrvsByName::iterator, GCState::DrvsByName::iterator> range =
- state.drvsByName.equal_range(name);
-
- for (GCState::DrvsByName::iterator i = range.first; i != range.second; ++i)
- if (isValidPath(i->second)) {
- Derivation drv = derivationFromPath(i->second);
- foreach (DerivationOutputs::iterator, j, drv.outputs)
- if (j->second.path == path) derivers.insert(i->second);
- }
-
- return derivers;
-}
-
bool LocalStore::tryToDelete(GCState & state, const Path & path)
{
@@ -508,10 +467,10 @@ bool LocalStore::tryToDelete(GCState & state, const Path & path)
then don't delete the derivation if any of the outputs are
live. */
if (state.gcKeepDerivations && isDerivation(path)) {
- Derivation drv = derivationFromPath(path);
- foreach (DerivationOutputs::iterator, i, drv.outputs)
- if (!tryToDelete(state, i->second.path)) {
- printMsg(lvlDebug, format("cannot delete derivation `%1%' because its output is alive") % path);
+ PathSet outputs = queryDerivationOutputs(path);
+ foreach (PathSet::iterator, i, outputs)
+ if (!tryToDelete(state, *i)) {
+ printMsg(lvlDebug, format("cannot delete derivation `%1%' because its output `%2%' is alive") % path % *i);
goto isLive;
}
}
@@ -522,18 +481,9 @@ bool LocalStore::tryToDelete(GCState & state, const Path & path)
if (!pathExists(path)) return true;
/* If gc-keep-outputs is set, then don't delete this path if
- its deriver is not garbage. !!! Nix does not reliably
- store derivers, so we have to look at all derivations to
- determine which of them derive `path'. Since this makes
- the garbage collector very slow to start on large Nix
- stores, here we just look for all derivations that have the
- same name as `path' (where the name is the part of the
- filename after the hash, i.e. the `name' attribute of the
- derivation). This is somewhat hacky: currently, the
- deriver of a path always has the same name as the output,
- but this might change in the future. */
+ there are derivers of this path that are not garbage. */
if (state.gcKeepOutputs) {
- PathSet derivers = findDerivers(state, path);
+ PathSet derivers = queryValidDerivers(path);
foreach (PathSet::iterator, deriver, derivers) {
/* Break an infinite recursion if gc-keep-derivations
and gc-keep-outputs are both set by tentatively
@@ -613,6 +563,15 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results)
state.gcKeepOutputs = queryBoolSetting("gc-keep-outputs", false);
state.gcKeepDerivations = queryBoolSetting("gc-keep-derivations", true);
+
+ /* Using `--ignore-liveness' with `--delete' can have unintended
+ consequences if `gc-keep-outputs' or `gc-keep-derivations' are
+ true (the garbage collector will recurse into deleting the
+ outputs or derivers, respectively). So disable them. */
+ if (options.action == GCOptions::gcDeleteSpecific && options.ignoreLiveness) {
+ state.gcKeepOutputs = false;
+ state.gcKeepDerivations = false;
+ }
/* Acquire the global GC root. This prevents
a) New roots from being added.
diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc
index a83ba55e2..4d82547c6 100644
--- a/src/libstore/local-store.cc
+++ b/src/libstore/local-store.cc
@@ -3,9 +3,9 @@
#include "globals.hh"
#include "archive.hh"
#include "pathlocks.hh"
-#include "aterm.hh"
#include "derivations-ast.hh"
#include "worker-protocol.hh"
+#include "derivations.hh"
#include <iostream>
#include <algorithm>
@@ -18,10 +18,137 @@
#include <errno.h>
#include <stdio.h>
+#include <sqlite3.h>
+
namespace nix {
+class SQLiteError : public Error
+{
+public:
+ SQLiteError(sqlite3 * db, const format & f)
+ : Error(format("%1%: %2%") % f.str() % sqlite3_errmsg(db))
+ {
+ }
+};
+
+
+SQLite::~SQLite()
+{
+ try {
+ if (db && sqlite3_close(db) != SQLITE_OK)
+ throw SQLiteError(db, "closing database");
+ } catch (...) {
+ ignoreException();
+ }
+}
+
+
+void SQLiteStmt::create(sqlite3 * db, const string & s)
+{
+ checkInterrupt();
+ assert(!stmt);
+ if (sqlite3_prepare_v2(db, s.c_str(), -1, &stmt, 0) != SQLITE_OK)
+ throw SQLiteError(db, "creating statement");
+ this->db = db;
+}
+
+
+void SQLiteStmt::reset()
+{
+ assert(stmt);
+ if (sqlite3_reset(stmt) != SQLITE_OK)
+ throw SQLiteError(db, "resetting statement");
+ curArg = 1;
+}
+
+
+SQLiteStmt::~SQLiteStmt()
+{
+ try {
+ if (stmt && sqlite3_finalize(stmt) != SQLITE_OK)
+ throw SQLiteError(db, "finalizing statement");
+ } catch (...) {
+ ignoreException();
+ }
+}
+
+
+void SQLiteStmt::bind(const string & value)
+{
+ if (sqlite3_bind_text(stmt, curArg++, value.c_str(), -1, SQLITE_TRANSIENT) != SQLITE_OK)
+ throw SQLiteError(db, "binding argument");
+}
+
+
+void SQLiteStmt::bind(int value)
+{
+ if (sqlite3_bind_int(stmt, curArg++, value) != SQLITE_OK)
+ throw SQLiteError(db, "binding argument");
+}
+
+
+void SQLiteStmt::bind()
+{
+ if (sqlite3_bind_null(stmt, curArg++) != SQLITE_OK)
+ throw SQLiteError(db, "binding argument");
+}
+
+
+/* Helper class to ensure that prepared statements are reset when
+ leaving the scope that uses them. Unfinished prepared statements
+ prevent transactions from being aborted, and can cause locks to be
+ kept when they should be released. */
+struct SQLiteStmtUse
+{
+ SQLiteStmt & stmt;
+ SQLiteStmtUse(SQLiteStmt & stmt) : stmt(stmt)
+ {
+ stmt.reset();
+ }
+ ~SQLiteStmtUse()
+ {
+ try {
+ stmt.reset();
+ } catch (...) {
+ ignoreException();
+ }
+ }
+};
+
+
+struct SQLiteTxn
+{
+ bool active;
+ sqlite3 * db;
+
+ SQLiteTxn(sqlite3 * db) : active(false) {
+ this->db = db;
+ if (sqlite3_exec(db, "begin;", 0, 0, 0) != SQLITE_OK)
+ throw SQLiteError(db, "starting transaction");
+ active = true;
+ }
+
+ void commit()
+ {
+ if (sqlite3_exec(db, "commit;", 0, 0, 0) != SQLITE_OK)
+ throw SQLiteError(db, "committing transaction");
+ active = false;
+ }
+
+ ~SQLiteTxn()
+ {
+ try {
+ if (active && sqlite3_exec(db, "rollback;", 0, 0, 0) != SQLITE_OK)
+ throw SQLiteError(db, "aborting transaction");
+ } catch (...) {
+ ignoreException();
+ }
+ }
+};
+
+
void checkStoreNotSymlink()
{
if (getEnv("NIX_IGNORE_SYMLINK_STORE") == "1") return;
@@ -46,13 +173,13 @@ LocalStore::LocalStore()
schemaPath = nixDBPath + "/schema";
- if (readOnlyMode) return;
+ if (readOnlyMode) {
+ openDB(false);
+ return;
+ }
/* Create missing state directories if they don't already exist. */
createDirs(nixStore);
- createDirs(nixDBPath + "/info");
- createDirs(nixDBPath + "/referrer");
- createDirs(nixDBPath + "/failed");
Path profilesDir = nixStateDir + "/profiles";
createDirs(nixStateDir + "/profiles");
createDirs(nixStateDir + "/temproots");
@@ -65,12 +192,15 @@ LocalStore::LocalStore()
checkStoreNotSymlink();
+ /* Acquire the big fat lock in shared mode to make sure that no
+ schema upgrade is in progress. */
try {
Path globalLockPath = nixDBPath + "/big-lock";
globalLock = openLockFile(globalLockPath.c_str(), true);
} catch (SysError & e) {
if (e.errNo != EACCES) throw;
readOnlyMode = true;
+ openDB(false);
return;
}
@@ -78,33 +208,55 @@ LocalStore::LocalStore()
printMsg(lvlError, "waiting for the big Nix store lock...");
lockFile(globalLock, ltRead, true);
}
-
+
+ /* Check the current database schema and if necessary do an
+ upgrade. */
int curSchema = getSchema();
if (curSchema > nixSchemaVersion)
throw Error(format("current Nix store schema is version %1%, but I only support %2%")
% curSchema % nixSchemaVersion);
- if (curSchema == 0) { /* new store */
- curSchema = nixSchemaVersion;
+
+ else if (curSchema == 0) { /* new store */
+ curSchema = nixSchemaVersion;
+ openDB(true);
writeFile(schemaPath, (format("%1%") % nixSchemaVersion).str());
}
- if (curSchema == 1) throw Error("your Nix store is no longer supported");
- if (curSchema < nixSchemaVersion) upgradeStore12();
+
+ else if (curSchema < nixSchemaVersion) {
+ if (curSchema < 5)
+ throw Error(
+ "Your Nix store has a database in Berkeley DB format,\n"
+ "which is no longer supported. To convert to the new format,\n"
+ "please upgrade Nix to version 0.12 first.");
+
+ if (!lockFile(globalLock, ltWrite, false)) {
+ printMsg(lvlError, "waiting for exclusive access to the Nix store...");
+ lockFile(globalLock, ltWrite, true);
+ }
+
+ /* Get the schema version again, because another process may
+ have performed the upgrade already. */
+ curSchema = getSchema();
+
+ if (curSchema < 6) upgradeStore6();
+
+ writeFile(schemaPath, (format("%1%") % nixSchemaVersion).str());
- doFsync = queryBoolSetting("fsync-metadata", false);
+ lockFile(globalLock, ltRead, true);
+ }
+
+ else openDB(false);
}
LocalStore::~LocalStore()
{
try {
- flushDelayedUpdates();
-
foreach (RunningSubstituters::iterator, i, runningSubstituters) {
i->second.to.close();
i->second.from.close();
i->second.pid.wait(true);
}
-
} catch (...) {
ignoreException();
}
@@ -123,6 +275,67 @@ int LocalStore::getSchema()
}
+void LocalStore::openDB(bool create)
+{
+ /* Open the Nix database. */
+ if (sqlite3_open_v2((nixDBPath + "/db.sqlite").c_str(), &db.db,
+ SQLITE_OPEN_READWRITE | (create ? SQLITE_OPEN_CREATE : 0), 0) != SQLITE_OK)
+ throw Error("cannot open SQLite database");
+
+ if (sqlite3_busy_timeout(db, 60 * 60 * 1000) != SQLITE_OK)
+ throw SQLiteError(db, "setting timeout");
+
+ if (sqlite3_exec(db, "pragma foreign_keys = 1;", 0, 0, 0) != SQLITE_OK)
+ throw SQLiteError(db, "enabling foreign keys");
+
+ /* !!! check whether sqlite has been built with foreign key
+ support */
+
+ /* Whether SQLite should fsync(). "Normal" synchronous mode
+ should be safe enough. If the user asks for it, don't sync at
+ all. This can cause database corruption if the system
+ crashes. */
+ string syncMode = queryBoolSetting("fsync-metadata", true) ? "normal" : "off";
+ if (sqlite3_exec(db, ("pragma synchronous = " + syncMode + ";").c_str(), 0, 0, 0) != SQLITE_OK)
+ throw SQLiteError(db, "setting synchronous mode");
+
+ /* Use `truncate' journal mode, which should be a bit faster. */
+ if (sqlite3_exec(db, "pragma main.journal_mode = truncate;", 0, 0, 0) != SQLITE_OK)
+ throw SQLiteError(db, "setting journal mode");
+
+ /* Initialise the database schema, if necessary. */
+ if (create) {
+#include "schema.sql.hh"
+ if (sqlite3_exec(db, (const char *) schema, 0, 0, 0) != SQLITE_OK)
+ throw SQLiteError(db, "initialising database schema");
+ }
+
+ /* Prepare SQL statements. */
+ stmtRegisterValidPath.create(db,
+ "insert or replace into ValidPaths (path, hash, registrationTime, deriver) values (?, ?, ?, ?);");
+ stmtAddReference.create(db,
+ "insert or replace into Refs (referrer, reference) values (?, ?);");
+ stmtQueryPathInfo.create(db,
+ "select id, hash, registrationTime, deriver from ValidPaths where path = ?;");
+ stmtQueryReferences.create(db,
+ "select path from Refs join ValidPaths on reference = id where referrer = ?;");
+ stmtQueryReferrers.create(db,
+ "select path from Refs join ValidPaths on referrer = id where reference = (select id from ValidPaths where path = ?);");
+ stmtInvalidatePath.create(db,
+ "delete from ValidPaths where path = ?;");
+ stmtRegisterFailedPath.create(db,
+ "insert into FailedPaths (path, time) values (?, ?);");
+ stmtHasPathFailed.create(db,
+ "select time from FailedPaths where path = ?;");
+ stmtAddDerivationOutput.create(db,
+ "insert or replace into DerivationOutputs (drv, id, path) values (?, ?, ?);");
+ stmtQueryValidDerivers.create(db,
+ "select v.id, v.path from DerivationOutputs d join ValidPaths v on d.drv = v.id where d.path = ?;");
+ stmtQueryDerivationOutputs.create(db,
+ "select id, path from DerivationOutputs where drv = ?;");
+}
+
+
void canonicalisePathMetaData(const Path & path, bool recurse)
{
checkInterrupt();
@@ -197,98 +410,6 @@ void canonicalisePathMetaData(const Path & path)
}
-static Path infoFileFor(const Path & path)
-{
- string baseName = baseNameOf(path);
- return (format("%1%/info/%2%") % nixDBPath % baseName).str();
-}
-
-
-static Path referrersFileFor(const Path & path)
-{
- string baseName = baseNameOf(path);
- return (format("%1%/referrer/%2%") % nixDBPath % baseName).str();
-}
-
-
-static Path failedFileFor(const Path & path)
-{
- string baseName = baseNameOf(path);
- return (format("%1%/failed/%2%") % nixDBPath % baseName).str();
-}
-
-
-static Path tmpFileForAtomicUpdate(const Path & path)
-{
- return (format("%1%/.%2%.%3%") % dirOf(path) % getpid() % baseNameOf(path)).str();
-}
-
-
-void LocalStore::appendReferrer(const Path & from, const Path & to, bool lock)
-{
- Path referrersFile = referrersFileFor(from);
-
- PathLocks referrersLock;
- if (lock) {
- referrersLock.lockPaths(singleton<PathSet, Path>(referrersFile));
- referrersLock.setDeletion(true);
- }
-
- AutoCloseFD fd = open(referrersFile.c_str(), O_WRONLY | O_APPEND | O_CREAT, 0666);
- if (fd == -1) throw SysError(format("opening file `%1%'") % referrersFile);
-
- string s = " " + to;
- writeFull(fd, (const unsigned char *) s.c_str(), s.size());
-
- if (doFsync) fsync(fd);
-}
-
-
-/* Atomically update the referrers file. If `purge' is true, the set
- of referrers is set to `referrers'. Otherwise, the current set of
- referrers is purged of invalid paths. */
-void LocalStore::rewriteReferrers(const Path & path, bool purge, PathSet referrers)
-{
- Path referrersFile = referrersFileFor(path);
-
- PathLocks referrersLock(singleton<PathSet, Path>(referrersFile));
- referrersLock.setDeletion(true);
-
- if (purge)
- /* queryReferrers() purges invalid paths, so that's all we
- need. */
- queryReferrers(path, referrers);
-
- Path tmpFile = tmpFileForAtomicUpdate(referrersFile);
-
- AutoCloseFD fd = open(tmpFile.c_str(), O_WRONLY | O_TRUNC | O_CREAT, 0666);
- if (fd == -1) throw SysError(format("opening file `%1%'") % referrersFile);
-
- string s;
- foreach (PathSet::const_iterator, i, referrers) {
- s += " "; s += *i;
- }
-
- writeFull(fd, (const unsigned char *) s.c_str(), s.size());
-
- if (doFsync) fsync(fd);
-
- fd.close(); /* for Windows; can't rename open file */
-
- if (rename(tmpFile.c_str(), referrersFile.c_str()) == -1)
- throw SysError(format("cannot rename `%1%' to `%2%'") % tmpFile % referrersFile);
-}
-
-
-void LocalStore::flushDelayedUpdates()
-{
- foreach (PathSet::iterator, i, delayedUpdates) {
- rewriteReferrers(*i, true, PathSet());
- }
- delayedUpdates.clear();
-}
-
-
void LocalStore::registerValidPath(const Path & path,
const Hash & hash, const PathSet & references,
const Path & deriver)
@@ -302,76 +423,88 @@ void LocalStore::registerValidPath(const Path & path,
}
-void LocalStore::registerValidPath(const ValidPathInfo & info, bool ignoreValidity)
+unsigned long long LocalStore::addValidPath(const ValidPathInfo & info)
{
- Path infoFile = infoFileFor(info.path);
-
- ValidPathInfo oldInfo;
- if (pathExists(infoFile)) oldInfo = queryPathInfo(info.path);
-
- /* Note that it's possible for infoFile to already exist. */
-
- /* Acquire a lock on each referrer file. This prevents those
- paths from being invalidated. (It would be a violation of the
- store invariants if we registered info.path as valid while some
- of its references are invalid.) NB: there can be no deadlock
- here since we're acquiring the locks in sorted order. */
- PathSet lockNames;
- foreach (PathSet::const_iterator, i, info.references)
- if (*i != info.path) lockNames.insert(referrersFileFor(*i));
- PathLocks referrerLocks(lockNames);
- referrerLocks.setDeletion(true);
-
- string refs;
- foreach (PathSet::const_iterator, i, info.references) {
- if (!refs.empty()) refs += " ";
- refs += *i;
-
- if (!ignoreValidity && *i != info.path && !isValidPath(*i))
- throw Error(format("cannot register `%1%' as valid, because its reference `%2%' isn't valid")
- % info.path % *i);
-
- /* Update the referrer mapping for *i. This must be done
- before the info file is written to maintain the invariant
- that if `path' is a valid path, then all its references
- have referrer mappings back to `path'. A " " is prefixed
- to separate it from the previous entry. It's not suffixed
- to deal with interrupted partial writes to this file. */
- if (oldInfo.references.find(*i) == oldInfo.references.end())
- appendReferrer(*i, info.path, false);
+ SQLiteStmtUse use(stmtRegisterValidPath);
+ stmtRegisterValidPath.bind(info.path);
+ stmtRegisterValidPath.bind("sha256:" + printHash(info.hash));
+ stmtRegisterValidPath.bind(info.registrationTime);
+ if (info.deriver != "")
+ stmtRegisterValidPath.bind(info.deriver);
+ else
+ stmtRegisterValidPath.bind(); // null
+ if (sqlite3_step(stmtRegisterValidPath) != SQLITE_DONE)
+ throw SQLiteError(db, format("registering valid path `%1%' in database") % info.path);
+ unsigned long long id = sqlite3_last_insert_rowid(db);
+
+ /* If this is a derivation, then store the derivation outputs in
+ the database. This is useful for the garbage collector: it can
+ efficiently query whether a path is an output of some
+ derivation. */
+ if (isDerivation(info.path)) {
+ ATerm t = ATreadFromNamedFile(info.path.c_str());
+ if (!t) throw Error(format("cannot read derivation `%1%'") % info.path);
+ Derivation drv = parseDerivation(t);
+ foreach (DerivationOutputs::iterator, i, drv.outputs) {
+ SQLiteStmtUse use(stmtAddDerivationOutput);
+ stmtAddDerivationOutput.bind(id);
+ stmtAddDerivationOutput.bind(i->first);
+ stmtAddDerivationOutput.bind(i->second.path);
+ if (sqlite3_step(stmtAddDerivationOutput) != SQLITE_DONE)
+ throw SQLiteError(db, format("adding derivation output for `%1%' in database") % info.path);
+ }
}
+ return id;
+}
+
+
+void LocalStore::addReference(unsigned long long referrer, unsigned long long reference)
+{
+ SQLiteStmtUse use(stmtAddReference);
+ stmtAddReference.bind(referrer);
+ stmtAddReference.bind(reference);
+ if (sqlite3_step(stmtAddReference) != SQLITE_DONE)
+ throw SQLiteError(db, "adding reference to database");
+}
+
+
+void LocalStore::registerValidPath(const ValidPathInfo & info)
+{
assert(info.hash.type == htSHA256);
+ ValidPathInfo info2(info);
+ if (info2.registrationTime == 0) info2.registrationTime = time(0);
+
+ SQLiteTxn txn(db);
+
+ unsigned long long id = addValidPath(info2);
- string s = (format(
- "Hash: sha256:%1%\n"
- "References: %2%\n"
- "Deriver: %3%\n"
- "Registered-At: %4%\n")
- % printHash(info.hash) % refs % info.deriver %
- (oldInfo.registrationTime ? oldInfo.registrationTime : time(0))).str();
-
- /* Atomically rewrite the info file. */
- Path tmpFile = tmpFileForAtomicUpdate(infoFile);
- writeFile(tmpFile, s, doFsync);
- if (rename(tmpFile.c_str(), infoFile.c_str()) == -1)
- throw SysError(format("cannot rename `%1%' to `%2%'") % tmpFile % infoFile);
-
- pathInfoCache[info.path] = info;
+ foreach (PathSet::const_iterator, i, info2.references)
+ addReference(id, queryValidPathId(*i));
+
+ txn.commit();
}
void LocalStore::registerFailedPath(const Path & path)
{
- /* Write an empty file in the .../failed directory to denote the
- failure of the builder for `path'. */
- writeFile(failedFileFor(path), "");
+ if (hasPathFailed(path)) return;
+ SQLiteStmtUse use(stmtRegisterFailedPath);
+ stmtRegisterFailedPath.bind(path);
+ stmtRegisterFailedPath.bind(time(0));
+ if (sqlite3_step(stmtRegisterFailedPath) != SQLITE_DONE)
+ throw SQLiteError(db, format("registering failed path `%1%'") % path);
}
bool LocalStore::hasPathFailed(const Path & path)
{
- return pathExists(failedFileFor(path));
+ SQLiteStmtUse use(stmtHasPathFailed);
+ stmtHasPathFailed.bind(path);
+ int res = sqlite3_step(stmtHasPathFailed);
+ if (res != SQLITE_DONE && res != SQLITE_ROW)
+ throw SQLiteError(db, "querying whether path failed");
+ return res == SQLITE_ROW;
}
@@ -389,91 +522,91 @@ Hash parseHashField(const Path & path, const string & s)
}
-ValidPathInfo LocalStore::queryPathInfo(const Path & path, bool ignoreErrors)
+ValidPathInfo LocalStore::queryPathInfo(const Path & path)
{
- ValidPathInfo res;
- res.path = path;
+ ValidPathInfo info;
+ info.path = path;
assertStorePath(path);
- if (!isValidPath(path))
- throw Error(format("path `%1%' is not valid") % path);
+ /* Get the path info. */
+ SQLiteStmtUse use1(stmtQueryPathInfo);
- std::map<Path, ValidPathInfo>::iterator lookup = pathInfoCache.find(path);
- if (lookup != pathInfoCache.end()) return lookup->second;
+ stmtQueryPathInfo.bind(path);
- /* Read the info file. */
- Path infoFile = infoFileFor(path);
- if (!pathExists(infoFile))
- throw Error(format("path `%1%' is not valid") % path);
- string info = readFile(infoFile);
+ int r = sqlite3_step(stmtQueryPathInfo);
+ if (r == SQLITE_DONE) throw Error(format("path `%1%' is not valid") % path);
+ if (r != SQLITE_ROW) throw SQLiteError(db, "querying path in database");
- /* Parse it. */
- Strings lines = tokenizeString(info, "\n");
+ info.id = sqlite3_column_int(stmtQueryPathInfo, 0);
- foreach (Strings::iterator, i, lines) {
- string::size_type p = i->find(':');
- if (p == string::npos) {
- if (!ignoreErrors)
- throw Error(format("corrupt line in `%1%': %2%") % infoFile % *i);
- continue; /* bad line */
- }
- string name(*i, 0, p);
- string value(*i, p + 2);
- if (name == "References") {
- Strings refs = tokenizeString(value, " ");
- res.references = PathSet(refs.begin(), refs.end());
- } else if (name == "Deriver") {
- res.deriver = value;
- } else if (name == "Hash") {
- try {
- res.hash = parseHashField(path, value);
- } catch (Error & e) {
- if (!ignoreErrors) throw;
- printMsg(lvlError, format("cannot parse hash field in `%1%': %2%") % infoFile % e.msg());
- }
- } else if (name == "Registered-At") {
- int n = 0;
- string2Int(value, n);
- res.registrationTime = n;
- }
+ const char * s = (const char *) sqlite3_column_text(stmtQueryPathInfo, 1);
+ assert(s);
+ info.hash = parseHashField(path, s);
+
+ info.registrationTime = sqlite3_column_int(stmtQueryPathInfo, 2);
+
+ s = (const char *) sqlite3_column_text(stmtQueryPathInfo, 3);
+ if (s) info.deriver = s;
+
+ /* Get the references. */
+ SQLiteStmtUse use2(stmtQueryReferences);
+
+ stmtQueryReferences.bind(info.id);
+
+ while ((r = sqlite3_step(stmtQueryReferences)) == SQLITE_ROW) {
+ s = (const char *) sqlite3_column_text(stmtQueryReferences, 0);
+ assert(s);
+ info.references.insert(s);
}
- return pathInfoCache[path] = res;
+ if (r != SQLITE_DONE)
+ throw SQLiteError(db, format("error getting references of `%1%'") % path);
+
+ return info;
}
-bool LocalStore::isValidPath(const Path & path)
+unsigned long long LocalStore::queryValidPathId(const Path & path)
{
- /* Files in the info directory starting with a `.' are temporary
- files. */
- if (baseNameOf(path).at(0) == '.') return false;
-
- /* A path is valid if its info file exists and has a non-zero
- size. (The non-zero size restriction is to be robust to
- certain kinds of filesystem corruption, particularly with
- ext4.) */
- Path infoFile = infoFileFor(path);
+ SQLiteStmtUse use(stmtQueryPathInfo);
+ stmtQueryPathInfo.bind(path);
+ int res = sqlite3_step(stmtQueryPathInfo);
+ if (res == SQLITE_ROW) return sqlite3_column_int(stmtQueryPathInfo, 0);
+ if (res == SQLITE_DONE) throw Error(format("path `%1%' is not valid") % path);
+ throw SQLiteError(db, "querying path in database");
+}
- struct stat st;
- if (lstat(infoFile.c_str(), &st)) {
- if (errno == ENOENT) return false;
- throw SysError(format("getting status of `%1%'") % infoFile);
- }
- if (st.st_size == 0) return false;
-
- return true;
+bool LocalStore::isValidPath(const Path & path)
+{
+ SQLiteStmtUse use(stmtQueryPathInfo);
+ stmtQueryPathInfo.bind(path);
+ int res = sqlite3_step(stmtQueryPathInfo);
+ if (res != SQLITE_DONE && res != SQLITE_ROW)
+ throw SQLiteError(db, "querying path in database");
+ return res == SQLITE_ROW;
}
PathSet LocalStore::queryValidPaths()
{
- PathSet paths;
- Strings entries = readDirectory(nixDBPath + "/info");
- foreach (Strings::iterator, i, entries)
- if (i->at(0) != '.') paths.insert(nixStore + "/" + *i);
- return paths;
+ SQLiteStmt stmt;
+ stmt.create(db, "select path from ValidPaths");
+
+ PathSet res;
+
+ int r;
+ while ((r = sqlite3_step(stmt)) == SQLITE_ROW) {
+ const char * s = (const char *) sqlite3_column_text(stmt, 0);
+ assert(s);
+ res.insert(s);
+ }
+
+ if (r != SQLITE_DONE)
+ throw SQLiteError(db, "error getting valid paths");
+
+ return res;
}
@@ -485,45 +618,73 @@ void LocalStore::queryReferences(const Path & path,
}
-bool LocalStore::queryReferrersInternal(const Path & path, PathSet & referrers)
+void LocalStore::queryReferrers(const Path & path, PathSet & referrers)
{
- bool allValid = true;
-
- if (!isValidPath(path))
- throw Error(format("path `%1%' is not valid") % path);
+ assertStorePath(path);
- /* No locking is necessary here: updates are only done by
- appending or by atomically replacing the file. When appending,
- there is a possibility that we see a partial entry, but it will
- just be filtered out below (the partially written path will not
- be valid, so it will be ignored). */
+ SQLiteStmtUse use(stmtQueryReferrers);
- Path referrersFile = referrersFileFor(path);
- if (!pathExists(referrersFile)) return true;
-
- AutoCloseFD fd = open(referrersFile.c_str(), O_RDONLY);
- if (fd == -1) throw SysError(format("opening file `%1%'") % referrersFile);
+ stmtQueryReferrers.bind(path);
- Paths refs = tokenizeString(readFile(fd), " ");
+ int r;
+ while ((r = sqlite3_step(stmtQueryReferrers)) == SQLITE_ROW) {
+ const char * s = (const char *) sqlite3_column_text(stmtQueryReferrers, 0);
+ assert(s);
+ referrers.insert(s);
+ }
+
+ if (r != SQLITE_DONE)
+ throw SQLiteError(db, format("error getting references of `%1%'") % path);
+}
- foreach (Paths::iterator, i, refs)
- /* Referrers can be invalid (see registerValidPath() for the
- invariant), so we only return one if it is valid. */
- if (isStorePath(*i) && isValidPath(*i)) referrers.insert(*i); else allValid = false;
- return allValid;
+Path LocalStore::queryDeriver(const Path & path)
+{
+ return queryPathInfo(path).deriver;
}
-void LocalStore::queryReferrers(const Path & path, PathSet & referrers)
+PathSet LocalStore::queryValidDerivers(const Path & path)
{
- queryReferrersInternal(path, referrers);
+ assertStorePath(path);
+
+ SQLiteStmtUse use(stmtQueryValidDerivers);
+ stmtQueryValidDerivers.bind(path);
+
+ PathSet derivers;
+ int r;
+ while ((r = sqlite3_step(stmtQueryValidDerivers)) == SQLITE_ROW) {
+ const char * s = (const char *) sqlite3_column_text(stmtQueryValidDerivers, 1);
+ assert(s);
+ derivers.insert(s);
+ }
+
+ if (r != SQLITE_DONE)
+ throw SQLiteError(db, format("error getting valid derivers of `%1%'") % path);
+
+ return derivers;
}
-Path LocalStore::queryDeriver(const Path & path)
+PathSet LocalStore::queryDerivationOutputs(const Path & path)
{
- return queryPathInfo(path).deriver;
+ SQLiteTxn txn(db);
+
+ SQLiteStmtUse use(stmtQueryDerivationOutputs);
+ stmtQueryDerivationOutputs.bind(queryValidPathId(path));
+
+ PathSet outputs;
+ int r;
+ while ((r = sqlite3_step(stmtQueryDerivationOutputs)) == SQLITE_ROW) {
+ const char * s = (const char *) sqlite3_column_text(stmtQueryDerivationOutputs, 1);
+ assert(s);
+ outputs.insert(s);
+ }
+
+ if (r != SQLITE_DONE)
+ throw SQLiteError(db, format("error getting outputs of `%1%'") % path);
+
+ return outputs;
}
@@ -637,39 +798,19 @@ Hash LocalStore::queryPathHash(const Path & path)
}
-static void dfsVisit(std::map<Path, ValidPathInfo> & infos,
- const Path & path, PathSet & visited, Paths & sorted)
+void LocalStore::registerValidPaths(const ValidPathInfos & infos)
{
- if (visited.find(path) != visited.end()) return;
- visited.insert(path);
-
- ValidPathInfo & info(infos[path]);
+ SQLiteTxn txn(db);
- foreach (PathSet::iterator, i, info.references)
- if (infos.find(*i) != infos.end())
- dfsVisit(infos, *i, visited, sorted);
-
- sorted.push_back(path);
-}
+ foreach (ValidPathInfos::const_iterator, i, infos) addValidPath(*i);
+ foreach (ValidPathInfos::const_iterator, i, infos) {
+ unsigned long long referrer = queryValidPathId(i->path);
+ foreach (PathSet::iterator, j, i->references)
+ addReference(referrer, queryValidPathId(*j));
+ }
-void LocalStore::registerValidPaths(const ValidPathInfos & infos)
-{
- std::map<Path, ValidPathInfo> infosMap;
-
- /* Sort the paths topologically under the references relation, so
- that if path A is referenced by B, then A is registered before
- B. */
- foreach (ValidPathInfos::const_iterator, i, infos)
- infosMap[i->path] = *i;
-
- PathSet visited;
- Paths sorted;
- foreach (ValidPathInfos::const_iterator, i, infos)
- dfsVisit(infosMap, i->path, visited, sorted);
-
- foreach (Paths::iterator, i, sorted)
- registerValidPath(infosMap[*i]);
+ txn.commit();
}
@@ -679,43 +820,15 @@ void LocalStore::invalidatePath(const Path & path)
{
debug(format("invalidating path `%1%'") % path);
- ValidPathInfo info;
+ SQLiteStmtUse use(stmtInvalidatePath);
- if (pathExists(infoFileFor(path))) {
- info = queryPathInfo(path);
+ stmtInvalidatePath.bind(path);
- /* Remove the info file. */
- Path p = infoFileFor(path);
- if (unlink(p.c_str()) == -1)
- throw SysError(format("unlinking `%1%'") % p);
- }
+ if (sqlite3_step(stmtInvalidatePath) != SQLITE_DONE)
+ throw SQLiteError(db, format("invalidating path `%1%' in database") % path);
- /* Remove the referrers file for `path'. */
- Path p = referrersFileFor(path);
- if (pathExists(p) && unlink(p.c_str()) == -1)
- throw SysError(format("unlinking `%1%'") % p);
-
- /* Clear `path' from the info cache. */
- pathInfoCache.erase(path);
- delayedUpdates.erase(path);
-
- /* Cause the referrer files for each path referenced by this one
- to be updated. This has to happen after removing the info file
- to preserve the invariant (see registerValidPath()).
-
- The referrer files are updated lazily in flushDelayedUpdates()
- to prevent quadratic performance in the garbage collector
- (i.e., when N referrers to some path X are deleted, we have to
- rewrite the referrers file for X N times, causing O(N^2) I/O).
-
- What happens if we die before the referrer file can be updated?
- That's not a problem, because stale (invalid) entries in the
- referrer file are ignored by queryReferrers(). Thus a referrer
- file is allowed to have stale entries; removing them is just an
- optimisation. verifyStore() gets rid of them eventually.
- */
- foreach (PathSet::iterator, i, info.references)
- if (*i != path) delayedUpdates.insert(*i);
+ /* Note that the foreign key constraints on the Refs table take
+ care of deleting the references entries for `path'. */
}
@@ -817,16 +930,19 @@ struct HashAndWriteSink : Sink
{
Sink & writeSink;
HashSink hashSink;
- bool hashing;
HashAndWriteSink(Sink & writeSink) : writeSink(writeSink), hashSink(htSHA256)
{
- hashing = true;
}
virtual void operator ()
(const unsigned char * data, unsigned int len)
{
writeSink(data, len);
- if (hashing) hashSink(data, len);
+ hashSink(data, len);
+ }
+ Hash currentHash()
+ {
+ HashSink hashSinkClone(hashSink);
+ return hashSinkClone.finish();
}
};
@@ -857,6 +973,14 @@ void LocalStore::exportPath(const Path & path, bool sign,
dumpPath(path, hashAndWriteSink);
+ /* Refuse to export paths that have changed. This prevents
+ filesystem corruption from spreading to other machines. */
+ Hash hash = hashAndWriteSink.currentHash();
+ Hash storedHash = queryPathHash(path);
+ if (hash != storedHash)
+ throw Error(format("hash of path `%1%' has changed from `%2%' to `%3%'!") % path
+ % printHash(storedHash) % printHash(hash));
+
writeInt(EXPORT_MAGIC, hashAndWriteSink);
writeString(path, hashAndWriteSink);
@@ -869,9 +993,8 @@ void LocalStore::exportPath(const Path & path, bool sign,
writeString(deriver, hashAndWriteSink);
if (sign) {
- Hash hash = hashAndWriteSink.hashSink.finish();
- hashAndWriteSink.hashing = false;
-
+ Hash hash = hashAndWriteSink.currentHash();
+
writeInt(1, hashAndWriteSink);
Path tmpDir = createTempDir();
@@ -1024,11 +1147,6 @@ void LocalStore::deleteFromStore(const Path & path, unsigned long long & bytesFr
assertStorePath(path);
if (isValidPath(path)) {
- /* Acquire a lock on the referrers file to prevent new
- referrers to this path from appearing while we're deleting
- it. */
- PathLocks referrersLock(singleton<PathSet, Path>(referrersFileFor(path)));
- referrersLock.setDeletion(true);
PathSet referrers; queryReferrers(path, referrers);
referrers.erase(path); /* ignore self-references */
if (!referrers.empty())
@@ -1050,68 +1168,26 @@ void LocalStore::verifyStore(bool checkContents)
foreach (PathSet::iterator, i, validPaths2) {
checkInterrupt();
+ /* !!! invalidatePath() will probably fail due to the foreign
+ key constraints on the Ref table. */
if (!isStorePath(*i)) {
printMsg(lvlError, format("path `%1%' is not in the Nix store") % *i);
invalidatePath(*i);
} else if (!pathExists(*i)) {
printMsg(lvlError, format("path `%1%' disappeared") % *i);
invalidatePath(*i);
- } else {
- Path infoFile = infoFileFor(*i);
- struct stat st;
- if (lstat(infoFile.c_str(), &st))
- throw SysError(format("getting status of `%1%'") % infoFile);
- if (st.st_size == 0) {
- printMsg(lvlError, format("removing corrupt info file `%1%'") % infoFile);
- if (unlink(infoFile.c_str()) == -1)
- throw SysError(format("unlinking `%1%'") % infoFile);
- }
- else validPaths.insert(*i);
- }
+ } else validPaths.insert(*i);
}
+ /* Optionally, check the content hashes (slow). */
+ if (checkContents) {
+ printMsg(lvlInfo, "checking hashes");
- /* Check the store path meta-information. */
- printMsg(lvlInfo, "checking path meta-information");
-
- std::map<Path, PathSet> referrersCache;
-
- foreach (PathSet::iterator, i, validPaths) {
- bool update = false;
- ValidPathInfo info = queryPathInfo(*i, true);
-
- /* Check the references: each reference should be valid, and
- it should have a matching referrer. */
- foreach (PathSet::iterator, j, info.references) {
- if (validPaths.find(*j) == validPaths.end()) {
- printMsg(lvlError, format("incomplete closure: `%1%' needs missing `%2%'")
- % *i % *j);
- /* nothing we can do about it... */
- } else {
- if (referrersCache.find(*j) == referrersCache.end())
- queryReferrers(*j, referrersCache[*j]);
- if (referrersCache[*j].find(*i) == referrersCache[*j].end()) {
- printMsg(lvlError, format("adding missing referrer mapping from `%1%' to `%2%'")
- % *j % *i);
- appendReferrer(*j, *i, true);
- }
- }
- }
-
- /* Check the deriver. (Note that the deriver doesn't have to
- be a valid path.) */
- if (!info.deriver.empty() && !isStorePath(info.deriver)) {
- info.deriver = "";
- update = true;
- }
+ foreach (PathSet::iterator, i, validPaths) {
+ ValidPathInfo info = queryPathInfo(*i);
- /* Check the content hash (optionally - slow). */
- if (info.hash.hashSize == 0) {
- printMsg(lvlError, format("re-hashing `%1%'") % *i);
- info.hash = hashPath(htSHA256, *i);
- update = true;
- } else if (checkContents) {
- debug(format("checking contents of `%1%'") % *i);
+ /* Check the content hash (optionally - slow). */
+ printMsg(lvlTalkative, format("checking contents of `%1%'") % *i);
Hash current = hashPath(info.hash.type, *i);
if (current != info.hash) {
printMsg(lvlError, format("path `%1%' was modified! "
@@ -1119,67 +1195,90 @@ void LocalStore::verifyStore(bool checkContents)
% *i % printHash(info.hash) % printHash(current));
}
}
-
- if (update) registerValidPath(info);
}
+}
- referrersCache.clear();
-
- /* Check the referrers. */
- printMsg(lvlInfo, "checking referrers");
+/* Functions for upgrading from the pre-SQLite database. */
- std::map<Path, PathSet> referencesCache;
-
- Strings entries = readDirectory(nixDBPath + "/referrer");
- foreach (Strings::iterator, i, entries) {
- Path from = nixStore + "/" + *i;
-
- if (validPaths.find(from) == validPaths.end()) {
- /* !!! This removes lock files as well. Need to check
- whether that's okay. */
- printMsg(lvlError, format("removing referrers file for invalid `%1%'") % from);
- Path p = referrersFileFor(from);
- if (unlink(p.c_str()) == -1)
- throw SysError(format("unlinking `%1%'") % p);
- continue;
- }
+PathSet LocalStore::queryValidPathsOld()
+{
+ PathSet paths;
+ Strings entries = readDirectory(nixDBPath + "/info");
+ foreach (Strings::iterator, i, entries)
+ if (i->at(0) != '.') paths.insert(nixStore + "/" + *i);
+ return paths;
+}
- PathSet referrers;
- bool allValid = queryReferrersInternal(from, referrers);
- bool update = false;
- if (!allValid) {
- printMsg(lvlError, format("removing some stale referrers for `%1%'") % from);
- update = true;
- }
+ValidPathInfo LocalStore::queryPathInfoOld(const Path & path)
+{
+ ValidPathInfo res;
+ res.path = path;
- /* Each referrer should have a matching reference. */
- PathSet referrersNew;
- foreach (PathSet::iterator, j, referrers) {
- if (referencesCache.find(*j) == referencesCache.end())
- queryReferences(*j, referencesCache[*j]);
- if (referencesCache[*j].find(from) == referencesCache[*j].end()) {
- printMsg(lvlError, format("removing unexpected referrer mapping from `%1%' to `%2%'")
- % from % *j);
- update = true;
- } else referrersNew.insert(*j);
- }
+ /* Read the info file. */
+ string baseName = baseNameOf(path);
+ Path infoFile = (format("%1%/info/%2%") % nixDBPath % baseName).str();
+ if (!pathExists(infoFile))
+ throw Error(format("path `%1%' is not valid") % path);
+ string info = readFile(infoFile);
+
+ /* Parse it. */
+ Strings lines = tokenizeString(info, "\n");
- if (update) rewriteReferrers(from, false, referrersNew);
+ foreach (Strings::iterator, i, lines) {
+ string::size_type p = i->find(':');
+ if (p == string::npos)
+ throw Error(format("corrupt line in `%1%': %2%") % infoFile % *i);
+ string name(*i, 0, p);
+ string value(*i, p + 2);
+ if (name == "References") {
+ Strings refs = tokenizeString(value, " ");
+ res.references = PathSet(refs.begin(), refs.end());
+ } else if (name == "Deriver") {
+ res.deriver = value;
+ } else if (name == "Hash") {
+ res.hash = parseHashField(path, value);
+ } else if (name == "Registered-At") {
+ int n = 0;
+ string2Int(value, n);
+ res.registrationTime = n;
+ }
}
+
+ return res;
}
-/* Upgrade from schema 4 (Nix 0.11) to schema 5 (Nix >= 0.12). The
- old schema uses Berkeley DB, the new one stores store path
- meta-information in files. */
-void LocalStore::upgradeStore12()
+/* Upgrade from schema 5 (Nix 0.12) to schema 6 (Nix >= 0.15). */
+void LocalStore::upgradeStore6()
{
- throw Error(
- "Your Nix store has a database in Berkeley DB format,\n"
- "which is no longer supported. To convert to the new format,\n"
- "please upgrade Nix to version 0.12 first.");
+ printMsg(lvlError, "upgrading Nix store to new schema (this may take a while)...");
+
+ openDB(true);
+
+ PathSet validPaths = queryValidPathsOld();
+
+ SQLiteTxn txn(db);
+
+ foreach (PathSet::iterator, i, validPaths) {
+ addValidPath(queryPathInfoOld(*i));
+ std::cerr << ".";
+ }
+
+ std::cerr << "|";
+
+ foreach (PathSet::iterator, i, validPaths) {
+ ValidPathInfo info = queryPathInfoOld(*i);
+ unsigned long long referrer = queryValidPathId(*i);
+ foreach (PathSet::iterator, j, info.references)
+ addReference(referrer, queryValidPathId(*j));
+ std::cerr << ".";
+ }
+
+ std::cerr << "\n";
+
+ txn.commit();
}
diff --git a/src/libstore/local-store.hh b/src/libstore/local-store.hh
index 31f8d9109..0c5f04158 100644
--- a/src/libstore/local-store.hh
+++ b/src/libstore/local-store.hh
@@ -7,13 +7,18 @@
#include "util.hh"
+class sqlite3;
+class sqlite3_stmt;
+
+
namespace nix {
/* Nix store and database schema version. Version 1 (or 0) was Nix <=
0.7. Version 2 was Nix 0.8 and 0.9. Version 3 is Nix 0.10.
- Version 4 is Nix 0.11. Version 5 is Nix 0.12*/
-const int nixSchemaVersion = 5;
+ Version 4 is Nix 0.11. Version 5 is Nix 0.12-0.14. Version 6 is
+ Nix 0.15. */
+const int nixSchemaVersion = 6;
extern string drvsLogDir;
@@ -41,6 +46,33 @@ struct RunningSubstituter
};
+/* Wrapper object to close the SQLite database automatically. */
+struct SQLite
+{
+ sqlite3 * db;
+ SQLite() { db = 0; }
+ ~SQLite();
+ operator sqlite3 * () { return db; }
+};
+
+
+/* Wrapper object to create and destroy SQLite prepared statements. */
+struct SQLiteStmt
+{
+ sqlite3 * db;
+ sqlite3_stmt * stmt;
+ unsigned int curArg;
+ SQLiteStmt() { stmt = 0; }
+ void create(sqlite3 * db, const string & s);
+ void reset();
+ ~SQLiteStmt();
+ operator sqlite3_stmt * () { return stmt; }
+ void bind(const string & value);
+ void bind(int value);
+ void bind();
+};
+
+
class LocalStore : public StoreAPI
{
private:
@@ -71,6 +103,14 @@ public:
void queryReferrers(const Path & path, PathSet & referrers);
Path queryDeriver(const Path & path);
+
+ /* Return all currently valid derivations that have `path' as an
+ output. (Note that the result of `queryDeriver()' is the
+ derivation that was actually used to produce `path', which may
+ not exist anymore.) */
+ PathSet queryValidDerivers(const Path & path);
+
+ PathSet queryDerivationOutputs(const Path & path);
PathSet querySubstitutablePaths();
@@ -151,40 +191,50 @@ private:
/* Lock file used for upgrading. */
AutoCloseFD globalLock;
- /* !!! The cache can grow very big. Maybe it should be pruned
- every once in a while. */
- std::map<Path, ValidPathInfo> pathInfoCache;
+ /* The SQLite database object. */
+ SQLite db;
+
+ /* Some precompiled SQLite statements. */
+ SQLiteStmt stmtRegisterValidPath;
+ SQLiteStmt stmtAddReference;
+ SQLiteStmt stmtQueryPathInfo;
+ SQLiteStmt stmtQueryReferences;
+ SQLiteStmt stmtQueryReferrers;
+ SQLiteStmt stmtInvalidatePath;
+ SQLiteStmt stmtRegisterFailedPath;
+ SQLiteStmt stmtHasPathFailed;
+ SQLiteStmt stmtAddDerivationOutput;
+ SQLiteStmt stmtQueryValidDerivers;
+ SQLiteStmt stmtQueryDerivationOutputs;
- /* Store paths for which the referrers file must be purged. */
- PathSet delayedUpdates;
+ int getSchema();
- /* Whether to do an fsync() after writing Nix metadata. */
- bool doFsync;
+ void openDB(bool create);
- int getSchema();
+ unsigned long long queryValidPathId(const Path & path);
- void registerValidPath(const ValidPathInfo & info, bool ignoreValidity = false);
+ unsigned long long addValidPath(const ValidPathInfo & info);
+
+ void addReference(unsigned long long referrer, unsigned long long reference);
+
+ void registerValidPath(const ValidPathInfo & info);
- ValidPathInfo queryPathInfo(const Path & path, bool ignoreErrors = false);
+ ValidPathInfo queryPathInfo(const Path & path);
void appendReferrer(const Path & from, const Path & to, bool lock);
void rewriteReferrers(const Path & path, bool purge, PathSet referrers);
- void flushDelayedUpdates();
-
- bool queryReferrersInternal(const Path & path, PathSet & referrers);
-
void invalidatePath(const Path & path);
-
- void upgradeStore12();
+
+ void upgradeStore6();
+ PathSet queryValidPathsOld();
+ ValidPathInfo queryPathInfoOld(const Path & path);
struct GCState;
bool tryToDelete(GCState & state, const Path & path);
- PathSet findDerivers(GCState & state, const Path & path);
-
bool isActiveTempFile(const GCState & state,
const Path & path, const string & suffix);
diff --git a/src/libstore/misc.cc b/src/libstore/misc.cc
index 2d7d13a0e..f79cb11cc 100644
--- a/src/libstore/misc.cc
+++ b/src/libstore/misc.cc
@@ -31,10 +31,10 @@ void computeFSClosure(const Path & storePath,
store->queryReferences(storePath, references);
if (includeOutputs && isDerivation(storePath)) {
- Derivation drv = derivationFromPath(storePath);
- foreach (DerivationOutputs::iterator, i, drv.outputs)
- if (store->isValidPath(i->second.path))
- computeFSClosure(i->second.path, paths, flipDirection, true);
+ PathSet outputs = store->queryDerivationOutputs(storePath);
+ foreach (PathSet::iterator, i, outputs)
+ if (store->isValidPath(*i))
+ computeFSClosure(*i, paths, flipDirection, true);
}
foreach (PathSet::iterator, i, references)
diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc
index 5143143f5..c5d7975b5 100644
--- a/src/libstore/remote-store.cc
+++ b/src/libstore/remote-store.cc
@@ -214,7 +214,9 @@ bool RemoteStore::isValidPath(const Path & path)
PathSet RemoteStore::queryValidPaths()
{
openConnection();
- throw Error("not implemented");
+ writeInt(wopQueryValidPaths, to);
+ processStderr();
+ return readStorePaths(from);
}
@@ -294,6 +296,16 @@ Path RemoteStore::queryDeriver(const Path & path)
}
+PathSet RemoteStore::queryDerivationOutputs(const Path & path)
+{
+ openConnection();
+ writeInt(wopQueryDerivationOutputs, to);
+ writeString(path, to);
+ processStderr();
+ return readStorePaths(from);
+}
+
+
Path RemoteStore::addToStore(const Path & _srcPath,
bool recursive, HashType hashAlgo, PathFilter & filter)
{
diff --git a/src/libstore/remote-store.hh b/src/libstore/remote-store.hh
index 3d55d23d9..8bab1d8c4 100644
--- a/src/libstore/remote-store.hh
+++ b/src/libstore/remote-store.hh
@@ -37,6 +37,8 @@ public:
Path queryDeriver(const Path & path);
+ PathSet queryDerivationOutputs(const Path & path);
+
bool hasSubstitutes(const Path & path);
bool querySubstitutablePathInfo(const Path & path,
diff --git a/src/libstore/schema.sql b/src/libstore/schema.sql
new file mode 100644
index 000000000..7438632ed
--- /dev/null
+++ b/src/libstore/schema.sql
@@ -0,0 +1,43 @@
+create table if not exists ValidPaths (
+ id integer primary key autoincrement not null,
+ path text unique not null,
+ hash text not null,
+ registrationTime integer not null,
+ deriver text
+);
+
+create table if not exists Refs (
+ referrer integer not null,
+ reference integer not null,
+ primary key (referrer, reference),
+ foreign key (referrer) references ValidPaths(id) on delete cascade,
+ foreign key (reference) references ValidPaths(id) on delete restrict
+);
+
+create index if not exists IndexReferrer on Refs(referrer);
+create index if not exists IndexReference on Refs(reference);
+
+-- Paths can refer to themselves, causing a tuple (N, N) in the Refs
+-- table. This causes a deletion of the corresponding row in
+-- ValidPaths to cause a foreign key constraint violation (due to `on
+-- delete restrict' on the `reference' column). Therefore, explicitly
+-- get rid of self-references.
+create trigger if not exists DeleteSelfRefs before delete on ValidPaths
+ begin
+ delete from Refs where referrer = old.id and reference = old.id;
+ end;
+
+create table if not exists DerivationOutputs (
+ drv integer not null,
+ id text not null, -- symbolic output id, usually "out"
+ path text not null,
+ primary key (drv, id),
+ foreign key (drv) references ValidPaths(id) on delete cascade
+);
+
+create index if not exists IndexDerivationOutputs on DerivationOutputs(path);
+
+create table if not exists FailedPaths (
+ path text primary key not null,
+ time integer not null
+);
diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc
index f0abe61ad..01dd51621 100644
--- a/src/libstore/store-api.cc
+++ b/src/libstore/store-api.cc
@@ -1,7 +1,6 @@
#include "store-api.hh"
#include "globals.hh"
#include "util.hh"
-#include "derivations.hh"
#include <limits.h>
@@ -53,18 +52,6 @@ Path toStorePath(const Path & path)
}
-string getNameOfStorePath(const Path & path)
-{
- Path::size_type slash = path.rfind('/');
- string p = slash == Path::npos ? path : string(path, slash + 1);
- Path::size_type dash = p.find('-');
- assert(dash != Path::npos);
- string p2 = string(p, dash + 1);
- if (isDerivation(p2)) p2 = string(p2, 0, p2.size() - 4);
- return p2;
-}
-
-
Path followLinksToStore(const Path & _path)
{
Path path = absPath(_path);
diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh
index 8506d47e3..095fdd24b 100644
--- a/src/libstore/store-api.hh
+++ b/src/libstore/store-api.hh
@@ -112,33 +112,18 @@ public:
virtual void queryReferences(const Path & path,
PathSet & references) = 0;
- /* Like queryReferences, but with self-references filtered out. */
- PathSet queryReferencesNoSelf(const Path & path)
- {
- PathSet res;
- queryReferences(path, res);
- res.erase(path);
- return res;
- }
-
/* Queries the set of incoming FS references for a store path.
The result is not cleared. */
virtual void queryReferrers(const Path & path,
PathSet & referrers) = 0;
- /* Like queryReferrers, but with self-references filtered out. */
- PathSet queryReferrersNoSelf(const Path & path)
- {
- PathSet res;
- queryReferrers(path, res);
- res.erase(path);
- return res;
- }
-
/* Query the deriver of a store path. Return the empty string if
no deriver has been set. */
virtual Path queryDeriver(const Path & path) = 0;
+ /* Query the outputs of the derivation denoted by `path'. */
+ virtual PathSet queryDerivationOutputs(const Path & path) = 0;
+
/* Query whether a path has substitutes. */
virtual bool hasSubstitutes(const Path & path) = 0;
@@ -243,12 +228,6 @@ void checkStoreName(const string & name);
Path toStorePath(const Path & path);
-/* Get the "name" part of a store path, that is, the part after the
- hash and the dash, and with any ".drv" suffix removed
- (e.g. /nix/store/<hash>-foo-1.2.3.drv => foo-1.2.3). */
-string getNameOfStorePath(const Path & path);
-
-
/* Follow symlinks until we end up with a path in the Nix store. */
Path followLinksToStore(const Path & path);
@@ -333,6 +312,7 @@ struct ValidPathInfo
Hash hash;
PathSet references;
time_t registrationTime;
+ unsigned long long id; // internal use only
ValidPathInfo() : registrationTime(0) { }
};
diff --git a/src/libstore/worker-protocol.hh b/src/libstore/worker-protocol.hh
index e44c1e36b..c3096010b 100644
--- a/src/libstore/worker-protocol.hh
+++ b/src/libstore/worker-protocol.hh
@@ -34,6 +34,8 @@ typedef enum {
wopSetOptions = 19,
wopCollectGarbage = 20,
wopQuerySubstitutablePathInfo = 21,
+ wopQueryDerivationOutputs = 22,
+ wopQueryValidPaths = 23,
} WorkerOp;
diff --git a/src/libutil/Makefile.am b/src/libutil/Makefile.am
index bd0996543..f17236d29 100644
--- a/src/libutil/Makefile.am
+++ b/src/libutil/Makefile.am
@@ -3,7 +3,7 @@ pkglib_LTLIBRARIES = libutil.la
libutil_la_SOURCES = util.cc hash.cc serialise.cc \
archive.cc aterm.cc aterm-map.cc xml-writer.cc
-libutil_la_LIBADD = ../boost/format/libformat.la
+libutil_la_LIBADD = ../boost/format/libformat.la ${aterm_lib} ${sqlite_lib}
pkginclude_HEADERS = util.hh hash.hh serialise.hh \
archive.hh aterm.hh aterm-map.hh xml-writer.hh types.hh
diff --git a/src/libutil/aterm-map.hh b/src/libutil/aterm-map.hh
index b732453a7..bcfefd9ee 100644
--- a/src/libutil/aterm-map.hh
+++ b/src/libutil/aterm-map.hh
@@ -1,7 +1,8 @@
#ifndef __ATERM_MAP_H
#define __ATERM_MAP_H
-#include <aterm1.h>
+typedef union _ATerm * ATerm;
+
#include <assert.h>
diff --git a/src/libutil/hash.cc b/src/libutil/hash.cc
index eef01fe4d..bd7e33a48 100644
--- a/src/libutil/hash.cc
+++ b/src/libutil/hash.cc
@@ -289,6 +289,13 @@ HashSink::HashSink(HashType ht) : ht(ht)
start(ht, *ctx);
}
+HashSink::HashSink(const HashSink & h)
+{
+ ht = h.ht;
+ ctx = new Ctx;
+ *ctx = *h.ctx;
+}
+
HashSink::~HashSink()
{
delete ctx;
diff --git a/src/libutil/hash.hh b/src/libutil/hash.hh
index 062d97254..81425b234 100644
--- a/src/libutil/hash.hh
+++ b/src/libutil/hash.hh
@@ -96,6 +96,7 @@ private:
public:
HashSink(HashType ht);
+ HashSink(const HashSink & h);
~HashSink();
virtual void operator () (const unsigned char * data, unsigned int len);
Hash finish();
@@ -104,5 +105,5 @@ public:
}
-
+
#endif /* !__HASH_H */
diff --git a/src/libutil/util.cc b/src/libutil/util.cc
index eec60867d..3f76be670 100644
--- a/src/libutil/util.cc
+++ b/src/libutil/util.cc
@@ -227,13 +227,12 @@ string readFile(const Path & path)
}
-void writeFile(const Path & path, const string & s, bool doFsync)
+void writeFile(const Path & path, const string & s)
{
AutoCloseFD fd = open(path.c_str(), O_WRONLY | O_TRUNC | O_CREAT, 0666);
if (fd == -1)
throw SysError(format("opening file `%1%'") % path);
writeFull(fd, (unsigned char *) s.c_str(), s.size());
- if (doFsync) fsync(fd);
}
diff --git a/src/libutil/util.hh b/src/libutil/util.hh
index f609e8944..aa40c6177 100644
--- a/src/libutil/util.hh
+++ b/src/libutil/util.hh
@@ -60,7 +60,7 @@ string readFile(int fd);
string readFile(const Path & path);
/* Write a string to a file. */
-void writeFile(const Path & path, const string & s, bool doFsync = false);
+void writeFile(const Path & path, const string & s);
/* Read a line from a file descriptor. */
string readLine(int fd);
diff --git a/src/nix-env/Makefile.am b/src/nix-env/Makefile.am
index 900524f76..53fea3d9d 100644
--- a/src/nix-env/Makefile.am
+++ b/src/nix-env/Makefile.am
@@ -3,7 +3,7 @@ bin_PROGRAMS = nix-env
nix_env_SOURCES = nix-env.cc profiles.cc profiles.hh help.txt
nix_env_LDADD = ../libmain/libmain.la ../libexpr/libexpr.la \
../libstore/libstore.la ../libutil/libutil.la \
- ../boost/format/libformat.la ${aterm_lib} @ADDITIONAL_NETWORK_LIBS@
+ ../boost/format/libformat.la
nix-env.o: help.txt.hh
@@ -11,6 +11,7 @@ nix-env.o: help.txt.hh
../bin2c/bin2c helpText < $< > $@ || (rm $@ && exit 1)
AM_CXXFLAGS = \
- -I$(srcdir)/.. ${aterm_include} \
+ ${aterm_include} \
+ -I$(srcdir)/.. \
-I$(srcdir)/../libutil -I$(srcdir)/../libstore \
-I$(srcdir)/../libexpr -I$(srcdir)/../libmain -I../libexpr
diff --git a/src/nix-env/nix-env.cc b/src/nix-env/nix-env.cc
index 35caf687b..d3f74c7ed 100644
--- a/src/nix-env/nix-env.cc
+++ b/src/nix-env/nix-env.cc
@@ -14,6 +14,7 @@
#include "xml-writer.hh"
#include "store-api.hh"
#include "util.hh"
+#include "aterm.hh"
#include <cerrno>
#include <ctime>
@@ -70,7 +71,7 @@ typedef void (* Operation) (Globals & globals,
void printHelp()
{
- cout << string((char *) helpText, sizeof helpText);
+ cout << string((char *) helpText);
}
diff --git a/src/nix-hash/Makefile.am b/src/nix-hash/Makefile.am
index 350aa8ebd..a4fdb3246 100644
--- a/src/nix-hash/Makefile.am
+++ b/src/nix-hash/Makefile.am
@@ -2,7 +2,7 @@ bin_PROGRAMS = nix-hash
nix_hash_SOURCES = nix-hash.cc help.txt
nix_hash_LDADD = ../libmain/libmain.la ../libstore/libstore.la ../libutil/libutil.la \
- ../boost/format/libformat.la ${aterm_lib} @ADDITIONAL_NETWORK_LIBS@
+ ../boost/format/libformat.la
nix-hash.o: help.txt.hh
diff --git a/src/nix-hash/nix-hash.cc b/src/nix-hash/nix-hash.cc
index 1282af070..a70f59e0c 100644
--- a/src/nix-hash/nix-hash.cc
+++ b/src/nix-hash/nix-hash.cc
@@ -10,7 +10,7 @@ using namespace nix;
void printHelp()
{
- std::cout << string((char *) helpText, sizeof helpText);
+ std::cout << string((char *) helpText);
}
diff --git a/src/nix-instantiate/Makefile.am b/src/nix-instantiate/Makefile.am
index 3f6671719..1c6d30873 100644
--- a/src/nix-instantiate/Makefile.am
+++ b/src/nix-instantiate/Makefile.am
@@ -3,7 +3,7 @@ bin_PROGRAMS = nix-instantiate
nix_instantiate_SOURCES = nix-instantiate.cc help.txt
nix_instantiate_LDADD = ../libmain/libmain.la ../libexpr/libexpr.la \
../libstore/libstore.la ../libutil/libutil.la \
- ../boost/format/libformat.la ${aterm_lib} @ADDITIONAL_NETWORK_LIBS@
+ ../boost/format/libformat.la
nix-instantiate.o: help.txt.hh
diff --git a/src/nix-instantiate/nix-instantiate.cc b/src/nix-instantiate/nix-instantiate.cc
index 3822de5c6..d8b39eca2 100644
--- a/src/nix-instantiate/nix-instantiate.cc
+++ b/src/nix-instantiate/nix-instantiate.cc
@@ -11,6 +11,7 @@
#include "util.hh"
#include "store-api.hh"
#include "common-opts.hh"
+#include "aterm.hh"
#include "help.txt.hh"
@@ -19,7 +20,7 @@ using namespace nix;
void printHelp()
{
- std::cout << string((char *) helpText, sizeof helpText);
+ std::cout << string((char *) helpText);
}
diff --git a/src/nix-log2xml/log2xml.cc b/src/nix-log2xml/log2xml.cc
index b2a25eefa..6645dc500 100644
--- a/src/nix-log2xml/log2xml.cc
+++ b/src/nix-log2xml/log2xml.cc
@@ -18,6 +18,8 @@ struct Decoder
int priority;
bool ignoreLF;
int lineNo, charNo;
+ bool warning;
+ bool error;
Decoder()
{
@@ -29,6 +31,8 @@ struct Decoder
ignoreLF = false;
lineNo = 1;
charNo = 0;
+ warning = false;
+ error = false;
}
void pushChar(char c);
@@ -95,6 +99,12 @@ void Decoder::pushChar(char c)
case 'b':
ignoreLF = false;
break;
+ case 'e':
+ error = true;
+ break;
+ case 'w':
+ warning = true;
+ break;
}
} else if (c >= '0' && c <= '9') {
int n = 0;
@@ -118,6 +128,8 @@ void Decoder::finishLine()
string tag = inHeader ? "head" : "line";
cout << "<" << tag;
if (priority != 1) cout << " priority='" << priority << "'";
+ if (warning) cout << " warning='1'";
+ if (error) cout << " error='1'";
cout << ">";
for (unsigned int i = 0; i < line.size(); i++) {
@@ -158,6 +170,8 @@ void Decoder::finishLine()
line = "";
inHeader = false;
priority = 1;
+ warning = false;
+ error = false;
}
diff --git a/src/nix-setuid-helper/Makefile.am b/src/nix-setuid-helper/Makefile.am
index a0fbdf39d..a04701636 100644
--- a/src/nix-setuid-helper/Makefile.am
+++ b/src/nix-setuid-helper/Makefile.am
@@ -2,7 +2,6 @@ libexec_PROGRAMS = nix-setuid-helper
nix_setuid_helper_SOURCES = nix-setuid-helper.cc
nix_setuid_helper_LDADD = ../libutil/libutil.la \
- ../boost/format/libformat.la ${aterm_lib}
+ ../boost/format/libformat.la
-AM_CXXFLAGS = \
- -I$(srcdir)/.. $(aterm_include) -I$(srcdir)/../libutil
+AM_CXXFLAGS = -I$(srcdir)/.. -I$(srcdir)/../libutil
diff --git a/src/nix-store/Makefile.am b/src/nix-store/Makefile.am
index ca0fec570..e0ba809dc 100644
--- a/src/nix-store/Makefile.am
+++ b/src/nix-store/Makefile.am
@@ -2,7 +2,7 @@ bin_PROGRAMS = nix-store
nix_store_SOURCES = nix-store.cc dotgraph.cc dotgraph.hh help.txt
nix_store_LDADD = ../libmain/libmain.la ../libstore/libstore.la ../libutil/libutil.la \
- ../boost/format/libformat.la ${aterm_lib} @ADDITIONAL_NETWORK_LIBS@
+ ../boost/format/libformat.la
nix-store.o: help.txt.hh
@@ -10,5 +10,5 @@ nix-store.o: help.txt.hh
../bin2c/bin2c helpText < $< > $@ || (rm $@ && exit 1)
AM_CXXFLAGS = \
- -I$(srcdir)/.. $(aterm_include) -I$(srcdir)/../libutil \
+ -I$(srcdir)/.. -I$(srcdir)/../libutil \
-I$(srcdir)/../libstore -I$(srcdir)/../libmain
diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc
index 22effc65d..ddf2062c2 100644
--- a/src/nix-store/nix-store.cc
+++ b/src/nix-store/nix-store.cc
@@ -21,7 +21,7 @@ typedef void (* Operation) (Strings opFlags, Strings opArgs);
void printHelp()
{
- cout << string((char *) helpText, sizeof helpText);
+ cout << string((char *) helpText);
}
diff --git a/src/nix-worker/Makefile.am b/src/nix-worker/Makefile.am
index d1163ce37..b6094a2a0 100644
--- a/src/nix-worker/Makefile.am
+++ b/src/nix-worker/Makefile.am
@@ -2,7 +2,7 @@ bin_PROGRAMS = nix-worker
nix_worker_SOURCES = nix-worker.cc help.txt
nix_worker_LDADD = ../libmain/libmain.la ../libstore/libstore.la ../libutil/libutil.la \
- ../boost/format/libformat.la ${aterm_lib} @ADDITIONAL_NETWORK_LIBS@
+ ../boost/format/libformat.la
nix-worker.o: help.txt.hh
@@ -10,5 +10,5 @@ nix-worker.o: help.txt.hh
../bin2c/bin2c helpText < $< > $@ || (rm $@ && exit 1)
AM_CXXFLAGS = \
- -I$(srcdir)/.. $(aterm_include) -I$(srcdir)/../libutil \
+ -I$(srcdir)/.. -I$(srcdir)/../libutil \
-I$(srcdir)/../libstore -I$(srcdir)/../libmain
diff --git a/src/nix-worker/nix-worker.cc b/src/nix-worker/nix-worker.cc
index bd2209c6d..a41fb2e15 100644
--- a/src/nix-worker/nix-worker.cc
+++ b/src/nix-worker/nix-worker.cc
@@ -313,14 +313,16 @@ static void performOp(unsigned int clientVersion,
}
case wopQueryReferences:
- case wopQueryReferrers: {
+ case wopQueryReferrers:
+ case wopQueryDerivationOutputs: {
Path path = readStorePath(from);
startWork();
PathSet paths;
if (op == wopQueryReferences)
store->queryReferences(path, paths);
- else
+ else if (op == wopQueryReferrers)
store->queryReferrers(path, paths);
+ else paths = store->queryDerivationOutputs(path);
stopWork();
writeStringSet(paths, to);
break;
@@ -518,6 +520,14 @@ static void performOp(unsigned int clientVersion,
break;
}
+ case wopQueryValidPaths: {
+ startWork();
+ PathSet paths = store->queryValidPaths();
+ stopWork();
+ writeStringSet(paths, to);
+ break;
+ }
+
default:
throw Error(format("invalid operation %1%") % op);
}
@@ -765,7 +775,7 @@ void run(Strings args)
void printHelp()
{
- std::cout << string((char *) helpText, sizeof helpText);
+ std::cout << string((char *) helpText);
}