aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorJohn Ericson <John.Ericson@Obsidian.Systems>2020-09-04 02:40:36 +0000
committerJohn Ericson <John.Ericson@Obsidian.Systems>2020-09-04 02:40:36 +0000
commit25f7ff16fa47a6143a129fb56c9a4ecdb2259a8d (patch)
tree03764f681fddc94e532dc7a74608f37a0a698cd2 /src
parente12bcabdcbddc228d7af157bb3c2090e324c59a7 (diff)
parent8a945d6ddb0676b454458e6fe0e9ea6f8b4b5659 (diff)
Merge remote-tracking branch 'upstream/master' into fix-and-ci-static-builds
Diffstat (limited to 'src')
-rw-r--r--src/build-remote/build-remote.cc18
-rw-r--r--src/libexpr/common-eval-args.cc2
-rw-r--r--src/libexpr/eval-cache.cc20
-rw-r--r--src/libexpr/eval-cache.hh6
-rw-r--r--src/libexpr/eval.cc86
-rw-r--r--src/libexpr/eval.hh71
-rw-r--r--src/libexpr/primops.cc1478
-rw-r--r--src/libexpr/primops.hh8
-rw-r--r--src/libexpr/primops/fetchTree.cc177
-rw-r--r--src/libfetchers/fetchers.cc4
-rw-r--r--src/libfetchers/git.cc4
-rw-r--r--src/libfetchers/github.cc14
-rw-r--r--src/libfetchers/registry.cc2
-rw-r--r--src/libfetchers/tarball.cc6
-rw-r--r--src/libmain/common-args.cc6
-rw-r--r--src/libmain/progress-bar.cc2
-rw-r--r--src/libmain/shared.cc2
-rw-r--r--src/libstore/binary-cache-store.cc26
-rw-r--r--src/libstore/build.cc113
-rw-r--r--src/libstore/builtins/fetchurl.cc14
-rw-r--r--src/libstore/daemon.cc70
-rw-r--r--src/libstore/daemon.hh7
-rw-r--r--src/libstore/derivations.cc133
-rw-r--r--src/libstore/derivations.hh25
-rw-r--r--src/libstore/dummy-store.cc59
-rw-r--r--src/libstore/export-import.cc15
-rw-r--r--src/libstore/filetransfer.hh29
-rw-r--r--src/libstore/gc.cc50
-rw-r--r--src/libstore/globals.cc7
-rw-r--r--src/libstore/globals.hh786
-rw-r--r--src/libstore/http-binary-cache-store.cc11
-rw-r--r--src/libstore/legacy-ssh-store.cc74
-rw-r--r--src/libstore/local-store.cc60
-rw-r--r--src/libstore/local-store.hh5
-rw-r--r--src/libstore/machines.cc24
-rw-r--r--src/libstore/machines.hh4
-rw-r--r--src/libstore/misc.cc6
-rw-r--r--src/libstore/nar-accessor.cc3
-rw-r--r--src/libstore/nar-info-disk-cache.cc7
-rw-r--r--src/libstore/nar-info.cc14
-rw-r--r--src/libstore/nar-info.hh6
-rw-r--r--src/libstore/parsed-derivations.cc8
-rw-r--r--src/libstore/parsed-derivations.hh4
-rw-r--r--src/libstore/path-info.hh7
-rw-r--r--src/libstore/profiles.cc6
-rw-r--r--src/libstore/profiles.hh4
-rw-r--r--src/libstore/remote-store.cc67
-rw-r--r--src/libstore/remote-store.hh4
-rw-r--r--src/libstore/store-api.cc82
-rw-r--r--src/libstore/store-api.hh22
-rw-r--r--src/libstore/worker-protocol.hh80
-rw-r--r--src/libutil/archive.cc6
-rw-r--r--src/libutil/args.cc75
-rw-r--r--src/libutil/args.hh12
-rw-r--r--src/libutil/config.cc44
-rw-r--r--src/libutil/config.hh14
-rw-r--r--src/libutil/hash.cc2
-rw-r--r--src/libutil/hash.hh5
-rw-r--r--src/libutil/logging.cc27
-rw-r--r--src/libutil/logging.hh10
-rw-r--r--src/libutil/serialise.hh17
-rw-r--r--src/libutil/tests/config.cc33
-rw-r--r--src/libutil/tests/logging.cc18
-rw-r--r--src/libutil/util.cc41
-rw-r--r--src/libutil/util.hh6
-rwxr-xr-xsrc/nix-build/nix-build.cc2
-rw-r--r--src/nix-daemon/nix-daemon.cc15
-rw-r--r--src/nix-env/nix-env.cc4
-rw-r--r--src/nix-env/user-env.cc3
-rw-r--r--src/nix-instantiate/nix-instantiate.cc7
-rw-r--r--src/nix-store/nix-store.cc36
-rw-r--r--src/nix/add-to-store.cc14
-rw-r--r--src/nix/app.cc2
-rw-r--r--src/nix/build.cc4
-rw-r--r--src/nix/bundle.cc2
-rw-r--r--src/nix/command.cc15
-rw-r--r--src/nix/command.hh7
-rw-r--r--src/nix/develop.cc141
-rw-r--r--src/nix/flake.cc18
-rw-r--r--src/nix/get-env.sh17
-rw-r--r--src/nix/installables.cc32
-rw-r--r--src/nix/installables.hh9
-rw-r--r--src/nix/local.mk2
-rw-r--r--src/nix/main.cc30
-rw-r--r--src/nix/make-content-addressable.cc8
-rw-r--r--src/nix/markdown.cc50
-rw-r--r--src/nix/markdown.hh7
-rw-r--r--src/nix/profile.cc15
-rw-r--r--src/nix/registry.cc7
-rw-r--r--src/nix/repl.cc61
-rw-r--r--src/nix/search.cc2
-rw-r--r--src/nix/show-config.cc6
-rw-r--r--src/nix/show-derivation.cc6
-rw-r--r--src/nix/verify.cc8
94 files changed, 3578 insertions, 1000 deletions
diff --git a/src/build-remote/build-remote.cc b/src/build-remote/build-remote.cc
index 5247cefa6..ce5127113 100644
--- a/src/build-remote/build-remote.cc
+++ b/src/build-remote/build-remote.cc
@@ -38,9 +38,9 @@ static AutoCloseFD openSlotLock(const Machine & m, uint64_t slot)
return openLockFile(fmt("%s/%s-%d", currentLoad, escapeUri(m.storeUri), slot), true);
}
-static bool allSupportedLocally(const std::set<std::string>& requiredFeatures) {
+static bool allSupportedLocally(Store & store, const std::set<std::string>& requiredFeatures) {
for (auto & feature : requiredFeatures)
- if (!settings.systemFeatures.get().count(feature)) return false;
+ if (!store.systemFeatures.get().count(feature)) return false;
return true;
}
@@ -106,7 +106,7 @@ static int _main(int argc, char * * argv)
auto canBuildLocally = amWilling
&& ( neededSystem == settings.thisSystem
|| settings.extraPlatforms.get().count(neededSystem) > 0)
- && allSupportedLocally(requiredFeatures);
+ && allSupportedLocally(*store, requiredFeatures);
/* Error ignored here, will be caught later */
mkdir(currentLoad.c_str(), 0777);
@@ -201,7 +201,7 @@ static int _main(int argc, char * * argv)
% concatStringsSep<StringSet>(", ", m.mandatoryFeatures);
}
- logError({
+ logErrorInfo(lvlInfo, {
.name = "Remote build",
.description = "Failed to find a machine for remote build!",
.hint = hint
@@ -224,15 +224,7 @@ static int _main(int argc, char * * argv)
Activity act(*logger, lvlTalkative, actUnknown, fmt("connecting to '%s'", bestMachine->storeUri));
- Store::Params storeParams;
- if (hasPrefix(bestMachine->storeUri, "ssh://")) {
- storeParams["max-connections"] = "1";
- storeParams["log-fd"] = "4";
- if (bestMachine->sshKey != "")
- storeParams["ssh-key"] = bestMachine->sshKey;
- }
-
- sshStore = openStore(bestMachine->storeUri, storeParams);
+ sshStore = bestMachine->openStore();
sshStore->connect();
storeUri = bestMachine->storeUri;
diff --git a/src/libexpr/common-eval-args.cc b/src/libexpr/common-eval-args.cc
index 6b48ead1f..10c1a6975 100644
--- a/src/libexpr/common-eval-args.cc
+++ b/src/libexpr/common-eval-args.cc
@@ -29,7 +29,7 @@ MixEvalArgs::MixEvalArgs()
addFlag({
.longName = "include",
.shortName = 'I',
- .description = "add a path to the list of locations used to look up <...> file names",
+ .description = "add a path to the list of locations used to look up `<...>` file names",
.labels = {"path"},
.handler = {[&](std::string s) { searchPath.push_back(s); }}
});
diff --git a/src/libexpr/eval-cache.cc b/src/libexpr/eval-cache.cc
index deb32484f..381344b40 100644
--- a/src/libexpr/eval-cache.cc
+++ b/src/libexpr/eval-cache.cc
@@ -391,7 +391,8 @@ Value & AttrCursor::forceValue()
if (root->db && (!cachedValue || std::get_if<placeholder_t>(&cachedValue->second))) {
if (v.type == tString)
- cachedValue = {root->db->setString(getKey(), v.string.s, v.string.context), v.string.s};
+ cachedValue = {root->db->setString(getKey(), v.string.s, v.string.context),
+ string_t{v.string.s, {}}};
else if (v.type == tPath)
cachedValue = {root->db->setString(getKey(), v.path), v.path};
else if (v.type == tBool)
@@ -405,7 +406,7 @@ Value & AttrCursor::forceValue()
return v;
}
-std::shared_ptr<AttrCursor> AttrCursor::maybeGetAttr(Symbol name)
+std::shared_ptr<AttrCursor> AttrCursor::maybeGetAttr(Symbol name, bool forceErrors)
{
if (root->db) {
if (!cachedValue)
@@ -422,9 +423,12 @@ std::shared_ptr<AttrCursor> AttrCursor::maybeGetAttr(Symbol name)
if (attr) {
if (std::get_if<missing_t>(&attr->second))
return nullptr;
- else if (std::get_if<failed_t>(&attr->second))
- throw EvalError("cached failure of attribute '%s'", getAttrPathStr(name));
- else
+ else if (std::get_if<failed_t>(&attr->second)) {
+ if (forceErrors)
+ debug("reevaluating failed cached attribute '%s'");
+ else
+ throw CachedEvalError("cached failure of attribute '%s'", getAttrPathStr(name));
+ } else
return std::make_shared<AttrCursor>(root,
std::make_pair(shared_from_this(), name), nullptr, std::move(attr));
}
@@ -469,9 +473,9 @@ std::shared_ptr<AttrCursor> AttrCursor::maybeGetAttr(std::string_view name)
return maybeGetAttr(root->state.symbols.create(name));
}
-std::shared_ptr<AttrCursor> AttrCursor::getAttr(Symbol name)
+std::shared_ptr<AttrCursor> AttrCursor::getAttr(Symbol name, bool forceErrors)
{
- auto p = maybeGetAttr(name);
+ auto p = maybeGetAttr(name, forceErrors);
if (!p)
throw Error("attribute '%s' does not exist", getAttrPathStr(name));
return p;
@@ -600,7 +604,7 @@ bool AttrCursor::isDerivation()
StorePath AttrCursor::forceDerivation()
{
- auto aDrvPath = getAttr(root->state.sDrvPath);
+ auto aDrvPath = getAttr(root->state.sDrvPath, true);
auto drvPath = root->state.store->parseStorePath(aDrvPath->getString());
if (!root->state.store->isValidPath(drvPath) && !settings.readOnlyMode) {
/* The eval cache contains 'drvPath', but the actual path has
diff --git a/src/libexpr/eval-cache.hh b/src/libexpr/eval-cache.hh
index afee85fa9..8ffffc0ed 100644
--- a/src/libexpr/eval-cache.hh
+++ b/src/libexpr/eval-cache.hh
@@ -9,6 +9,8 @@
namespace nix::eval_cache {
+MakeError(CachedEvalError, EvalError);
+
class AttrDb;
class AttrCursor;
@@ -92,11 +94,11 @@ public:
std::string getAttrPathStr(Symbol name) const;
- std::shared_ptr<AttrCursor> maybeGetAttr(Symbol name);
+ std::shared_ptr<AttrCursor> maybeGetAttr(Symbol name, bool forceErrors = false);
std::shared_ptr<AttrCursor> maybeGetAttr(std::string_view name);
- std::shared_ptr<AttrCursor> getAttr(Symbol name);
+ std::shared_ptr<AttrCursor> getAttr(Symbol name, bool forceErrors = false);
std::shared_ptr<AttrCursor> getAttr(std::string_view name);
diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc
index 0123070d1..8c97b3760 100644
--- a/src/libexpr/eval.cc
+++ b/src/libexpr/eval.cc
@@ -381,10 +381,14 @@ EvalState::EvalState(const Strings & _searchPath, ref<Store> store)
auto path = r.second;
if (store->isInStore(r.second)) {
- StorePathSet closure;
- store->computeFSClosure(store->toStorePath(r.second).first, closure);
- for (auto & path : closure)
- allowedPaths->insert(store->printStorePath(path));
+ try {
+ StorePathSet closure;
+ store->computeFSClosure(store->toStorePath(r.second).first, closure);
+ for (auto & path : closure)
+ allowedPaths->insert(store->printStorePath(path));
+ } catch (InvalidPath &) {
+ allowedPaths->insert(r.second);
+ }
} else
allowedPaths->insert(r.second);
}
@@ -509,7 +513,7 @@ Value * EvalState::addPrimOp(const string & name,
if (arity == 0) {
auto vPrimOp = allocValue();
vPrimOp->type = tPrimOp;
- vPrimOp->primOp = new PrimOp(primOp, 1, sym);
+ vPrimOp->primOp = new PrimOp { .fun = primOp, .arity = 1, .name = sym };
Value v;
mkApp(v, *vPrimOp, *vPrimOp);
return addConstant(name, v);
@@ -517,7 +521,7 @@ Value * EvalState::addPrimOp(const string & name,
Value * v = allocValue();
v->type = tPrimOp;
- v->primOp = new PrimOp(primOp, arity, sym);
+ v->primOp = new PrimOp { .fun = primOp, .arity = arity, .name = sym };
staticBaseEnv.vars[symbols.create(name)] = baseEnvDispl;
baseEnv.values[baseEnvDispl++] = v;
baseEnv.values[0]->attrs->push_back(Attr(sym, v));
@@ -525,12 +529,59 @@ Value * EvalState::addPrimOp(const string & name,
}
+Value * EvalState::addPrimOp(PrimOp && primOp)
+{
+ /* Hack to make constants lazy: turn them into a application of
+ the primop to a dummy value. */
+ if (primOp.arity == 0) {
+ primOp.arity = 1;
+ auto vPrimOp = allocValue();
+ vPrimOp->type = tPrimOp;
+ vPrimOp->primOp = new PrimOp(std::move(primOp));
+ Value v;
+ mkApp(v, *vPrimOp, *vPrimOp);
+ return addConstant(primOp.name, v);
+ }
+
+ Symbol envName = primOp.name;
+ if (hasPrefix(primOp.name, "__"))
+ primOp.name = symbols.create(std::string(primOp.name, 2));
+
+ Value * v = allocValue();
+ v->type = tPrimOp;
+ v->primOp = new PrimOp(std::move(primOp));
+ staticBaseEnv.vars[envName] = baseEnvDispl;
+ baseEnv.values[baseEnvDispl++] = v;
+ baseEnv.values[0]->attrs->push_back(Attr(primOp.name, v));
+ return v;
+}
+
+
Value & EvalState::getBuiltin(const string & name)
{
return *baseEnv.values[0]->attrs->find(symbols.create(name))->value;
}
+std::optional<EvalState::Doc> EvalState::getDoc(Value & v)
+{
+ if (v.type == tPrimOp || v.type == tPrimOpApp) {
+ auto v2 = &v;
+ while (v2->type == tPrimOpApp)
+ v2 = v2->primOpApp.left;
+ if (v2->primOp->doc)
+ return Doc {
+ .pos = noPos,
+ .name = v2->primOp->name,
+ .arity = v2->primOp->arity,
+ .args = v2->primOp->args,
+ .doc = v2->primOp->doc,
+ };
+ }
+ return {};
+}
+
+
/* Every "format" object (even temporary) takes up a few hundred bytes
of stack space, which is a real killer in the recursive
evaluator. So here are some helper functions for throwing
@@ -1299,12 +1350,23 @@ void EvalState::autoCallFunction(Bindings & args, Value & fun, Value & res)
Value * actualArgs = allocValue();
mkAttrs(*actualArgs, fun.lambda.fun->formals->formals.size());
- for (auto & i : fun.lambda.fun->formals->formals) {
- Bindings::iterator j = args.find(i.name);
- if (j != args.end())
- actualArgs->attrs->push_back(*j);
- else if (!i.def)
- throwTypeError("cannot auto-call a function that has an argument without a default value ('%1%')", i.name);
+ if (fun.lambda.fun->formals->ellipsis) {
+ // If the formals have an ellipsis (eg the function accepts extra args) pass
+ // all available automatic arguments (which includes arguments specified on
+ // the command line via --arg/--argstr)
+ for (auto& v : args) {
+ actualArgs->attrs->push_back(v);
+ }
+ } else {
+ // Otherwise, only pass the arguments that the function accepts
+ for (auto & i : fun.lambda.fun->formals->formals) {
+ Bindings::iterator j = args.find(i.name);
+ if (j != args.end()) {
+ actualArgs->attrs->push_back(*j);
+ } else if (!i.def) {
+ throwTypeError("cannot auto-call a function that has an argument without a default value ('%1%')", i.name);
+ }
+ }
}
actualArgs->attrs->sort();
diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh
index 0382298b3..80078d8a5 100644
--- a/src/libexpr/eval.hh
+++ b/src/libexpr/eval.hh
@@ -30,8 +30,8 @@ struct PrimOp
PrimOpFun fun;
size_t arity;
Symbol name;
- PrimOp(PrimOpFun fun, size_t arity, Symbol name)
- : fun(fun), arity(arity), name(name) { }
+ std::vector<std::string> args;
+ const char * doc = nullptr;
};
@@ -242,10 +242,23 @@ private:
Value * addPrimOp(const string & name,
size_t arity, PrimOpFun primOp);
+ Value * addPrimOp(PrimOp && primOp);
+
public:
Value & getBuiltin(const string & name);
+ struct Doc
+ {
+ Pos pos;
+ std::optional<Symbol> name;
+ size_t arity;
+ std::vector<std::string> args;
+ const char * doc;
+ };
+
+ std::optional<Doc> getDoc(Value & v);
+
private:
inline Value * lookupVar(Env * env, const ExprVar & var, bool noEval);
@@ -357,24 +370,60 @@ struct EvalSettings : Config
Setting<bool> enableNativeCode{this, false, "allow-unsafe-native-code-during-evaluation",
"Whether builtin functions that allow executing native code should be enabled."};
- Setting<Strings> nixPath{this, getDefaultNixPath(), "nix-path",
- "List of directories to be searched for <...> file references."};
+ Setting<Strings> nixPath{
+ this, getDefaultNixPath(), "nix-path",
+ "List of directories to be searched for `<...>` file references."};
- Setting<bool> restrictEval{this, false, "restrict-eval",
- "Whether to restrict file system access to paths in $NIX_PATH, "
- "and network access to the URI prefixes listed in 'allowed-uris'."};
+ Setting<bool> restrictEval{
+ this, false, "restrict-eval",
+ R"(
+ If set to `true`, the Nix evaluator will not allow access to any
+ files outside of the Nix search path (as set via the `NIX_PATH`
+ environment variable or the `-I` option), or to URIs outside of
+ `allowed-uri`. The default is `false`.
+ )"};
Setting<bool> pureEval{this, false, "pure-eval",
"Whether to restrict file system and network access to files specified by cryptographic hash."};
- Setting<bool> enableImportFromDerivation{this, true, "allow-import-from-derivation",
- "Whether the evaluator allows importing the result of a derivation."};
+ Setting<bool> enableImportFromDerivation{
+ this, true, "allow-import-from-derivation",
+ R"(
+ By default, Nix allows you to `import` from a derivation, allowing
+ building at evaluation time. With this option set to false, Nix will
+ throw an error when evaluating an expression that uses this feature,
+ allowing users to ensure their evaluation will not require any
+ builds to take place.
+ )"};
Setting<Strings> allowedUris{this, {}, "allowed-uris",
- "Prefixes of URIs that builtin functions such as fetchurl and fetchGit are allowed to fetch."};
+ R"(
+ A list of URI prefixes to which access is allowed in restricted
+ evaluation mode. For example, when set to
+ `https://github.com/NixOS`, builtin functions such as `fetchGit` are
+ allowed to access `https://github.com/NixOS/patchelf.git`.
+ )"};
Setting<bool> traceFunctionCalls{this, false, "trace-function-calls",
- "Emit log messages for each function entry and exit at the 'vomit' log level (-vvvv)."};
+ R"(
+ If set to `true`, the Nix evaluator will trace every function call.
+ Nix will print a log message at the "vomit" level for every function
+ entrance and function exit.
+
+ function-trace entered undefined position at 1565795816999559622
+ function-trace exited undefined position at 1565795816999581277
+ function-trace entered /nix/store/.../example.nix:226:41 at 1565795253249935150
+ function-trace exited /nix/store/.../example.nix:226:41 at 1565795253249941684
+
+ The `undefined position` means the function call is a builtin.
+
+ Use the `contrib/stack-collapse.py` script distributed with the Nix
+ source code to convert the trace logs in to a format suitable for
+ `flamegraph.pl`.
+ )"};
+
+ Setting<bool> useEvalCache{this, true, "eval-cache",
+ "Whether to use the flake evaluation cache."};
};
extern EvalSettings evalSettings;
diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index 65d36ca0e..3350d05d4 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -74,10 +74,10 @@ void EvalState::realiseContext(const PathSet & context)
/* Load and evaluate an expression from path specified by the
argument. */
-static void prim_scopedImport(EvalState & state, const Pos & pos, Value * * args, Value & v)
+static void import(EvalState & state, const Pos & pos, Value & vPath, Value * vScope, Value & v)
{
PathSet context;
- Path path = state.coerceToPath(pos, *args[1], context);
+ Path path = state.coerceToPath(pos, vPath, context);
try {
state.realiseContext(context);
@@ -99,9 +99,10 @@ static void prim_scopedImport(EvalState & state, const Pos & pos, Value * * args
return std::nullopt;
return storePath;
};
+
if (auto optStorePath = isValidDerivationInStore()) {
auto storePath = *optStorePath;
- Derivation drv = readDerivation(*state.store, realPath, Derivation::nameFromPath(storePath));
+ Derivation drv = state.store->readDerivation(storePath);
Value & w = *state.allocValue();
state.mkAttrs(w, 3 + drv.outputs.size());
Value * v2 = state.allocAttr(w, state.sDrvPath);
@@ -113,9 +114,9 @@ static void prim_scopedImport(EvalState & state, const Pos & pos, Value * * args
state.mkList(*outputsVal, drv.outputs.size());
unsigned int outputs_index = 0;
- for (const auto & o : drv.outputs) {
+ for (const auto & o : drv.outputsAndPaths(*state.store)) {
v2 = state.allocAttr(w, state.symbols.create(o.first));
- mkString(*v2, state.store->printStorePath(o.second.path(*state.store, drv.name)), {"!" + o.first + "!" + path});
+ mkString(*v2, state.store->printStorePath(o.second.second), {"!" + o.first + "!" + path});
outputsVal->listElems()[outputs_index] = state.allocValue();
mkString(*(outputsVal->listElems()[outputs_index++]), o.first);
}
@@ -133,17 +134,18 @@ static void prim_scopedImport(EvalState & state, const Pos & pos, Value * * args
mkApp(v, **fun, w);
state.forceAttrs(v, pos);
} else {
- state.forceAttrs(*args[0]);
- if (args[0]->attrs->empty())
+ if (!vScope)
state.evalFile(realPath, v);
else {
- Env * env = &state.allocEnv(args[0]->attrs->size());
+ state.forceAttrs(*vScope);
+
+ Env * env = &state.allocEnv(vScope->attrs->size());
env->up = &state.baseEnv;
StaticEnv staticEnv(false, &state.staticBaseEnv);
unsigned int displ = 0;
- for (auto & attr : *args[0]->attrs) {
+ for (auto & attr : *vScope->attrs) {
staticEnv.vars[attr.name] = displ;
env->values[displ++] = attr.value;
}
@@ -156,6 +158,76 @@ static void prim_scopedImport(EvalState & state, const Pos & pos, Value * * args
}
}
+static RegisterPrimOp primop_scopedImport(RegisterPrimOp::Info {
+ .name = "scopedImport",
+ .arity = 2,
+ .fun = [](EvalState & state, const Pos & pos, Value * * args, Value & v)
+ {
+ import(state, pos, *args[1], args[0], v);
+ }
+});
+
+static RegisterPrimOp primop_import({
+ .name = "import",
+ .args = {"path"},
+ .doc = R"(
+ Load, parse and return the Nix expression in the file *path*. If
+ *path* is a directory, the file ` default.nix ` in that directory
+ is loaded. Evaluation aborts if the file doesn’t exist or contains
+ an incorrect Nix expression. `import` implements Nix’s module
+ system: you can put any Nix expression (such as a set or a
+ function) in a separate file, and use it from Nix expressions in
+ other files.
+
+ > **Note**
+ >
+ > Unlike some languages, `import` is a regular function in Nix.
+ > Paths using the angle bracket syntax (e.g., `import` *\<foo\>*)
+ > are [normal path values](language-values.md).
+
+ A Nix expression loaded by `import` must not contain any *free
+ variables* (identifiers that are not defined in the Nix expression
+ itself and are not built-in). Therefore, it cannot refer to
+ variables that are in scope at the call site. For instance, if you
+ have a calling expression
+
+ ```nix
+ rec {
+ x = 123;
+ y = import ./foo.nix;
+ }
+ ```
+
+ then the following `foo.nix` will give an error:
+
+ ```nix
+ x + 456
+ ```
+
+ since `x` is not in scope in `foo.nix`. If you want `x` to be
+ available in `foo.nix`, you should pass it as a function argument:
+
+ ```nix
+ rec {
+ x = 123;
+ y = import ./foo.nix x;
+ }
+ ```
+
+ and
+
+ ```nix
+ x: x + 456
+ ```
+
+ (The function argument doesn’t have to be called `x` in `foo.nix`;
+ any name would work.)
+ )",
+ .fun = [](EvalState & state, const Pos & pos, Value * * args, Value & v)
+ {
+ import(state, pos, *args[0], nullptr, v);
+ }
+});
/* Want reasonable symbol names, so extern C */
/* !!! Should we pass the Pos or the file name too? */
@@ -275,6 +347,16 @@ static void prim_typeOf(EvalState & state, const Pos & pos, Value * * args, Valu
mkString(v, state.symbols.create(t));
}
+static RegisterPrimOp primop_typeOf({
+ .name = "__typeOf",
+ .args = {"e"},
+ .doc = R"(
+ Return a string representing the type of the value *e*, namely
+ `"int"`, `"bool"`, `"string"`, `"path"`, `"null"`, `"set"`,
+ `"list"`, `"lambda"` or `"float"`.
+ )",
+ .fun = prim_typeOf,
+});
/* Determine whether the argument is the null value. */
static void prim_isNull(EvalState & state, const Pos & pos, Value * * args, Value & v)
@@ -283,6 +365,18 @@ static void prim_isNull(EvalState & state, const Pos & pos, Value * * args, Valu
mkBool(v, args[0]->type == tNull);
}
+static RegisterPrimOp primop_isNull({
+ .name = "isNull",
+ .args = {"e"},
+ .doc = R"(
+ Return `true` if *e* evaluates to `null`, and `false` otherwise.
+
+ > **Warning**
+ >
+ > This function is *deprecated*; just write `e == null` instead.
+ )",
+ .fun = prim_isNull,
+});
/* Determine whether the argument is a function. */
static void prim_isFunction(EvalState & state, const Pos & pos, Value * * args, Value & v)
@@ -302,6 +396,14 @@ static void prim_isFunction(EvalState & state, const Pos & pos, Value * * args,
mkBool(v, res);
}
+static RegisterPrimOp primop_isFunction({
+ .name = "__isFunction",
+ .args = {"e"},
+ .doc = R"(
+ Return `true` if *e* evaluates to a function, and `false` otherwise.
+ )",
+ .fun = prim_isFunction,
+});
/* Determine whether the argument is an integer. */
static void prim_isInt(EvalState & state, const Pos & pos, Value * * args, Value & v)
@@ -310,6 +412,15 @@ static void prim_isInt(EvalState & state, const Pos & pos, Value * * args, Value
mkBool(v, args[0]->type == tInt);
}
+static RegisterPrimOp primop_isInt({
+ .name = "__isInt",
+ .args = {"e"},
+ .doc = R"(
+ Return `true` if *e* evaluates to an integer, and `false` otherwise.
+ )",
+ .fun = prim_isInt,
+});
+
/* Determine whether the argument is a float. */
static void prim_isFloat(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
@@ -317,6 +428,15 @@ static void prim_isFloat(EvalState & state, const Pos & pos, Value * * args, Val
mkBool(v, args[0]->type == tFloat);
}
+static RegisterPrimOp primop_isFloat({
+ .name = "__isFloat",
+ .args = {"e"},
+ .doc = R"(
+ Return `true` if *e* evaluates to a float, and `false` otherwise.
+ )",
+ .fun = prim_isFloat,
+});
+
/* Determine whether the argument is a string. */
static void prim_isString(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
@@ -324,6 +444,14 @@ static void prim_isString(EvalState & state, const Pos & pos, Value * * args, Va
mkBool(v, args[0]->type == tString);
}
+static RegisterPrimOp primop_isString({
+ .name = "__isString",
+ .args = {"e"},
+ .doc = R"(
+ Return `true` if *e* evaluates to a string, and `false` otherwise.
+ )",
+ .fun = prim_isString,
+});
/* Determine whether the argument is a Boolean. */
static void prim_isBool(EvalState & state, const Pos & pos, Value * * args, Value & v)
@@ -332,6 +460,15 @@ static void prim_isBool(EvalState & state, const Pos & pos, Value * * args, Valu
mkBool(v, args[0]->type == tBool);
}
+static RegisterPrimOp primop_isBool({
+ .name = "__isBool",
+ .args = {"e"},
+ .doc = R"(
+ Return `true` if *e* evaluates to a bool, and `false` otherwise.
+ )",
+ .fun = prim_isBool,
+});
+
/* Determine whether the argument is a path. */
static void prim_isPath(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
@@ -339,6 +476,15 @@ static void prim_isPath(EvalState & state, const Pos & pos, Value * * args, Valu
mkBool(v, args[0]->type == tPath);
}
+static RegisterPrimOp primop_isPath({
+ .name = "__isPath",
+ .args = {"e"},
+ .doc = R"(
+ Return `true` if *e* evaluates to a path, and `false` otherwise.
+ )",
+ .fun = prim_isPath,
+});
+
struct CompareValues
{
bool operator () (const Value * v1, const Value * v2) const
@@ -444,22 +590,43 @@ static void prim_genericClosure(EvalState & state, const Pos & pos, Value * * ar
v.listElems()[n++] = i;
}
+static RegisterPrimOp primop_genericClosure(RegisterPrimOp::Info {
+ .name = "__genericClosure",
+ .arity = 1,
+ .fun = prim_genericClosure,
+});
-static void prim_abort(EvalState & state, const Pos & pos, Value * * args, Value & v)
-{
- PathSet context;
- string s = state.coerceToString(pos, *args[0], context);
- throw Abort("evaluation aborted with the following error message: '%1%'", s);
-}
-
-
-static void prim_throw(EvalState & state, const Pos & pos, Value * * args, Value & v)
-{
- PathSet context;
- string s = state.coerceToString(pos, *args[0], context);
- throw ThrownError(s);
-}
-
+static RegisterPrimOp primop_abort({
+ .name = "abort",
+ .args = {"s"},
+ .doc = R"(
+ Abort Nix expression evaluation and print the error message *s*.
+ )",
+ .fun = [](EvalState & state, const Pos & pos, Value * * args, Value & v)
+ {
+ PathSet context;
+ string s = state.coerceToString(pos, *args[0], context);
+ throw Abort("evaluation aborted with the following error message: '%1%'", s);
+ }
+});
+
+static RegisterPrimOp primop_throw({
+ .name = "throw",
+ .args = {"s"},
+ .doc = R"(
+ Throw an error message *s*. This usually aborts Nix expression
+ evaluation, but in `nix-env -qa` and other commands that try to
+ evaluate a set of derivations to get information about those
+ derivations, a derivation that throws an error is silently skipped
+ (which is not the case for `abort`).
+ )",
+ .fun = [](EvalState & state, const Pos & pos, Value * * args, Value & v)
+ {
+ PathSet context;
+ string s = state.coerceToString(pos, *args[0], context);
+ throw ThrownError(s);
+ }
+});
static void prim_addErrorContext(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
@@ -473,6 +640,11 @@ static void prim_addErrorContext(EvalState & state, const Pos & pos, Value * * a
}
}
+static RegisterPrimOp primop_addErrorContext(RegisterPrimOp::Info {
+ .name = "__addErrorContext",
+ .arity = 2,
+ .fun = prim_addErrorContext,
+});
/* Try evaluating the argument. Success => {success=true; value=something;},
* else => {success=false; value=false;} */
@@ -490,6 +662,22 @@ static void prim_tryEval(EvalState & state, const Pos & pos, Value * * args, Val
v.attrs->sort();
}
+static RegisterPrimOp primop_tryEval({
+ .name = "__tryEval",
+ .args = {"e"},
+ .doc = R"(
+ Try to shallowly evaluate *e*. Return a set containing the
+ attributes `success` (`true` if *e* evaluated successfully,
+ `false` if an error was thrown) and `value`, equalling *e* if
+ successful and `false` otherwise. Note that this doesn't evaluate
+ *e* deeply, so ` let e = { x = throw ""; }; in (builtins.tryEval
+ e).success ` will be `true`. Using ` builtins.deepSeq ` one can
+ get the expected result: `let e = { x = throw ""; }; in
+ (builtins.tryEval (builtins.deepSeq e e)).success` will be
+ `false`.
+ )",
+ .fun = prim_tryEval,
+});
/* Return an environment variable. Use with care. */
static void prim_getEnv(EvalState & state, const Pos & pos, Value * * args, Value & v)
@@ -498,6 +686,22 @@ static void prim_getEnv(EvalState & state, const Pos & pos, Value * * args, Valu
mkString(v, evalSettings.restrictEval || evalSettings.pureEval ? "" : getEnv(name).value_or(""));
}
+static RegisterPrimOp primop_getEnv({
+ .name = "__getEnv",
+ .args = {"s"},
+ .doc = R"(
+ `getEnv` returns the value of the environment variable *s*, or an
+ empty string if the variable doesn’t exist. This function should be
+ used with care, as it can introduce all sorts of nasty environment
+ dependencies in your Nix expression.
+
+ `getEnv` is used in Nix Packages to locate the file
+ `~/.nixpkgs/config.nix`, which contains user-local settings for Nix
+ Packages. (That is, it does a `getEnv "HOME"` to locate the user’s
+ home directory.)
+ )",
+ .fun = prim_getEnv,
+});
/* Evaluate the first argument, then return the second argument. */
static void prim_seq(EvalState & state, const Pos & pos, Value * * args, Value & v)
@@ -507,6 +711,15 @@ static void prim_seq(EvalState & state, const Pos & pos, Value * * args, Value &
v = *args[1];
}
+static RegisterPrimOp primop_seq({
+ .name = "__seq",
+ .args = {"e1", "e2"},
+ .doc = R"(
+ Evaluate *e1*, then evaluate and return *e2*. This ensures that a
+ computation is strict in the value of *e1*.
+ )",
+ .fun = prim_seq,
+});
/* Evaluate the first argument deeply (i.e. recursing into lists and
attrsets), then return the second argument. */
@@ -517,6 +730,16 @@ static void prim_deepSeq(EvalState & state, const Pos & pos, Value * * args, Val
v = *args[1];
}
+static RegisterPrimOp primop_deepSeq({
+ .name = "__deepSeq",
+ .args = {"e1", "e2"},
+ .doc = R"(
+ This is like `seq e1 e2`, except that *e1* is evaluated *deeply*:
+ if it’s a list or set, its elements or attributes are also
+ evaluated recursively.
+ )",
+ .fun = prim_deepSeq,
+});
/* Evaluate the first expression and print it on standard error. Then
return the second expression. Useful for debugging. */
@@ -531,6 +754,17 @@ static void prim_trace(EvalState & state, const Pos & pos, Value * * args, Value
v = *args[1];
}
+static RegisterPrimOp primop_trace({
+ .name = "__trace",
+ .args = {"e1", "e2"},
+ .doc = R"(
+ Evaluate *e1* and print its abstract syntax representation on
+ standard error. Then return *e2*. This function is useful for
+ debugging.
+ )",
+ .fun = prim_trace,
+});
+
/*************************************************************
* Derivations
@@ -781,7 +1015,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
Hash h = newHashAllowEmpty(*outputHash, ht);
auto outPath = state.store->makeFixedOutputPath(ingestionMethod, h, drvName);
- if (!jsonObject) drv.env["out"] = state.store->printStorePath(outPath);
+ drv.env["out"] = state.store->printStorePath(outPath);
drv.outputs.insert_or_assign("out", DerivationOutput {
.output = DerivationOutputCAFixed {
.hash = FixedOutputHash {
@@ -795,7 +1029,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
else if (contentAddressed) {
HashType ht = parseHashType(outputHashAlgo);
for (auto & i : outputs) {
- if (!jsonObject) drv.env[i] = hashPlaceholder(i);
+ drv.env[i] = hashPlaceholder(i);
drv.outputs.insert_or_assign(i, DerivationOutput {
.output = DerivationOutputCAFloating {
.method = ingestionMethod,
@@ -813,7 +1047,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
that changes in the set of output names do get reflected in
the hash. */
for (auto & i : outputs) {
- if (!jsonObject) drv.env[i] = "";
+ drv.env[i] = "";
drv.outputs.insert_or_assign(i,
DerivationOutput {
.output = DerivationOutputInputAddressed {
@@ -828,7 +1062,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
for (auto & i : outputs) {
auto outPath = state.store->makeOutputPath(i, h, drvName);
- if (!jsonObject) drv.env[i] = state.store->printStorePath(outPath);
+ drv.env[i] = state.store->printStorePath(outPath);
drv.outputs.insert_or_assign(i,
DerivationOutput {
.output = DerivationOutputInputAddressed {
@@ -839,7 +1073,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
}
/* Write the resulting term into the Nix store directory. */
- auto drvPath = writeDerivation(state.store, drv, drvName, state.repair);
+ auto drvPath = writeDerivation(*state.store, drv, state.repair);
auto drvPathS = state.store->printStorePath(drvPath);
printMsg(lvlChatty, "instantiated '%1%' -> '%2%'", drvName, drvPathS);
@@ -852,13 +1086,18 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
state.mkAttrs(v, 1 + drv.outputs.size());
mkString(*state.allocAttr(v, state.sDrvPath), drvPathS, {"=" + drvPathS});
- for (auto & i : drv.outputs) {
+ for (auto & i : drv.outputsAndPaths(*state.store)) {
mkString(*state.allocAttr(v, state.symbols.create(i.first)),
- state.store->printStorePath(i.second.path(*state.store, drv.name)), {"!" + i.first + "!" + drvPathS});
+ state.store->printStorePath(i.second.second), {"!" + i.first + "!" + drvPathS});
}
v.attrs->sort();
}
+static RegisterPrimOp primop_derivationStrict(RegisterPrimOp::Info {
+ .name = "derivationStrict",
+ .arity = 1,
+ .fun = prim_derivationStrict,
+});
/* Return a placeholder string for the specified output that will be
substituted by the corresponding output path at build time. For
@@ -872,6 +1111,17 @@ static void prim_placeholder(EvalState & state, const Pos & pos, Value * * args,
mkString(v, hashPlaceholder(state.forceStringNoCtx(*args[0], pos)));
}
+static RegisterPrimOp primop_placeholder({
+ .name = "placeholder",
+ .args = {"output"},
+ .doc = R"(
+ Return a placeholder string for the specified *output* that will be
+ substituted by the corresponding output path at build time. Typical
+ outputs would be `"out"`, `"bin"` or `"dev"`.
+ )",
+ .fun = prim_placeholder,
+});
+
/*************************************************************
* Paths
@@ -886,6 +1136,15 @@ static void prim_toPath(EvalState & state, const Pos & pos, Value * * args, Valu
mkString(v, canonPath(path), context);
}
+static RegisterPrimOp primop_toPath({
+ .name = "__toPath",
+ .args = {"s"},
+ .doc = R"(
+ **DEPRECATED.** Use `/. + "/path"` to convert a string into an absolute
+ path. For relative paths, use `./. + "/path"`.
+ )",
+ .fun = prim_toPath,
+});
/* Allow a valid store path to be used in an expression. This is
useful in some generated expressions such as in nix-push, which
@@ -897,6 +1156,9 @@ static void prim_toPath(EvalState & state, const Pos & pos, Value * * args, Valu
corner cases. */
static void prim_storePath(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
+ if (evalSettings.pureEval)
+ throw EvalError("builtins.storePath' is not allowed in pure evaluation mode");
+
PathSet context;
Path path = state.checkSourcePath(state.coerceToPath(pos, *args[0], context));
/* Resolve symlinks in ‘path’, unless ‘path’ itself is a symlink
@@ -915,6 +1177,23 @@ static void prim_storePath(EvalState & state, const Pos & pos, Value * * args, V
mkString(v, path, context);
}
+static RegisterPrimOp primop_storePath({
+ .name = "__storePath",
+ .args = {"path"},
+ .doc = R"(
+ This function allows you to define a dependency on an already
+ existing store path. For example, the derivation attribute `src
+ = builtins.storePath /nix/store/f1d18v1y…-source` causes the
+ derivation to depend on the specified path, which must exist or
+ be substitutable. Note that this differs from a plain path
+ (e.g. `src = /nix/store/f1d18v1y…-source`) in that the latter
+ causes the path to be *copied* again to the Nix store, resulting
+ in a new path (e.g. `/nix/store/ld01dnzc…-source-source`).
+
+ This function is not available in pure evaluation mode.
+ )",
+ .fun = prim_storePath,
+});
static void prim_pathExists(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
@@ -942,6 +1221,15 @@ static void prim_pathExists(EvalState & state, const Pos & pos, Value * * args,
}
}
+static RegisterPrimOp primop_pathExists({
+ .name = "__pathExists",
+ .args = {"path"},
+ .doc = R"(
+ Return `true` if the path *path* exists at evaluation time, and
+ `false` otherwise.
+ )",
+ .fun = prim_pathExists,
+});
/* Return the base name of the given string, i.e., everything
following the last slash. */
@@ -951,6 +1239,16 @@ static void prim_baseNameOf(EvalState & state, const Pos & pos, Value * * args,
mkString(v, baseNameOf(state.coerceToString(pos, *args[0], context, false, false)), context);
}
+static RegisterPrimOp primop_baseNameOf({
+ .name = "baseNameOf",
+ .args = {"s"},
+ .doc = R"(
+ Return the *base name* of the string *s*, that is, everything
+ following the final slash in the string. This is similar to the GNU
+ `basename` command.
+ )",
+ .fun = prim_baseNameOf,
+});
/* Return the directory of the given path, i.e., everything before the
last slash. Return either a path or a string depending on the type
@@ -962,6 +1260,16 @@ static void prim_dirOf(EvalState & state, const Pos & pos, Value * * args, Value
if (args[0]->type == tPath) mkPath(v, dir.c_str()); else mkString(v, dir, context);
}
+static RegisterPrimOp primop_dirOf({
+ .name = "dirOf",
+ .args = {"s"},
+ .doc = R"(
+ Return the directory part of the string *s*, that is, everything
+ before the final slash in the string. This is similar to the GNU
+ `dirname` command.
+ )",
+ .fun = prim_dirOf,
+});
/* Return the contents of a file as a string. */
static void prim_readFile(EvalState & state, const Pos & pos, Value * * args, Value & v)
@@ -982,6 +1290,14 @@ static void prim_readFile(EvalState & state, const Pos & pos, Value * * args, Va
mkString(v, s.c_str());
}
+static RegisterPrimOp primop_readFile({
+ .name = "__readFile",
+ .args = {"path"},
+ .doc = R"(
+ Return the contents of the file *path* as a string.
+ )",
+ .fun = prim_readFile,
+});
/* Find a file in the Nix search path. Used to implement <x> paths,
which are desugared to 'findFile __nixPath "x"'. */
@@ -1027,6 +1343,12 @@ static void prim_findFile(EvalState & state, const Pos & pos, Value * * args, Va
mkPath(v, state.checkSourcePath(state.findFile(searchPath, path, pos)).c_str());
}
+static RegisterPrimOp primop_findFile(RegisterPrimOp::Info {
+ .name = "__findFile",
+ .arity = 2,
+ .fun = prim_findFile,
+});
+
/* Return the cryptographic hash of a file in base-16. */
static void prim_hashFile(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
@@ -1044,6 +1366,17 @@ static void prim_hashFile(EvalState & state, const Pos & pos, Value * * args, Va
mkString(v, hashFile(*ht, state.checkSourcePath(p)).to_string(Base16, false), context);
}
+static RegisterPrimOp primop_hashFile({
+ .name = "__hashFile",
+ .args = {"type", "p"},
+ .doc = R"(
+ Return a base-16 representation of the cryptographic hash of the
+ file at path *p*. The hash algorithm specified by *type* must be one
+ of `"md5"`, `"sha1"`, `"sha256"` or `"sha512"`.
+ )",
+ .fun = prim_hashFile,
+});
+
/* Read a directory (without . or ..) */
static void prim_readDir(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
@@ -1075,6 +1408,25 @@ static void prim_readDir(EvalState & state, const Pos & pos, Value * * args, Val
v.attrs->sort();
}
+static RegisterPrimOp primop_readDir({
+ .name = "__readDir",
+ .args = {"path"},
+ .doc = R"(
+ Return the contents of the directory *path* as a set mapping
+ directory entries to the corresponding file type. For instance, if
+ directory `A` contains a regular file `B` and another directory
+ `C`, then `builtins.readDir ./A` will return the set
+
+ ```nix
+ { B = "regular"; C = "directory"; }
+ ```
+
+ The possible values for the file type are `"regular"`,
+ `"directory"`, `"symlink"` and `"unknown"`.
+ )",
+ .fun = prim_readDir,
+});
+
/*************************************************************
* Creating files
@@ -1092,6 +1444,102 @@ static void prim_toXML(EvalState & state, const Pos & pos, Value * * args, Value
mkString(v, out.str(), context);
}
+static RegisterPrimOp primop_toXML({
+ .name = "__toXML",
+ .args = {"e"},
+ .doc = R"(
+ Return a string containing an XML representation of *e*. The main
+ application for `toXML` is to communicate information with the
+ builder in a more structured format than plain environment
+ variables.
+
+ Here is an example where this is the case:
+
+ ```nix
+ { stdenv, fetchurl, libxslt, jira, uberwiki }:
+
+ stdenv.mkDerivation (rec {
+ name = "web-server";
+
+ buildInputs = [ libxslt ];
+
+ builder = builtins.toFile "builder.sh" "
+ source $stdenv/setup
+ mkdir $out
+ echo "$servlets" | xsltproc ${stylesheet} - > $out/server-conf.xml ①
+ ";
+
+ stylesheet = builtins.toFile "stylesheet.xsl" ②
+ "<?xml version='1.0' encoding='UTF-8'?>
+ <xsl:stylesheet xmlns:xsl='http://www.w3.org/1999/XSL/Transform' version='1.0'>
+ <xsl:template match='/'>
+ <Configure>
+ <xsl:for-each select='/expr/list/attrs'>
+ <Call name='addWebApplication'>
+ <Arg><xsl:value-of select=\"attr[@name = 'path']/string/@value\" /></Arg>
+ <Arg><xsl:value-of select=\"attr[@name = 'war']/path/@value\" /></Arg>
+ </Call>
+ </xsl:for-each>
+ </Configure>
+ </xsl:template>
+ </xsl:stylesheet>
+ ";
+
+ servlets = builtins.toXML [ ③
+ { path = "/bugtracker"; war = jira + "/lib/atlassian-jira.war"; }
+ { path = "/wiki"; war = uberwiki + "/uberwiki.war"; }
+ ];
+ })
+ ```
+
+ The builder is supposed to generate the configuration file for a
+ [Jetty servlet container](http://jetty.mortbay.org/). A servlet
+ container contains a number of servlets (`*.war` files) each
+ exported under a specific URI prefix. So the servlet configuration
+ is a list of sets containing the `path` and `war` of the servlet
+ (①). This kind of information is difficult to communicate with the
+ normal method of passing information through an environment
+ variable, which just concatenates everything together into a
+ string (which might just work in this case, but wouldn’t work if
+ fields are optional or contain lists themselves). Instead the Nix
+ expression is converted to an XML representation with `toXML`,
+ which is unambiguous and can easily be processed with the
+ appropriate tools. For instance, in the example an XSLT stylesheet
+ (at point ②) is applied to it (at point ①) to generate the XML
+ configuration file for the Jetty server. The XML representation
+ produced at point ③ by `toXML` is as follows:
+
+ ```xml
+ <?xml version='1.0' encoding='utf-8'?>
+ <expr>
+ <list>
+ <attrs>
+ <attr name="path">
+ <string value="/bugtracker" />
+ </attr>
+ <attr name="war">
+ <path value="/nix/store/d1jh9pasa7k2...-jira/lib/atlassian-jira.war" />
+ </attr>
+ </attrs>
+ <attrs>
+ <attr name="path">
+ <string value="/wiki" />
+ </attr>
+ <attr name="war">
+ <path value="/nix/store/y6423b1yi4sx...-uberwiki/uberwiki.war" />
+ </attr>
+ </attrs>
+ </list>
+ </expr>
+ ```
+
+ Note that we used the `toFile` built-in to write the builder and
+ the stylesheet “inline” in the Nix expression. The path of the
+ stylesheet is spliced into the builder using the syntax `xsltproc
+ ${stylesheet}`.
+ )",
+ .fun = prim_toXML,
+});
/* Convert the argument (which can be any Nix expression) to a JSON
string. Not all Nix expressions can be sensibly or completely
@@ -1104,6 +1552,19 @@ static void prim_toJSON(EvalState & state, const Pos & pos, Value * * args, Valu
mkString(v, out.str(), context);
}
+static RegisterPrimOp primop_toJSON({
+ .name = "__toJSON",
+ .args = {"e"},
+ .doc = R"(
+ Return a string containing a JSON representation of *e*. Strings,
+ integers, floats, booleans, nulls and lists are mapped to their JSON
+ equivalents. Sets (except derivations) are represented as objects.
+ Derivations are translated to a JSON string containing the
+ derivation’s output path. Paths are copied to the store and
+ represented as a JSON string of the resulting store path.
+ )",
+ .fun = prim_toJSON,
+});
/* Parse a JSON string to a value. */
static void prim_fromJSON(EvalState & state, const Pos & pos, Value * * args, Value & v)
@@ -1112,6 +1573,20 @@ static void prim_fromJSON(EvalState & state, const Pos & pos, Value * * args, Va
parseJSON(state, s, v);
}
+static RegisterPrimOp primop_fromJSON({
+ .name = "__fromJSON",
+ .args = {"e"},
+ .doc = R"(
+ Convert a JSON string to a Nix value. For example,
+
+ ```nix
+ builtins.fromJSON ''{"x": [1, 2, 3], "y": null}''
+ ```
+
+ returns the value `{ x = [ 1 2 3 ]; y = null; }`.
+ )",
+ .fun = prim_fromJSON,
+});
/* Store a string in the Nix store as a source file that can be used
as an input by derivations. */
@@ -1146,6 +1621,83 @@ static void prim_toFile(EvalState & state, const Pos & pos, Value * * args, Valu
mkString(v, storePath, {storePath});
}
+static RegisterPrimOp primop_toFile({
+ .name = "__toFile",
+ .args = {"name", "s"},
+ .doc = R"(
+ Store the string *s* in a file in the Nix store and return its
+ path. The file has suffix *name*. This file can be used as an
+ input to derivations. One application is to write builders
+ “inline”. For instance, the following Nix expression combines the
+ [Nix expression for GNU Hello](expression-syntax.md) and its
+ [build script](build-script.md) into one file:
+
+ ```nix
+ { stdenv, fetchurl, perl }:
+
+ stdenv.mkDerivation {
+ name = "hello-2.1.1";
+
+ builder = builtins.toFile "builder.sh" "
+ source $stdenv/setup
+
+ PATH=$perl/bin:$PATH
+
+ tar xvfz $src
+ cd hello-*
+ ./configure --prefix=$out
+ make
+ make install
+ ";
+
+ src = fetchurl {
+ url = "http://ftp.nluug.nl/pub/gnu/hello/hello-2.1.1.tar.gz";
+ sha256 = "1md7jsfd8pa45z73bz1kszpp01yw6x5ljkjk2hx7wl800any6465";
+ };
+ inherit perl;
+ }
+ ```
+
+ It is even possible for one file to refer to another, e.g.,
+
+ ```nix
+ builder = let
+ configFile = builtins.toFile "foo.conf" "
+ # This is some dummy configuration file.
+ ...
+ ";
+ in builtins.toFile "builder.sh" "
+ source $stdenv/setup
+ ...
+ cp ${configFile} $out/etc/foo.conf
+ ";
+ ```
+
+ Note that `${configFile}` is an
+ [antiquotation](language-values.md), so the result of the
+ expression `configFile`
+ (i.e., a path like `/nix/store/m7p7jfny445k...-foo.conf`) will be
+ spliced into the resulting string.
+
+ It is however *not* allowed to have files mutually referring to each
+ other, like so:
+
+ ```nix
+ let
+ foo = builtins.toFile "foo" "...${bar}...";
+ bar = builtins.toFile "bar" "...${foo}...";
+ in foo
+ ```
+
+ This is not allowed because it would cause a cyclic dependency in
+ the computation of the cryptographic hashes for `foo` and `bar`.
+
+ It is also not possible to reference the result of a derivation. If
+ you are using Nixpkgs, the `writeTextFile` function is able to do
+ that.
+ )",
+ .fun = prim_toFile,
+});
static void addPath(EvalState & state, const Pos & pos, const string & name, const Path & path_,
Value * filterFun, FileIngestionMethod method, const std::optional<Hash> expectedHash, Value & v)
@@ -1216,6 +1768,48 @@ static void prim_filterSource(EvalState & state, const Pos & pos, Value * * args
addPath(state, pos, std::string(baseNameOf(path)), path, args[0], FileIngestionMethod::Recursive, std::nullopt, v);
}
+static RegisterPrimOp primop_filterSource({
+ .name = "__filterSource",
+ .args = {"e1", "e2"},
+ .doc = R"(
+ This function allows you to copy sources into the Nix store while
+ filtering certain files. For instance, suppose that you want to use
+ the directory `source-dir` as an input to a Nix expression, e.g.
+
+ ```nix
+ stdenv.mkDerivation {
+ ...
+ src = ./source-dir;
+ }
+ ```
+
+ However, if `source-dir` is a Subversion working copy, then all
+ those annoying `.svn` subdirectories will also be copied to the
+ store. Worse, the contents of those directories may change a lot,
+ causing lots of spurious rebuilds. With `filterSource` you can
+ filter out the `.svn` directories:
+
+ ```nix
+ src = builtins.filterSource
+ (path: type: type != "directory" || baseNameOf path != ".svn")
+ ./source-dir;
+ ```
+
+ Thus, the first argument *e1* must be a predicate function that is
+ called for each regular file, directory or symlink in the source
+ tree *e2*. If the function returns `true`, the file is copied to the
+ Nix store, otherwise it is omitted. The function is called with two
+ arguments. The first is the full path of the file. The second is a
+ string that identifies the type of the file, which is either
+ `"regular"`, `"directory"`, `"symlink"` or `"unknown"` (for other
+ kinds of files such as device nodes or fifos — but note that those
+ cannot be copied to the Nix store, so if the predicate returns
+ `true` for them, the copy will fail). If you exclude a directory,
+ the entire corresponding subtree of *e2* will be excluded.
+ )",
+ .fun = prim_filterSource,
+});
+
static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
state.forceAttrs(*args[0], pos);
@@ -1261,6 +1855,41 @@ static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value
addPath(state, pos, name, path, filterFun, method, expectedHash, v);
}
+static RegisterPrimOp primop_path({
+ .name = "__path",
+ .args = {"args"},
+ .doc = R"(
+ An enrichment of the built-in path type, based on the attributes
+ present in *args*. All are optional except `path`:
+
+ - path
+ The underlying path.
+
+ - name
+ The name of the path when added to the store. This can used to
+ reference paths that have nix-illegal characters in their names,
+ like `@`.
+
+ - filter
+ A function of the type expected by `builtins.filterSource`,
+ with the same semantics.
+
+ - recursive
+ When `false`, when `path` is added to the store it is with a
+ flat hash, rather than a hash of the NAR serialization of the
+ file. Thus, `path` must refer to a regular file, not a
+ directory. This allows similar behavior to `fetchurl`. Defaults
+ to `true`.
+
+ - sha256
+ When provided, this is the expected hash of the file at the
+ path. Evaluation will fail if the hash is incorrect, and
+ providing a hash allows `builtins.path` to be used even when the
+ `pure-eval` nix config option is on.
+ )",
+ .fun = prim_path,
+});
+
/*************************************************************
* Sets
@@ -1283,6 +1912,16 @@ static void prim_attrNames(EvalState & state, const Pos & pos, Value * * args, V
[](Value * v1, Value * v2) { return strcmp(v1->string.s, v2->string.s) < 0; });
}
+static RegisterPrimOp primop_attrNames({
+ .name = "__attrNames",
+ .args = {"set"},
+ .doc = R"(
+ Return the names of the attributes in the set *set* in an
+ alphabetically sorted list. For instance, `builtins.attrNames { y
+ = 1; x = "foo"; }` evaluates to `[ "x" "y" ]`.
+ )",
+ .fun = prim_attrNames,
+});
/* Return the values of the attributes in a set as a list, in the same
order as attrNames. */
@@ -1303,6 +1942,15 @@ static void prim_attrValues(EvalState & state, const Pos & pos, Value * * args,
v.listElems()[i] = ((Attr *) v.listElems()[i])->value;
}
+static RegisterPrimOp primop_attrValues({
+ .name = "__attrValues",
+ .args = {"set"},
+ .doc = R"(
+ Return the values of the attributes in the set *set* in the order
+ corresponding to the sorted attribute names.
+ )",
+ .fun = prim_attrValues,
+});
/* Dynamic version of the `.' operator. */
void prim_getAttr(EvalState & state, const Pos & pos, Value * * args, Value & v)
@@ -1322,9 +1970,20 @@ void prim_getAttr(EvalState & state, const Pos & pos, Value * * args, Value & v)
v = *i->value;
}
+static RegisterPrimOp primop_getAttr({
+ .name = "__getAttr",
+ .args = {"s", "set"},
+ .doc = R"(
+ `getAttr` returns the attribute named *s* from *set*. Evaluation
+ aborts if the attribute doesn’t exist. This is a dynamic version of
+ the `.` operator, since *s* is an expression rather than an
+ identifier.
+ )",
+ .fun = prim_getAttr,
+});
/* Return position information of the specified attribute. */
-void prim_unsafeGetAttrPos(EvalState & state, const Pos & pos, Value * * args, Value & v)
+static void prim_unsafeGetAttrPos(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
string attr = state.forceStringNoCtx(*args[0], pos);
state.forceAttrs(*args[1], pos);
@@ -1335,6 +1994,11 @@ void prim_unsafeGetAttrPos(EvalState & state, const Pos & pos, Value * * args, V
state.mkPos(v, i->pos);
}
+static RegisterPrimOp primop_unsafeGetAttrPos(RegisterPrimOp::Info {
+ .name = "__unsafeGetAttrPos",
+ .arity = 2,
+ .fun = prim_unsafeGetAttrPos,
+});
/* Dynamic version of the `?' operator. */
static void prim_hasAttr(EvalState & state, const Pos & pos, Value * * args, Value & v)
@@ -1344,6 +2008,16 @@ static void prim_hasAttr(EvalState & state, const Pos & pos, Value * * args, Val
mkBool(v, args[1]->attrs->find(state.symbols.create(attr)) != args[1]->attrs->end());
}
+static RegisterPrimOp primop_hasAttr({
+ .name = "__hasAttr",
+ .args = {"s", "set"},
+ .doc = R"(
+ `hasAttr` returns `true` if *set* has an attribute named *s*, and
+ `false` otherwise. This is a dynamic version of the `?` operator,
+ since *s* is an expression rather than an identifier.
+ )",
+ .fun = prim_hasAttr,
+});
/* Determine whether the argument is a set. */
static void prim_isAttrs(EvalState & state, const Pos & pos, Value * * args, Value & v)
@@ -1352,6 +2026,14 @@ static void prim_isAttrs(EvalState & state, const Pos & pos, Value * * args, Val
mkBool(v, args[0]->type == tAttrs);
}
+static RegisterPrimOp primop_isAttrs({
+ .name = "__isAttrs",
+ .args = {"e"},
+ .doc = R"(
+ Return `true` if *e* evaluates to a set, and `false` otherwise.
+ )",
+ .fun = prim_isAttrs,
+});
static void prim_removeAttrs(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
@@ -1375,6 +2057,21 @@ static void prim_removeAttrs(EvalState & state, const Pos & pos, Value * * args,
}
}
+static RegisterPrimOp primop_removeAttrs({
+ .name = "removeAttrs",
+ .args = {"set", "list"},
+ .doc = R"(
+ Remove the attributes listed in *list* from *set*. The attributes
+ don’t have to exist in *set*. For instance,
+
+ ```nix
+ removeAttrs { x = 1; y = 2; z = 3; } [ "a" "x" "z" ]
+ ```
+
+ evaluates to `{ y = 2; }`.
+ )",
+ .fun = prim_removeAttrs,
+});
/* Builds a set from a list specifying (name, value) pairs. To be
precise, a list [{name = "name1"; value = value1;} ... {name =
@@ -1416,10 +2113,31 @@ static void prim_listToAttrs(EvalState & state, const Pos & pos, Value * * args,
v.attrs->sort();
}
+static RegisterPrimOp primop_listToAttrs({
+ .name = "__listToAttrs",
+ .args = {"e"},
+ .doc = R"(
+ Construct a set from a list specifying the names and values of each
+ attribute. Each element of the list should be a set consisting of a
+ string-valued attribute `name` specifying the name of the attribute,
+ and an attribute `value` specifying its value. Example:
+
+ ```nix
+ builtins.listToAttrs
+ [ { name = "foo"; value = 123; }
+ { name = "bar"; value = 456; }
+ ]
+ ```
+
+ evaluates to
+
+ ```nix
+ { foo = 123; bar = 456; }
+ ```
+ )",
+ .fun = prim_listToAttrs,
+});
-/* Return the right-biased intersection of two sets as1 and as2,
- i.e. a set that contains every attribute from as2 that is also a
- member of as1. */
static void prim_intersectAttrs(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
state.forceAttrs(*args[0], pos);
@@ -1434,14 +2152,16 @@ static void prim_intersectAttrs(EvalState & state, const Pos & pos, Value * * ar
}
}
+static RegisterPrimOp primop_intersectAttrs({
+ .name = "__intersectAttrs",
+ .args = {"e1", "e2"},
+ .doc = R"(
+ Return a set consisting of the attributes in the set *e2* that also
+ exist in the set *e1*.
+ )",
+ .fun = prim_intersectAttrs,
+});
-/* Collect each attribute named `attr' from a list of attribute sets.
- Sets that don't contain the named attribute are ignored.
-
- Example:
- catAttrs "a" [{a = 1;} {b = 0;} {a = 2;}]
- => [1 2]
-*/
static void prim_catAttrs(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
Symbol attrName = state.symbols.create(state.forceStringNoCtx(*args[0], pos));
@@ -1463,20 +2183,23 @@ static void prim_catAttrs(EvalState & state, const Pos & pos, Value * * args, Va
v.listElems()[n] = res[n];
}
+static RegisterPrimOp primop_catAttrs({
+ .name = "__catAttrs",
+ .args = {"attr", "list"},
+ .doc = R"(
+ Collect each attribute named *attr* from a list of attribute
+ sets. Attrsets that don't contain the named attribute are
+ ignored. For example,
-/* Return a set containing the names of the formal arguments expected
- by the function `f'. The value of each attribute is a Boolean
- denoting whether the corresponding argument has a default value. For instance,
+ ```nix
+ builtins.catAttrs "a" [{a = 1;} {b = 0;} {a = 2;}]
+ ```
- functionArgs ({ x, y ? 123}: ...)
- => { x = false; y = true; }
+ evaluates to `[1 2]`.
+ )",
+ .fun = prim_catAttrs,
+});
- "Formal argument" here refers to the attributes pattern-matched by
- the function. Plain lambdas are not included, e.g.
-
- functionArgs (x: ...)
- => { }
-*/
static void prim_functionArgs(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
state.forceValue(*args[0], pos);
@@ -1501,8 +2224,24 @@ static void prim_functionArgs(EvalState & state, const Pos & pos, Value * * args
v.attrs->sort();
}
-
-/* Apply a function to every element of an attribute set. */
+static RegisterPrimOp primop_functionArgs({
+ .name = "__functionArgs",
+ .args = {"f"},
+ .doc = R"(
+ Return a set containing the names of the formal arguments expected
+ by the function *f*. The value of each attribute is a Boolean
+ denoting whether the corresponding argument has a default value. For
+ instance, `functionArgs ({ x, y ? 123}: ...) = { x = false; y =
+ true; }`.
+
+ "Formal argument" here refers to the attributes pattern-matched by
+ the function. Plain lambdas are not included, e.g. `functionArgs (x:
+ ...) = { }`.
+ )",
+ .fun = prim_functionArgs,
+});
+
+/* */
static void prim_mapAttrs(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
state.forceAttrs(*args[1], pos);
@@ -1518,6 +2257,20 @@ static void prim_mapAttrs(EvalState & state, const Pos & pos, Value * * args, Va
}
}
+static RegisterPrimOp primop_mapAttrs({
+ .name = "__mapAttrs",
+ .args = {"f", "attrset"},
+ .doc = R"(
+ Apply function *f* to every element of *attrset*. For example,
+
+ ```nix
+ builtins.mapAttrs (name: value: value * 10) { a = 1; b = 2; }
+ ```
+
+ evaluates to `{ a = 10; b = 20; }`.
+ )",
+ .fun = prim_mapAttrs,
+});
/*************************************************************
@@ -1532,6 +2285,14 @@ static void prim_isList(EvalState & state, const Pos & pos, Value * * args, Valu
mkBool(v, args[0]->isList());
}
+static RegisterPrimOp primop_isList({
+ .name = "__isList",
+ .args = {"e"},
+ .doc = R"(
+ Return `true` if *e* evaluates to a list, and `false` otherwise.
+ )",
+ .fun = prim_isList,
+});
static void elemAt(EvalState & state, const Pos & pos, Value & list, int n, Value & v)
{
@@ -1545,13 +2306,21 @@ static void elemAt(EvalState & state, const Pos & pos, Value & list, int n, Valu
v = *list.listElems()[n];
}
-
/* Return the n-1'th element of a list. */
static void prim_elemAt(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
elemAt(state, pos, *args[0], state.forceInt(*args[1], pos), v);
}
+static RegisterPrimOp primop_elemAt({
+ .name = "__elemAt",
+ .args = {"xs", "n"},
+ .doc = R"(
+ Return element *n* from the list *xs*. Elements are counted starting
+ from 0. A fatal error occurs if the index is out of bounds.
+ )",
+ .fun = prim_elemAt,
+});
/* Return the first element of a list. */
static void prim_head(EvalState & state, const Pos & pos, Value * * args, Value & v)
@@ -1559,6 +2328,16 @@ static void prim_head(EvalState & state, const Pos & pos, Value * * args, Value
elemAt(state, pos, *args[0], 0, v);
}
+static RegisterPrimOp primop_head({
+ .name = "__head",
+ .args = {"list"},
+ .doc = R"(
+ Return the first element of a list; abort evaluation if the argument
+ isn’t a list or is an empty list. You can test whether a list is
+ empty by comparing it with `[]`.
+ )",
+ .fun = prim_head,
+});
/* Return a list consisting of everything but the first element of
a list. Warning: this function takes O(n) time, so you probably
@@ -1577,6 +2356,21 @@ static void prim_tail(EvalState & state, const Pos & pos, Value * * args, Value
v.listElems()[n] = args[0]->listElems()[n + 1];
}
+static RegisterPrimOp primop_tail({
+ .name = "__tail",
+ .args = {"list"},
+ .doc = R"(
+ Return the second to last elements of a list; abort evaluation if
+ the argument isn’t a list or is an empty list.
+
+ > **Warning**
+ >
+ > This function should generally be avoided since it's inefficient:
+ > unlike Haskell's `tail`, it takes O(n) time, so recursing over a
+ > list by repeatedly calling `tail` takes O(n^2) time.
+ )",
+ .fun = prim_tail,
+});
/* Apply a function to every element of a list. */
static void prim_map(EvalState & state, const Pos & pos, Value * * args, Value & v)
@@ -1590,6 +2384,21 @@ static void prim_map(EvalState & state, const Pos & pos, Value * * args, Value &
*args[0], *args[1]->listElems()[n]);
}
+static RegisterPrimOp primop_map({
+ .name = "map",
+ .args = {"f", "list"},
+ .doc = R"(
+ Apply the function *f* to each element in the list *list*. For
+ example,
+
+ ```nix
+ map (x: "foo" + x) [ "bar" "bla" "abc" ]
+ ```
+
+ evaluates to `[ "foobar" "foobla" "fooabc" ]`.
+ )",
+ .fun = prim_map,
+});
/* Filter a list using a predicate; that is, return a list containing
every element from the list for which the predicate function
@@ -1621,6 +2430,15 @@ static void prim_filter(EvalState & state, const Pos & pos, Value * * args, Valu
}
}
+static RegisterPrimOp primop_filter({
+ .name = "__filter",
+ .args = {"f", "list"},
+ .doc = R"(
+ Return a list consisting of the elements of *list* for which the
+ function *f* returns `true`.
+ )",
+ .fun = prim_filter,
+});
/* Return true if a list contains a given element. */
static void prim_elem(EvalState & state, const Pos & pos, Value * * args, Value & v)
@@ -1635,6 +2453,15 @@ static void prim_elem(EvalState & state, const Pos & pos, Value * * args, Value
mkBool(v, res);
}
+static RegisterPrimOp primop_elem({
+ .name = "__elem",
+ .args = {"x", "xs"},
+ .doc = R"(
+ Return `true` if a value equal to *x* occurs in the list *xs*, and
+ `false` otherwise.
+ )",
+ .fun = prim_elem,
+});
/* Concatenate a list of lists. */
static void prim_concatLists(EvalState & state, const Pos & pos, Value * * args, Value & v)
@@ -1643,6 +2470,14 @@ static void prim_concatLists(EvalState & state, const Pos & pos, Value * * args,
state.concatLists(v, args[0]->listSize(), args[0]->listElems(), pos);
}
+static RegisterPrimOp primop_concatLists({
+ .name = "__concatLists",
+ .args = {"lists"},
+ .doc = R"(
+ Concatenate a list of lists into a single list.
+ )",
+ .fun = prim_concatLists,
+});
/* Return the length of a list. This is an O(1) time operation. */
static void prim_length(EvalState & state, const Pos & pos, Value * * args, Value & v)
@@ -1651,6 +2486,14 @@ static void prim_length(EvalState & state, const Pos & pos, Value * * args, Valu
mkInt(v, args[0]->listSize());
}
+static RegisterPrimOp primop_length({
+ .name = "__length",
+ .args = {"e"},
+ .doc = R"(
+ Return the length of the list *e*.
+ )",
+ .fun = prim_length,
+});
/* Reduce a list by applying a binary operator, from left to
right. The operator is applied strictly. */
@@ -1675,6 +2518,18 @@ static void prim_foldlStrict(EvalState & state, const Pos & pos, Value * * args,
}
}
+static RegisterPrimOp primop_foldlStrict({
+ .name = "__foldl'",
+ .args = {"op", "nul", "list"},
+ .doc = R"(
+ Reduce a list by applying a binary operator, from left to right,
+ e.g. `foldl’ op nul [x0 x1 x2 ...] = op (op (op nul x0) x1) x2)
+ ...`. The operator is applied strictly, i.e., its arguments are
+ evaluated first. For example, `foldl’ (x: y: x + y) 0 [1 2 3]`
+ evaluates to 6.
+ )",
+ .fun = prim_foldlStrict,
+});
static void anyOrAll(bool any, EvalState & state, const Pos & pos, Value * * args, Value & v)
{
@@ -1700,12 +2555,30 @@ static void prim_any(EvalState & state, const Pos & pos, Value * * args, Value &
anyOrAll(true, state, pos, args, v);
}
+static RegisterPrimOp primop_any({
+ .name = "__any",
+ .args = {"pred", "list"},
+ .doc = R"(
+ Return `true` if the function *pred* returns `true` for at least one
+ element of *list*, and `false` otherwise.
+ )",
+ .fun = prim_any,
+});
static void prim_all(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
anyOrAll(false, state, pos, args, v);
}
+static RegisterPrimOp primop_all({
+ .name = "__all",
+ .args = {"pred", "list"},
+ .doc = R"(
+ Return `true` if the function *pred* returns `true` for all elements
+ of *list*, and `false` otherwise.
+ )",
+ .fun = prim_all,
+});
static void prim_genList(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
@@ -1726,6 +2599,21 @@ static void prim_genList(EvalState & state, const Pos & pos, Value * * args, Val
}
}
+static RegisterPrimOp primop_genList({
+ .name = "__genList",
+ .args = {"generator", "length"},
+ .doc = R"(
+ Generate list of size *length*, with each element *i* equal to the
+ value returned by *generator* `i`. For example,
+
+ ```nix
+ builtins.genList (x: x * x) 5
+ ```
+
+ returns the list `[ 0 1 4 9 16 ]`.
+ )",
+ .fun = prim_genList,
+});
static void prim_lessThan(EvalState & state, const Pos & pos, Value * * args, Value & v);
@@ -1761,6 +2649,26 @@ static void prim_sort(EvalState & state, const Pos & pos, Value * * args, Value
std::stable_sort(v.listElems(), v.listElems() + len, comparator);
}
+static RegisterPrimOp primop_sort({
+ .name = "__sort",
+ .args = {"comparator", "list"},
+ .doc = R"(
+ Return *list* in sorted order. It repeatedly calls the function
+ *comparator* with two elements. The comparator should return `true`
+ if the first element is less than the second, and `false` otherwise.
+ For example,
+
+ ```nix
+ builtins.sort builtins.lessThan [ 483 249 526 147 42 77 ]
+ ```
+
+ produces the list `[ 42 77 147 249 483 526 ]`.
+
+ This is a stable sort: it preserves the relative order of elements
+ deemed equal by the comparator.
+ )",
+ .fun = prim_sort,
+});
static void prim_partition(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
@@ -1799,9 +2707,29 @@ static void prim_partition(EvalState & state, const Pos & pos, Value * * args, V
v.attrs->sort();
}
+static RegisterPrimOp primop_partition({
+ .name = "__partition",
+ .args = {"pred", "list"},
+ .doc = R"(
+ Given a predicate function *pred*, this function returns an
+ attrset containing a list named `right`, containing the elements
+ in *list* for which *pred* returned `true`, and a list named
+ `wrong`, containing the elements for which it returned
+ `false`. For example,
+
+ ```nix
+ builtins.partition (x: x > 10) [1 23 9 3 42]
+ ```
+
+ evaluates to
+
+ ```nix
+ { right = [ 23 42 ]; wrong = [ 1 9 3 ]; }
+ ```
+ )",
+ .fun = prim_partition,
+});
-/* concatMap = f: list: concatLists (map f list); */
-/* C++-version is to avoid allocating `mkApp', call `f' eagerly */
static void prim_concatMap(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
state.forceFunction(*args[0], pos);
@@ -1828,6 +2756,16 @@ static void prim_concatMap(EvalState & state, const Pos & pos, Value * * args, V
}
}
+static RegisterPrimOp primop_concatMap({
+ .name = "__concatMap",
+ .args = {"f", "list"},
+ .doc = R"(
+ This function is equivalent to `builtins.concatLists (map f list)`
+ but is more efficient.
+ )",
+ .fun = prim_concatMap,
+});
+
/*************************************************************
* Integer arithmetic
@@ -1844,6 +2782,14 @@ static void prim_add(EvalState & state, const Pos & pos, Value * * args, Value &
mkInt(v, state.forceInt(*args[0], pos) + state.forceInt(*args[1], pos));
}
+static RegisterPrimOp primop_add({
+ .name = "__add",
+ .args = {"e1", "e2"},
+ .doc = R"(
+ Return the sum of the numbers *e1* and *e2*.
+ )",
+ .fun = prim_add,
+});
static void prim_sub(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
@@ -1855,6 +2801,14 @@ static void prim_sub(EvalState & state, const Pos & pos, Value * * args, Value &
mkInt(v, state.forceInt(*args[0], pos) - state.forceInt(*args[1], pos));
}
+static RegisterPrimOp primop_sub({
+ .name = "__sub",
+ .args = {"e1", "e2"},
+ .doc = R"(
+ Return the difference between the numbers *e1* and *e2*.
+ )",
+ .fun = prim_sub,
+});
static void prim_mul(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
@@ -1866,6 +2820,14 @@ static void prim_mul(EvalState & state, const Pos & pos, Value * * args, Value &
mkInt(v, state.forceInt(*args[0], pos) * state.forceInt(*args[1], pos));
}
+static RegisterPrimOp primop_mul({
+ .name = "__mul",
+ .args = {"e1", "e2"},
+ .doc = R"(
+ Return the product of the numbers *e1* and *e2*.
+ )",
+ .fun = prim_mul,
+});
static void prim_div(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
@@ -1895,21 +2857,57 @@ static void prim_div(EvalState & state, const Pos & pos, Value * * args, Value &
}
}
+static RegisterPrimOp primop_div({
+ .name = "__div",
+ .args = {"e1", "e2"},
+ .doc = R"(
+ Return the quotient of the numbers *e1* and *e2*.
+ )",
+ .fun = prim_div,
+});
+
static void prim_bitAnd(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
mkInt(v, state.forceInt(*args[0], pos) & state.forceInt(*args[1], pos));
}
+static RegisterPrimOp primop_bitAnd({
+ .name = "__bitAnd",
+ .args = {"e1", "e2"},
+ .doc = R"(
+ Return the bitwise AND of the integers *e1* and *e2*.
+ )",
+ .fun = prim_bitAnd,
+});
+
static void prim_bitOr(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
mkInt(v, state.forceInt(*args[0], pos) | state.forceInt(*args[1], pos));
}
+static RegisterPrimOp primop_bitOr({
+ .name = "__bitOr",
+ .args = {"e1", "e2"},
+ .doc = R"(
+ Return the bitwise OR of the integers *e1* and *e2*.
+ )",
+ .fun = prim_bitOr,
+});
+
static void prim_bitXor(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
mkInt(v, state.forceInt(*args[0], pos) ^ state.forceInt(*args[1], pos));
}
+static RegisterPrimOp primop_bitXor({
+ .name = "__bitXor",
+ .args = {"e1", "e2"},
+ .doc = R"(
+ Return the bitwise XOR of the integers *e1* and *e2*.
+ )",
+ .fun = prim_bitXor,
+});
+
static void prim_lessThan(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
state.forceValue(*args[0], pos);
@@ -1918,6 +2916,17 @@ static void prim_lessThan(EvalState & state, const Pos & pos, Value * * args, Va
mkBool(v, comp(args[0], args[1]));
}
+static RegisterPrimOp primop_lessThan({
+ .name = "__lessThan",
+ .args = {"e1", "e2"},
+ .doc = R"(
+ Return `true` if the number *e1* is less than the number *e2*, and
+ `false` otherwise. Evaluation aborts if either *e1* or *e2* does not
+ evaluate to a number.
+ )",
+ .fun = prim_lessThan,
+});
+
/*************************************************************
* String manipulation
@@ -1934,6 +2943,29 @@ static void prim_toString(EvalState & state, const Pos & pos, Value * * args, Va
mkString(v, s, context);
}
+static RegisterPrimOp primop_toString({
+ .name = "toString",
+ .args = {"e"},
+ .doc = R"(
+ Convert the expression *e* to a string. *e* can be:
+
+ - A string (in which case the string is returned unmodified).
+
+ - A path (e.g., `toString /foo/bar` yields `"/foo/bar"`.
+
+ - A set containing `{ __toString = self: ...; }`.
+
+ - An integer.
+
+ - A list, in which case the string representations of its elements
+ are joined with spaces.
+
+ - A Boolean (`false` yields `""`, `true` yields `"1"`).
+
+ - `null`, which yields the empty string.
+ )",
+ .fun = prim_toString,
+});
/* `substring start len str' returns the substring of `str' starting
at character position `min(start, stringLength str)' inclusive and
@@ -1955,6 +2987,25 @@ static void prim_substring(EvalState & state, const Pos & pos, Value * * args, V
mkString(v, (unsigned int) start >= s.size() ? "" : string(s, start, len), context);
}
+static RegisterPrimOp primop_substring({
+ .name = "__substring",
+ .args = {"start", "len", "s"},
+ .doc = R"(
+ Return the substring of *s* from character position *start*
+ (zero-based) up to but not including *start + len*. If *start* is
+ greater than the length of the string, an empty string is returned,
+ and if *start + len* lies beyond the end of the string, only the
+ substring up to the end of the string is returned. *start* must be
+ non-negative. For example,
+
+ ```nix
+ builtins.substring 0 3 "nixos"
+ ```
+
+ evaluates to `"nix"`.
+ )",
+ .fun = prim_substring,
+});
static void prim_stringLength(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
@@ -1963,6 +3014,15 @@ static void prim_stringLength(EvalState & state, const Pos & pos, Value * * args
mkInt(v, s.size());
}
+static RegisterPrimOp primop_stringLength({
+ .name = "__stringLength",
+ .args = {"e"},
+ .doc = R"(
+ Return the length of the string *e*. If *e* is not a string,
+ evaluation is aborted.
+ )",
+ .fun = prim_stringLength,
+});
/* Return the cryptographic hash of a string in base-16. */
static void prim_hashString(EvalState & state, const Pos & pos, Value * * args, Value & v)
@@ -1981,6 +3041,16 @@ static void prim_hashString(EvalState & state, const Pos & pos, Value * * args,
mkString(v, hashString(*ht, s).to_string(Base16, false), context);
}
+static RegisterPrimOp primop_hashString({
+ .name = "__hashString",
+ .args = {"type", "s"},
+ .doc = R"(
+ Return a base-16 representation of the cryptographic hash of string
+ *s*. The hash algorithm specified by *type* must be one of `"md5"`,
+ `"sha1"`, `"sha256"` or `"sha512"`.
+ )",
+ .fun = prim_hashString,
+});
/* Match a regular expression against a string and return either
‘null’ or a list containing substring matches. */
@@ -2029,6 +3099,41 @@ void prim_match(EvalState & state, const Pos & pos, Value * * args, Value & v)
}
}
+static RegisterPrimOp primop_match({
+ .name = "__match",
+ .args = {"regex", "str"},
+ .doc = R"s(
+ Returns a list if the [extended POSIX regular
+ expression](http://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap09.html#tag_09_04)
+ *regex* matches *str* precisely, otherwise returns `null`. Each item
+ in the list is a regex group.
+
+ ```nix
+ builtins.match "ab" "abc"
+ ```
+
+ Evaluates to `null`.
+
+ ```nix
+ builtins.match "abc" "abc"
+ ```
+
+ Evaluates to `[ ]`.
+
+ ```nix
+ builtins.match "a(b)(c)" "abc"
+ ```
+
+ Evaluates to `[ "b" "c" ]`.
+
+ ```nix
+ builtins.match "[[:space:]]+([[:upper:]]+)[[:space:]]+" " FOO "
+ ```
+
+ Evaluates to `[ "foo" ]`.
+ )s",
+ .fun = prim_match,
+});
/* Split a string with a regular expression, and return a list of the
non-matching parts interleaved by the lists of the matching groups. */
@@ -2102,8 +3207,44 @@ static void prim_split(EvalState & state, const Pos & pos, Value * * args, Value
}
}
+static RegisterPrimOp primop_split({
+ .name = "__split",
+ .args = {"regex", "str"},
+ .doc = R"s(
+ Returns a list composed of non matched strings interleaved with the
+ lists of the [extended POSIX regular
+ expression](http://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap09.html#tag_09_04)
+ *regex* matches of *str*. Each item in the lists of matched
+ sequences is a regex group.
+
+ ```nix
+ builtins.split "(a)b" "abc"
+ ```
+
+ Evaluates to `[ "" [ "a" ] "c" ]`.
+
+ ```nix
+ builtins.split "([ac])" "abc"
+ ```
-static void prim_concatStringSep(EvalState & state, const Pos & pos, Value * * args, Value & v)
+ Evaluates to `[ "" [ "a" ] "b" [ "c" ] "" ]`.
+
+ ```nix
+ builtins.split "(a)|(c)" "abc"
+ ```
+
+ Evaluates to `[ "" [ "a" null ] "b" [ null "c" ] "" ]`.
+
+ ```nix
+ builtins.split "([[:upper:]]+)" " FOO "
+ ```
+
+ Evaluates to `[ " " [ "FOO" ] " " ]`.
+ )s",
+ .fun = prim_split,
+});
+
+static void prim_concatStringsSep(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
PathSet context;
@@ -2122,6 +3263,16 @@ static void prim_concatStringSep(EvalState & state, const Pos & pos, Value * * a
mkString(v, res, context);
}
+static RegisterPrimOp primop_concatStringsSep({
+ .name = "__concatStringsSep",
+ .args = {"separator", "list"},
+ .doc = R"(
+ Concatenate a list of strings with a separator between each
+ element, e.g. `concatStringsSep "/" ["usr" "local" "bin"] ==
+ "usr/local/bin"`.
+ )",
+ .fun = prim_concatStringsSep,
+});
static void prim_replaceStrings(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
@@ -2181,6 +3332,22 @@ static void prim_replaceStrings(EvalState & state, const Pos & pos, Value * * ar
mkString(v, res, context);
}
+static RegisterPrimOp primop_replaceStrings({
+ .name = "__replaceStrings",
+ .args = {"from", "to", "s"},
+ .doc = R"(
+ Given string *s*, replace every occurrence of the strings in *from*
+ with the corresponding string in *to*. For example,
+
+ ```nix
+ builtins.replaceStrings ["oo" "a"] ["a" "i"] "foobar"
+ ```
+
+ evaluates to `"fabir"`.
+ )",
+ .fun = prim_replaceStrings,
+});
+
/*************************************************************
* Versions
@@ -2197,6 +3364,19 @@ static void prim_parseDrvName(EvalState & state, const Pos & pos, Value * * args
v.attrs->sort();
}
+static RegisterPrimOp primop_parseDrvName({
+ .name = "__parseDrvName",
+ .args = {"s"},
+ .doc = R"(
+ Split the string *s* into a package name and version. The package
+ name is everything up to but not including the first dash followed
+ by a digit, and the version is everything following that dash. The
+ result is returned in a set `{ name, version }`. Thus,
+ `builtins.parseDrvName "nix-0.12pre12876"` returns `{ name =
+ "nix"; version = "0.12pre12876"; }`.
+ )",
+ .fun = prim_parseDrvName,
+});
static void prim_compareVersions(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
@@ -2205,6 +3385,18 @@ static void prim_compareVersions(EvalState & state, const Pos & pos, Value * * a
mkInt(v, compareVersions(version1, version2));
}
+static RegisterPrimOp primop_compareVersions({
+ .name = "__compareVersions",
+ .args = {"s1", "s2"},
+ .doc = R"(
+ Compare two strings representing versions and return `-1` if
+ version *s1* is older than version *s2*, `0` if they are the same,
+ and `1` if *s1* is newer than *s2*. The version comparison
+ algorithm is the same as the one used by [`nix-env
+ -u`](../command-ref/nix-env.md#operation---upgrade).
+ )",
+ .fun = prim_compareVersions,
+});
static void prim_splitVersion(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
@@ -2225,6 +3417,17 @@ static void prim_splitVersion(EvalState & state, const Pos & pos, Value * * args
}
}
+static RegisterPrimOp primop_splitVersion({
+ .name = "__splitVersion",
+ .args = {"s"},
+ .doc = R"(
+ Split a string representing a version into its components, by the
+ same version splitting logic underlying the version comparison in
+ [`nix-env -u`](../command-ref/nix-env.md#operation---upgrade).
+ )",
+ .fun = prim_splitVersion,
+});
+
/*************************************************************
* Primop registration
@@ -2238,7 +3441,20 @@ RegisterPrimOp::RegisterPrimOp(std::string name, size_t arity, PrimOpFun fun,
std::optional<std::string> requiredFeature)
{
if (!primOps) primOps = new PrimOps;
- primOps->push_back({name, arity, fun, requiredFeature});
+ primOps->push_back({
+ .name = name,
+ .args = {},
+ .arity = arity,
+ .requiredFeature = std::move(requiredFeature),
+ .fun = fun
+ });
+}
+
+
+RegisterPrimOp::RegisterPrimOp(Info && info)
+{
+ if (!primOps) primOps = new PrimOps;
+ primOps->push_back(std::move(info));
}
@@ -2262,15 +3478,6 @@ void EvalState::createBaseEnv()
mkNull(v);
addConstant("null", v);
- auto vThrow = addPrimOp("throw", 1, prim_throw);
-
- auto addPurityError = [&](const std::string & name) {
- Value * v2 = allocValue();
- mkString(*v2, fmt("'%s' is not allowed in pure evaluation mode", name));
- mkApp(v, *vThrow, *v2);
- addConstant(name, v);
- };
-
if (!evalSettings.pureEval) {
mkInt(v, time(0));
addConstant("__currentTime", v);
@@ -2295,132 +3502,16 @@ void EvalState::createBaseEnv()
addConstant("__langVersion", v);
// Miscellaneous
- auto vScopedImport = addPrimOp("scopedImport", 2, prim_scopedImport);
- Value * v2 = allocValue();
- mkAttrs(*v2, 0);
- mkApp(v, *vScopedImport, *v2);
- forceValue(v);
- addConstant("import", v);
if (evalSettings.enableNativeCode) {
addPrimOp("__importNative", 2, prim_importNative);
addPrimOp("__exec", 1, prim_exec);
}
- addPrimOp("__typeOf", 1, prim_typeOf);
- addPrimOp("isNull", 1, prim_isNull);
- addPrimOp("__isFunction", 1, prim_isFunction);
- addPrimOp("__isString", 1, prim_isString);
- addPrimOp("__isInt", 1, prim_isInt);
- addPrimOp("__isFloat", 1, prim_isFloat);
- addPrimOp("__isBool", 1, prim_isBool);
- addPrimOp("__isPath", 1, prim_isPath);
- addPrimOp("__genericClosure", 1, prim_genericClosure);
- addPrimOp("abort", 1, prim_abort);
- addPrimOp("__addErrorContext", 2, prim_addErrorContext);
- addPrimOp("__tryEval", 1, prim_tryEval);
- addPrimOp("__getEnv", 1, prim_getEnv);
-
- // Strictness
- addPrimOp("__seq", 2, prim_seq);
- addPrimOp("__deepSeq", 2, prim_deepSeq);
-
- // Debugging
- addPrimOp("__trace", 2, prim_trace);
-
- // Paths
- addPrimOp("__toPath", 1, prim_toPath);
- if (evalSettings.pureEval)
- addPurityError("__storePath");
- else
- addPrimOp("__storePath", 1, prim_storePath);
- addPrimOp("__pathExists", 1, prim_pathExists);
- addPrimOp("baseNameOf", 1, prim_baseNameOf);
- addPrimOp("dirOf", 1, prim_dirOf);
- addPrimOp("__readFile", 1, prim_readFile);
- addPrimOp("__readDir", 1, prim_readDir);
- addPrimOp("__findFile", 2, prim_findFile);
- addPrimOp("__hashFile", 2, prim_hashFile);
-
- // Creating files
- addPrimOp("__toXML", 1, prim_toXML);
- addPrimOp("__toJSON", 1, prim_toJSON);
- addPrimOp("__fromJSON", 1, prim_fromJSON);
- addPrimOp("__toFile", 2, prim_toFile);
- addPrimOp("__filterSource", 2, prim_filterSource);
- addPrimOp("__path", 1, prim_path);
-
- // Sets
- addPrimOp("__attrNames", 1, prim_attrNames);
- addPrimOp("__attrValues", 1, prim_attrValues);
- addPrimOp("__getAttr", 2, prim_getAttr);
- addPrimOp("__unsafeGetAttrPos", 2, prim_unsafeGetAttrPos);
- addPrimOp("__hasAttr", 2, prim_hasAttr);
- addPrimOp("__isAttrs", 1, prim_isAttrs);
- addPrimOp("removeAttrs", 2, prim_removeAttrs);
- addPrimOp("__listToAttrs", 1, prim_listToAttrs);
- addPrimOp("__intersectAttrs", 2, prim_intersectAttrs);
- addPrimOp("__catAttrs", 2, prim_catAttrs);
- addPrimOp("__functionArgs", 1, prim_functionArgs);
- addPrimOp("__mapAttrs", 2, prim_mapAttrs);
-
- // Lists
- addPrimOp("__isList", 1, prim_isList);
- addPrimOp("__elemAt", 2, prim_elemAt);
- addPrimOp("__head", 1, prim_head);
- addPrimOp("__tail", 1, prim_tail);
- addPrimOp("map", 2, prim_map);
- addPrimOp("__filter", 2, prim_filter);
- addPrimOp("__elem", 2, prim_elem);
- addPrimOp("__concatLists", 1, prim_concatLists);
- addPrimOp("__length", 1, prim_length);
- addPrimOp("__foldl'", 3, prim_foldlStrict);
- addPrimOp("__any", 2, prim_any);
- addPrimOp("__all", 2, prim_all);
- addPrimOp("__genList", 2, prim_genList);
- addPrimOp("__sort", 2, prim_sort);
- addPrimOp("__partition", 2, prim_partition);
- addPrimOp("__concatMap", 2, prim_concatMap);
-
- // Integer arithmetic
- addPrimOp("__add", 2, prim_add);
- addPrimOp("__sub", 2, prim_sub);
- addPrimOp("__mul", 2, prim_mul);
- addPrimOp("__div", 2, prim_div);
- addPrimOp("__bitAnd", 2, prim_bitAnd);
- addPrimOp("__bitOr", 2, prim_bitOr);
- addPrimOp("__bitXor", 2, prim_bitXor);
- addPrimOp("__lessThan", 2, prim_lessThan);
-
- // String manipulation
- addPrimOp("toString", 1, prim_toString);
- addPrimOp("__substring", 3, prim_substring);
- addPrimOp("__stringLength", 1, prim_stringLength);
- addPrimOp("__hashString", 2, prim_hashString);
- addPrimOp("__match", 2, prim_match);
- addPrimOp("__split", 2, prim_split);
- addPrimOp("__concatStringsSep", 2, prim_concatStringSep);
- addPrimOp("__replaceStrings", 3, prim_replaceStrings);
-
- // Versions
- addPrimOp("__parseDrvName", 1, prim_parseDrvName);
- addPrimOp("__compareVersions", 2, prim_compareVersions);
- addPrimOp("__splitVersion", 1, prim_splitVersion);
-
- // Derivations
- addPrimOp("derivationStrict", 1, prim_derivationStrict);
- addPrimOp("placeholder", 1, prim_placeholder);
-
- /* Add a wrapper around the derivation primop that computes the
- `drvPath' and `outPath' attributes lazily. */
- string path = canonPath(settings.nixDataDir + "/nix/corepkgs/derivation.nix", true);
- sDerivationNix = symbols.create(path);
- evalFile(path, v);
- addConstant("derivation", v);
/* Add a value containing the current Nix expression search path. */
mkList(v, searchPath.size());
int n = 0;
for (auto & i : searchPath) {
- v2 = v.listElems()[n++] = allocValue();
+ auto v2 = v.listElems()[n++] = allocValue();
mkAttrs(*v2, 2);
mkString(*allocAttr(*v2, symbols.create("path")), i.second);
mkString(*allocAttr(*v2, symbols.create("prefix")), i.first);
@@ -2431,7 +3522,20 @@ void EvalState::createBaseEnv()
if (RegisterPrimOp::primOps)
for (auto & primOp : *RegisterPrimOp::primOps)
if (!primOp.requiredFeature || settings.isExperimentalFeatureEnabled(*primOp.requiredFeature))
- addPrimOp(primOp.name, primOp.arity, primOp.primOp);
+ addPrimOp({
+ .fun = primOp.fun,
+ .arity = std::max(primOp.args.size(), primOp.arity),
+ .name = symbols.create(primOp.name),
+ .args = std::move(primOp.args),
+ .doc = primOp.doc,
+ });
+
+ /* Add a wrapper around the derivation primop that computes the
+ `drvPath' and `outPath' attributes lazily. */
+ string path = canonPath(settings.nixDataDir + "/nix/corepkgs/derivation.nix", true);
+ sDerivationNix = symbols.create(path);
+ evalFile(path, v);
+ addConstant("derivation", v);
/* Now that we've added all primops, sort the `builtins' set,
because attribute lookups expect it to be sorted. */
diff --git a/src/libexpr/primops.hh b/src/libexpr/primops.hh
index 75c460ecf..ed5e2ea58 100644
--- a/src/libexpr/primops.hh
+++ b/src/libexpr/primops.hh
@@ -10,9 +10,11 @@ struct RegisterPrimOp
struct Info
{
std::string name;
- size_t arity;
- PrimOpFun primOp;
+ std::vector<std::string> args;
+ size_t arity = 0;
+ const char * doc;
std::optional<std::string> requiredFeature;
+ PrimOpFun fun;
};
typedef std::vector<Info> PrimOps;
@@ -26,6 +28,8 @@ struct RegisterPrimOp
size_t arity,
PrimOpFun fun,
std::optional<std::string> requiredFeature = {});
+
+ RegisterPrimOp(Info && info);
};
/* These primops are disabled without enableNativeCode, but plugins
diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc
index cddcf0e59..06e8304b8 100644
--- a/src/libexpr/primops/fetchTree.cc
+++ b/src/libexpr/primops/fetchTree.cc
@@ -212,7 +212,7 @@ static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
: hashFile(htSHA256, path);
if (hash != *expectedHash)
throw Error((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n wanted: %s\n got: %s",
- *url, expectedHash->to_string(Base32, true), hash->to_string(Base32, true));
+ *url, expectedHash->to_string(Base32, true), hash.to_string(Base32, true));
}
if (state.allowedPaths)
@@ -226,18 +226,187 @@ static void prim_fetchurl(EvalState & state, const Pos & pos, Value * * args, Va
fetch(state, pos, args, v, "fetchurl", false, "");
}
+static RegisterPrimOp primop_fetchurl({
+ .name = "__fetchurl",
+ .args = {"url"},
+ .doc = R"(
+ Download the specified URL and return the path of the downloaded
+ file. This function is not available if [restricted evaluation
+ mode](../command-ref/conf-file.md) is enabled.
+ )",
+ .fun = prim_fetchurl,
+});
+
static void prim_fetchTarball(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
fetch(state, pos, args, v, "fetchTarball", true, "source");
}
+static RegisterPrimOp primop_fetchTarball({
+ .name = "fetchTarball",
+ .args = {"args"},
+ .doc = R"(
+ Download the specified URL, unpack it and return the path of the
+ unpacked tree. The file must be a tape archive (`.tar`) compressed
+ with `gzip`, `bzip2` or `xz`. The top-level path component of the
+ files in the tarball is removed, so it is best if the tarball
+ contains a single directory at top level. The typical use of the
+ function is to obtain external Nix expression dependencies, such as
+ a particular version of Nixpkgs, e.g.
+
+ ```nix
+ with import (fetchTarball https://github.com/NixOS/nixpkgs/archive/nixos-14.12.tar.gz) {};
+
+ stdenv.mkDerivation { … }
+ ```
+
+ The fetched tarball is cached for a certain amount of time (1 hour
+ by default) in `~/.cache/nix/tarballs/`. You can change the cache
+ timeout either on the command line with `--option tarball-ttl number
+ of seconds` or in the Nix configuration file with this option: `
+ number of seconds to cache `.
+
+ Note that when obtaining the hash with ` nix-prefetch-url ` the
+ option `--unpack` is required.
+
+ This function can also verify the contents against a hash. In that
+ case, the function takes a set instead of a URL. The set requires
+ the attribute `url` and the attribute `sha256`, e.g.
+
+ ```nix
+ with import (fetchTarball {
+ url = "https://github.com/NixOS/nixpkgs/archive/nixos-14.12.tar.gz";
+ sha256 = "1jppksrfvbk5ypiqdz4cddxdl8z6zyzdb2srq8fcffr327ld5jj2";
+ }) {};
+
+ stdenv.mkDerivation { … }
+ ```
+
+ This function is not available if [restricted evaluation
+ mode](../command-ref/conf-file.md) is enabled.
+ )",
+ .fun = prim_fetchTarball,
+});
+
static void prim_fetchGit(EvalState &state, const Pos &pos, Value **args, Value &v)
{
fetchTree(state, pos, args, v, "git", true);
}
-static RegisterPrimOp r2("__fetchurl", 1, prim_fetchurl);
-static RegisterPrimOp r3("fetchTarball", 1, prim_fetchTarball);
-static RegisterPrimOp r4("fetchGit", 1, prim_fetchGit);
+static RegisterPrimOp primop_fetchGit({
+ .name = "fetchGit",
+ .args = {"args"},
+ .doc = R"(
+ Fetch a path from git. *args* can be a URL, in which case the HEAD
+ of the repo at that URL is fetched. Otherwise, it can be an
+ attribute with the following attributes (all except `url` optional):
+
+ - url
+ The URL of the repo.
+
+ - name
+ The name of the directory the repo should be exported to in the
+ store. Defaults to the basename of the URL.
+
+ - rev
+ The git revision to fetch. Defaults to the tip of `ref`.
+
+ - ref
+ The git ref to look for the requested revision under. This is
+ often a branch or tag name. Defaults to `HEAD`.
+
+ By default, the `ref` value is prefixed with `refs/heads/`. As
+ of Nix 2.3.0 Nix will not prefix `refs/heads/` if `ref` starts
+ with `refs/`.
+
+ - submodules
+ A Boolean parameter that specifies whether submodules should be
+ checked out. Defaults to `false`.
+
+ Here are some examples of how to use `fetchGit`.
+
+ - To fetch a private repository over SSH:
+
+ ```nix
+ builtins.fetchGit {
+ url = "git@github.com:my-secret/repository.git";
+ ref = "master";
+ rev = "adab8b916a45068c044658c4158d81878f9ed1c3";
+ }
+ ```
+
+ - To fetch an arbitrary reference:
+
+ ```nix
+ builtins.fetchGit {
+ url = "https://github.com/NixOS/nix.git";
+ ref = "refs/heads/0.5-release";
+ }
+ ```
+
+ - If the revision you're looking for is in the default branch of
+ the git repository you don't strictly need to specify the branch
+ name in the `ref` attribute.
+
+ However, if the revision you're looking for is in a future
+ branch for the non-default branch you will need to specify the
+ the `ref` attribute as well.
+
+ ```nix
+ builtins.fetchGit {
+ url = "https://github.com/nixos/nix.git";
+ rev = "841fcbd04755c7a2865c51c1e2d3b045976b7452";
+ ref = "1.11-maintenance";
+ }
+ ```
+
+ > **Note**
+ >
+ > It is nice to always specify the branch which a revision
+ > belongs to. Without the branch being specified, the fetcher
+ > might fail if the default branch changes. Additionally, it can
+ > be confusing to try a commit from a non-default branch and see
+ > the fetch fail. If the branch is specified the fault is much
+ > more obvious.
+
+ - If the revision you're looking for is in the default branch of
+ the git repository you may omit the `ref` attribute.
+
+ ```nix
+ builtins.fetchGit {
+ url = "https://github.com/nixos/nix.git";
+ rev = "841fcbd04755c7a2865c51c1e2d3b045976b7452";
+ }
+ ```
+
+ - To fetch a specific tag:
+
+ ```nix
+ builtins.fetchGit {
+ url = "https://github.com/nixos/nix.git";
+ ref = "refs/tags/1.9";
+ }
+ ```
+
+ - To fetch the latest version of a remote branch:
+
+ ```nix
+ builtins.fetchGit {
+ url = "ssh://git@github.com/nixos/nix.git";
+ ref = "master";
+ }
+ ```
+
+ > **Note**
+ >
+ > Nix will refetch the branch in accordance with
+ > the option `tarball-ttl`.
+
+ > **Note**
+ >
+ > This behavior is disabled in *Pure evaluation mode*.
+ )",
+ .fun = prim_fetchGit,
+});
}
diff --git a/src/libfetchers/fetchers.cc b/src/libfetchers/fetchers.cc
index 9c69fc564..eaa635595 100644
--- a/src/libfetchers/fetchers.cc
+++ b/src/libfetchers/fetchers.cc
@@ -130,12 +130,12 @@ std::pair<Tree, Input> Input::fetch(ref<Store> store) const
tree.actualPath = store->toRealPath(tree.storePath);
auto narHash = store->queryPathInfo(tree.storePath)->narHash;
- input.attrs.insert_or_assign("narHash", narHash->to_string(SRI, true));
+ input.attrs.insert_or_assign("narHash", narHash.to_string(SRI, true));
if (auto prevNarHash = getNarHash()) {
if (narHash != *prevNarHash)
throw Error((unsigned int) 102, "NAR hash mismatch in input '%s' (%s), expected '%s', got '%s'",
- to_string(), tree.actualPath, prevNarHash->to_string(SRI, true), narHash->to_string(SRI, true));
+ to_string(), tree.actualPath, prevNarHash->to_string(SRI, true), narHash.to_string(SRI, true));
}
if (auto prevLastModified = getLastModified()) {
diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc
index 8b6e047f1..5ca0f8521 100644
--- a/src/libfetchers/git.cc
+++ b/src/libfetchers/git.cc
@@ -269,7 +269,7 @@ struct GitInputScheme : InputScheme
// modified dirty file?
input.attrs.insert_or_assign(
"lastModified",
- haveCommits ? std::stoull(runProgram("git", true, { "-C", actualUrl, "log", "-1", "--format=%ct", "HEAD" })) : 0);
+ haveCommits ? std::stoull(runProgram("git", true, { "-C", actualUrl, "log", "-1", "--format=%ct", "--no-show-signature", "HEAD" })) : 0);
return {
Tree(store->printStorePath(storePath), std::move(storePath)),
@@ -421,7 +421,7 @@ struct GitInputScheme : InputScheme
auto storePath = store->addToStore(name, tmpDir, FileIngestionMethod::Recursive, htSHA256, filter);
- auto lastModified = std::stoull(runProgram("git", true, { "-C", repoDir, "log", "-1", "--format=%ct", input.getRev()->gitRev() }));
+ auto lastModified = std::stoull(runProgram("git", true, { "-C", repoDir, "log", "-1", "--format=%ct", "--no-show-signature", input.getRev()->gitRev() }));
Attrs infoAttrs({
{"rev", input.getRev()->gitRev()},
diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc
index 9f84ffb68..1cc0c5e2e 100644
--- a/src/libfetchers/github.cc
+++ b/src/libfetchers/github.cc
@@ -182,11 +182,21 @@ struct GitHubInputScheme : GitArchiveInputScheme
{
std::string type() override { return "github"; }
+ void addAccessToken(std::string & url) const
+ {
+ std::string accessToken = settings.githubAccessToken.get();
+ if (accessToken != "")
+ url += "?access_token=" + accessToken;
+ }
+
Hash getRevFromRef(nix::ref<Store> store, const Input & input) const override
{
auto host_url = maybeGetStrAttr(input.attrs, "url").value_or("github.com");
auto url = fmt("https://api.%s/repos/%s/%s/commits/%s", // FIXME: check
host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), *input.getRef());
+
+ addAccessToken(url);
+
auto json = nlohmann::json::parse(
readFile(
store->toRealPath(
@@ -205,9 +215,7 @@ struct GitHubInputScheme : GitArchiveInputScheme
host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"),
input.getRev()->to_string(Base16, false));
- std::string accessToken = settings.githubAccessToken.get();
- if (accessToken != "")
- url += "?access_token=" + accessToken;
+ addAccessToken(url);
return url;
}
diff --git a/src/libfetchers/registry.cc b/src/libfetchers/registry.cc
index d4134ce29..4367ee810 100644
--- a/src/libfetchers/registry.cc
+++ b/src/libfetchers/registry.cc
@@ -147,7 +147,7 @@ static std::shared_ptr<Registry> getGlobalRegistry(ref<Store> store)
if (!hasPrefix(path, "/")) {
auto storePath = downloadFile(store, path, "flake-registry.json", false).storePath;
if (auto store2 = store.dynamic_pointer_cast<LocalFSStore>())
- store2->addPermRoot(storePath, getCacheDir() + "/nix/flake-registry.json", true);
+ store2->addPermRoot(storePath, getCacheDir() + "/nix/flake-registry.json");
path = store->toRealPath(storePath);
}
diff --git a/src/libfetchers/tarball.cc b/src/libfetchers/tarball.cc
index 55158cece..a2d16365e 100644
--- a/src/libfetchers/tarball.cc
+++ b/src/libfetchers/tarball.cc
@@ -67,8 +67,10 @@ DownloadFileResult downloadFile(
StringSink sink;
dumpString(*res.data, sink);
auto hash = hashString(htSHA256, *res.data);
- ValidPathInfo info(store->makeFixedOutputPath(FileIngestionMethod::Flat, hash, name));
- info.narHash = hashString(htSHA256, *sink.s);
+ ValidPathInfo info {
+ store->makeFixedOutputPath(FileIngestionMethod::Flat, hash, name),
+ hashString(htSHA256, *sink.s),
+ };
info.narSize = sink.s->size();
info.ca = FixedOutputHash {
.method = FileIngestionMethod::Flat,
diff --git a/src/libmain/common-args.cc b/src/libmain/common-args.cc
index 09f4cd133..3411e2d7a 100644
--- a/src/libmain/common-args.cc
+++ b/src/libmain/common-args.cc
@@ -28,7 +28,7 @@ MixCommonArgs::MixCommonArgs(const string & programName)
addFlag({
.longName = "option",
- .description = "set a Nix configuration option (overriding nix.conf)",
+ .description = "set a Nix configuration option (overriding `nix.conf`)",
.labels = {"name", "value"},
.handler = {[](std::string name, std::string value) {
try {
@@ -51,8 +51,8 @@ MixCommonArgs::MixCommonArgs(const string & programName)
addFlag({
.longName = "log-format",
- .description = "format of log output; \"raw\", \"internal-json\", \"bar\" "
- "or \"bar-with-logs\"",
+ .description = "format of log output; `raw`, `internal-json`, `bar` "
+ "or `bar-with-logs`",
.labels = {"format"},
.handler = {[](std::string format) { setLogFormat(format); }},
});
diff --git a/src/libmain/progress-bar.cc b/src/libmain/progress-bar.cc
index 3f7d99a1d..be3c06a38 100644
--- a/src/libmain/progress-bar.cc
+++ b/src/libmain/progress-bar.cc
@@ -362,7 +362,7 @@ public:
auto width = getWindowSize().second;
if (width <= 0) width = std::numeric_limits<decltype(width)>::max();
- writeToStderr("\r" + filterANSIEscapes(line, false, width) + "\e[K");
+ writeToStderr("\r" + filterANSIEscapes(line, false, width) + ANSI_NORMAL + "\e[K");
}
std::string getStatus(State & state)
diff --git a/src/libmain/shared.cc b/src/libmain/shared.cc
index 2b1f25ca3..22ae51e47 100644
--- a/src/libmain/shared.cc
+++ b/src/libmain/shared.cc
@@ -277,6 +277,8 @@ void printVersion(const string & programName)
#if HAVE_SODIUM
cfg.push_back("signed-caches");
#endif
+ std::cout << "System type: " << settings.thisSystem << "\n";
+ std::cout << "Additional system types: " << concatStringsSep(", ", settings.extraPlatforms.get()) << "\n";
std::cout << "Features: " << concatStringsSep(", ", cfg) << "\n";
std::cout << "System configuration file: " << settings.nixConfDir + "/nix.conf" << "\n";
std::cout << "User configuration files: " <<
diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc
index 9682db730..5433fe50d 100644
--- a/src/libstore/binary-cache-store.cc
+++ b/src/libstore/binary-cache-store.cc
@@ -143,7 +143,7 @@ struct FileSource : FdSource
void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource,
RepairFlag repair, CheckSigsFlag checkSigs)
{
- assert(info.narHash && info.narSize);
+ assert(info.narSize);
if (!repair && isValidPath(info.path)) {
// FIXME: copyNAR -> null sink
@@ -219,7 +219,7 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource
}
}
- upsertFile(std::string(info.path.to_string()) + ".ls", jsonOut.str(), "application/json");
+ upsertFile(std::string(info.path.hashPart()) + ".ls", jsonOut.str(), "application/json");
}
/* Optionally maintain an index of DWARF debug info files
@@ -312,14 +312,10 @@ void BinaryCacheStore::narFromPath(const StorePath & storePath, Sink & sink)
{
auto info = queryPathInfo(storePath).cast<const NarInfo>();
- uint64_t narSize = 0;
+ LengthSink narSize;
+ TeeSink tee { sink, narSize };
- LambdaSink wrapperSink([&](const unsigned char * data, size_t len) {
- sink(data, len);
- narSize += len;
- });
-
- auto decompressor = makeDecompressionSink(info->compression, wrapperSink);
+ auto decompressor = makeDecompressionSink(info->compression, tee);
try {
getFile(info->url, *decompressor);
@@ -331,7 +327,7 @@ void BinaryCacheStore::narFromPath(const StorePath & storePath, Sink & sink)
stats.narRead++;
//stats.narReadCompressedBytes += nar->size(); // FIXME
- stats.narReadBytes += narSize;
+ stats.narReadBytes += narSize.length;
}
void BinaryCacheStore::queryPathInfoUncached(const StorePath & storePath,
@@ -385,7 +381,10 @@ StorePath BinaryCacheStore::addToStore(const string & name, const Path & srcPath
h = hashString(hashAlgo, s);
}
- ValidPathInfo info(makeFixedOutputPath(method, *h, name));
+ ValidPathInfo info {
+ makeFixedOutputPath(method, *h, name),
+ Hash::dummy, // Will be fixed in addToStore, which recomputes nar hash
+ };
auto source = StringSource { *sink.s };
addToStore(info, source, repair, CheckSigs);
@@ -396,7 +395,10 @@ StorePath BinaryCacheStore::addToStore(const string & name, const Path & srcPath
StorePath BinaryCacheStore::addTextToStore(const string & name, const string & s,
const StorePathSet & references, RepairFlag repair)
{
- ValidPathInfo info(computeStorePathForText(name, s, references));
+ ValidPathInfo info {
+ computeStorePathForText(name, s, references),
+ Hash::dummy, // Will be fixed in addToStore, which recomputes nar hash
+ };
info.references = references;
if (repair || !isValidPath(info.path)) {
diff --git a/src/libstore/build.cc b/src/libstore/build.cc
index 76baa1a6e..dd932cee9 100644
--- a/src/libstore/build.cc
+++ b/src/libstore/build.cc
@@ -1181,8 +1181,8 @@ void DerivationGoal::haveDerivation()
retrySubstitution = false;
- for (auto & i : drv->outputs)
- worker.store.addTempRoot(i.second.path(worker.store, drv->name));
+ for (auto & i : drv->outputsAndPaths(worker.store))
+ worker.store.addTempRoot(i.second.second);
/* Check what outputs paths are not already valid. */
auto invalidOutputs = checkPathValidity(false, buildMode == bmRepair);
@@ -1288,14 +1288,14 @@ void DerivationGoal::repairClosure()
/* Get the output closure. */
StorePathSet outputClosure;
- for (auto & i : drv->outputs) {
+ for (auto & i : drv->outputsAndPaths(worker.store)) {
if (!wantOutput(i.first, wantedOutputs)) continue;
- worker.store.computeFSClosure(i.second.path(worker.store, drv->name), outputClosure);
+ worker.store.computeFSClosure(i.second.second, outputClosure);
}
/* Filter out our own outputs (which we have already checked). */
- for (auto & i : drv->outputs)
- outputClosure.erase(i.second.path(worker.store, drv->name));
+ for (auto & i : drv->outputsAndPaths(worker.store))
+ outputClosure.erase(i.second.second);
/* Get all dependencies of this derivation so that we know which
derivation is responsible for which path in the output
@@ -1306,8 +1306,8 @@ void DerivationGoal::repairClosure()
for (auto & i : inputClosure)
if (i.isDerivation()) {
Derivation drv = worker.store.derivationFromPath(i);
- for (auto & j : drv.outputs)
- outputsToDrv.insert_or_assign(j.second.path(worker.store, drv.name), i);
+ for (auto & j : drv.outputsAndPaths(worker.store))
+ outputsToDrv.insert_or_assign(j.second.second, i);
}
/* Check each path (slow!). */
@@ -1466,16 +1466,16 @@ void DerivationGoal::tryToBuild()
/* If any of the outputs already exist but are not valid, delete
them. */
- for (auto & i : drv->outputs) {
- if (worker.store.isValidPath(i.second.path(worker.store, drv->name))) continue;
- debug("removing invalid path '%s'", worker.store.printStorePath(i.second.path(worker.store, drv->name)));
- deletePath(worker.store.Store::toRealPath(i.second.path(worker.store, drv->name)));
+ for (auto & i : drv->outputsAndPaths(worker.store)) {
+ if (worker.store.isValidPath(i.second.second)) continue;
+ debug("removing invalid path '%s'", worker.store.printStorePath(i.second.second));
+ deletePath(worker.store.Store::toRealPath(i.second.second));
}
/* Don't do a remote build if the derivation has the attribute
`preferLocalBuild' set. Also, check and repair modes are only
supported for local builds. */
- bool buildLocally = buildMode != bmNormal || parsedDrv->willBuildLocally();
+ bool buildLocally = buildMode != bmNormal || parsedDrv->willBuildLocally(worker.store);
/* Is the build hook willing to accept this job? */
if (!buildLocally) {
@@ -1919,8 +1919,8 @@ StorePathSet DerivationGoal::exportReferences(const StorePathSet & storePaths)
for (auto & j : paths2) {
if (j.isDerivation()) {
Derivation drv = worker.store.derivationFromPath(j);
- for (auto & k : drv.outputs)
- worker.store.computeFSClosure(k.second.path(worker.store, drv.name), paths);
+ for (auto & k : drv.outputsAndPaths(worker.store))
+ worker.store.computeFSClosure(k.second.second, paths);
}
}
@@ -1964,13 +1964,13 @@ void linkOrCopy(const Path & from, const Path & to)
void DerivationGoal::startBuilder()
{
/* Right platform? */
- if (!parsedDrv->canBuildLocally())
+ if (!parsedDrv->canBuildLocally(worker.store))
throw Error("a '%s' with features {%s} is required to build '%s', but I am a '%s' with features {%s}",
drv->platform,
concatStringsSep(", ", parsedDrv->getRequiredSystemFeatures()),
worker.store.printStorePath(drvPath),
settings.thisSystem,
- concatStringsSep<StringSet>(", ", settings.systemFeatures));
+ concatStringsSep<StringSet>(", ", worker.store.systemFeatures));
if (drv->isBuiltin())
preloadNSS();
@@ -2014,8 +2014,8 @@ void DerivationGoal::startBuilder()
chownToBuilder(tmpDir);
/* Substitute output placeholders with the actual output paths. */
- for (auto & output : drv->outputs)
- inputRewrites[hashPlaceholder(output.first)] = worker.store.printStorePath(output.second.path(worker.store, drv->name));
+ for (auto & output : drv->outputsAndPaths(worker.store))
+ inputRewrites[hashPlaceholder(output.first)] = worker.store.printStorePath(output.second.second);
/* Construct the environment passed to the builder. */
initEnv();
@@ -2199,8 +2199,8 @@ void DerivationGoal::startBuilder()
rebuilding a path that is in settings.dirsInChroot
(typically the dependencies of /bin/sh). Throw them
out. */
- for (auto & i : drv->outputs)
- dirsInChroot.erase(worker.store.printStorePath(i.second.path(worker.store, drv->name)));
+ for (auto & i : drv->outputsAndPaths(worker.store))
+ dirsInChroot.erase(worker.store.printStorePath(i.second.second));
#elif __APPLE__
/* We don't really have any parent prep work to do (yet?)
@@ -2612,8 +2612,8 @@ void DerivationGoal::writeStructuredAttrs()
/* Add an "outputs" object containing the output paths. */
nlohmann::json outputs;
- for (auto & i : drv->outputs)
- outputs[i.first] = rewriteStrings(worker.store.printStorePath(i.second.path(worker.store, drv->name)), inputRewrites);
+ for (auto & i : drv->outputsAndPaths(worker.store))
+ outputs[i.first] = rewriteStrings(worker.store.printStorePath(i.second.second), inputRewrites);
json["outputs"] = outputs;
/* Handle exportReferencesGraph. */
@@ -2756,8 +2756,12 @@ struct RestrictedStore : public LocalFSStore
void queryReferrers(const StorePath & path, StorePathSet & referrers) override
{ }
- OutputPathMap queryDerivationOutputMap(const StorePath & path) override
- { throw Error("queryDerivationOutputMap"); }
+ std::map<std::string, std::optional<StorePath>> queryPartialDerivationOutputMap(const StorePath & path) override
+ {
+ if (!goal.isAllowed(path))
+ throw InvalidPath("cannot query output map for unknown path '%s' in recursive Nix", printStorePath(path));
+ return next->queryPartialDerivationOutputMap(path);
+ }
std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override
{ throw Error("queryPathFromHashPart"); }
@@ -2815,9 +2819,9 @@ struct RestrictedStore : public LocalFSStore
if (!goal.isAllowed(path.path))
throw InvalidPath("cannot build unknown path '%s' in recursive Nix", printStorePath(path.path));
auto drv = derivationFromPath(path.path);
- for (auto & output : drv.outputs)
+ for (auto & output : drv.outputsAndPaths(*this))
if (wantOutput(output.first, path.outputs))
- newPaths.insert(output.second.path(*this, drv.name));
+ newPaths.insert(output.second.second);
} else if (!goal.isAllowed(path.path))
throw InvalidPath("cannot build unknown path '%s' in recursive Nix", printStorePath(path.path));
}
@@ -2920,7 +2924,8 @@ void DerivationGoal::startDaemon()
FdSink to(remote.get());
try {
daemon::processConnection(store, from, to,
- daemon::NotTrusted, daemon::Recursive, "nobody", 65535);
+ daemon::NotTrusted, daemon::Recursive,
+ [&](Store & store) { store.createUser("nobody", 65535); });
debug("terminated daemon connection");
} catch (SysError &) {
ignoreException();
@@ -3179,7 +3184,7 @@ void DerivationGoal::runChild()
createDirs(chrootRootDir + "/dev/shm");
createDirs(chrootRootDir + "/dev/pts");
ss.push_back("/dev/full");
- if (settings.systemFeatures.get().count("kvm") && pathExists("/dev/kvm"))
+ if (worker.store.systemFeatures.get().count("kvm") && pathExists("/dev/kvm"))
ss.push_back("/dev/kvm");
ss.push_back("/dev/null");
ss.push_back("/dev/random");
@@ -3616,8 +3621,8 @@ void DerivationGoal::registerOutputs()
to do anything here. */
if (hook) {
bool allValid = true;
- for (auto & i : drv->outputs)
- if (!worker.store.isValidPath(i.second.path(worker.store, drv->name))) allValid = false;
+ for (auto & i : drv->outputsAndPaths(worker.store))
+ if (!worker.store.isValidPath(i.second.second)) allValid = false;
if (allValid) return;
}
@@ -3638,23 +3643,23 @@ void DerivationGoal::registerOutputs()
Nix calls. */
StorePathSet referenceablePaths;
for (auto & p : inputPaths) referenceablePaths.insert(p);
- for (auto & i : drv->outputs) referenceablePaths.insert(i.second.path(worker.store, drv->name));
+ for (auto & i : drv->outputsAndPaths(worker.store)) referenceablePaths.insert(i.second.second);
for (auto & p : addedPaths) referenceablePaths.insert(p);
/* Check whether the output paths were created, and grep each
output path to determine what other paths it references. Also make all
output paths read-only. */
- for (auto & i : drv->outputs) {
- auto path = worker.store.printStorePath(i.second.path(worker.store, drv->name));
- if (!missingPaths.count(i.second.path(worker.store, drv->name))) continue;
+ for (auto & i : drv->outputsAndPaths(worker.store)) {
+ auto path = worker.store.printStorePath(i.second.second);
+ if (!missingPaths.count(i.second.second)) continue;
Path actualPath = path;
if (needsHashRewrite()) {
- auto r = redirectedOutputs.find(i.second.path(worker.store, drv->name));
+ auto r = redirectedOutputs.find(i.second.second);
if (r != redirectedOutputs.end()) {
auto redirected = worker.store.Store::toRealPath(r->second);
if (buildMode == bmRepair
- && redirectedBadOutputs.count(i.second.path(worker.store, drv->name))
+ && redirectedBadOutputs.count(i.second.second)
&& pathExists(redirected))
replaceValidPath(path, redirected);
if (buildMode == bmCheck)
@@ -3721,7 +3726,7 @@ void DerivationGoal::registerOutputs()
hash). */
std::optional<ContentAddress> ca;
- if (! std::holds_alternative<DerivationOutputInputAddressed>(i.second.output)) {
+ if (! std::holds_alternative<DerivationOutputInputAddressed>(i.second.first.output)) {
DerivationOutputCAFloating outputHash;
std::visit(overloaded {
[&](DerivationOutputInputAddressed doi) {
@@ -3736,7 +3741,7 @@ void DerivationGoal::registerOutputs()
[&](DerivationOutputCAFloating dof) {
outputHash = dof;
},
- }, i.second.output);
+ }, i.second.first.output);
if (outputHash.method == FileIngestionMethod::Flat) {
/* The output path should be a regular file without execute permission. */
@@ -3753,12 +3758,12 @@ void DerivationGoal::registerOutputs()
? hashPath(outputHash.hashType, actualPath).first
: hashFile(outputHash.hashType, actualPath);
- auto dest = worker.store.makeFixedOutputPath(outputHash.method, h2, i.second.path(worker.store, drv->name).name());
+ auto dest = worker.store.makeFixedOutputPath(outputHash.method, h2, i.second.second.name());
// true if either floating CA, or incorrect fixed hash.
bool needsMove = true;
- if (auto p = std::get_if<DerivationOutputCAFixed>(& i.second.output)) {
+ if (auto p = std::get_if<DerivationOutputCAFixed>(& i.second.first.output)) {
Hash & h = p->hash.hash;
if (h != h2) {
@@ -3864,8 +3869,10 @@ void DerivationGoal::registerOutputs()
worker.markContentsGood(worker.store.parseStorePath(path));
}
- ValidPathInfo info(worker.store.parseStorePath(path));
- info.narHash = hash.first;
+ ValidPathInfo info {
+ worker.store.parseStorePath(path),
+ hash.first,
+ };
info.narSize = hash.second;
info.references = std::move(references);
info.deriver = drvPath;
@@ -3921,8 +3928,8 @@ void DerivationGoal::registerOutputs()
/* If this is the first round of several, then move the output out of the way. */
if (nrRounds > 1 && curRound == 1 && curRound < nrRounds && keepPreviousRound) {
- for (auto & i : drv->outputs) {
- auto path = worker.store.printStorePath(i.second.path(worker.store, drv->name));
+ for (auto & i : drv->outputsAndPaths(worker.store)) {
+ auto path = worker.store.printStorePath(i.second.second);
Path prev = path + checkSuffix;
deletePath(prev);
Path dst = path + checkSuffix;
@@ -3939,8 +3946,8 @@ void DerivationGoal::registerOutputs()
/* Remove the .check directories if we're done. FIXME: keep them
if the result was not determistic? */
if (curRound == nrRounds) {
- for (auto & i : drv->outputs) {
- Path prev = worker.store.printStorePath(i.second.path(worker.store, drv->name)) + checkSuffix;
+ for (auto & i : drv->outputsAndPaths(worker.store)) {
+ Path prev = worker.store.printStorePath(i.second.second) + checkSuffix;
deletePath(prev);
}
}
@@ -4238,12 +4245,12 @@ void DerivationGoal::flushLine()
StorePathSet DerivationGoal::checkPathValidity(bool returnValid, bool checkHash)
{
StorePathSet result;
- for (auto & i : drv->outputs) {
+ for (auto & i : drv->outputsAndPaths(worker.store)) {
if (!wantOutput(i.first, wantedOutputs)) continue;
bool good =
- worker.store.isValidPath(i.second.path(worker.store, drv->name)) &&
- (!checkHash || worker.pathContentsGood(i.second.path(worker.store, drv->name)));
- if (good == returnValid) result.insert(i.second.path(worker.store, drv->name));
+ worker.store.isValidPath(i.second.second) &&
+ (!checkHash || worker.pathContentsGood(i.second.second));
+ if (good == returnValid) result.insert(i.second.second);
}
return result;
}
@@ -4980,7 +4987,7 @@ void Worker::waitForInput()
std::vector<unsigned char> buffer(4096);
for (auto & k : fds2) {
if (pollStatus.at(fdToPollStatus.at(k)).revents) {
- ssize_t rd = read(k, buffer.data(), buffer.size());
+ ssize_t rd = ::read(k, buffer.data(), buffer.size());
// FIXME: is there a cleaner way to handle pt close
// than EIO? Is this even standard?
if (rd == 0 || (rd == -1 && errno == EIO)) {
@@ -5070,7 +5077,7 @@ bool Worker::pathContentsGood(const StorePath & path)
if (!pathExists(store.printStorePath(path)))
res = false;
else {
- HashResult current = hashPath(info->narHash->type, store.printStorePath(path));
+ HashResult current = hashPath(info->narHash.type, store.printStorePath(path));
Hash nullHash(htSHA256);
res = info->narHash == nullHash || info->narHash == current.first;
}
diff --git a/src/libstore/builtins/fetchurl.cc b/src/libstore/builtins/fetchurl.cc
index 6585a480d..4fb5d8a06 100644
--- a/src/libstore/builtins/fetchurl.cc
+++ b/src/libstore/builtins/fetchurl.cc
@@ -58,6 +58,20 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData)
}
};
+ /* Try the hashed mirrors first. */
+ if (getAttr("outputHashMode") == "flat")
+ for (auto hashedMirror : settings.hashedMirrors.get())
+ try {
+ if (!hasSuffix(hashedMirror, "/")) hashedMirror += '/';
+ std::optional<HashType> ht = parseHashTypeOpt(getAttr("outputHashAlgo"));
+ Hash h = newHashAllowEmpty(getAttr("outputHash"), ht);
+ fetch(hashedMirror + printHashType(h.type) + "/" + h.to_string(Base16, false));
+ return;
+ } catch (Error & e) {
+ debug(e.what());
+ }
+
+ /* Otherwise try the specified URL. */
fetch(mainUrl);
}
diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc
index 5e568fc94..f35ddb522 100644
--- a/src/libstore/daemon.cc
+++ b/src/libstore/daemon.cc
@@ -289,7 +289,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
logger->startWork();
auto hash = store->queryPathInfo(path)->narHash;
logger->stopWork();
- to << hash->to_string(Base16, false);
+ to << hash.to_string(Base16, false);
break;
}
@@ -325,9 +325,9 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
case wopQueryDerivationOutputMap: {
auto path = store->parseStorePath(readString(from));
logger->startWork();
- OutputPathMap outputs = store->queryDerivationOutputMap(path);
+ auto outputs = store->queryPartialDerivationOutputMap(path);
logger->stopWork();
- writeOutputPathMap(*store, to, outputs);
+ worker_proto::write(*store, to, outputs);
break;
}
@@ -454,8 +454,46 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
readDerivation(from, *store, drv, Derivation::nameFromPath(drvPath));
BuildMode buildMode = (BuildMode) readInt(from);
logger->startWork();
- if (!trusted)
- throw Error("you are not privileged to build derivations");
+
+ /* Content-addressed derivations are trustless because their output paths
+ are verified by their content alone, so any derivation is free to
+ try to produce such a path.
+
+ Input-addressed derivation output paths, however, are calculated
+ from the derivation closure that produced them---even knowing the
+ root derivation is not enough. That the output data actually came
+ from those derivations is fundamentally unverifiable, but the daemon
+ trusts itself on that matter. The question instead is whether the
+ submitted plan has rights to the output paths it wants to fill, and
+ at least the derivation closure proves that.
+
+ It would have been nice if input-address algorithm merely depended
+ on the build time closure, rather than depending on the derivation
+ closure. That would mean input-addressed paths used at build time
+ would just be trusted and not need their own evidence. This is in
+ fact fine as the same guarantees would hold *inductively*: either
+ the remote builder has those paths and already trusts them, or it
+ needs to build them too and thus their evidence must be provided in
+ turn. The advantage of this variant algorithm is that the evidence
+ for input-addressed paths which the remote builder already has
+ doesn't need to be sent again.
+
+ That said, now that we have floating CA derivations, it is better
+ that people just migrate to those which also solve this problem, and
+ others. It's the same migration difficulty with strictly more
+ benefit.
+
+ Lastly, do note that when we parse fixed-output content-addressed
+ derivations, we throw out the precomputed output paths and just
+ store the hashes, so there aren't two competing sources of truth an
+ attacker could exploit. */
+ if (drv.type() == DerivationType::InputAddressed && !trusted)
+ throw Error("you are not privileged to build input-addressed derivations");
+
+ /* Make sure that the non-input-addressed derivations that got this far
+ are in fact content-addressed if we don't trust them. */
+ assert(derivationIsCA(drv.type()) || trusted);
+
auto res = store->buildDerivation(drvPath, drv, buildMode);
logger->stopWork();
to << res.status << res.errorMsg;
@@ -638,7 +676,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
if (GET_PROTOCOL_MINOR(clientVersion) >= 17)
to << 1;
to << (info->deriver ? store->printStorePath(*info->deriver) : "")
- << info->narHash->to_string(Base16, false);
+ << info->narHash.to_string(Base16, false);
writeStorePaths(*store, to, info->references);
to << info->registrationTime << info->narSize;
if (GET_PROTOCOL_MINOR(clientVersion) >= 16) {
@@ -688,17 +726,18 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
auto path = store->parseStorePath(readString(from));
logger->startWork();
logger->stopWork();
- dumpPath(store->printStorePath(path), to);
+ dumpPath(store->toRealPath(path), to);
break;
}
case wopAddToStoreNar: {
bool repair, dontCheckSigs;
- ValidPathInfo info(store->parseStorePath(readString(from)));
+ auto path = store->parseStorePath(readString(from));
auto deriver = readString(from);
+ auto narHash = Hash::parseAny(readString(from), htSHA256);
+ ValidPathInfo info { path, narHash };
if (deriver != "")
info.deriver = store->parseStorePath(deriver);
- info.narHash = Hash::parseAny(readString(from), htSHA256);
info.references = readStorePaths<StorePathSet>(*store, from);
from >> info.registrationTime >> info.narSize >> info.ultimate;
info.sigs = readStrings<StringSet>(from);
@@ -817,8 +856,7 @@ void processConnection(
FdSink & to,
TrustedFlag trusted,
RecursiveFlag recursive,
- const std::string & userName,
- uid_t userId)
+ std::function<void(Store &)> authHook)
{
auto monitor = !recursive ? std::make_unique<MonitorFdHup>(from.fd) : nullptr;
@@ -859,15 +897,7 @@ void processConnection(
/* If we can't accept clientVersion, then throw an error
*here* (not above). */
-
-#if 0
- /* Prevent users from doing something very dangerous. */
- if (geteuid() == 0 &&
- querySetting("build-users-group", "") == "")
- throw Error("if you run 'nix-daemon' as root, then you MUST set 'build-users-group'!");
-#endif
-
- store->createUser(userName, userId);
+ authHook(*store);
tunnelLogger->stopWork();
to.flush();
diff --git a/src/libstore/daemon.hh b/src/libstore/daemon.hh
index 266932013..841ace316 100644
--- a/src/libstore/daemon.hh
+++ b/src/libstore/daemon.hh
@@ -12,7 +12,10 @@ void processConnection(
FdSink & to,
TrustedFlag trusted,
RecursiveFlag recursive,
- const std::string & userName,
- uid_t userId);
+ /* Arbitrary hook to check authorization / initialize user data / whatever
+ after the protocol has been negotiated. The idea is that this function
+ and everything it calls doesn't know about this stuff, and the
+ `nix-daemon` handles that instead. */
+ std::function<void(Store &)> authHook);
}
diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc
index 68b081058..d263cf0c5 100644
--- a/src/libstore/derivations.cc
+++ b/src/libstore/derivations.cc
@@ -61,8 +61,8 @@ bool BasicDerivation::isBuiltin() const
}
-StorePath writeDerivation(ref<Store> store,
- const Derivation & drv, std::string_view name, RepairFlag repair)
+StorePath writeDerivation(Store & store,
+ const Derivation & drv, RepairFlag repair)
{
auto references = drv.inputSrcs;
for (auto & i : drv.inputDrvs)
@@ -70,11 +70,11 @@ StorePath writeDerivation(ref<Store> store,
/* Note that the outputs of a derivation are *not* references
(that can be missing (of course) and should not necessarily be
held during a garbage collection). */
- auto suffix = std::string(name) + drvExtension;
- auto contents = drv.unparse(*store, false);
+ auto suffix = std::string(drv.name) + drvExtension;
+ auto contents = drv.unparse(store, false);
return settings.readOnlyMode
- ? store->computeStorePathForText(suffix, contents, references)
- : store->addTextToStore(suffix, contents, references, repair);
+ ? store.computeStorePathForText(suffix, contents, references)
+ : store.addTextToStore(suffix, contents, references, repair);
}
@@ -139,18 +139,14 @@ static StringSet parseStrings(std::istream & str, bool arePaths)
}
-static DerivationOutput parseDerivationOutput(const Store & store, std::istringstream & str)
+static DerivationOutput parseDerivationOutput(const Store & store,
+ StorePath path, std::string_view hashAlgo, std::string_view hash)
{
- expect(str, ","); auto path = store.parseStorePath(parsePath(str));
- expect(str, ","); auto hashAlgo = parseString(str);
- expect(str, ","); const auto hash = parseString(str);
- expect(str, ")");
-
if (hashAlgo != "") {
auto method = FileIngestionMethod::Flat;
if (string(hashAlgo, 0, 2) == "r:") {
method = FileIngestionMethod::Recursive;
- hashAlgo = string(hashAlgo, 2);
+ hashAlgo = hashAlgo.substr(2);
}
const HashType hashType = parseHashType(hashAlgo);
@@ -178,8 +174,18 @@ static DerivationOutput parseDerivationOutput(const Store & store, std::istrings
};
}
+static DerivationOutput parseDerivationOutput(const Store & store, std::istringstream & str)
+{
+ expect(str, ","); auto path = store.parseStorePath(parsePath(str));
+ expect(str, ","); const auto hashAlgo = parseString(str);
+ expect(str, ","); const auto hash = parseString(str);
+ expect(str, ")");
+
+ return parseDerivationOutput(store, std::move(path), hashAlgo, hash);
+}
-static Derivation parseDerivation(const Store & store, std::string && s, std::string_view name)
+
+Derivation parseDerivation(const Store & store, std::string && s, std::string_view name)
{
Derivation drv;
drv.name = name;
@@ -227,34 +233,6 @@ static Derivation parseDerivation(const Store & store, std::string && s, std::st
}
-Derivation readDerivation(const Store & store, const Path & drvPath, std::string_view name)
-{
- try {
- return parseDerivation(store, readFile(drvPath), name);
- } catch (FormatError & e) {
- throw Error("error parsing derivation '%1%': %2%", drvPath, e.msg());
- }
-}
-
-
-Derivation Store::derivationFromPath(const StorePath & drvPath)
-{
- ensurePath(drvPath);
- return readDerivation(drvPath);
-}
-
-
-Derivation Store::readDerivation(const StorePath & drvPath)
-{
- auto accessor = getFSAccessor();
- try {
- return parseDerivation(*this, accessor->readFile(printStorePath(drvPath)), Derivation::nameFromPath(drvPath));
- } catch (FormatError & e) {
- throw Error("error parsing derivation '%s': %s", printStorePath(drvPath), e.msg());
- }
-}
-
-
static void printString(string & res, std::string_view s)
{
char buf[s.size() * 2 + 2];
@@ -474,12 +452,12 @@ DrvHashModulo hashDerivationModulo(Store & store, const Derivation & drv, bool m
throw Error("Regular input-addressed derivations are not yet allowed to depend on CA derivations");
case DerivationType::CAFixed: {
std::map<std::string, Hash> outputHashes;
- for (const auto & i : drv.outputs) {
- auto & dof = std::get<DerivationOutputCAFixed>(i.second.output);
+ for (const auto & i : drv.outputsAndPaths(store)) {
+ auto & dof = std::get<DerivationOutputCAFixed>(i.second.first.output);
auto hash = hashString(htSHA256, "fixed:out:"
+ dof.hash.printMethodAlgo() + ":"
+ dof.hash.hash.to_string(Base16, false) + ":"
- + store.printStorePath(i.second.path(store, drv.name)));
+ + store.printStorePath(i.second.second));
outputHashes.insert_or_assign(i.first, std::move(hash));
}
return outputHashes;
@@ -533,46 +511,18 @@ bool wantOutput(const string & output, const std::set<string> & wanted)
StorePathSet BasicDerivation::outputPaths(const Store & store) const
{
StorePathSet paths;
- for (auto & i : outputs)
- paths.insert(i.second.path(store, name));
+ for (auto & i : outputsAndPaths(store))
+ paths.insert(i.second.second);
return paths;
}
static DerivationOutput readDerivationOutput(Source & in, const Store & store)
{
auto path = store.parseStorePath(readString(in));
- auto hashAlgo = readString(in);
- auto hash = readString(in);
+ const auto hashAlgo = readString(in);
+ const auto hash = readString(in);
- if (hashAlgo != "") {
- auto method = FileIngestionMethod::Flat;
- if (string(hashAlgo, 0, 2) == "r:") {
- method = FileIngestionMethod::Recursive;
- hashAlgo = string(hashAlgo, 2);
- }
- auto hashType = parseHashType(hashAlgo);
- return hash != ""
- ? DerivationOutput {
- .output = DerivationOutputCAFixed {
- .hash = FixedOutputHash {
- .method = std::move(method),
- .hash = Hash::parseNonSRIUnprefixed(hash, hashType),
- },
- }
- }
- : (settings.requireExperimentalFeature("ca-derivations"),
- DerivationOutput {
- .output = DerivationOutputCAFloating {
- .method = std::move(method),
- .hashType = std::move(hashType),
- },
- });
- } else
- return DerivationOutput {
- .output = DerivationOutputInputAddressed {
- .path = std::move(path),
- }
- };
+ return parseDerivationOutput(store, std::move(path), hashAlgo, hash);
}
StringSet BasicDerivation::outputNames() const
@@ -583,6 +533,27 @@ StringSet BasicDerivation::outputNames() const
return names;
}
+DerivationOutputsAndPaths BasicDerivation::outputsAndPaths(const Store & store) const {
+ DerivationOutputsAndPaths outsAndPaths;
+ for (auto output : outputs)
+ outsAndPaths.insert(std::make_pair(
+ output.first,
+ std::make_pair(output.second, output.second.path(store, name))
+ )
+ );
+ return outsAndPaths;
+}
+
+DerivationOutputsAndOptPaths BasicDerivation::outputsAndOptPaths(const Store & store) const {
+ DerivationOutputsAndOptPaths outsAndOptPaths;
+ for (auto output : outputs)
+ outsAndOptPaths.insert(std::make_pair(
+ output.first,
+ std::make_pair(output.second, output.second.pathOpt(store, output.first))
+ )
+ );
+ return outsAndOptPaths;
+}
std::string_view BasicDerivation::nameFromPath(const StorePath & drvPath) {
auto nameWithSuffix = drvPath.name();
@@ -623,9 +594,9 @@ Source & readDerivation(Source & in, const Store & store, BasicDerivation & drv,
void writeDerivation(Sink & out, const Store & store, const BasicDerivation & drv)
{
out << drv.outputs.size();
- for (auto & i : drv.outputs) {
+ for (auto & i : drv.outputsAndPaths(store)) {
out << i.first
- << store.printStorePath(i.second.path(store, drv.name));
+ << store.printStorePath(i.second.second);
std::visit(overloaded {
[&](DerivationOutputInputAddressed doi) {
out << "" << "";
@@ -638,7 +609,7 @@ void writeDerivation(Sink & out, const Store & store, const BasicDerivation & dr
out << (makeFileIngestionPrefix(dof.method) + printHashType(dof.hashType))
<< "";
},
- }, i.second.output);
+ }, i.second.first.output);
}
writeStorePaths(store, out, drv.inputSrcs);
out << drv.platform << drv.builder << drv.args;
diff --git a/src/libstore/derivations.hh b/src/libstore/derivations.hh
index 14e0e947a..573502c90 100644
--- a/src/libstore/derivations.hh
+++ b/src/libstore/derivations.hh
@@ -47,6 +47,9 @@ struct DerivationOutput
DerivationOutputCAFloating
> output;
std::optional<HashType> hashAlgoOpt(const Store & store) const;
+ /* Note, when you use this function you should make sure that you're passing
+ the right derivation name. When in doubt, you should use the safer
+ interface provided by BasicDerivation::outputsAndPaths */
std::optional<StorePath> pathOpt(const Store & store, std::string_view drvName) const;
/* DEPRECATED: Remove after CA drvs are fully implemented */
StorePath path(const Store & store, std::string_view drvName) const {
@@ -58,6 +61,15 @@ struct DerivationOutput
typedef std::map<string, DerivationOutput> DerivationOutputs;
+/* These are analogues to the previous DerivationOutputs data type, but they
+ also contains, for each output, the (optional) store path in which it would
+ be written. To calculate values of these types, see the corresponding
+ functions in BasicDerivation */
+typedef std::map<string, std::pair<DerivationOutput, StorePath>>
+ DerivationOutputsAndPaths;
+typedef std::map<string, std::pair<DerivationOutput, std::optional<StorePath>>>
+ DerivationOutputsAndOptPaths;
+
/* For inputs that are sub-derivations, we specify exactly which
output IDs we are interested in. */
typedef std::map<StorePath, StringSet> DerivationInputs;
@@ -107,6 +119,13 @@ struct BasicDerivation
/* Return the output names of a derivation. */
StringSet outputNames() const;
+ /* Calculates the maps that contains all the DerivationOutputs, but
+ augmented with knowledge of the Store paths they would be written into.
+ The first one of these functions will be removed when the CA work is
+ completed */
+ DerivationOutputsAndPaths outputsAndPaths(const Store & store) const;
+ DerivationOutputsAndOptPaths outputsAndOptPaths(const Store & store) const;
+
static std::string_view nameFromPath(const StorePath & storePath);
};
@@ -127,11 +146,11 @@ class Store;
enum RepairFlag : bool { NoRepair = false, Repair = true };
/* Write a derivation to the Nix store, and return its path. */
-StorePath writeDerivation(ref<Store> store,
- const Derivation & drv, std::string_view name, RepairFlag repair = NoRepair);
+StorePath writeDerivation(Store & store,
+ const Derivation & drv, RepairFlag repair = NoRepair);
/* Read a derivation from a file. */
-Derivation readDerivation(const Store & store, const Path & drvPath, std::string_view name);
+Derivation parseDerivation(const Store & store, std::string && s, std::string_view name);
// FIXME: remove
bool isDerivation(const string & fileName);
diff --git a/src/libstore/dummy-store.cc b/src/libstore/dummy-store.cc
new file mode 100644
index 000000000..7a5744bc1
--- /dev/null
+++ b/src/libstore/dummy-store.cc
@@ -0,0 +1,59 @@
+#include "store-api.hh"
+
+namespace nix {
+
+static std::string uriScheme = "dummy://";
+
+struct DummyStore : public Store
+{
+ DummyStore(const Params & params)
+ : Store(params)
+ { }
+
+ string getUri() override
+ {
+ return uriScheme;
+ }
+
+ void queryPathInfoUncached(const StorePath & path,
+ Callback<std::shared_ptr<const ValidPathInfo>> callback) noexcept override
+ {
+ callback(nullptr);
+ }
+
+ std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override
+ { unsupported("queryPathFromHashPart"); }
+
+ void addToStore(const ValidPathInfo & info, Source & source,
+ RepairFlag repair, CheckSigsFlag checkSigs) override
+ { unsupported("addToStore"); }
+
+ StorePath addToStore(const string & name, const Path & srcPath,
+ FileIngestionMethod method, HashType hashAlgo,
+ PathFilter & filter, RepairFlag repair) override
+ { unsupported("addToStore"); }
+
+ StorePath addTextToStore(const string & name, const string & s,
+ const StorePathSet & references, RepairFlag repair) override
+ { unsupported("addTextToStore"); }
+
+ void narFromPath(const StorePath & path, Sink & sink) override
+ { unsupported("narFromPath"); }
+
+ void ensurePath(const StorePath & path) override
+ { unsupported("ensurePath"); }
+
+ BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv,
+ BuildMode buildMode) override
+ { unsupported("buildDerivation"); }
+};
+
+static RegisterStoreImplementation regStore([](
+ const std::string & uri, const Store::Params & params)
+ -> std::shared_ptr<Store>
+{
+ if (uri != uriScheme) return nullptr;
+ return std::make_shared<DummyStore>(params);
+});
+
+}
diff --git a/src/libstore/export-import.cc b/src/libstore/export-import.cc
index a0fc22264..ccd466d09 100644
--- a/src/libstore/export-import.cc
+++ b/src/libstore/export-import.cc
@@ -38,9 +38,9 @@ void Store::exportPath(const StorePath & path, Sink & sink)
filesystem corruption from spreading to other machines.
Don't complain if the stored hash is zero (unknown). */
Hash hash = hashSink.currentHash().first;
- if (hash != info->narHash && info->narHash != Hash(info->narHash->type))
+ if (hash != info->narHash && info->narHash != Hash(info->narHash.type))
throw Error("hash of path '%s' has changed from '%s' to '%s'!",
- printStorePath(path), info->narHash->to_string(Base32, true), hash.to_string(Base32, true));
+ printStorePath(path), info->narHash.to_string(Base32, true), hash.to_string(Base32, true));
teeSink
<< exportMagic
@@ -69,17 +69,18 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs)
if (magic != exportMagic)
throw Error("Nix archive cannot be imported; wrong format");
- ValidPathInfo info(parseStorePath(readString(source)));
+ auto path = parseStorePath(readString(source));
//Activity act(*logger, lvlInfo, format("importing path '%s'") % info.path);
- info.references = readStorePaths<StorePathSet>(*this, source);
-
+ auto references = readStorePaths<StorePathSet>(*this, source);
auto deriver = readString(source);
+ auto narHash = hashString(htSHA256, *saved.s);
+
+ ValidPathInfo info { path, narHash };
if (deriver != "")
info.deriver = parseStorePath(deriver);
-
- info.narHash = hashString(htSHA256, *saved.s);
+ info.references = references;
info.narSize = saved.s->size();
// Ignore optional legacy signature.
diff --git a/src/libstore/filetransfer.hh b/src/libstore/filetransfer.hh
index 25ade0add..0d608c8d8 100644
--- a/src/libstore/filetransfer.hh
+++ b/src/libstore/filetransfer.hh
@@ -17,15 +17,30 @@ struct FileTransferSettings : Config
Setting<std::string> userAgentSuffix{this, "", "user-agent-suffix",
"String appended to the user agent in HTTP requests."};
- Setting<size_t> httpConnections{this, 25, "http-connections",
- "Number of parallel HTTP connections.",
+ Setting<size_t> httpConnections{
+ this, 25, "http-connections",
+ R"(
+ The maximum number of parallel TCP connections used to fetch
+ files from binary caches and by other downloads. It defaults
+ to 25. 0 means no limit.
+ )",
{"binary-caches-parallel-connections"}};
- Setting<unsigned long> connectTimeout{this, 0, "connect-timeout",
- "Timeout for connecting to servers during downloads. 0 means use curl's builtin default."};
-
- Setting<unsigned long> stalledDownloadTimeout{this, 300, "stalled-download-timeout",
- "Timeout (in seconds) for receiving data from servers during download. Nix cancels idle downloads after this timeout's duration."};
+ Setting<unsigned long> connectTimeout{
+ this, 0, "connect-timeout",
+ R"(
+ The timeout (in seconds) for establishing connections in the
+ binary cache substituter. It corresponds to `curl`’s
+ `--connect-timeout` option.
+ )"};
+
+ Setting<unsigned long> stalledDownloadTimeout{
+ this, 300, "stalled-download-timeout",
+ R"(
+ The timeout (in seconds) for receiving data from servers
+ during download. Nix cancels idle downloads after this
+ timeout's duration.
+ )"};
Setting<unsigned int> tries{this, 5, "download-attempts",
"How often Nix will attempt to download a file before giving up."};
diff --git a/src/libstore/gc.cc b/src/libstore/gc.cc
index e74382ed2..e6cbc525d 100644
--- a/src/libstore/gc.cc
+++ b/src/libstore/gc.cc
@@ -85,8 +85,7 @@ void LocalStore::addIndirectRoot(const Path & path)
}
-Path LocalFSStore::addPermRoot(const StorePath & storePath,
- const Path & _gcRoot, bool indirect, bool allowOutsideRootsDir)
+Path LocalFSStore::addPermRoot(const StorePath & storePath, const Path & _gcRoot)
{
Path gcRoot(canonPath(_gcRoot));
@@ -95,47 +94,12 @@ Path LocalFSStore::addPermRoot(const StorePath & storePath,
"creating a garbage collector root (%1%) in the Nix store is forbidden "
"(are you running nix-build inside the store?)", gcRoot);
- if (indirect) {
- /* Don't clobber the link if it already exists and doesn't
- point to the Nix store. */
- if (pathExists(gcRoot) && (!isLink(gcRoot) || !isInStore(readLink(gcRoot))))
- throw Error("cannot create symlink '%1%'; already exists", gcRoot);
- makeSymlink(gcRoot, printStorePath(storePath));
- addIndirectRoot(gcRoot);
- }
-
- else {
- if (!allowOutsideRootsDir) {
- Path rootsDir = canonPath((format("%1%/%2%") % stateDir % gcRootsDir).str());
-
- if (string(gcRoot, 0, rootsDir.size() + 1) != rootsDir + "/")
- throw Error(
- "path '%1%' is not a valid garbage collector root; "
- "it's not in the directory '%2%'",
- gcRoot, rootsDir);
- }
-
- if (baseNameOf(gcRoot) == std::string(storePath.to_string()))
- writeFile(gcRoot, "");
- else
- makeSymlink(gcRoot, printStorePath(storePath));
- }
-
- /* Check that the root can be found by the garbage collector.
- !!! This can be very slow on machines that have many roots.
- Instead of reading all the roots, it would be more efficient to
- check if the root is in a directory in or linked from the
- gcroots directory. */
- if (settings.checkRootReachability) {
- auto roots = findRoots(false);
- if (roots[storePath].count(gcRoot) == 0)
- logWarning({
- .name = "GC root",
- .hint = hintfmt("warning: '%1%' is not in a directory where the garbage collector looks for roots; "
- "therefore, '%2%' might be removed by the garbage collector",
- gcRoot, printStorePath(storePath))
- });
- }
+ /* Don't clobber the link if it already exists and doesn't
+ point to the Nix store. */
+ if (pathExists(gcRoot) && (!isLink(gcRoot) || !isInStore(readLink(gcRoot))))
+ throw Error("cannot create symlink '%1%'; already exists", gcRoot);
+ makeSymlink(gcRoot, printStorePath(storePath));
+ addIndirectRoot(gcRoot);
/* Grab the global GC root, causing us to block while a GC is in
progress. This prevents the set of permanent roots from
diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc
index 7aec6d1c0..04c78405e 100644
--- a/src/libstore/globals.cc
+++ b/src/libstore/globals.cc
@@ -10,6 +10,9 @@
#include <sys/utsname.h>
#include <unordered_set>
+#include <nlohmann/json.hpp>
+
+
namespace nix {
@@ -160,9 +163,9 @@ template<> std::string BaseSetting<SandboxMode>::to_string() const
else abort();
}
-template<> void BaseSetting<SandboxMode>::toJSON(JSONPlaceholder & out)
+template<> nlohmann::json BaseSetting<SandboxMode>::toJSON()
{
- AbstractSetting::toJSON(out);
+ return AbstractSetting::toJSON();
}
template<> void BaseSetting<SandboxMode>::convertToArg(Args & args, const std::string & category)
diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh
index 3406a9331..8a2d3ff75 100644
--- a/src/libstore/globals.hh
+++ b/src/libstore/globals.hh
@@ -80,89 +80,209 @@ public:
Setting<bool> keepGoing{this, false, "keep-going",
"Whether to keep building derivations when another build fails."};
- Setting<bool> tryFallback{this, false, "fallback",
- "Whether to fall back to building when substitution fails.",
+ Setting<bool> tryFallback{
+ this, false, "fallback",
+ R"(
+ If set to `true`, Nix will fall back to building from source if a
+ binary substitute fails. This is equivalent to the `--fallback`
+ flag. The default is `false`.
+ )",
{"build-fallback"}};
/* Whether to show build log output in real time. */
bool verboseBuild = true;
Setting<size_t> logLines{this, 10, "log-lines",
- "If verbose-build is false, the number of lines of the tail of "
+ "If `verbose-build` is false, the number of lines of the tail of "
"the log to show if a build fails."};
- MaxBuildJobsSetting maxBuildJobs{this, 1, "max-jobs",
- "Maximum number of parallel build jobs. \"auto\" means use number of cores.",
+ MaxBuildJobsSetting maxBuildJobs{
+ this, 1, "max-jobs",
+ R"(
+ This option defines the maximum number of jobs that Nix will try to
+ build in parallel. The default is `1`. The special value `auto`
+ causes Nix to use the number of CPUs in your system. `0` is useful
+ when using remote builders to prevent any local builds (except for
+ `preferLocalBuild` derivation attribute which executes locally
+ regardless). It can be overridden using the `--max-jobs` (`-j`)
+ command line switch.
+ )",
{"build-max-jobs"}};
- Setting<unsigned int> buildCores{this, getDefaultCores(), "cores",
- "Number of CPU cores to utilize in parallel within a build, "
- "i.e. by passing this number to Make via '-j'. 0 means that the "
- "number of actual CPU cores on the local host ought to be "
- "auto-detected.", {"build-cores"}};
+ Setting<unsigned int> buildCores{
+ this, getDefaultCores(), "cores",
+ R"(
+ Sets the value of the `NIX_BUILD_CORES` environment variable in the
+ invocation of builders. Builders can use this variable at their
+ discretion to control the maximum amount of parallelism. For
+ instance, in Nixpkgs, if the derivation attribute
+ `enableParallelBuilding` is set to `true`, the builder passes the
+ `-jN` flag to GNU Make. It can be overridden using the `--cores`
+ command line switch and defaults to `1`. The value `0` means that
+ the builder should use all available CPU cores in the system.
+ )",
+ {"build-cores"}};
/* Read-only mode. Don't copy stuff to the store, don't change
the database. */
bool readOnlyMode = false;
- Setting<std::string> thisSystem{this, SYSTEM, "system",
- "The canonical Nix system name."};
-
- Setting<time_t> maxSilentTime{this, 0, "max-silent-time",
- "The maximum time in seconds that a builer can go without "
- "producing any output on stdout/stderr before it is killed. "
- "0 means infinity.",
+ Setting<std::string> thisSystem{
+ this, SYSTEM, "system",
+ R"(
+ This option specifies the canonical Nix system name of the current
+ installation, such as `i686-linux` or `x86_64-darwin`. Nix can only
+ build derivations whose `system` attribute equals the value
+ specified here. In general, it never makes sense to modify this
+ value from its default, since you can use it to ‘lie’ about the
+ platform you are building on (e.g., perform a Mac OS build on a
+ Linux machine; the result would obviously be wrong). It only makes
+ sense if the Nix binaries can run on multiple platforms, e.g.,
+ ‘universal binaries’ that run on `x86_64-linux` and `i686-linux`.
+
+ It defaults to the canonical Nix system name detected by `configure`
+ at build time.
+ )"};
+
+ Setting<time_t> maxSilentTime{
+ this, 0, "max-silent-time",
+ R"(
+ This option defines the maximum number of seconds that a builder can
+ go without producing any data on standard output or standard error.
+ This is useful (for instance in an automated build system) to catch
+ builds that are stuck in an infinite loop, or to catch remote builds
+ that are hanging due to network problems. It can be overridden using
+ the `--max-silent-time` command line switch.
+
+ The value `0` means that there is no timeout. This is also the
+ default.
+ )",
{"build-max-silent-time"}};
- Setting<time_t> buildTimeout{this, 0, "timeout",
- "The maximum duration in seconds that a builder can run. "
- "0 means infinity.", {"build-timeout"}};
+ Setting<time_t> buildTimeout{
+ this, 0, "timeout",
+ R"(
+ This option defines the maximum number of seconds that a builder can
+ run. This is useful (for instance in an automated build system) to
+ catch builds that are stuck in an infinite loop but keep writing to
+ their standard output or standard error. It can be overridden using
+ the `--timeout` command line switch.
+
+ The value `0` means that there is no timeout. This is also the
+ default.
+ )",
+ {"build-timeout"}};
PathSetting buildHook{this, true, nixLibexecDir + "/nix/build-remote", "build-hook",
"The path of the helper program that executes builds to remote machines."};
- Setting<std::string> builders{this, "@" + nixConfDir + "/machines", "builders",
- "A semicolon-separated list of build machines, in the format of nix.machines."};
-
- Setting<bool> buildersUseSubstitutes{this, false, "builders-use-substitutes",
- "Whether build machines should use their own substitutes for obtaining "
- "build dependencies if possible, rather than waiting for this host to "
- "upload them."};
+ Setting<std::string> builders{
+ this, "@" + nixConfDir + "/machines", "builders",
+ "A semicolon-separated list of build machines, in the format of `nix.machines`."};
+
+ Setting<bool> buildersUseSubstitutes{
+ this, false, "builders-use-substitutes",
+ R"(
+ If set to `true`, Nix will instruct remote build machines to use
+ their own binary substitutes if available. In practical terms, this
+ means that remote hosts will fetch as many build dependencies as
+ possible from their own substitutes (e.g, from `cache.nixos.org`),
+ instead of waiting for this host to upload them all. This can
+ drastically reduce build times if the network connection between
+ this computer and the remote build host is slow.
+ )"};
Setting<off_t> reservedSize{this, 8 * 1024 * 1024, "gc-reserved-space",
"Amount of reserved disk space for the garbage collector."};
- Setting<bool> fsyncMetadata{this, true, "fsync-metadata",
- "Whether SQLite should use fsync()."};
+ Setting<bool> fsyncMetadata{
+ this, true, "fsync-metadata",
+ R"(
+ If set to `true`, changes to the Nix store metadata (in
+ `/nix/var/nix/db`) are synchronously flushed to disk. This improves
+ robustness in case of system crashes, but reduces performance. The
+ default is `true`.
+ )"};
Setting<bool> useSQLiteWAL{this, !isWSL1(), "use-sqlite-wal",
"Whether SQLite should use WAL mode."};
Setting<bool> syncBeforeRegistering{this, false, "sync-before-registering",
- "Whether to call sync() before registering a path as valid."};
-
- Setting<bool> useSubstitutes{this, true, "substitute",
- "Whether to use substitutes.",
+ "Whether to call `sync()` before registering a path as valid."};
+
+ Setting<bool> useSubstitutes{
+ this, true, "substitute",
+ R"(
+ If set to `true` (default), Nix will use binary substitutes if
+ available. This option can be disabled to force building from
+ source.
+ )",
{"build-use-substitutes"}};
- Setting<std::string> buildUsersGroup{this, "", "build-users-group",
- "The Unix group that contains the build users."};
+ Setting<std::string> buildUsersGroup{
+ this, "", "build-users-group",
+ R"(
+ This options specifies the Unix group containing the Nix build user
+ accounts. In multi-user Nix installations, builds should not be
+ performed by the Nix account since that would allow users to
+ arbitrarily modify the Nix store and database by supplying specially
+ crafted builders; and they cannot be performed by the calling user
+ since that would allow him/her to influence the build result.
+
+ Therefore, if this option is non-empty and specifies a valid group,
+ builds will be performed under the user accounts that are a member
+ of the group specified here (as listed in `/etc/group`). Those user
+ accounts should not be used for any other purpose\!
+
+ Nix will never run two builds under the same user account at the
+ same time. This is to prevent an obvious security hole: a malicious
+ user writing a Nix expression that modifies the build result of a
+ legitimate Nix expression being built by another user. Therefore it
+ is good to have as many Nix build user accounts as you can spare.
+ (Remember: uids are cheap.)
+
+ The build users should have permission to create files in the Nix
+ store, but not delete them. Therefore, `/nix/store` should be owned
+ by the Nix account, its group should be the group specified here,
+ and its mode should be `1775`.
+
+ If the build users group is empty, builds will be performed under
+ the uid of the Nix process (that is, the uid of the caller if
+ `NIX_REMOTE` is empty, the uid under which the Nix daemon runs if
+ `NIX_REMOTE` is `daemon`). Obviously, this should not be used in
+ multi-user settings with untrusted users.
+ )"};
Setting<bool> impersonateLinux26{this, false, "impersonate-linux-26",
"Whether to impersonate a Linux 2.6 machine on newer kernels.",
{"build-impersonate-linux-26"}};
- Setting<bool> keepLog{this, true, "keep-build-log",
- "Whether to store build logs.",
+ Setting<bool> keepLog{
+ this, true, "keep-build-log",
+ R"(
+ If set to `true` (the default), Nix will write the build log of a
+ derivation (i.e. the standard output and error of its builder) to
+ the directory `/nix/var/log/nix/drvs`. The build log can be
+ retrieved using the command `nix-store -l path`.
+ )",
{"build-keep-log"}};
- Setting<bool> compressLog{this, true, "compress-build-log",
- "Whether to compress logs.",
+ Setting<bool> compressLog{
+ this, true, "compress-build-log",
+ R"(
+ If set to `true` (the default), build logs written to
+ `/nix/var/log/nix/drvs` will be compressed on the fly using bzip2.
+ Otherwise, they will not be compressed.
+ )",
{"build-compress-log"}};
- Setting<unsigned long> maxLogSize{this, 0, "max-build-log-size",
- "Maximum number of bytes a builder can write to stdout/stderr "
- "before being killed (0 means no limit).",
+ Setting<unsigned long> maxLogSize{
+ this, 0, "max-build-log-size",
+ R"(
+ This option defines the maximum number of bytes that a builder can
+ write to its stdout/stderr. If the builder exceeds this limit, it’s
+ killed. A value of `0` (the default) means that there is no limit.
+ )",
{"build-max-log-size"}};
/* When buildRepeat > 0 and verboseBuild == true, whether to print
@@ -173,57 +293,156 @@ public:
Setting<unsigned int> pollInterval{this, 5, "build-poll-interval",
"How often (in seconds) to poll for locks."};
- Setting<bool> checkRootReachability{this, false, "gc-check-reachability",
- "Whether to check if new GC roots can in fact be found by the "
- "garbage collector."};
-
- Setting<bool> gcKeepOutputs{this, false, "keep-outputs",
- "Whether the garbage collector should keep outputs of live derivations.",
+ Setting<bool> gcKeepOutputs{
+ this, false, "keep-outputs",
+ R"(
+ If `true`, the garbage collector will keep the outputs of
+ non-garbage derivations. If `false` (default), outputs will be
+ deleted unless they are GC roots themselves (or reachable from other
+ roots).
+
+ In general, outputs must be registered as roots separately. However,
+ even if the output of a derivation is registered as a root, the
+ collector will still delete store paths that are used only at build
+ time (e.g., the C compiler, or source tarballs downloaded from the
+ network). To prevent it from doing so, set this option to `true`.
+ )",
{"gc-keep-outputs"}};
- Setting<bool> gcKeepDerivations{this, true, "keep-derivations",
- "Whether the garbage collector should keep derivers of live paths.",
+ Setting<bool> gcKeepDerivations{
+ this, true, "keep-derivations",
+ R"(
+ If `true` (default), the garbage collector will keep the derivations
+ from which non-garbage store paths were built. If `false`, they will
+ be deleted unless explicitly registered as a root (or reachable from
+ other roots).
+
+ Keeping derivation around is useful for querying and traceability
+ (e.g., it allows you to ask with what dependencies or options a
+ store path was built), so by default this option is on. Turn it off
+ to save a bit of disk space (or a lot if `keep-outputs` is also
+ turned on).
+ )",
{"gc-keep-derivations"}};
- Setting<bool> autoOptimiseStore{this, false, "auto-optimise-store",
- "Whether to automatically replace files with identical contents with hard links."};
-
- Setting<bool> envKeepDerivations{this, false, "keep-env-derivations",
- "Whether to add derivations as a dependency of user environments "
- "(to prevent them from being GCed).",
+ Setting<bool> autoOptimiseStore{
+ this, false, "auto-optimise-store",
+ R"(
+ If set to `true`, Nix automatically detects files in the store
+ that have identical contents, and replaces them with hard links to
+ a single copy. This saves disk space. If set to `false` (the
+ default), you can still run `nix-store --optimise` to get rid of
+ duplicate files.
+ )"};
+
+ Setting<bool> envKeepDerivations{
+ this, false, "keep-env-derivations",
+ R"(
+ If `false` (default), derivations are not stored in Nix user
+ environments. That is, the derivations of any build-time-only
+ dependencies may be garbage-collected.
+
+ If `true`, when you add a Nix derivation to a user environment, the
+ path of the derivation is stored in the user environment. Thus, the
+ derivation will not be garbage-collected until the user environment
+ generation is deleted (`nix-env --delete-generations`). To prevent
+ build-time-only dependencies from being collected, you should also
+ turn on `keep-outputs`.
+
+ The difference between this option and `keep-derivations` is that
+ this one is “sticky”: it applies to any user environment created
+ while this option was enabled, while `keep-derivations` only applies
+ at the moment the garbage collector is run.
+ )",
{"env-keep-derivations"}};
/* Whether to lock the Nix client and worker to the same CPU. */
bool lockCPU;
- Setting<SandboxMode> sandboxMode{this,
+ Setting<SandboxMode> sandboxMode{
+ this,
#if __linux__
smEnabled
#else
smDisabled
#endif
, "sandbox",
- "Whether to enable sandboxed builds. Can be \"true\", \"false\" or \"relaxed\".",
+ R"(
+ If set to `true`, builds will be performed in a *sandboxed
+ environment*, i.e., they’re isolated from the normal file system
+ hierarchy and will only see their dependencies in the Nix store,
+ the temporary build directory, private versions of `/proc`,
+ `/dev`, `/dev/shm` and `/dev/pts` (on Linux), and the paths
+ configured with the `sandbox-paths` option. This is useful to
+ prevent undeclared dependencies on files in directories such as
+ `/usr/bin`. In addition, on Linux, builds run in private PID,
+ mount, network, IPC and UTS namespaces to isolate them from other
+ processes in the system (except that fixed-output derivations do
+ not run in private network namespace to ensure they can access the
+ network).
+
+ Currently, sandboxing only work on Linux and macOS. The use of a
+ sandbox requires that Nix is run as root (so you should use the
+ “build users” feature to perform the actual builds under different
+ users than root).
+
+ If this option is set to `relaxed`, then fixed-output derivations
+ and derivations that have the `__noChroot` attribute set to `true`
+ do not run in sandboxes.
+
+ The default is `true` on Linux and `false` on all other platforms.
+ )",
{"build-use-chroot", "build-use-sandbox"}};
- Setting<PathSet> sandboxPaths{this, {}, "sandbox-paths",
- "The paths to make available inside the build sandbox.",
+ Setting<PathSet> sandboxPaths{
+ this, {}, "sandbox-paths",
+ R"(
+ A list of paths bind-mounted into Nix sandbox environments. You can
+ use the syntax `target=source` to mount a path in a different
+ location in the sandbox; for instance, `/bin=/nix-bin` will mount
+ the path `/nix-bin` as `/bin` inside the sandbox. If *source* is
+ followed by `?`, then it is not an error if *source* does not exist;
+ for example, `/dev/nvidiactl?` specifies that `/dev/nvidiactl` will
+ only be mounted in the sandbox if it exists in the host filesystem.
+
+ Depending on how Nix was built, the default value for this option
+ may be empty or provide `/bin/sh` as a bind-mount of `bash`.
+ )",
{"build-chroot-dirs", "build-sandbox-paths"}};
Setting<bool> sandboxFallback{this, true, "sandbox-fallback",
"Whether to disable sandboxing when the kernel doesn't allow it."};
- Setting<PathSet> extraSandboxPaths{this, {}, "extra-sandbox-paths",
- "Additional paths to make available inside the build sandbox.",
+ Setting<PathSet> extraSandboxPaths{
+ this, {}, "extra-sandbox-paths",
+ R"(
+ A list of additional paths appended to `sandbox-paths`. Useful if
+ you want to extend its default value.
+ )",
{"build-extra-chroot-dirs", "build-extra-sandbox-paths"}};
- Setting<size_t> buildRepeat{this, 0, "repeat",
- "The number of times to repeat a build in order to verify determinism.",
+ Setting<size_t> buildRepeat{
+ this, 0, "repeat",
+ R"(
+ How many times to repeat builds to check whether they are
+ deterministic. The default value is 0. If the value is non-zero,
+ every build is repeated the specified number of times. If the
+ contents of any of the runs differs from the previous ones and
+ `enforce-determinism` is true, the build is rejected and the
+ resulting store paths are not registered as “valid” in Nix’s
+ database.
+ )",
{"build-repeat"}};
#if __linux__
- Setting<std::string> sandboxShmSize{this, "50%", "sandbox-dev-shm-size",
- "The size of /dev/shm in the build sandbox."};
+ Setting<std::string> sandboxShmSize{
+ this, "50%", "sandbox-dev-shm-size",
+ R"(
+ This option determines the maximum size of the `tmpfs` filesystem
+ mounted on `/dev/shm` in Linux sandboxes. For the format, see the
+ description of the `size` option of `tmpfs` in mount8. The default
+ is `50%`.
+ )"};
Setting<Path> sandboxBuildDir{this, "/build", "sandbox-build-dir",
"The build directory inside the sandbox."};
@@ -237,118 +456,411 @@ public:
"Whether to log Darwin sandbox access violations to the system log."};
#endif
- Setting<bool> runDiffHook{this, false, "run-diff-hook",
- "Whether to run the program specified by the diff-hook setting "
- "repeated builds produce a different result. Typically used to "
- "plug in diffoscope."};
+ Setting<bool> runDiffHook{
+ this, false, "run-diff-hook",
+ R"(
+ If true, enable the execution of the `diff-hook` program.
- PathSetting diffHook{this, true, "", "diff-hook",
- "A program that prints out the differences between the two paths "
- "specified on its command line."};
+ When using the Nix daemon, `run-diff-hook` must be set in the
+ `nix.conf` configuration file, and cannot be passed at the command
+ line.
+ )"};
- Setting<bool> enforceDeterminism{this, true, "enforce-determinism",
- "Whether to fail if repeated builds produce different output."};
+ PathSetting diffHook{
+ this, true, "", "diff-hook",
+ R"(
+ Absolute path to an executable capable of diffing build
+ results. The hook is executed if `run-diff-hook` is true, and the
+ output of a build is known to not be the same. This program is not
+ executed to determine if two results are the same.
- Setting<Strings> trustedPublicKeys{this,
- {"cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY="},
- "trusted-public-keys",
- "Trusted public keys for secure substitution.",
- {"binary-cache-public-keys"}};
+ The diff hook is executed by the same user and group who ran the
+ build. However, the diff hook does not have write access to the
+ store path just built.
+
+ The diff hook program receives three parameters:
+
+ 1. A path to the previous build's results
- Setting<Strings> secretKeyFiles{this, {}, "secret-key-files",
- "Secret keys with which to sign local builds."};
+ 2. A path to the current build's results
- Setting<unsigned int> tarballTtl{this, 60 * 60, "tarball-ttl",
- "How long downloaded files are considered up-to-date."};
+ 3. The path to the build's derivation
- Setting<bool> requireSigs{this, true, "require-sigs",
- "Whether to check that any non-content-addressed path added to the "
- "Nix store has a valid signature (that is, one signed using a key "
- "listed in 'trusted-public-keys'."};
+ 4. The path to the build's scratch directory. This directory will
+ exist only if the build was run with `--keep-failed`.
- Setting<StringSet> extraPlatforms{this,
+ The stderr and stdout output from the diff hook will not be
+ displayed to the user. Instead, it will print to the nix-daemon's
+ log.
+
+ When using the Nix daemon, `diff-hook` must be set in the `nix.conf`
+ configuration file, and cannot be passed at the command line.
+ )"};
+
+ Setting<bool> enforceDeterminism{
+ this, true, "enforce-determinism",
+ "Whether to fail if repeated builds produce different output. See `repeat`."};
+
+ Setting<Strings> trustedPublicKeys{
+ this,
+ {"cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY="},
+ "trusted-public-keys",
+ R"(
+ A whitespace-separated list of public keys. When paths are copied
+ from another Nix store (such as a binary cache), they must be
+ signed with one of these keys. For example:
+ `cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY=
+ hydra.nixos.org-1:CNHJZBh9K4tP3EKF6FkkgeVYsS3ohTl+oS0Qa8bezVs=`.
+ )",
+ {"binary-cache-public-keys"}};
+
+ Setting<Strings> secretKeyFiles{
+ this, {}, "secret-key-files",
+ R"(
+ A whitespace-separated list of files containing secret (private)
+ keys. These are used to sign locally-built paths. They can be
+ generated using `nix-store --generate-binary-cache-key`. The
+ corresponding public key can be distributed to other users, who
+ can add it to `trusted-public-keys` in their `nix.conf`.
+ )"};
+
+ Setting<unsigned int> tarballTtl{
+ this, 60 * 60, "tarball-ttl",
+ R"(
+ The number of seconds a downloaded tarball is considered fresh. If
+ the cached tarball is stale, Nix will check whether it is still up
+ to date using the ETag header. Nix will download a new version if
+ the ETag header is unsupported, or the cached ETag doesn't match.
+
+ Setting the TTL to `0` forces Nix to always check if the tarball is
+ up to date.
+
+ Nix caches tarballs in `$XDG_CACHE_HOME/nix/tarballs`.
+
+ Files fetched via `NIX_PATH`, `fetchGit`, `fetchMercurial`,
+ `fetchTarball`, and `fetchurl` respect this TTL.
+ )"};
+
+ Setting<bool> requireSigs{
+ this, true, "require-sigs",
+ R"(
+ If set to `true` (the default), any non-content-addressed path added
+ or copied to the Nix store (e.g. when substituting from a binary
+ cache) must have a valid signature, that is, be signed using one of
+ the keys listed in `trusted-public-keys` or `secret-key-files`. Set
+ to `false` to disable signature checking.
+ )"};
+
+ Setting<StringSet> extraPlatforms{
+ this,
std::string{SYSTEM} == "x86_64-linux" && !isWSL1() ? StringSet{"i686-linux"} : StringSet{},
"extra-platforms",
- "Additional platforms that can be built on the local system. "
- "These may be supported natively (e.g. armv7 on some aarch64 CPUs "
- "or using hacks like qemu-user."};
-
- Setting<StringSet> systemFeatures{this, getDefaultSystemFeatures(),
+ R"(
+ Platforms other than the native one which this machine is capable of
+ building for. This can be useful for supporting additional
+ architectures on compatible machines: i686-linux can be built on
+ x86\_64-linux machines (and the default for this setting reflects
+ this); armv7 is backwards-compatible with armv6 and armv5tel; some
+ aarch64 machines can also natively run 32-bit ARM code; and
+ qemu-user may be used to support non-native platforms (though this
+ may be slow and buggy). Most values for this are not enabled by
+ default because build systems will often misdetect the target
+ platform and generate incompatible code, so you may wish to
+ cross-check the results of using this option against proper
+ natively-built versions of your derivations.
+ )"};
+
+ Setting<StringSet> systemFeatures{
+ this, getDefaultSystemFeatures(),
"system-features",
- "Optional features that this system implements (like \"kvm\")."};
+ R"(
+ A set of system “features” supported by this machine, e.g. `kvm`.
+ Derivations can express a dependency on such features through the
+ derivation attribute `requiredSystemFeatures`. For example, the
+ attribute
+
+ requiredSystemFeatures = [ "kvm" ];
+
+ ensures that the derivation can only be built on a machine with the
+ `kvm` feature.
+
+ This setting by default includes `kvm` if `/dev/kvm` is accessible,
+ and the pseudo-features `nixos-test`, `benchmark` and `big-parallel`
+ that are used in Nixpkgs to route builds to specific machines.
+ )"};
- Setting<Strings> substituters{this,
+ Setting<Strings> substituters{
+ this,
nixStore == "/nix/store" ? Strings{"https://cache.nixos.org/"} : Strings(),
"substituters",
- "The URIs of substituters (such as https://cache.nixos.org/).",
+ R"(
+ A list of URLs of substituters, separated by whitespace. The default
+ is `https://cache.nixos.org`.
+ )",
{"binary-caches"}};
// FIXME: provide a way to add to option values.
- Setting<Strings> extraSubstituters{this, {}, "extra-substituters",
- "Additional URIs of substituters.",
+ Setting<Strings> extraSubstituters{
+ this, {}, "extra-substituters",
+ R"(
+ Additional binary caches appended to those specified in
+ `substituters`. When used by unprivileged users, untrusted
+ substituters (i.e. those not listed in `trusted-substituters`) are
+ silently ignored.
+ )",
{"extra-binary-caches"}};
- Setting<StringSet> trustedSubstituters{this, {}, "trusted-substituters",
- "Disabled substituters that may be enabled via the substituters option by untrusted users.",
+ Setting<StringSet> trustedSubstituters{
+ this, {}, "trusted-substituters",
+ R"(
+ A list of URLs of substituters, separated by whitespace. These are
+ not used by default, but can be enabled by users of the Nix daemon
+ by specifying `--option substituters urls` on the command
+ line. Unprivileged users are only allowed to pass a subset of the
+ URLs listed in `substituters` and `trusted-substituters`.
+ )",
{"trusted-binary-caches"}};
- Setting<Strings> trustedUsers{this, {"root"}, "trusted-users",
- "Which users or groups are trusted to ask the daemon to do unsafe things."};
-
- Setting<unsigned int> ttlNegativeNarInfoCache{this, 3600, "narinfo-cache-negative-ttl",
- "The TTL in seconds for negative lookups in the disk cache i.e binary cache lookups that "
- "return an invalid path result"};
-
- Setting<unsigned int> ttlPositiveNarInfoCache{this, 30 * 24 * 3600, "narinfo-cache-positive-ttl",
- "The TTL in seconds for positive lookups in the disk cache i.e binary cache lookups that "
- "return a valid path result."};
+ Setting<Strings> trustedUsers{
+ this, {"root"}, "trusted-users",
+ R"(
+ A list of names of users (separated by whitespace) that have
+ additional rights when connecting to the Nix daemon, such as the
+ ability to specify additional binary caches, or to import unsigned
+ NARs. You can also specify groups by prefixing them with `@`; for
+ instance, `@wheel` means all users in the `wheel` group. The default
+ is `root`.
+
+ > **Warning**
+ >
+ > Adding a user to `trusted-users` is essentially equivalent to
+ > giving that user root access to the system. For example, the user
+ > can set `sandbox-paths` and thereby obtain read access to
+ > directories that are otherwise inacessible to them.
+ )"};
+
+ Setting<unsigned int> ttlNegativeNarInfoCache{
+ this, 3600, "narinfo-cache-negative-ttl",
+ R"(
+ The TTL in seconds for negative lookups. If a store path is queried
+ from a substituter but was not found, there will be a negative
+ lookup cached in the local disk cache database for the specified
+ duration.
+ )"};
+
+ Setting<unsigned int> ttlPositiveNarInfoCache{
+ this, 30 * 24 * 3600, "narinfo-cache-positive-ttl",
+ R"(
+ The TTL in seconds for positive lookups. If a store path is queried
+ from a substituter, the result of the query will be cached in the
+ local disk cache database including some of the NAR metadata. The
+ default TTL is a month, setting a shorter TTL for positive lookups
+ can be useful for binary caches that have frequent garbage
+ collection, in which case having a more frequent cache invalidation
+ would prevent trying to pull the path again and failing with a hash
+ mismatch if the build isn't reproducible.
+ )"};
/* ?Who we trust to use the daemon in safe ways */
- Setting<Strings> allowedUsers{this, {"*"}, "allowed-users",
- "Which users or groups are allowed to connect to the daemon."};
+ Setting<Strings> allowedUsers{
+ this, {"*"}, "allowed-users",
+ R"(
+ A list of names of users (separated by whitespace) that are allowed
+ to connect to the Nix daemon. As with the `trusted-users` option,
+ you can specify groups by prefixing them with `@`. Also, you can
+ allow all users by specifying `*`. The default is `*`.
+
+ Note that trusted users are always allowed to connect.
+ )"};
Setting<bool> printMissing{this, true, "print-missing",
"Whether to print what paths need to be built or downloaded."};
- Setting<std::string> preBuildHook{this, "",
- "pre-build-hook",
- "A program to run just before a build to set derivation-specific build settings."};
+ Setting<std::string> preBuildHook{
+ this, "", "pre-build-hook",
+ R"(
+ If set, the path to a program that can set extra derivation-specific
+ settings for this system. This is used for settings that can't be
+ captured by the derivation model itself and are too variable between
+ different versions of the same system to be hard-coded into nix.
+
+ The hook is passed the derivation path and, if sandboxes are
+ enabled, the sandbox directory. It can then modify the sandbox and
+ send a series of commands to modify various settings to stdout. The
+ currently recognized commands are:
+
+ - `extra-sandbox-paths`
+ Pass a list of files and directories to be included in the
+ sandbox for this build. One entry per line, terminated by an
+ empty line. Entries have the same format as `sandbox-paths`.
+ )"};
+
+ Setting<std::string> postBuildHook{
+ this, "", "post-build-hook",
+ R"(
+ Optional. The path to a program to execute after each build.
+
+ This option is only settable in the global `nix.conf`, or on the
+ command line by trusted users.
+
+ When using the nix-daemon, the daemon executes the hook as `root`.
+ If the nix-daemon is not involved, the hook runs as the user
+ executing the nix-build.
+
+ - The hook executes after an evaluation-time build.
+
+ - The hook does not execute on substituted paths.
+
+ - The hook's output always goes to the user's terminal.
- Setting<std::string> postBuildHook{this, "", "post-build-hook",
- "A program to run just after each successful build."};
+ - If the hook fails, the build succeeds but no further builds
+ execute.
- Setting<std::string> netrcFile{this, fmt("%s/%s", nixConfDir, "netrc"), "netrc-file",
- "Path to the netrc file used to obtain usernames/passwords for downloads."};
+ - The hook executes synchronously, and blocks other builds from
+ progressing while it runs.
+
+ The program executes with no arguments. The program's environment
+ contains the following environment variables:
+
+ - `DRV_PATH`
+ The derivation for the built paths.
+
+ Example:
+ `/nix/store/5nihn1a7pa8b25l9zafqaqibznlvvp3f-bash-4.4-p23.drv`
+
+ - `OUT_PATHS`
+ Output paths of the built derivation, separated by a space
+ character.
+
+ Example:
+ `/nix/store/zf5lbh336mnzf1nlswdn11g4n2m8zh3g-bash-4.4-p23-dev
+ /nix/store/rjxwxwv1fpn9wa2x5ssk5phzwlcv4mna-bash-4.4-p23-doc
+ /nix/store/6bqvbzjkcp9695dq0dpl5y43nvy37pq1-bash-4.4-p23-info
+ /nix/store/r7fng3kk3vlpdlh2idnrbn37vh4imlj2-bash-4.4-p23-man
+ /nix/store/xfghy8ixrhz3kyy6p724iv3cxji088dx-bash-4.4-p23`.
+ )"};
+
+ Setting<std::string> netrcFile{
+ this, fmt("%s/%s", nixConfDir, "netrc"), "netrc-file",
+ R"(
+ If set to an absolute path to a `netrc` file, Nix will use the HTTP
+ authentication credentials in this file when trying to download from
+ a remote host through HTTP or HTTPS. Defaults to
+ `$NIX_CONF_DIR/netrc`.
+
+ The `netrc` file consists of a list of accounts in the following
+ format:
+
+ machine my-machine
+ login my-username
+ password my-password
+
+ For the exact syntax, see [the `curl`
+ documentation](https://ec.haxx.se/usingcurl-netrc.html).
+
+ > **Note**
+ >
+ > This must be an absolute path, and `~` is not resolved. For
+ > example, `~/.netrc` won't resolve to your home directory's
+ > `.netrc`.
+ )"};
/* Path to the SSL CA file used */
Path caFile;
#if __linux__
- Setting<bool> filterSyscalls{this, true, "filter-syscalls",
- "Whether to prevent certain dangerous system calls, such as "
- "creation of setuid/setgid files or adding ACLs or extended "
- "attributes. Only disable this if you're aware of the "
- "security implications."};
-
- Setting<bool> allowNewPrivileges{this, false, "allow-new-privileges",
- "Whether builders can acquire new privileges by calling programs with "
- "setuid/setgid bits or with file capabilities."};
+ Setting<bool> filterSyscalls{
+ this, true, "filter-syscalls",
+ R"(
+ Whether to prevent certain dangerous system calls, such as
+ creation of setuid/setgid files or adding ACLs or extended
+ attributes. Only disable this if you're aware of the
+ security implications.
+ )"};
+
+ Setting<bool> allowNewPrivileges{
+ this, false, "allow-new-privileges",
+ R"(
+ (Linux-specific.) By default, builders on Linux cannot acquire new
+ privileges by calling setuid/setgid programs or programs that have
+ file capabilities. For example, programs such as `sudo` or `ping`
+ will fail. (Note that in sandbox builds, no such programs are
+ available unless you bind-mount them into the sandbox via the
+ `sandbox-paths` option.) You can allow the use of such programs by
+ enabling this option. This is impure and usually undesirable, but
+ may be useful in certain scenarios (e.g. to spin up containers or
+ set up userspace network interfaces in tests).
+ )"};
#endif
- Setting<uint64_t> minFree{this, 0, "min-free",
- "Automatically run the garbage collector when free disk space drops below the specified amount."};
-
- Setting<uint64_t> maxFree{this, std::numeric_limits<uint64_t>::max(), "max-free",
- "Stop deleting garbage when free disk space is above the specified amount."};
+ Setting<Strings> hashedMirrors{
+ this, {}, "hashed-mirrors",
+ R"(
+ A list of web servers used by `builtins.fetchurl` to obtain files by
+ hash. The default is `http://tarballs.nixos.org/`. Given a hash type
+ *ht* and a base-16 hash *h*, Nix will try to download the file from
+ *hashed-mirror*/*ht*/*h*. This allows files to be downloaded even if
+ they have disappeared from their original URI. For example, given
+ the default mirror `http://tarballs.nixos.org/`, when building the
+ derivation
+
+ ```nix
+ builtins.fetchurl {
+ url = "https://example.org/foo-1.2.3.tar.xz";
+ sha256 = "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae";
+ }
+ ```
+
+ Nix will attempt to download this file from
+ `http://tarballs.nixos.org/sha256/2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae`
+ first. If it is not available there, if will try the original URI.
+ )"};
+
+ Setting<uint64_t> minFree{
+ this, 0, "min-free",
+ R"(
+ When free disk space in `/nix/store` drops below `min-free` during a
+ build, Nix performs a garbage-collection until `max-free` bytes are
+ available or there is no more garbage. A value of `0` (the default)
+ disables this feature.
+ )"};
+
+ Setting<uint64_t> maxFree{
+ this, std::numeric_limits<uint64_t>::max(), "max-free",
+ R"(
+ When a garbage collection is triggered by the `min-free` option, it
+ stops as soon as `max-free` bytes are available. The default is
+ infinity (i.e. delete all garbage).
+ )"};
Setting<uint64_t> minFreeCheckInterval{this, 5, "min-free-check-interval",
"Number of seconds between checking free disk space."};
- Setting<Paths> pluginFiles{this, {}, "plugin-files",
- "Plugins to dynamically load at nix initialization time."};
+ Setting<Paths> pluginFiles{
+ this, {}, "plugin-files",
+ R"(
+ A list of plugin files to be loaded by Nix. Each of these files will
+ be dlopened by Nix, allowing them to affect execution through static
+ initialization. In particular, these plugins may construct static
+ instances of RegisterPrimOp to add new primops or constants to the
+ expression language, RegisterStoreImplementation to add new store
+ implementations, RegisterCommand to add new subcommands to the `nix`
+ command, and RegisterSetting to add new nix config settings. See the
+ constructors for those types for more details.
+
+ Since these files are loaded into the same address space as Nix
+ itself, they must be DSOs compatible with the instance of Nix
+ running at the time (i.e. compiled against the same headers, not
+ linked to any incompatible libraries). They should not be linked to
+ any Nix libs directly, as those will be available already at load
+ time.
+
+ If an entry in the list is a directory, all files in the directory
+ are loaded as plugins (non-recursively).
+ )"};
Setting<std::string> githubAccessToken{this, "", "github-access-token",
- "GitHub access token to get access to GitHub data through the GitHub API for github:<..> flakes."};
+ "GitHub access token to get access to GitHub data through the GitHub API for `github:<..>` flakes."};
Setting<Strings> experimentalFeatures{this, {}, "experimental-features",
"Experimental Nix features to enable."};
diff --git a/src/libstore/http-binary-cache-store.cc b/src/libstore/http-binary-cache-store.cc
index c1ceb08cf..1733239fb 100644
--- a/src/libstore/http-binary-cache-store.cc
+++ b/src/libstore/http-binary-cache-store.cc
@@ -85,7 +85,7 @@ protected:
checkEnabled();
try {
- FileTransferRequest request(cacheUri + "/" + path);
+ FileTransferRequest request(makeRequest(path));
request.head = true;
getFileTransfer()->download(request);
return true;
@@ -103,7 +103,7 @@ protected:
std::shared_ptr<std::basic_iostream<char>> istream,
const std::string & mimeType) override
{
- auto req = FileTransferRequest(cacheUri + "/" + path);
+ auto req = makeRequest(path);
req.data = std::make_shared<string>(StreamToSourceAdapter(istream).drain());
req.mimeType = mimeType;
try {
@@ -115,8 +115,11 @@ protected:
FileTransferRequest makeRequest(const std::string & path)
{
- FileTransferRequest request(cacheUri + "/" + path);
- return request;
+ return FileTransferRequest(
+ hasPrefix(path, "https://") || hasPrefix(path, "http://") || hasPrefix(path, "file://")
+ ? path
+ : cacheUri + "/" + path);
+
}
void getFile(const std::string & path, Sink & sink) override
diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc
index c6eeab548..dc03313f0 100644
--- a/src/libstore/legacy-ssh-store.cc
+++ b/src/libstore/legacy-ssh-store.cc
@@ -93,6 +93,9 @@ struct LegacySSHStore : public Store
try {
auto conn(connections->get());
+ /* No longer support missing NAR hash */
+ assert(GET_PROTOCOL_MINOR(conn->remoteVersion) >= 4);
+
debug("querying remote host '%s' for info on '%s'", host, printStorePath(path));
conn->to << cmdQueryPathInfos << PathSet{printStorePath(path)};
@@ -100,8 +103,10 @@ struct LegacySSHStore : public Store
auto p = readString(conn->from);
if (p.empty()) return callback(nullptr);
- auto info = std::make_shared<ValidPathInfo>(parseStorePath(p));
- assert(path == info->path);
+ auto path2 = parseStorePath(p);
+ assert(path == path2);
+ /* Hash will be set below. FIXME construct ValidPathInfo at end. */
+ auto info = std::make_shared<ValidPathInfo>(path, Hash::dummy);
PathSet references;
auto deriver = readString(conn->from);
@@ -111,12 +116,14 @@ struct LegacySSHStore : public Store
readLongLong(conn->from); // download size
info->narSize = readLongLong(conn->from);
- if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 4) {
+ {
auto s = readString(conn->from);
- info->narHash = s.empty() ? std::optional<Hash>{} : Hash::parseAnyPrefixed(s);
- info->ca = parseContentAddressOpt(readString(conn->from));
- info->sigs = readStrings<StringSet>(conn->from);
+ if (s == "")
+ throw Error("NAR hash is now mandatory");
+ info->narHash = Hash::parseAnyPrefixed(s);
}
+ info->ca = parseContentAddressOpt(readString(conn->from));
+ info->sigs = readStrings<StringSet>(conn->from);
auto s = readString(conn->from);
assert(s == "");
@@ -138,7 +145,7 @@ struct LegacySSHStore : public Store
<< cmdAddToStoreNar
<< printStorePath(info.path)
<< (info.deriver ? printStorePath(*info.deriver) : "")
- << info.narHash->to_string(Base16, false);
+ << info.narHash.to_string(Base16, false);
writeStorePaths(*this, conn->to, info.references);
conn->to
<< info.registrationTime
@@ -202,6 +209,24 @@ struct LegacySSHStore : public Store
const StorePathSet & references, RepairFlag repair) override
{ unsupported("addTextToStore"); }
+private:
+
+ void putBuildSettings(Connection & conn)
+ {
+ conn.to
+ << settings.maxSilentTime
+ << settings.buildTimeout;
+ if (GET_PROTOCOL_MINOR(conn.remoteVersion) >= 2)
+ conn.to
+ << settings.maxLogSize;
+ if (GET_PROTOCOL_MINOR(conn.remoteVersion) >= 3)
+ conn.to
+ << settings.buildRepeat
+ << settings.enforceDeterminism;
+ }
+
+public:
+
BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv,
BuildMode buildMode) override
{
@@ -211,16 +236,8 @@ struct LegacySSHStore : public Store
<< cmdBuildDerivation
<< printStorePath(drvPath);
writeDerivation(conn->to, *this, drv);
- conn->to
- << settings.maxSilentTime
- << settings.buildTimeout;
- if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 2)
- conn->to
- << settings.maxLogSize;
- if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 3)
- conn->to
- << settings.buildRepeat
- << settings.enforceDeterminism;
+
+ putBuildSettings(*conn);
conn->to.flush();
@@ -234,6 +251,29 @@ struct LegacySSHStore : public Store
return status;
}
+ void buildPaths(const std::vector<StorePathWithOutputs> & drvPaths, BuildMode buildMode) override
+ {
+ auto conn(connections->get());
+
+ conn->to << cmdBuildPaths;
+ Strings ss;
+ for (auto & p : drvPaths)
+ ss.push_back(p.to_string(*this));
+ conn->to << ss;
+
+ putBuildSettings(*conn);
+
+ conn->to.flush();
+
+ BuildResult result;
+ result.status = (BuildResult::Status) readInt(conn->from);
+
+ if (!result.success()) {
+ conn->from >> result.errorMsg;
+ throw Error(result.status, result.errorMsg);
+ }
+ }
+
void ensurePath(const StorePath & path) override
{ unsupported("ensurePath"); }
diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc
index 3c66a4dfd..a79b2da44 100644
--- a/src/libstore/local-store.cc
+++ b/src/libstore/local-store.cc
@@ -594,7 +594,7 @@ uint64_t LocalStore::addValidPath(State & state,
state.stmtRegisterValidPath.use()
(printStorePath(info.path))
- (info.narHash->to_string(Base16, true))
+ (info.narHash.to_string(Base16, true))
(info.registrationTime == 0 ? time(0) : info.registrationTime)
(info.deriver ? printStorePath(*info.deriver) : "", (bool) info.deriver)
(info.narSize, info.narSize != 0)
@@ -618,11 +618,11 @@ uint64_t LocalStore::addValidPath(State & state,
registration above is undone. */
if (checkOutputs) checkDerivationOutputs(info.path, drv);
- for (auto & i : drv.outputs) {
+ for (auto & i : drv.outputsAndPaths(*this)) {
state.stmtAddDerivationOutput.use()
(id)
(i.first)
- (printStorePath(i.second.path(*this, drv.name)))
+ (printStorePath(i.second.second))
.exec();
}
}
@@ -641,25 +641,28 @@ void LocalStore::queryPathInfoUncached(const StorePath & path,
Callback<std::shared_ptr<const ValidPathInfo>> callback) noexcept
{
try {
- auto info = std::make_shared<ValidPathInfo>(path);
-
callback(retrySQLite<std::shared_ptr<ValidPathInfo>>([&]() {
auto state(_state.lock());
/* Get the path info. */
- auto useQueryPathInfo(state->stmtQueryPathInfo.use()(printStorePath(info->path)));
+ auto useQueryPathInfo(state->stmtQueryPathInfo.use()(printStorePath(path)));
if (!useQueryPathInfo.next())
return std::shared_ptr<ValidPathInfo>();
- info->id = useQueryPathInfo.getInt(0);
+ auto id = useQueryPathInfo.getInt(0);
+ auto narHash = Hash::dummy;
try {
- info->narHash = Hash::parseAnyPrefixed(useQueryPathInfo.getStr(1));
+ narHash = Hash::parseAnyPrefixed(useQueryPathInfo.getStr(1));
} catch (BadHash & e) {
- throw Error("in valid-path entry for '%s': %s", printStorePath(path), e.what());
+ throw Error("invalid-path entry for '%s': %s", printStorePath(path), e.what());
}
+ auto info = std::make_shared<ValidPathInfo>(path, narHash);
+
+ info->id = id;
+
info->registrationTime = useQueryPathInfo.getInt(2);
auto s = (const char *) sqlite3_column_text(state->stmtQueryPathInfo, 3);
@@ -694,7 +697,7 @@ void LocalStore::updatePathInfo(State & state, const ValidPathInfo & info)
{
state.stmtUpdatePathInfo.use()
(info.narSize, info.narSize != 0)
- (info.narHash->to_string(Base16, true))
+ (info.narHash.to_string(Base16, true))
(info.ultimate ? 1 : 0, info.ultimate)
(concatStringsSep(" ", info.sigs), !info.sigs.empty())
(renderContentAddress(info.ca), (bool) info.ca)
@@ -782,17 +785,21 @@ StorePathSet LocalStore::queryValidDerivers(const StorePath & path)
}
-OutputPathMap LocalStore::queryDerivationOutputMap(const StorePath & path)
+std::map<std::string, std::optional<StorePath>> LocalStore::queryPartialDerivationOutputMap(const StorePath & path)
{
- return retrySQLite<OutputPathMap>([&]() {
+ std::map<std::string, std::optional<StorePath>> outputs;
+ BasicDerivation drv = readDerivation(path);
+ for (auto & [outName, _] : drv.outputs) {
+ outputs.insert_or_assign(outName, std::nullopt);
+ }
+ return retrySQLite<std::map<std::string, std::optional<StorePath>>>([&]() {
auto state(_state.lock());
auto useQueryDerivationOutputs(state->stmtQueryDerivationOutputs.use()
(queryValidPathId(*state, path)));
- OutputPathMap outputs;
while (useQueryDerivationOutputs.next())
- outputs.emplace(
+ outputs.insert_or_assign(
useQueryDerivationOutputs.getStr(0),
parseStorePath(useQueryDerivationOutputs.getStr(1))
);
@@ -920,7 +927,7 @@ void LocalStore::registerValidPaths(const ValidPathInfos & infos)
StorePathSet paths;
for (auto & i : infos) {
- assert(i.narHash && i.narHash->type == htSHA256);
+ assert(i.narHash.type == htSHA256);
if (isValidPath_(*state, i.path))
updatePathInfo(*state, i);
else
@@ -984,9 +991,6 @@ const PublicKeys & LocalStore::getPublicKeys()
void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
RepairFlag repair, CheckSigsFlag checkSigs)
{
- if (!info.narHash)
- throw Error("cannot add path '%s' because it lacks a hash", printStorePath(info.path));
-
if (requireSigs && checkSigs && !info.checkSignatures(*this, getPublicKeys()))
throw Error("cannot add path '%s' because it lacks a valid signature", printStorePath(info.path));
@@ -1021,11 +1025,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
else
hashSink = std::make_unique<HashModuloSink>(htSHA256, std::string(info.path.hashPart()));
- LambdaSource wrapperSource([&](unsigned char * data, size_t len) -> size_t {
- size_t n = source.read(data, len);
- (*hashSink)(data, n);
- return n;
- });
+ TeeSource wrapperSource { source, *hashSink };
restorePath(realPath, wrapperSource);
@@ -1033,7 +1033,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
if (hashResult.first != info.narHash)
throw Error("hash mismatch importing path '%s';\n wanted: %s\n got: %s",
- printStorePath(info.path), info.narHash->to_string(Base32, true), hashResult.first.to_string(Base32, true));
+ printStorePath(info.path), info.narHash.to_string(Base32, true), hashResult.first.to_string(Base32, true));
if (hashResult.second != info.narSize)
throw Error("size mismatch importing path '%s';\n wanted: %s\n got: %s",
@@ -1155,8 +1155,7 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, const string & name,
optimisePath(realPath);
- ValidPathInfo info(dstPath);
- info.narHash = narHash.first;
+ ValidPathInfo info { dstPath, narHash.first };
info.narSize = narHash.second;
info.ca = FixedOutputHash { .method = method, .hash = hash };
registerValidPath(info);
@@ -1199,8 +1198,7 @@ StorePath LocalStore::addTextToStore(const string & name, const string & s,
optimisePath(realPath);
- ValidPathInfo info(dstPath);
- info.narHash = narHash;
+ ValidPathInfo info { dstPath, narHash };
info.narSize = sink.s->size();
info.references = references;
info.ca = TextHash { .hash = hash };
@@ -1315,9 +1313,9 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
std::unique_ptr<AbstractHashSink> hashSink;
if (!info->ca || !info->references.count(info->path))
- hashSink = std::make_unique<HashSink>(info->narHash->type);
+ hashSink = std::make_unique<HashSink>(info->narHash.type);
else
- hashSink = std::make_unique<HashModuloSink>(info->narHash->type, std::string(info->path.hashPart()));
+ hashSink = std::make_unique<HashModuloSink>(info->narHash.type, std::string(info->path.hashPart()));
dumpPath(Store::toRealPath(i), *hashSink);
auto current = hashSink->finish();
@@ -1326,7 +1324,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
logError({
.name = "Invalid hash - path modified",
.hint = hintfmt("path '%s' was modified! expected hash '%s', got '%s'",
- printStorePath(i), info->narHash->to_string(Base32, true), current.first.to_string(Base32, true))
+ printStorePath(i), info->narHash.to_string(Base32, true), current.first.to_string(Base32, true))
});
if (repair) repairPath(i); else errors = true;
} else {
diff --git a/src/libstore/local-store.hh b/src/libstore/local-store.hh
index 31e6587ac..bb4ed9b19 100644
--- a/src/libstore/local-store.hh
+++ b/src/libstore/local-store.hh
@@ -23,9 +23,6 @@ namespace nix {
const int nixSchemaVersion = 10;
-struct Derivation;
-
-
struct OptimiseStats
{
unsigned long filesLinked = 0;
@@ -133,7 +130,7 @@ public:
StorePathSet queryValidDerivers(const StorePath & path) override;
- OutputPathMap queryDerivationOutputMap(const StorePath & path) override;
+ std::map<std::string, std::optional<StorePath>> queryPartialDerivationOutputMap(const StorePath & path) override;
std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override;
diff --git a/src/libstore/machines.cc b/src/libstore/machines.cc
index f848582da..7db2556f4 100644
--- a/src/libstore/machines.cc
+++ b/src/libstore/machines.cc
@@ -1,6 +1,7 @@
#include "machines.hh"
#include "util.hh"
#include "globals.hh"
+#include "store-api.hh"
#include <algorithm>
@@ -48,6 +49,29 @@ bool Machine::mandatoryMet(const std::set<string> & features) const {
});
}
+ref<Store> Machine::openStore() const {
+ Store::Params storeParams;
+ if (hasPrefix(storeUri, "ssh://")) {
+ storeParams["max-connections"] = "1";
+ storeParams["log-fd"] = "4";
+ if (sshKey != "")
+ storeParams["ssh-key"] = sshKey;
+ }
+ {
+ auto & fs = storeParams["system-features"];
+ auto append = [&](auto feats) {
+ for (auto & f : feats) {
+ if (fs.size() > 0) fs += ' ';
+ fs += f;
+ }
+ };
+ append(supportedFeatures);
+ append(mandatoryFeatures);
+ }
+
+ return nix::openStore(storeUri, storeParams);
+}
+
void parseMachines(const std::string & s, Machines & machines)
{
for (auto line : tokenizeString<std::vector<string>>(s, "\n;")) {
diff --git a/src/libstore/machines.hh b/src/libstore/machines.hh
index de92eb924..341d9bd97 100644
--- a/src/libstore/machines.hh
+++ b/src/libstore/machines.hh
@@ -4,6 +4,8 @@
namespace nix {
+class Store;
+
struct Machine {
const string storeUri;
@@ -28,6 +30,8 @@ struct Machine {
decltype(supportedFeatures) supportedFeatures,
decltype(mandatoryFeatures) mandatoryFeatures,
decltype(sshPublicHostKey) sshPublicHostKey);
+
+ ref<Store> openStore() const;
};
typedef std::vector<Machine> Machines;
diff --git a/src/libstore/misc.cc b/src/libstore/misc.cc
index 0ae1ceaad..f6aa570bb 100644
--- a/src/libstore/misc.cc
+++ b/src/libstore/misc.cc
@@ -207,10 +207,10 @@ void Store::queryMissing(const std::vector<StorePathWithOutputs> & targets,
ParsedDerivation parsedDrv(StorePath(path.path), *drv);
PathSet invalid;
- for (auto & j : drv->outputs)
+ for (auto & j : drv->outputsAndPaths(*this))
if (wantOutput(j.first, path.outputs)
- && !isValidPath(j.second.path(*this, drv->name)))
- invalid.insert(printStorePath(j.second.path(*this, drv->name)));
+ && !isValidPath(j.second.second))
+ invalid.insert(printStorePath(j.second.second));
if (invalid.empty()) return;
if (settings.useSubstitutes && parsedDrv.substitutesAllowed()) {
diff --git a/src/libstore/nar-accessor.cc b/src/libstore/nar-accessor.cc
index 59ec164b6..a9efdd0b6 100644
--- a/src/libstore/nar-accessor.cc
+++ b/src/libstore/nar-accessor.cc
@@ -49,7 +49,8 @@ struct NarAccessor : public FSAccessor
: acc(acc), source(source)
{ }
- void createMember(const Path & path, NarMember member) {
+ void createMember(const Path & path, NarMember member)
+ {
size_t level = std::count(path.begin(), path.end(), '/');
while (parents.size() > level) parents.pop();
diff --git a/src/libstore/nar-info-disk-cache.cc b/src/libstore/nar-info-disk-cache.cc
index 92da14e23..8541cc51f 100644
--- a/src/libstore/nar-info-disk-cache.cc
+++ b/src/libstore/nar-info-disk-cache.cc
@@ -189,13 +189,14 @@ public:
return {oInvalid, 0};
auto namePart = queryNAR.getStr(1);
- auto narInfo = make_ref<NarInfo>(StorePath(hashPart + "-" + namePart));
+ auto narInfo = make_ref<NarInfo>(
+ StorePath(hashPart + "-" + namePart),
+ Hash::parseAnyPrefixed(queryNAR.getStr(6)));
narInfo->url = queryNAR.getStr(2);
narInfo->compression = queryNAR.getStr(3);
if (!queryNAR.isNull(4))
narInfo->fileHash = Hash::parseAnyPrefixed(queryNAR.getStr(4));
narInfo->fileSize = queryNAR.getInt(5);
- narInfo->narHash = Hash::parseAnyPrefixed(queryNAR.getStr(6));
narInfo->narSize = queryNAR.getInt(7);
for (auto & r : tokenizeString<Strings>(queryNAR.getStr(8), " "))
narInfo->references.insert(StorePath(r));
@@ -232,7 +233,7 @@ public:
(narInfo ? narInfo->compression : "", narInfo != 0)
(narInfo && narInfo->fileHash ? narInfo->fileHash->to_string(Base32, true) : "", narInfo && narInfo->fileHash)
(narInfo ? narInfo->fileSize : 0, narInfo != 0 && narInfo->fileSize)
- (info->narHash->to_string(Base32, true))
+ (info->narHash.to_string(Base32, true))
(info->narSize)
(concatStringsSep(" ", info->shortRefs()))
(info->deriver ? std::string(info->deriver->to_string()) : "", (bool) info->deriver)
diff --git a/src/libstore/nar-info.cc b/src/libstore/nar-info.cc
index 5812aa4ac..3454f34bb 100644
--- a/src/libstore/nar-info.cc
+++ b/src/libstore/nar-info.cc
@@ -1,10 +1,11 @@
#include "globals.hh"
#include "nar-info.hh"
+#include "store-api.hh"
namespace nix {
NarInfo::NarInfo(const Store & store, const std::string & s, const std::string & whence)
- : ValidPathInfo(StorePath(StorePath::dummy)) // FIXME: hack
+ : ValidPathInfo(StorePath(StorePath::dummy), Hash(Hash::dummy)) // FIXME: hack
{
auto corrupt = [&]() {
return Error("NAR info file '%1%' is corrupt", whence);
@@ -19,6 +20,7 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
};
bool havePath = false;
+ bool haveNarHash = false;
size_t pos = 0;
while (pos < s.size()) {
@@ -46,8 +48,10 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
else if (name == "FileSize") {
if (!string2Int(value, fileSize)) throw corrupt();
}
- else if (name == "NarHash")
+ else if (name == "NarHash") {
narHash = parseHashField(value);
+ haveNarHash = true;
+ }
else if (name == "NarSize") {
if (!string2Int(value, narSize)) throw corrupt();
}
@@ -76,7 +80,7 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
if (compression == "") compression = "bzip2";
- if (!havePath || url.empty() || narSize == 0 || !narHash) throw corrupt();
+ if (!havePath || !haveNarHash || url.empty() || narSize == 0) throw corrupt();
}
std::string NarInfo::to_string(const Store & store) const
@@ -89,8 +93,8 @@ std::string NarInfo::to_string(const Store & store) const
assert(fileHash && fileHash->type == htSHA256);
res += "FileHash: " + fileHash->to_string(Base32, true) + "\n";
res += "FileSize: " + std::to_string(fileSize) + "\n";
- assert(narHash && narHash->type == htSHA256);
- res += "NarHash: " + narHash->to_string(Base32, true) + "\n";
+ assert(narHash.type == htSHA256);
+ res += "NarHash: " + narHash.to_string(Base32, true) + "\n";
res += "NarSize: " + std::to_string(narSize) + "\n";
res += "References: " + concatStringsSep(" ", shortRefs()) + "\n";
diff --git a/src/libstore/nar-info.hh b/src/libstore/nar-info.hh
index eff19f0ef..39ced76e5 100644
--- a/src/libstore/nar-info.hh
+++ b/src/libstore/nar-info.hh
@@ -2,10 +2,12 @@
#include "types.hh"
#include "hash.hh"
-#include "store-api.hh"
+#include "path-info.hh"
namespace nix {
+class Store;
+
struct NarInfo : ValidPathInfo
{
std::string url;
@@ -15,7 +17,7 @@ struct NarInfo : ValidPathInfo
std::string system;
NarInfo() = delete;
- NarInfo(StorePath && path) : ValidPathInfo(std::move(path)) { }
+ NarInfo(StorePath && path, Hash narHash) : ValidPathInfo(std::move(path), narHash) { }
NarInfo(const ValidPathInfo & info) : ValidPathInfo(info) { }
NarInfo(const Store & store, const std::string & s, const std::string & whence);
diff --git a/src/libstore/parsed-derivations.cc b/src/libstore/parsed-derivations.cc
index 24f848e46..e7b7202d4 100644
--- a/src/libstore/parsed-derivations.cc
+++ b/src/libstore/parsed-derivations.cc
@@ -94,7 +94,7 @@ StringSet ParsedDerivation::getRequiredSystemFeatures() const
return res;
}
-bool ParsedDerivation::canBuildLocally() const
+bool ParsedDerivation::canBuildLocally(Store & localStore) const
{
if (drv.platform != settings.thisSystem.get()
&& !settings.extraPlatforms.get().count(drv.platform)
@@ -102,14 +102,14 @@ bool ParsedDerivation::canBuildLocally() const
return false;
for (auto & feature : getRequiredSystemFeatures())
- if (!settings.systemFeatures.get().count(feature)) return false;
+ if (!localStore.systemFeatures.get().count(feature)) return false;
return true;
}
-bool ParsedDerivation::willBuildLocally() const
+bool ParsedDerivation::willBuildLocally(Store & localStore) const
{
- return getBoolAttr("preferLocalBuild") && canBuildLocally();
+ return getBoolAttr("preferLocalBuild") && canBuildLocally(localStore);
}
bool ParsedDerivation::substitutesAllowed() const
diff --git a/src/libstore/parsed-derivations.hh b/src/libstore/parsed-derivations.hh
index 6ee172d81..3fa09f34f 100644
--- a/src/libstore/parsed-derivations.hh
+++ b/src/libstore/parsed-derivations.hh
@@ -29,9 +29,9 @@ public:
StringSet getRequiredSystemFeatures() const;
- bool canBuildLocally() const;
+ bool canBuildLocally(Store & localStore) const;
- bool willBuildLocally() const;
+ bool willBuildLocally(Store & localStore) const;
bool substitutesAllowed() const;
};
diff --git a/src/libstore/path-info.hh b/src/libstore/path-info.hh
index 2a015ea3c..8ff5c466e 100644
--- a/src/libstore/path-info.hh
+++ b/src/libstore/path-info.hh
@@ -1,5 +1,6 @@
#pragma once
+#include "crypto.hh"
#include "path.hh"
#include "hash.hh"
#include "content-address.hh"
@@ -29,7 +30,7 @@ struct ValidPathInfo
StorePath path;
std::optional<StorePath> deriver;
// TODO document this
- std::optional<Hash> narHash;
+ Hash narHash;
StorePathSet references;
time_t registrationTime = 0;
uint64_t narSize = 0; // 0 = unknown
@@ -100,8 +101,8 @@ struct ValidPathInfo
ValidPathInfo(const ValidPathInfo & other) = default;
- ValidPathInfo(StorePath && path) : path(std::move(path)) { };
- ValidPathInfo(const StorePath & path) : path(path) { };
+ ValidPathInfo(StorePath && path, Hash narHash) : path(std::move(path)), narHash(narHash) { };
+ ValidPathInfo(const StorePath & path, Hash narHash) : path(path), narHash(narHash) { };
virtual ~ValidPathInfo() { }
};
diff --git a/src/libstore/profiles.cc b/src/libstore/profiles.cc
index 6862b42f0..c20386e2b 100644
--- a/src/libstore/profiles.cc
+++ b/src/libstore/profiles.cc
@@ -72,7 +72,7 @@ static void makeName(const Path & profile, GenerationNumber num,
}
-Path createGeneration(ref<LocalFSStore> store, Path profile, Path outPath)
+Path createGeneration(ref<LocalFSStore> store, Path profile, StorePath outPath)
{
/* The new generation number should be higher than old the
previous ones. */
@@ -82,7 +82,7 @@ Path createGeneration(ref<LocalFSStore> store, Path profile, Path outPath)
if (gens.size() > 0) {
Generation last = gens.back();
- if (readLink(last.path) == outPath) {
+ if (readLink(last.path) == store->printStorePath(outPath)) {
/* We only create a new generation symlink if it differs
from the last one.
@@ -105,7 +105,7 @@ Path createGeneration(ref<LocalFSStore> store, Path profile, Path outPath)
user environment etc. we've just built. */
Path generation;
makeName(profile, num + 1, generation);
- store->addPermRoot(store->parseStorePath(outPath), generation, false, true);
+ store->addPermRoot(outPath, generation);
return generation;
}
diff --git a/src/libstore/profiles.hh b/src/libstore/profiles.hh
index abe507f0e..be55a65d4 100644
--- a/src/libstore/profiles.hh
+++ b/src/libstore/profiles.hh
@@ -8,6 +8,8 @@
namespace nix {
+class StorePath;
+
typedef unsigned int GenerationNumber;
@@ -28,7 +30,7 @@ std::pair<Generations, std::optional<GenerationNumber>> findGenerations(Path pro
class LocalFSStore;
-Path createGeneration(ref<LocalFSStore> store, Path profile, Path outPath);
+Path createGeneration(ref<LocalFSStore> store, Path profile, StorePath outPath);
void deleteGeneration(const Path & profile, GenerationNumber gen);
diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc
index 33d1e431b..e4a4ef5af 100644
--- a/src/libstore/remote-store.cc
+++ b/src/libstore/remote-store.cc
@@ -31,7 +31,6 @@ template<> StorePathSet readStorePaths(const Store & store, Source & from)
return paths;
}
-
void writeStorePaths(const Store & store, Sink & out, const StorePathSet & paths)
{
out << paths.size();
@@ -39,6 +38,7 @@ void writeStorePaths(const Store & store, Sink & out, const StorePathSet & paths
out << store.printStorePath(i);
}
+
StorePathCAMap readStorePathCAMap(const Store & store, Source & from)
{
StorePathCAMap paths;
@@ -57,30 +57,36 @@ void writeStorePathCAMap(const Store & store, Sink & out, const StorePathCAMap &
}
}
-std::map<string, StorePath> readOutputPathMap(const Store & store, Source & from)
+
+namespace worker_proto {
+
+StorePath read(const Store & store, Source & from, Phantom<StorePath> _)
{
- std::map<string, StorePath> pathMap;
- auto rawInput = readStrings<Strings>(from);
- if (rawInput.size() % 2)
- throw Error("got an odd number of elements from the daemon when trying to read a output path map");
- auto curInput = rawInput.begin();
- while (curInput != rawInput.end()) {
- auto thisKey = *curInput++;
- auto thisValue = *curInput++;
- pathMap.emplace(thisKey, store.parseStorePath(thisValue));
- }
- return pathMap;
+ return store.parseStorePath(readString(from));
}
-void writeOutputPathMap(const Store & store, Sink & out, const std::map<string, StorePath> & pathMap)
+void write(const Store & store, Sink & out, const StorePath & storePath)
{
- out << 2*pathMap.size();
- for (auto & i : pathMap) {
- out << i.first;
- out << store.printStorePath(i.second);
- }
+ out << store.printStorePath(storePath);
+}
+
+
+template<>
+std::optional<StorePath> read(const Store & store, Source & from, Phantom<std::optional<StorePath>> _)
+{
+ auto s = readString(from);
+ return s == "" ? std::optional<StorePath> {} : store.parseStorePath(s);
}
+template<>
+void write(const Store & store, Sink & out, const std::optional<StorePath> & storePathOpt)
+{
+ out << (storePathOpt ? store.printStorePath(*storePathOpt) : "");
+}
+
+}
+
+
/* TODO: Separate these store impls into different files, give them better names */
RemoteStore::RemoteStore(const Params & params)
: Store(params)
@@ -278,9 +284,9 @@ struct ConnectionHandle
RemoteStore::Connection * operator -> () { return &*handle; }
- void processStderr(Sink * sink = 0, Source * source = 0)
+ void processStderr(Sink * sink = 0, Source * source = 0, bool flush = true)
{
- auto ex = handle->processStderr(sink, source);
+ auto ex = handle->processStderr(sink, source, flush);
if (ex) {
daemonException = true;
std::rethrow_exception(ex);
@@ -419,10 +425,10 @@ void RemoteStore::queryPathInfoUncached(const StorePath & path,
bool valid; conn->from >> valid;
if (!valid) throw InvalidPath("path '%s' is not valid", printStorePath(path));
}
- info = std::make_shared<ValidPathInfo>(StorePath(path));
auto deriver = readString(conn->from);
+ auto narHash = Hash::parseAny(readString(conn->from), htSHA256);
+ info = std::make_shared<ValidPathInfo>(path, narHash);
if (deriver != "") info->deriver = parseStorePath(deriver);
- info->narHash = Hash::parseAny(readString(conn->from), htSHA256);
info->references = readStorePaths<StorePathSet>(*this, conn->from);
conn->from >> info->registrationTime >> info->narSize;
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 16) {
@@ -468,12 +474,12 @@ StorePathSet RemoteStore::queryDerivationOutputs(const StorePath & path)
}
-OutputPathMap RemoteStore::queryDerivationOutputMap(const StorePath & path)
+std::map<std::string, std::optional<StorePath>> RemoteStore::queryPartialDerivationOutputMap(const StorePath & path)
{
auto conn(getConnection());
conn->to << wopQueryDerivationOutputMap << printStorePath(path);
conn.processStderr();
- return readOutputPathMap(*this, conn->from);
+ return worker_proto::read(*this, conn->from, Phantom<std::map<std::string, std::optional<StorePath>>> {});
}
@@ -521,7 +527,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source,
conn->to << wopAddToStoreNar
<< printStorePath(info.path)
<< (info.deriver ? printStorePath(*info.deriver) : "")
- << info.narHash->to_string(Base16, false);
+ << info.narHash.to_string(Base16, false);
writeStorePaths(*this, conn->to, info.references);
conn->to << info.registrationTime << info.narSize
<< info.ultimate << info.sigs << renderContentAddress(info.ca)
@@ -529,6 +535,8 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source,
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 23) {
+ conn->to.flush();
+
std::exception_ptr ex;
struct FramedSink : BufferedSink
@@ -568,7 +576,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source,
std::thread stderrThread([&]()
{
try {
- conn.processStderr();
+ conn.processStderr(nullptr, nullptr, false);
} catch (...) {
ex = std::current_exception();
}
@@ -878,9 +886,10 @@ static Logger::Fields readFields(Source & from)
}
-std::exception_ptr RemoteStore::Connection::processStderr(Sink * sink, Source * source)
+std::exception_ptr RemoteStore::Connection::processStderr(Sink * sink, Source * source, bool flush)
{
- to.flush();
+ if (flush)
+ to.flush();
while (true) {
diff --git a/src/libstore/remote-store.hh b/src/libstore/remote-store.hh
index 72d2a6689..7cf4c4d12 100644
--- a/src/libstore/remote-store.hh
+++ b/src/libstore/remote-store.hh
@@ -51,7 +51,7 @@ public:
StorePathSet queryDerivationOutputs(const StorePath & path) override;
- OutputPathMap queryDerivationOutputMap(const StorePath & path) override;
+ std::map<std::string, std::optional<StorePath>> queryPartialDerivationOutputMap(const StorePath & path) override;
std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override;
StorePathSet querySubstitutablePaths(const StorePathSet & paths) override;
@@ -114,7 +114,7 @@ protected:
virtual ~Connection();
- std::exception_ptr processStderr(Sink * sink = 0, Source * source = 0);
+ std::exception_ptr processStderr(Sink * sink = 0, Source * source = 0, bool flush = true);
};
ref<Connection> openConnectionWrapper();
diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc
index 2837910b9..a41441079 100644
--- a/src/libstore/store-api.cc
+++ b/src/libstore/store-api.cc
@@ -1,11 +1,11 @@
#include "crypto.hh"
+#include "fs-accessor.hh"
#include "globals.hh"
#include "store-api.hh"
#include "util.hh"
#include "nar-info-disk-cache.hh"
#include "thread-pool.hh"
#include "json.hh"
-#include "derivations.hh"
#include "url.hh"
#include "archive.hh"
@@ -320,8 +320,10 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
if (expectedCAHash && expectedCAHash != hash)
throw Error("hash mismatch for '%s'", srcPath);
- ValidPathInfo info(makeFixedOutputPath(method, hash, name));
- info.narHash = narHash;
+ ValidPathInfo info {
+ makeFixedOutputPath(method, hash, name),
+ narHash,
+ };
info.narSize = narSize;
info.ca = FixedOutputHash { .method = method, .hash = hash };
@@ -357,6 +359,17 @@ bool Store::PathInfoCacheValue::isKnownNow()
return std::chrono::steady_clock::now() < time_point + ttl;
}
+OutputPathMap Store::queryDerivationOutputMap(const StorePath & path) {
+ auto resp = queryPartialDerivationOutputMap(path);
+ OutputPathMap result;
+ for (auto & [outName, optOutPath] : resp) {
+ if (!optOutPath)
+ throw Error("output '%s' has no store path mapped to it", outName);
+ result.insert_or_assign(outName, *optOutPath);
+ }
+ return result;
+}
+
StorePathSet Store::queryDerivationOutputs(const StorePath & path)
{
auto outputMap = this->queryDerivationOutputMap(path);
@@ -565,7 +578,7 @@ string Store::makeValidityRegistration(const StorePathSet & paths,
auto info = queryPathInfo(i);
if (showHash) {
- s += info->narHash->to_string(Base16, false) + "\n";
+ s += info->narHash.to_string(Base16, false) + "\n";
s += (format("%1%\n") % info->narSize).str();
}
@@ -597,7 +610,7 @@ void Store::pathInfoToJSON(JSONPlaceholder & jsonOut, const StorePathSet & store
auto info = queryPathInfo(storePath);
jsonPath
- .attr("narHash", info->narHash->to_string(hashBase, true))
+ .attr("narHash", info->narHash.to_string(hashBase, true))
.attr("narSize", info->narSize);
{
@@ -725,20 +738,6 @@ void copyStorePath(ref<Store> srcStore, ref<Store> dstStore,
info = info2;
}
- if (!info->narHash) {
- StringSink sink;
- srcStore->narFromPath({storePath}, sink);
- auto info2 = make_ref<ValidPathInfo>(*info);
- info2->narHash = hashString(htSHA256, *sink.s);
- if (!info->narSize) info2->narSize = sink.s->size();
- if (info->ultimate) info2->ultimate = false;
- info = info2;
-
- StringSource source(*sink.s);
- dstStore->addToStore(*info, source, repair, checkSigs);
- return;
- }
-
if (info->ultimate) {
auto info2 = make_ref<ValidPathInfo>(*info);
info2->ultimate = false;
@@ -746,12 +745,12 @@ void copyStorePath(ref<Store> srcStore, ref<Store> dstStore,
}
auto source = sinkToSource([&](Sink & sink) {
- LambdaSink wrapperSink([&](const unsigned char * data, size_t len) {
- sink(data, len);
+ LambdaSink progressSink([&](const unsigned char * data, size_t len) {
total += len;
act.progress(total, info->narSize);
});
- srcStore->narFromPath(storePath, wrapperSink);
+ TeeSink tee { sink, progressSink };
+ srcStore->narFromPath(storePath, tee);
}, [&]() {
throw EndOfFile("NAR for '%s' fetched from '%s' is incomplete", srcStore->printStorePath(storePath), srcStore->getUri());
});
@@ -863,19 +862,22 @@ void copyClosure(ref<Store> srcStore, ref<Store> dstStore,
}
-std::optional<ValidPathInfo> decodeValidPathInfo(const Store & store, std::istream & str, bool hashGiven)
+std::optional<ValidPathInfo> decodeValidPathInfo(const Store & store, std::istream & str, std::optional<HashResult> hashGiven)
{
std::string path;
getline(str, path);
if (str.eof()) { return {}; }
- ValidPathInfo info(store.parseStorePath(path));
- if (hashGiven) {
+ if (!hashGiven) {
string s;
getline(str, s);
- info.narHash = Hash::parseAny(s, htSHA256);
+ auto narHash = Hash::parseAny(s, htSHA256);
getline(str, s);
- if (!string2Int(s, info.narSize)) throw Error("number expected");
+ uint64_t narSize;
+ if (!string2Int(s, narSize)) throw Error("number expected");
+ hashGiven = { narHash, narSize };
}
+ ValidPathInfo info(store.parseStorePath(path), hashGiven->first);
+ info.narSize = hashGiven->second;
std::string deriver;
getline(str, deriver);
if (deriver != "") info.deriver = store.parseStorePath(deriver);
@@ -910,12 +912,12 @@ string showPaths(const PathSet & paths)
std::string ValidPathInfo::fingerprint(const Store & store) const
{
- if (narSize == 0 || !narHash)
- throw Error("cannot calculate fingerprint of path '%s' because its size/hash is not known",
+ if (narSize == 0)
+ throw Error("cannot calculate fingerprint of path '%s' because its size is not known",
store.printStorePath(path));
return
"1;" + store.printStorePath(path) + ";"
- + narHash->to_string(Base32, true) + ";"
+ + narHash.to_string(Base32, true) + ";"
+ std::to_string(narSize) + ";"
+ concatStringsSep(",", store.printStorePathSet(references));
}
@@ -981,6 +983,26 @@ Strings ValidPathInfo::shortRefs() const
}
+Derivation Store::derivationFromPath(const StorePath & drvPath)
+{
+ ensurePath(drvPath);
+ return readDerivation(drvPath);
+}
+
+
+Derivation Store::readDerivation(const StorePath & drvPath)
+{
+ auto accessor = getFSAccessor();
+ try {
+ return parseDerivation(*this,
+ accessor->readFile(printStorePath(drvPath)),
+ Derivation::nameFromPath(drvPath));
+ } catch (FormatError & e) {
+ throw Error("error parsing derivation '%s': %s", printStorePath(drvPath), e.msg());
+ }
+}
+
+
}
diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh
index e94d975c5..61aa3ba7e 100644
--- a/src/libstore/store-api.hh
+++ b/src/libstore/store-api.hh
@@ -4,7 +4,6 @@
#include "hash.hh"
#include "content-address.hh"
#include "serialise.hh"
-#include "crypto.hh"
#include "lru-cache.hh"
#include "sync.hh"
#include "globals.hh"
@@ -164,6 +163,10 @@ public:
Setting<bool> wantMassQuery{this, false, "want-mass-query", "whether this substituter can be queried efficiently for path validity"};
+ Setting<StringSet> systemFeatures{this, settings.systemFeatures,
+ "system-features",
+ "Optional features that the system this store builds on implements (like \"kvm\")."};
+
protected:
struct PathInfoCacheValue {
@@ -340,9 +343,15 @@ public:
/* Query the outputs of the derivation denoted by `path'. */
virtual StorePathSet queryDerivationOutputs(const StorePath & path);
- /* Query the mapping outputName=>outputPath for the given derivation */
- virtual OutputPathMap queryDerivationOutputMap(const StorePath & path)
- { unsupported("queryDerivationOutputMap"); }
+ /* Query the mapping outputName => outputPath for the given derivation. All
+ outputs are mentioned so ones mising the mapping are mapped to
+ `std::nullopt`. */
+ virtual std::map<std::string, std::optional<StorePath>> queryPartialDerivationOutputMap(const StorePath & path)
+ { unsupported("queryPartialDerivationOutputMap"); }
+
+ /* Query the mapping outputName=>outputPath for the given derivation.
+ Assume every output has a mapping and throw an exception otherwise. */
+ OutputPathMap queryDerivationOutputMap(const StorePath & path);
/* Query the full store path given the hash part of a valid store
path, or empty if the path doesn't exist. */
@@ -640,8 +649,7 @@ public:
ref<FSAccessor> getFSAccessor() override;
/* Register a permanent GC root. */
- Path addPermRoot(const StorePath & storePath,
- const Path & gcRoot, bool indirect, bool allowOutsideRootsDir = false);
+ Path addPermRoot(const StorePath & storePath, const Path & gcRoot);
virtual Path getRealStoreDir() { return storeDir; }
@@ -761,7 +769,7 @@ string showPaths(const PathSet & paths);
std::optional<ValidPathInfo> decodeValidPathInfo(
const Store & store,
std::istream & str,
- bool hashGiven = false);
+ std::optional<HashResult> hashGiven = std::nullopt);
/* Split URI into protocol+hierarchy part and its parameter set. */
std::pair<std::string, Store::Params> splitUriAndParams(const std::string & uri);
diff --git a/src/libstore/worker-protocol.hh b/src/libstore/worker-protocol.hh
index f76b13fb4..13cf8d4ab 100644
--- a/src/libstore/worker-protocol.hh
+++ b/src/libstore/worker-protocol.hh
@@ -6,7 +6,7 @@ namespace nix {
#define WORKER_MAGIC_1 0x6e697863
#define WORKER_MAGIC_2 0x6478696f
-#define PROTOCOL_VERSION 0x117
+#define PROTOCOL_VERSION 0x118
#define GET_PROTOCOL_MAJOR(x) ((x) & 0xff00)
#define GET_PROTOCOL_MINOR(x) ((x) & 0x00ff)
@@ -70,10 +70,84 @@ template<class T> T readStorePaths(const Store & store, Source & from);
void writeStorePaths(const Store & store, Sink & out, const StorePathSet & paths);
+/* To guide overloading */
+template<typename T>
+struct Phantom {};
+
+
+namespace worker_proto {
+/* FIXME maybe move more stuff inside here */
+
+StorePath read(const Store & store, Source & from, Phantom<StorePath> _);
+void write(const Store & store, Sink & out, const StorePath & storePath);
+
+template<typename T>
+std::map<std::string, T> read(const Store & store, Source & from, Phantom<std::map<std::string, T>> _);
+template<typename T>
+void write(const Store & store, Sink & out, const std::map<string, T> & resMap);
+template<typename T>
+std::optional<T> read(const Store & store, Source & from, Phantom<std::optional<T>> _);
+template<typename T>
+void write(const Store & store, Sink & out, const std::optional<T> & optVal);
+
+/* Specialization which uses and empty string for the empty case, taking
+ advantage of the fact StorePaths always serialize to a non-empty string.
+ This is done primarily for backwards compatability, so that StorePath <=
+ std::optional<StorePath>, where <= is the compatability partial order.
+ */
+template<>
+void write(const Store & store, Sink & out, const std::optional<StorePath> & optVal);
+
+template<typename T>
+std::map<std::string, T> read(const Store & store, Source & from, Phantom<std::map<std::string, T>> _)
+{
+ std::map<string, T> resMap;
+ auto size = (size_t)readInt(from);
+ while (size--) {
+ auto thisKey = readString(from);
+ resMap.insert_or_assign(std::move(thisKey), nix::worker_proto::read(store, from, Phantom<T> {}));
+ }
+ return resMap;
+}
+
+template<typename T>
+void write(const Store & store, Sink & out, const std::map<string, T> & resMap)
+{
+ out << resMap.size();
+ for (auto & i : resMap) {
+ out << i.first;
+ nix::worker_proto::write(store, out, i.second);
+ }
+}
+
+template<typename T>
+std::optional<T> read(const Store & store, Source & from, Phantom<std::optional<T>> _)
+{
+ auto tag = readNum<uint8_t>(from);
+ switch (tag) {
+ case 0:
+ return std::nullopt;
+ case 1:
+ return nix::worker_proto::read(store, from, Phantom<T> {});
+ default:
+ throw Error("got an invalid tag bit for std::optional: %#04x", tag);
+ }
+}
+
+template<typename T>
+void write(const Store & store, Sink & out, const std::optional<T> & optVal)
+{
+ out << (optVal ? 1 : 0);
+ if (optVal)
+ nix::worker_proto::write(store, out, *optVal);
+}
+
+
+}
+
+
StorePathCAMap readStorePathCAMap(const Store & store, Source & from);
void writeStorePathCAMap(const Store & store, Sink & out, const StorePathCAMap & paths);
-void writeOutputPathMap(const Store & store, Sink & out, const OutputPathMap & paths);
-
}
diff --git a/src/libutil/archive.cc b/src/libutil/archive.cc
index ce7cf9754..14399dea3 100644
--- a/src/libutil/archive.cc
+++ b/src/libutil/archive.cc
@@ -366,11 +366,7 @@ void copyNAR(Source & source, Sink & sink)
ParseSink parseSink; /* null sink; just parse the NAR */
- LambdaSource wrapper([&](unsigned char * data, size_t len) {
- auto n = source.read(data, len);
- sink(data, n);
- return n;
- });
+ TeeSource wrapper { source, sink };
parseDump(parseSink, wrapper);
}
diff --git a/src/libutil/args.cc b/src/libutil/args.cc
index 986c5d1cd..147602415 100644
--- a/src/libutil/args.cc
+++ b/src/libutil/args.cc
@@ -3,6 +3,8 @@
#include <glob.h>
+#include <nlohmann/json.hpp>
+
namespace nix {
void Args::addFlag(Flag && flag_)
@@ -205,6 +207,43 @@ bool Args::processArgs(const Strings & args, bool finish)
return res;
}
+nlohmann::json Args::toJSON()
+{
+ auto flags = nlohmann::json::object();
+
+ for (auto & [name, flag] : longFlags) {
+ auto j = nlohmann::json::object();
+ if (flag->shortName)
+ j["shortName"] = std::string(1, flag->shortName);
+ if (flag->description != "")
+ j["description"] = flag->description;
+ if (flag->category != "")
+ j["category"] = flag->category;
+ if (flag->handler.arity != ArityAny)
+ j["arity"] = flag->handler.arity;
+ if (!flag->labels.empty())
+ j["labels"] = flag->labels;
+ flags[name] = std::move(j);
+ }
+
+ auto args = nlohmann::json::array();
+
+ for (auto & arg : expectedArgs) {
+ auto j = nlohmann::json::object();
+ j["label"] = arg.label;
+ j["optional"] = arg.optional;
+ if (arg.handler.arity != ArityAny)
+ j["arity"] = arg.handler.arity;
+ args.push_back(std::move(j));
+ }
+
+ auto res = nlohmann::json::object();
+ res["description"] = description();
+ res["flags"] = std::move(flags);
+ res["args"] = std::move(args);
+ return res;
+}
+
static void hashTypeCompleter(size_t index, std::string_view prefix)
{
for (auto & type : hashTypes)
@@ -313,11 +352,29 @@ void Command::printHelp(const string & programName, std::ostream & out)
}
}
+nlohmann::json Command::toJSON()
+{
+ auto exs = nlohmann::json::array();
+
+ for (auto & example : examples()) {
+ auto ex = nlohmann::json::object();
+ ex["description"] = example.description;
+ ex["command"] = chomp(stripIndentation(example.command));
+ exs.push_back(std::move(ex));
+ }
+
+ auto res = Args::toJSON();
+ res["examples"] = std::move(exs);
+ auto s = doc();
+ if (s != "") res.emplace("doc", stripIndentation(s));
+ return res;
+}
+
MultiCommand::MultiCommand(const Commands & commands)
: commands(commands)
{
expectArgs({
- .label = "command",
+ .label = "subcommand",
.optional = true,
.handler = {[=](std::string s) {
assert(!command);
@@ -387,4 +444,20 @@ bool MultiCommand::processArgs(const Strings & args, bool finish)
return Args::processArgs(args, finish);
}
+nlohmann::json MultiCommand::toJSON()
+{
+ auto cmds = nlohmann::json::object();
+
+ for (auto & [name, commandFun] : commands) {
+ auto command = commandFun();
+ auto j = command->toJSON();
+ j["category"] = categories[command->category()];
+ cmds[name] = std::move(j);
+ }
+
+ auto res = Args::toJSON();
+ res["commands"] = std::move(cmds);
+ return res;
+}
+
}
diff --git a/src/libutil/args.hh b/src/libutil/args.hh
index 97a517344..3c1f87f7e 100644
--- a/src/libutil/args.hh
+++ b/src/libutil/args.hh
@@ -4,6 +4,8 @@
#include <map>
#include <memory>
+#include <nlohmann/json_fwd.hpp>
+
#include "util.hh"
namespace nix {
@@ -20,6 +22,7 @@ public:
virtual void printHelp(const string & programName, std::ostream & out);
+ /* Return a short one-line description of the command. */
virtual std::string description() { return ""; }
protected:
@@ -203,6 +206,8 @@ public:
});
}
+ virtual nlohmann::json toJSON();
+
friend class MultiCommand;
};
@@ -217,6 +222,9 @@ struct Command : virtual Args
virtual void prepare() { };
virtual void run() = 0;
+ /* Return documentation about this command, in Markdown format. */
+ virtual std::string doc() { return ""; }
+
struct Example
{
std::string description;
@@ -234,6 +242,8 @@ struct Command : virtual Args
virtual Category category() { return catDefault; }
void printHelp(const string & programName, std::ostream & out) override;
+
+ nlohmann::json toJSON() override;
};
typedef std::map<std::string, std::function<ref<Command>()>> Commands;
@@ -259,6 +269,8 @@ public:
bool processFlag(Strings::iterator & pos, Strings::iterator end) override;
bool processArgs(const Strings & args, bool finish) override;
+
+ nlohmann::json toJSON() override;
};
Strings argvToStrings(int argc, char * * argv);
diff --git a/src/libutil/config.cc b/src/libutil/config.cc
index 8fc700a2b..3cf720bce 100644
--- a/src/libutil/config.cc
+++ b/src/libutil/config.cc
@@ -1,6 +1,7 @@
#include "config.hh"
#include "args.hh"
-#include "json.hh"
+
+#include <nlohmann/json.hpp>
namespace nix {
@@ -131,15 +132,18 @@ void Config::resetOverriden()
s.second.setting->overriden = false;
}
-void Config::toJSON(JSONObject & out)
+nlohmann::json Config::toJSON()
{
+ auto res = nlohmann::json::object();
for (auto & s : _settings)
if (!s.second.isAlias) {
- JSONObject out2(out.object(s.first));
- out2.attr("description", s.second.setting->description);
- JSONPlaceholder out3(out2.placeholder("value"));
- s.second.setting->toJSON(out3);
+ auto obj = nlohmann::json::object();
+ obj.emplace("description", s.second.setting->description);
+ obj.emplace("aliases", s.second.setting->aliases);
+ obj.emplace("value", s.second.setting->toJSON());
+ res.emplace(s.first, obj);
}
+ return res;
}
void Config::convertToArgs(Args & args, const std::string & category)
@@ -153,7 +157,7 @@ AbstractSetting::AbstractSetting(
const std::string & name,
const std::string & description,
const std::set<std::string> & aliases)
- : name(name), description(description), aliases(aliases)
+ : name(name), description(stripIndentation(description)), aliases(aliases)
{
}
@@ -162,9 +166,9 @@ void AbstractSetting::setDefault(const std::string & str)
if (!overriden) set(str);
}
-void AbstractSetting::toJSON(JSONPlaceholder & out)
+nlohmann::json AbstractSetting::toJSON()
{
- out.write(to_string());
+ return to_string();
}
void AbstractSetting::convertToArg(Args & args, const std::string & category)
@@ -172,9 +176,9 @@ void AbstractSetting::convertToArg(Args & args, const std::string & category)
}
template<typename T>
-void BaseSetting<T>::toJSON(JSONPlaceholder & out)
+nlohmann::json BaseSetting<T>::toJSON()
{
- out.write(value);
+ return value;
}
template<typename T>
@@ -255,11 +259,9 @@ template<> std::string BaseSetting<Strings>::to_string() const
return concatStringsSep(" ", value);
}
-template<> void BaseSetting<Strings>::toJSON(JSONPlaceholder & out)
+template<> nlohmann::json BaseSetting<Strings>::toJSON()
{
- JSONList list(out.list());
- for (auto & s : value)
- list.elem(s);
+ return value;
}
template<> void BaseSetting<StringSet>::set(const std::string & str)
@@ -272,11 +274,9 @@ template<> std::string BaseSetting<StringSet>::to_string() const
return concatStringsSep(" ", value);
}
-template<> void BaseSetting<StringSet>::toJSON(JSONPlaceholder & out)
+template<> nlohmann::json BaseSetting<StringSet>::toJSON()
{
- JSONList list(out.list());
- for (auto & s : value)
- list.elem(s);
+ return value;
}
template class BaseSetting<int>;
@@ -323,10 +323,12 @@ void GlobalConfig::resetOverriden()
config->resetOverriden();
}
-void GlobalConfig::toJSON(JSONObject & out)
+nlohmann::json GlobalConfig::toJSON()
{
+ auto res = nlohmann::json::object();
for (auto & config : *configRegistrations)
- config->toJSON(out);
+ res.update(config->toJSON());
+ return res;
}
void GlobalConfig::convertToArgs(Args & args, const std::string & category)
diff --git a/src/libutil/config.hh b/src/libutil/config.hh
index 66073546e..2b4265806 100644
--- a/src/libutil/config.hh
+++ b/src/libutil/config.hh
@@ -4,6 +4,8 @@
#include "types.hh"
+#include <nlohmann/json_fwd.hpp>
+
#pragma once
namespace nix {
@@ -42,8 +44,6 @@ namespace nix {
class Args;
class AbstractSetting;
-class JSONPlaceholder;
-class JSONObject;
class AbstractConfig
{
@@ -97,7 +97,7 @@ public:
* Outputs all settings to JSON
* - out: JSONObject to write the configuration to
*/
- virtual void toJSON(JSONObject & out) = 0;
+ virtual nlohmann::json toJSON() = 0;
/**
* Converts settings to `Args` to be used on the command line interface
@@ -167,7 +167,7 @@ public:
void resetOverriden() override;
- void toJSON(JSONObject & out) override;
+ nlohmann::json toJSON() override;
void convertToArgs(Args & args, const std::string & category) override;
};
@@ -206,7 +206,7 @@ protected:
virtual std::string to_string() const = 0;
- virtual void toJSON(JSONPlaceholder & out);
+ virtual nlohmann::json toJSON();
virtual void convertToArg(Args & args, const std::string & category);
@@ -251,7 +251,7 @@ public:
void convertToArg(Args & args, const std::string & category) override;
- void toJSON(JSONPlaceholder & out) override;
+ nlohmann::json toJSON() override;
};
template<typename T>
@@ -319,7 +319,7 @@ struct GlobalConfig : public AbstractConfig
void resetOverriden() override;
- void toJSON(JSONObject & out) override;
+ nlohmann::json toJSON() override;
void convertToArgs(Args & args, const std::string & category) override;
diff --git a/src/libutil/hash.cc b/src/libutil/hash.cc
index dfb3668f1..4a94f0dfd 100644
--- a/src/libutil/hash.cc
+++ b/src/libutil/hash.cc
@@ -136,6 +136,8 @@ std::string Hash::to_string(Base base, bool includeType) const
return s;
}
+Hash Hash::dummy(htSHA256);
+
Hash Hash::parseSRI(std::string_view original) {
auto rest = original;
diff --git a/src/libutil/hash.hh b/src/libutil/hash.hh
index 00ce7bb6f..6d6eb70ca 100644
--- a/src/libutil/hash.hh
+++ b/src/libutil/hash.hh
@@ -59,9 +59,6 @@ private:
Hash(std::string_view s, HashType type, bool isSRI);
public:
- /* Check whether a hash is set. */
- operator bool () const { return (bool) type; }
-
/* Check whether two hash are equal. */
bool operator == (const Hash & h2) const;
@@ -105,6 +102,8 @@ public:
assert(type == htSHA1);
return std::string(to_string(Base16, false), 0, 7);
}
+
+ static Hash dummy;
};
/* Helper that defaults empty hashes to the 0 hash. */
diff --git a/src/libutil/logging.cc b/src/libutil/logging.cc
index 832aee783..cbbf64395 100644
--- a/src/libutil/logging.cc
+++ b/src/libutil/logging.cc
@@ -184,6 +184,33 @@ struct JSONLogger : Logger {
json["action"] = "msg";
json["level"] = ei.level;
json["msg"] = oss.str();
+ json["raw_msg"] = ei.hint->str();
+
+ if (ei.errPos.has_value() && (*ei.errPos)) {
+ json["line"] = ei.errPos->line;
+ json["column"] = ei.errPos->column;
+ json["file"] = ei.errPos->file;
+ } else {
+ json["line"] = nullptr;
+ json["column"] = nullptr;
+ json["file"] = nullptr;
+ }
+
+ if (loggerSettings.showTrace.get() && !ei.traces.empty()) {
+ nlohmann::json traces = nlohmann::json::array();
+ for (auto iter = ei.traces.rbegin(); iter != ei.traces.rend(); ++iter) {
+ nlohmann::json stackFrame;
+ stackFrame["raw_msg"] = iter->hint.str();
+ if (iter->pos.has_value() && (*iter->pos)) {
+ stackFrame["line"] = iter->pos->line;
+ stackFrame["column"] = iter->pos->column;
+ stackFrame["file"] = iter->pos->file;
+ }
+ traces.push_back(stackFrame);
+ }
+
+ json["trace"] = traces;
+ }
write(json);
}
diff --git a/src/libutil/logging.hh b/src/libutil/logging.hh
index b75b87f80..77b92fb51 100644
--- a/src/libutil/logging.hh
+++ b/src/libutil/logging.hh
@@ -37,10 +37,12 @@ typedef uint64_t ActivityId;
struct LoggerSettings : Config
{
- Setting<bool> showTrace{this,
- false,
- "show-trace",
- "Whether to show a stack trace on evaluation errors."};
+ Setting<bool> showTrace{
+ this, false, "show-trace",
+ R"(
+ Where Nix should print out a stack trace in case of Nix
+ expression evaluation errors.
+ )"};
};
extern LoggerSettings loggerSettings;
diff --git a/src/libutil/serialise.hh b/src/libutil/serialise.hh
index c29c6b29b..a6f1c42e9 100644
--- a/src/libutil/serialise.hh
+++ b/src/libutil/serialise.hh
@@ -23,7 +23,8 @@ struct Sink
};
-/* A buffered abstract sink. */
+/* A buffered abstract sink. Warning: a BufferedSink should not be
+ used from multiple threads concurrently. */
struct BufferedSink : virtual Sink
{
size_t bufSize, bufPos;
@@ -66,7 +67,8 @@ struct Source
};
-/* A buffered abstract source. */
+/* A buffered abstract source. Warning: a BufferedSource should not be
+ used from multiple threads concurrently. */
struct BufferedSource : Source
{
size_t bufSize, bufPosIn, bufPosOut;
@@ -225,6 +227,17 @@ struct SizedSource : Source
}
};
+/* A sink that that just counts the number of bytes given to it */
+struct LengthSink : Sink
+{
+ uint64_t length = 0;
+
+ virtual void operator () (const unsigned char * _, size_t len)
+ {
+ length += len;
+ }
+};
+
/* Convert a function into a sink. */
struct LambdaSink : Sink
{
diff --git a/src/libutil/tests/config.cc b/src/libutil/tests/config.cc
index 74c59fd31..c5abefe11 100644
--- a/src/libutil/tests/config.cc
+++ b/src/libutil/tests/config.cc
@@ -1,9 +1,9 @@
-#include "json.hh"
#include "config.hh"
#include "args.hh"
#include <sstream>
#include <gtest/gtest.h>
+#include <nlohmann/json.hpp>
namespace nix {
@@ -33,7 +33,7 @@ namespace nix {
const auto iter = settings.find("name-of-the-setting");
ASSERT_NE(iter, settings.end());
ASSERT_EQ(iter->second.value, "");
- ASSERT_EQ(iter->second.description, "description");
+ ASSERT_EQ(iter->second.description, "description\n");
}
TEST(Config, getDefinedOverridenSettingNotSet) {
@@ -59,7 +59,7 @@ namespace nix {
const auto iter = settings.find("name-of-the-setting");
ASSERT_NE(iter, settings.end());
ASSERT_EQ(iter->second.value, "value");
- ASSERT_EQ(iter->second.description, "description");
+ ASSERT_EQ(iter->second.description, "description\n");
}
TEST(Config, getDefinedSettingSet2) {
@@ -73,7 +73,7 @@ namespace nix {
const auto e = settings.find("name-of-the-setting");
ASSERT_NE(e, settings.end());
ASSERT_EQ(e->second.value, "value");
- ASSERT_EQ(e->second.description, "description");
+ ASSERT_EQ(e->second.description, "description\n");
}
TEST(Config, addSetting) {
@@ -152,29 +152,16 @@ namespace nix {
}
TEST(Config, toJSONOnEmptyConfig) {
- std::stringstream out;
- { // Scoped to force the destructor of JSONObject to write the final `}`
- JSONObject obj(out);
- Config config;
- config.toJSON(obj);
- }
-
- ASSERT_EQ(out.str(), "{}");
+ ASSERT_EQ(Config().toJSON().dump(), "{}");
}
TEST(Config, toJSONOnNonEmptyConfig) {
- std::stringstream out;
- { // Scoped to force the destructor of JSONObject to write the final `}`
- JSONObject obj(out);
-
- Config config;
- std::map<std::string, Config::SettingInfo> settings;
- Setting<std::string> setting{&config, "", "name-of-the-setting", "description"};
- setting.assign("value");
+ Config config;
+ std::map<std::string, Config::SettingInfo> settings;
+ Setting<std::string> setting{&config, "", "name-of-the-setting", "description"};
+ setting.assign("value");
- config.toJSON(obj);
- }
- ASSERT_EQ(out.str(), R"#({"name-of-the-setting":{"description":"description","value":"value"}})#");
+ ASSERT_EQ(config.toJSON().dump(), R"#({"name-of-the-setting":{"aliases":[],"description":"description\n","value":"value"}})#");
}
TEST(Config, setSettingAlias) {
diff --git a/src/libutil/tests/logging.cc b/src/libutil/tests/logging.cc
index ad588055f..7e53f17c6 100644
--- a/src/libutil/tests/logging.cc
+++ b/src/libutil/tests/logging.cc
@@ -34,6 +34,24 @@ namespace nix {
}
}
+ TEST(logEI, jsonOutput) {
+ SymbolTable testTable;
+ auto problem_file = testTable.create("random.nix");
+ testing::internal::CaptureStderr();
+
+ makeJSONLogger(*logger)->logEI({
+ .name = "error name",
+ .description = "error without any code lines.",
+ .hint = hintfmt("this hint has %1% templated %2%!!",
+ "yellow",
+ "values"),
+ .errPos = Pos(foFile, problem_file, 02, 13)
+ });
+
+ auto str = testing::internal::GetCapturedStderr();
+ ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- SysError --- error-unit-test\x1B[0m\nopening file '\x1B[33;1mrandom.nix\x1B[0m': \x1B[33;1mNo such file or directory\x1B[0m\n@nix {\"action\":\"msg\",\"column\":13,\"file\":\"random.nix\",\"level\":0,\"line\":2,\"msg\":\"\\u001b[31;1merror:\\u001b[0m\\u001b[34;1m --- error name --- error-unit-test\\u001b[0m\\n\\u001b[34;1mat: \\u001b[33;1m(2:13)\\u001b[34;1m in file: \\u001b[0mrandom.nix\\n\\nerror without any code lines.\\n\\nthis hint has \\u001b[33;1myellow\\u001b[0m templated \\u001b[33;1mvalues\\u001b[0m!!\",\"raw_msg\":\"this hint has \\u001b[33;1myellow\\u001b[0m templated \\u001b[33;1mvalues\\u001b[0m!!\"}\n");
+ }
+
TEST(logEI, appendingHintsToPreviousError) {
MakeError(TestError, Error);
diff --git a/src/libutil/util.cc b/src/libutil/util.cc
index 44b5361dd..16cfa654f 100644
--- a/src/libutil/util.cc
+++ b/src/libutil/util.cc
@@ -1464,6 +1464,47 @@ string base64Decode(std::string_view s)
}
+std::string stripIndentation(std::string_view s)
+{
+ size_t minIndent = 10000;
+ size_t curIndent = 0;
+ bool atStartOfLine = true;
+
+ for (auto & c : s) {
+ if (atStartOfLine && c == ' ')
+ curIndent++;
+ else if (c == '\n') {
+ if (atStartOfLine)
+ minIndent = std::max(minIndent, curIndent);
+ curIndent = 0;
+ atStartOfLine = true;
+ } else {
+ if (atStartOfLine) {
+ minIndent = std::min(minIndent, curIndent);
+ atStartOfLine = false;
+ }
+ }
+ }
+
+ std::string res;
+
+ size_t pos = 0;
+ while (pos < s.size()) {
+ auto eol = s.find('\n', pos);
+ if (eol == s.npos) eol = s.size();
+ if (eol - pos > minIndent)
+ res.append(s.substr(pos + minIndent, eol - pos - minIndent));
+ res.push_back('\n');
+ pos = eol + 1;
+ }
+
+ return res;
+}
+
+
+//////////////////////////////////////////////////////////////////////
+
+
static Sync<std::pair<unsigned short, unsigned short>> windowSize{{0, 0}};
diff --git a/src/libutil/util.hh b/src/libutil/util.hh
index 3a20679a8..082e26375 100644
--- a/src/libutil/util.hh
+++ b/src/libutil/util.hh
@@ -464,6 +464,12 @@ string base64Encode(std::string_view s);
string base64Decode(std::string_view s);
+/* Remove common leading whitespace from the lines in the string
+ 's'. For example, if every line is indented by at least 3 spaces,
+ then we remove 3 spaces from the start of every line. */
+std::string stripIndentation(std::string_view s);
+
+
/* Get a value for the specified key from an associate container. */
template <class T>
std::optional<typename T::mapped_type> get(const T & map, const typename T::key_type & key)
diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc
index 94412042f..471bcc10d 100755
--- a/src/nix-build/nix-build.cc
+++ b/src/nix-build/nix-build.cc
@@ -525,7 +525,7 @@ static void _main(int argc, char * * argv)
for (auto & symlink : resultSymlinks)
if (auto store2 = store.dynamic_pointer_cast<LocalFSStore>())
- store2->addPermRoot(store->parseStorePath(symlink.second), absPath(symlink.first), true);
+ store2->addPermRoot(store->parseStorePath(symlink.second), absPath(symlink.first));
logger->stop();
diff --git a/src/nix-daemon/nix-daemon.cc b/src/nix-daemon/nix-daemon.cc
index bcb86cbce..cfa634a44 100644
--- a/src/nix-daemon/nix-daemon.cc
+++ b/src/nix-daemon/nix-daemon.cc
@@ -239,7 +239,15 @@ static void daemonLoop(char * * argv)
// Handle the connection.
FdSource from(remote.get());
FdSink to(remote.get());
- processConnection(openUncachedStore(), from, to, trusted, NotRecursive, user, peer.uid);
+ processConnection(openUncachedStore(), from, to, trusted, NotRecursive, [&](Store & store) {
+#if 0
+ /* Prevent users from doing something very dangerous. */
+ if (geteuid() == 0 &&
+ querySetting("build-users-group", "") == "")
+ throw Error("if you run 'nix-daemon' as root, then you MUST set 'build-users-group'!");
+#endif
+ store.createUser(user, peer.uid);
+ });
exit(0);
}, options);
@@ -324,7 +332,10 @@ static int _main(int argc, char * * argv)
} else {
FdSource from(STDIN_FILENO);
FdSink to(STDOUT_FILENO);
- processConnection(openUncachedStore(), from, to, Trusted, NotRecursive, "root", 0);
+ /* Auth hook is empty because in this mode we blindly trust the
+ standard streams. Limitting access to thoses is explicitly
+ not `nix-daemon`'s responsibility. */
+ processConnection(openUncachedStore(), from, to, Trusted, NotRecursive, [&](Store & _){});
}
} else {
daemonLoop(argv);
diff --git a/src/nix-env/nix-env.cc b/src/nix-env/nix-env.cc
index ddd036070..e5a433ac0 100644
--- a/src/nix-env/nix-env.cc
+++ b/src/nix-env/nix-env.cc
@@ -708,7 +708,9 @@ static void opSet(Globals & globals, Strings opFlags, Strings opArgs)
}
debug(format("switching to new user environment"));
- Path generation = createGeneration(ref<LocalFSStore>(store2), globals.profile, drv.queryOutPath());
+ Path generation = createGeneration(
+ ref<LocalFSStore>(store2), globals.profile,
+ store2->parseStorePath(drv.queryOutPath()));
switchLink(globals.profile, generation);
}
diff --git a/src/nix-env/user-env.cc b/src/nix-env/user-env.cc
index 8e7f09e12..8c6c8af05 100644
--- a/src/nix-env/user-env.cc
+++ b/src/nix-env/user-env.cc
@@ -151,7 +151,8 @@ bool createUserEnv(EvalState & state, DrvInfos & elems,
}
debug(format("switching to new user environment"));
- Path generation = createGeneration(ref<LocalFSStore>(store2), profile, topLevelOut);
+ Path generation = createGeneration(ref<LocalFSStore>(store2), profile,
+ store2->parseStorePath(topLevelOut));
switchLink(profile, generation);
}
diff --git a/src/nix-instantiate/nix-instantiate.cc b/src/nix-instantiate/nix-instantiate.cc
index bf353677a..539092cbe 100644
--- a/src/nix-instantiate/nix-instantiate.cc
+++ b/src/nix-instantiate/nix-instantiate.cc
@@ -20,7 +20,6 @@ using namespace nix;
static Path gcRoot;
static int rootNr = 0;
-static bool indirectRoot = false;
enum OutputKind { okPlain, okXML, okJSON };
@@ -71,11 +70,11 @@ void processExpr(EvalState & state, const Strings & attrPaths,
if (gcRoot == "")
printGCWarning();
else {
- Path rootName = indirectRoot ? absPath(gcRoot) : gcRoot;
+ Path rootName = absPath(gcRoot);
if (++rootNr > 1) rootName += "-" + std::to_string(rootNr);
auto store2 = state.store.dynamic_pointer_cast<LocalFSStore>();
if (store2)
- drvPath = store2->addPermRoot(store2->parseStorePath(drvPath), rootName, indirectRoot);
+ drvPath = store2->addPermRoot(store2->parseStorePath(drvPath), rootName);
}
std::cout << fmt("%s%s\n", drvPath, (outputName != "out" ? "!" + outputName : ""));
}
@@ -127,7 +126,7 @@ static int _main(int argc, char * * argv)
else if (*arg == "--add-root")
gcRoot = getArg(*arg, arg, end);
else if (*arg == "--indirect")
- indirectRoot = true;
+ ;
else if (*arg == "--xml")
outputKind = okXML;
else if (*arg == "--json")
diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc
index 2996e36c4..3f2594712 100644
--- a/src/nix-store/nix-store.cc
+++ b/src/nix-store/nix-store.cc
@@ -34,7 +34,6 @@ typedef void (* Operation) (Strings opFlags, Strings opArgs);
static Path gcRoot;
static int rootNr = 0;
-static bool indirectRoot = false;
static bool noOutput = false;
static std::shared_ptr<Store> store;
@@ -85,7 +84,7 @@ static PathSet realisePath(StorePathWithOutputs path, bool build = true)
Path rootName = gcRoot;
if (rootNr > 1) rootName += "-" + std::to_string(rootNr);
if (i->first != "out") rootName += "-" + i->first;
- outPath = store2->addPermRoot(store->parseStorePath(outPath), rootName, indirectRoot);
+ outPath = store2->addPermRoot(store->parseStorePath(outPath), rootName);
}
}
outputs.insert(outPath);
@@ -104,7 +103,7 @@ static PathSet realisePath(StorePathWithOutputs path, bool build = true)
Path rootName = gcRoot;
rootNr++;
if (rootNr > 1) rootName += "-" + std::to_string(rootNr);
- return {store2->addPermRoot(path.path, rootName, indirectRoot)};
+ return {store2->addPermRoot(path.path, rootName)};
}
}
return {store->printStorePath(path.path)};
@@ -218,8 +217,8 @@ static StorePathSet maybeUseOutputs(const StorePath & storePath, bool useOutput,
if (useOutput && storePath.isDerivation()) {
auto drv = store->derivationFromPath(storePath);
StorePathSet outputs;
- for (auto & i : drv.outputs)
- outputs.insert(i.second.path(*store, drv.name));
+ for (auto & i : drv.outputsAndPaths(*store))
+ outputs.insert(i.second.second);
return outputs;
}
else return {storePath};
@@ -312,8 +311,8 @@ static void opQuery(Strings opFlags, Strings opArgs)
auto i2 = store->followLinksToStorePath(i);
if (forceRealise) realisePath({i2});
Derivation drv = store->derivationFromPath(i2);
- for (auto & j : drv.outputs)
- cout << fmt("%1%\n", store->printStorePath(j.second.path(*store, drv.name)));
+ for (auto & j : drv.outputsAndPaths(*store))
+ cout << fmt("%1%\n", store->printStorePath(j.second.second));
}
break;
}
@@ -372,8 +371,8 @@ static void opQuery(Strings opFlags, Strings opArgs)
for (auto & j : maybeUseOutputs(store->followLinksToStorePath(i), useOutput, forceRealise)) {
auto info = store->queryPathInfo(j);
if (query == qHash) {
- assert(info->narHash && info->narHash->type == htSHA256);
- cout << fmt("%s\n", info->narHash->to_string(Base32, true));
+ assert(info->narHash.type == htSHA256);
+ cout << fmt("%s\n", info->narHash.to_string(Base32, true));
} else if (query == qSize)
cout << fmt("%d\n", info->narSize);
}
@@ -495,7 +494,10 @@ static void registerValidity(bool reregister, bool hashGiven, bool canonicalise)
ValidPathInfos infos;
while (1) {
- auto info = decodeValidPathInfo(*store, cin, hashGiven);
+ // We use a dummy value because we'll set it below. FIXME be correct by
+ // construction and avoid dummy value.
+ auto hashResultOpt = !hashGiven ? std::optional<HashResult> { {Hash::dummy, -1} } : std::nullopt;
+ auto info = decodeValidPathInfo(*store, cin, hashResultOpt);
if (!info) break;
if (!store->isValidPath(info->path) || reregister) {
/* !!! races */
@@ -723,7 +725,7 @@ static void opVerifyPath(Strings opFlags, Strings opArgs)
auto path = store->followLinksToStorePath(i);
printMsg(lvlTalkative, "checking path '%s'...", store->printStorePath(path));
auto info = store->queryPathInfo(path);
- HashSink sink(info->narHash->type);
+ HashSink sink(info->narHash.type);
store->narFromPath(path, sink);
auto current = sink.finish();
if (current.first != info->narHash) {
@@ -732,7 +734,7 @@ static void opVerifyPath(Strings opFlags, Strings opArgs)
.hint = hintfmt(
"path '%s' was modified! expected hash '%s', got '%s'",
store->printStorePath(path),
- info->narHash->to_string(Base32, true),
+ info->narHash.to_string(Base32, true),
current.first.to_string(Base32, true))
});
status = 1;
@@ -862,7 +864,7 @@ static void opServe(Strings opFlags, Strings opArgs)
out << info->narSize // downloadSize
<< info->narSize;
if (GET_PROTOCOL_MINOR(clientVersion) >= 4)
- out << (info->narHash ? info->narHash->to_string(Base32, true) : "")
+ out << info->narHash.to_string(Base32, true)
<< renderContentAddress(info->ca)
<< info->sigs;
} catch (InvalidPath &) {
@@ -944,11 +946,13 @@ static void opServe(Strings opFlags, Strings opArgs)
if (!writeAllowed) throw Error("importing paths is not allowed");
auto path = readString(in);
- ValidPathInfo info(store->parseStorePath(path));
auto deriver = readString(in);
+ ValidPathInfo info {
+ store->parseStorePath(path),
+ Hash::parseAny(readString(in), htSHA256),
+ };
if (deriver != "")
info.deriver = store->parseStorePath(deriver);
- info.narHash = Hash::parseAny(readString(in), htSHA256);
info.references = readStorePaths<StorePathSet>(*store, in);
in >> info.registrationTime >> info.narSize >> info.ultimate;
info.sigs = readStrings<StringSet>(in);
@@ -1080,7 +1084,7 @@ static int _main(int argc, char * * argv)
else if (*arg == "--add-root")
gcRoot = absPath(getArg(*arg, arg, end));
else if (*arg == "--indirect")
- indirectRoot = true;
+ ;
else if (*arg == "--no-output")
noOutput = true;
else if (*arg != "" && arg->at(0) == '-') {
diff --git a/src/nix/add-to-store.cc b/src/nix/add-to-store.cc
index eaec63349..df55d1bc4 100644
--- a/src/nix/add-to-store.cc
+++ b/src/nix/add-to-store.cc
@@ -36,6 +36,14 @@ struct CmdAddToStore : MixDryRun, StoreCommand
return "add a path to the Nix store";
}
+ std::string doc() override
+ {
+ return R"(
+ Copy the file or directory *path* to the Nix store, and
+ print the resulting store path on standard output.
+ )";
+ }
+
Examples examples() override
{
return {
@@ -60,8 +68,10 @@ struct CmdAddToStore : MixDryRun, StoreCommand
hash = hsink.finish().first;
}
- ValidPathInfo info(store->makeFixedOutputPath(ingestionMethod, hash, *namePart));
- info.narHash = narHash;
+ ValidPathInfo info {
+ store->makeFixedOutputPath(ingestionMethod, hash, *namePart),
+ narHash,
+ };
info.narSize = sink.s->size();
info.ca = std::optional { FixedOutputHash {
.method = ingestionMethod,
diff --git a/src/nix/app.cc b/src/nix/app.cc
index 3935297cf..80acbf658 100644
--- a/src/nix/app.cc
+++ b/src/nix/app.cc
@@ -8,7 +8,7 @@ namespace nix {
App Installable::toApp(EvalState & state)
{
- auto [cursor, attrPath] = getCursor(state, true);
+ auto [cursor, attrPath] = getCursor(state);
auto type = cursor->getAttr("type")->getString();
diff --git a/src/nix/build.cc b/src/nix/build.cc
index 13d14a7fb..75a42ac55 100644
--- a/src/nix/build.cc
+++ b/src/nix/build.cc
@@ -71,14 +71,14 @@ struct CmdBuild : InstallablesCommand, MixDryRun, MixProfile
[&](BuildableOpaque bo) {
std::string symlink = outLink;
if (i) symlink += fmt("-%d", i);
- store2->addPermRoot(bo.path, absPath(symlink), true);
+ store2->addPermRoot(bo.path, absPath(symlink));
},
[&](BuildableFromDrv bfd) {
for (auto & output : bfd.outputs) {
std::string symlink = outLink;
if (i) symlink += fmt("-%d", i);
if (output.first != "out") symlink += fmt("-%s", output.first);
- store2->addPermRoot(output.second, absPath(symlink), true);
+ store2->addPermRoot(output.second, absPath(symlink));
}
},
}, buildables[i]);
diff --git a/src/nix/bundle.cc b/src/nix/bundle.cc
index eb3339f5d..241c8699b 100644
--- a/src/nix/bundle.cc
+++ b/src/nix/bundle.cc
@@ -122,7 +122,7 @@ struct CmdBundle : InstallableCommand
if (!outLink)
outLink = baseNameOf(app.program);
- store.dynamic_pointer_cast<LocalFSStore>()->addPermRoot(outPath, absPath(*outLink), true);
+ store.dynamic_pointer_cast<LocalFSStore>()->addPermRoot(outPath, absPath(*outLink));
}
};
diff --git a/src/nix/command.cc b/src/nix/command.cc
index da32819da..efac230bd 100644
--- a/src/nix/command.cc
+++ b/src/nix/command.cc
@@ -4,12 +4,25 @@
#include "nixexpr.hh"
#include "profiles.hh"
+#include <nlohmann/json.hpp>
+
extern char * * environ __attribute__((weak));
namespace nix {
Commands * RegisterCommand::commands = nullptr;
+void NixMultiCommand::printHelp(const string & programName, std::ostream & out)
+{
+ MultiCommand::printHelp(programName, out);
+}
+
+nlohmann::json NixMultiCommand::toJSON()
+{
+ // FIXME: use Command::toJSON() as well.
+ return MultiCommand::toJSON();
+}
+
StoreCommand::StoreCommand()
{
}
@@ -121,7 +134,7 @@ void MixProfile::updateProfile(const StorePath & storePath)
switchLink(profile2,
createGeneration(
ref<LocalFSStore>(store),
- profile2, store->printStorePath(storePath)));
+ profile2, storePath));
}
void MixProfile::updateProfile(const Buildables & buildables)
diff --git a/src/nix/command.hh b/src/nix/command.hh
index bc46a2028..d60c8aeb6 100644
--- a/src/nix/command.hh
+++ b/src/nix/command.hh
@@ -21,6 +21,13 @@ static constexpr Command::Category catSecondary = 100;
static constexpr Command::Category catUtility = 101;
static constexpr Command::Category catNixInstallation = 102;
+struct NixMultiCommand : virtual MultiCommand, virtual Command
+{
+ void printHelp(const string & programName, std::ostream & out) override;
+
+ nlohmann::json toJSON() override;
+};
+
/* A command that requires a Nix store. */
struct StoreCommand : virtual Command
{
diff --git a/src/nix/develop.cc b/src/nix/develop.cc
index 12658078a..516e7bda9 100644
--- a/src/nix/develop.cc
+++ b/src/nix/develop.cc
@@ -15,7 +15,7 @@ struct Var
{
bool exported = true;
bool associative = false;
- std::string value; // quoted string or array
+ std::string quoted; // quoted string or array
};
struct BuildEnvironment
@@ -75,12 +75,12 @@ BuildEnvironment readEnvironment(const Path & path)
else if (std::regex_search(pos, file.cend(), match, varRegex, std::regex_constants::match_continuous)) {
pos = match[0].second;
- res.env.insert({match[1], Var { .exported = exported.count(match[1]) > 0, .value = match[2] }});
+ res.env.insert({match[1], Var { .exported = exported.count(match[1]) > 0, .quoted = match[2] }});
}
else if (std::regex_search(pos, file.cend(), match, assocArrayRegex, std::regex_constants::match_continuous)) {
pos = match[0].second;
- res.env.insert({match[1], Var { .associative = true, .value = match[2] }});
+ res.env.insert({match[1], Var { .associative = true, .quoted = match[2] }});
}
else if (std::regex_search(pos, file.cend(), match, functionRegex, std::regex_constants::match_continuous)) {
@@ -92,6 +92,8 @@ BuildEnvironment readEnvironment(const Path & path)
path, file.substr(pos - file.cbegin(), 60));
}
+ res.env.erase("__output");
+
return res;
}
@@ -124,31 +126,33 @@ StorePath getDerivationEnvironment(ref<Store> store, const StorePath & drvPath)
/* Rehash and write the derivation. FIXME: would be nice to use
'buildDerivation', but that's privileged. */
- auto drvName = std::string(drvPath.name());
- assert(hasSuffix(drvName, ".drv"));
- drvName.resize(drvName.size() - 4);
- drvName += "-env";
- for (auto & output : drv.outputs)
- drv.env.erase(output.first);
- drv.outputs = {{"out", DerivationOutput { .output = DerivationOutputInputAddressed { .path = StorePath::dummy }}}};
- drv.env["out"] = "";
- drv.env["_outputs_saved"] = drv.env["outputs"];
- drv.env["outputs"] = "out";
+ drv.name += "-env";
+ for (auto & output : drv.outputs) {
+ output.second = { .output = DerivationOutputInputAddressed { .path = StorePath::dummy } };
+ drv.env[output.first] = "";
+ }
drv.inputSrcs.insert(std::move(getEnvShPath));
Hash h = std::get<0>(hashDerivationModulo(*store, drv, true));
- auto shellOutPath = store->makeOutputPath("out", h, drvName);
- drv.outputs.insert_or_assign("out", DerivationOutput { .output = DerivationOutputInputAddressed {
- .path = shellOutPath
- } });
- drv.env["out"] = store->printStorePath(shellOutPath);
- auto shellDrvPath2 = writeDerivation(store, drv, drvName);
+
+ for (auto & output : drv.outputs) {
+ auto outPath = store->makeOutputPath(output.first, h, drv.name);
+ output.second = { .output = DerivationOutputInputAddressed { .path = outPath } };
+ drv.env[output.first] = store->printStorePath(outPath);
+ }
+
+ auto shellDrvPath = writeDerivation(*store, drv);
/* Build the derivation. */
- store->buildPaths({{shellDrvPath2}});
+ store->buildPaths({{shellDrvPath}});
- assert(store->isValidPath(shellOutPath));
+ for (auto & outPath : drv.outputPaths(*store)) {
+ assert(store->isValidPath(outPath));
+ auto outPathS = store->toRealPath(outPath);
+ if (lstat(outPathS).st_size)
+ return outPath;
+ }
- return shellOutPath;
+ throw Error("get-env.sh failed to produce an environment");
}
struct Common : InstallableCommand, MixProfile
@@ -174,8 +178,12 @@ struct Common : InstallableCommand, MixProfile
"UID",
};
- void makeRcScript(const BuildEnvironment & buildEnvironment, std::ostream & out)
+ std::string makeRcScript(
+ const BuildEnvironment & buildEnvironment,
+ const Path & outputsDir = absPath(".") + "/outputs")
{
+ std::ostringstream out;
+
out << "unset shellHook\n";
out << "nix_saved_PATH=\"$PATH\"\n";
@@ -183,9 +191,9 @@ struct Common : InstallableCommand, MixProfile
for (auto & i : buildEnvironment.env) {
if (!ignoreVars.count(i.first) && !hasPrefix(i.first, "BASH_")) {
if (i.second.associative)
- out << fmt("declare -A %s=(%s)\n", i.first, i.second.value);
+ out << fmt("declare -A %s=(%s)\n", i.first, i.second.quoted);
else {
- out << fmt("%s=%s\n", i.first, i.second.value);
+ out << fmt("%s=%s\n", i.first, i.second.quoted);
if (i.second.exported)
out << fmt("export %s\n", i.first);
}
@@ -196,13 +204,26 @@ struct Common : InstallableCommand, MixProfile
out << buildEnvironment.bashFunctions << "\n";
- // FIXME: set outputs
-
out << "export NIX_BUILD_TOP=\"$(mktemp -d --tmpdir nix-shell.XXXXXX)\"\n";
for (auto & i : {"TMP", "TMPDIR", "TEMP", "TEMPDIR"})
out << fmt("export %s=\"$NIX_BUILD_TOP\"\n", i);
out << "eval \"$shellHook\"\n";
+
+ /* Substitute occurrences of output paths. */
+ auto outputs = buildEnvironment.env.find("outputs");
+ assert(outputs != buildEnvironment.env.end());
+
+ // FIXME: properly unquote 'outputs'.
+ StringMap rewrites;
+ for (auto & outputName : tokenizeString<std::vector<std::string>>(replaceStrings(outputs->second.quoted, "'", ""))) {
+ auto from = buildEnvironment.env.find(outputName);
+ assert(from != buildEnvironment.env.end());
+ // FIXME: unquote
+ rewrites.insert({from->second.quoted, outputsDir + "/" + outputName});
+ }
+
+ return rewriteStrings(out.str(), rewrites);
}
Strings getDefaultFlakeAttrPaths() override
@@ -243,19 +264,57 @@ struct Common : InstallableCommand, MixProfile
struct CmdDevelop : Common, MixEnvironment
{
std::vector<std::string> command;
+ std::optional<std::string> phase;
CmdDevelop()
{
addFlag({
.longName = "command",
.shortName = 'c',
- .description = "command and arguments to be executed insted of an interactive shell",
+ .description = "command and arguments to be executed instead of an interactive shell",
.labels = {"command", "args"},
.handler = {[&](std::vector<std::string> ss) {
if (ss.empty()) throw UsageError("--command requires at least one argument");
command = ss;
}}
});
+
+ addFlag({
+ .longName = "phase",
+ .description = "phase to run (e.g. `build` or `configure`)",
+ .labels = {"phase-name"},
+ .handler = {&phase},
+ });
+
+ addFlag({
+ .longName = "configure",
+ .description = "run the configure phase",
+ .handler = {&phase, {"configure"}},
+ });
+
+ addFlag({
+ .longName = "build",
+ .description = "run the build phase",
+ .handler = {&phase, {"build"}},
+ });
+
+ addFlag({
+ .longName = "check",
+ .description = "run the check phase",
+ .handler = {&phase, {"check"}},
+ });
+
+ addFlag({
+ .longName = "install",
+ .description = "run the install phase",
+ .handler = {&phase, {"install"}},
+ });
+
+ addFlag({
+ .longName = "installcheck",
+ .description = "run the installcheck phase",
+ .handler = {&phase, {"installCheck"}},
+ });
}
std::string description() override
@@ -291,19 +350,31 @@ struct CmdDevelop : Common, MixEnvironment
auto [rcFileFd, rcFilePath] = createTempFile("nix-shell");
- std::ostringstream ss;
- makeRcScript(buildEnvironment, ss);
+ auto script = makeRcScript(buildEnvironment);
+
+ if (verbosity >= lvlDebug)
+ script += "set -x\n";
- ss << fmt("rm -f '%s'\n", rcFilePath);
+ script += fmt("rm -f '%s'\n", rcFilePath);
+
+ if (phase) {
+ if (!command.empty())
+ throw UsageError("you cannot use both '--command' and '--phase'");
+ // FIXME: foundMakefile is set by buildPhase, need to get
+ // rid of that.
+ script += fmt("foundMakefile=1\n");
+ script += fmt("runHook %1%Phase\n", *phase);
+ script += fmt("exit 0\n", *phase);
+ }
- if (!command.empty()) {
+ else if (!command.empty()) {
std::vector<std::string> args;
for (auto s : command)
args.push_back(shellEscape(s));
- ss << fmt("exec %s\n", concatStringsSep(" ", args));
+ script += fmt("exec %s\n", concatStringsSep(" ", args));
}
- writeFull(rcFileFd.get(), ss.str());
+ writeFull(rcFileFd.get(), script);
stopProgressBar();
@@ -365,7 +436,7 @@ struct CmdPrintDevEnv : Common
stopProgressBar();
- makeRcScript(buildEnvironment, std::cout);
+ std::cout << makeRcScript(buildEnvironment);
}
};
diff --git a/src/nix/flake.cc b/src/nix/flake.cc
index a90668307..64fc896d9 100644
--- a/src/nix/flake.cc
+++ b/src/nix/flake.cc
@@ -572,7 +572,7 @@ struct CmdFlakeInitCommon : virtual Args, EvalCommand
Strings{templateName == "" ? "defaultTemplate" : templateName},
Strings(attrsPathPrefixes), lockFlags);
- auto [cursor, attrPath] = installable.getCursor(*evalState, true);
+ auto [cursor, attrPath] = installable.getCursor(*evalState);
auto templateDir = cursor->getAttr("path")->getString();
@@ -782,7 +782,6 @@ struct CmdFlakeArchive : FlakeCommand, MixJSON, MixDryRun
struct CmdFlakeShow : FlakeCommand
{
bool showLegacy = false;
- bool useEvalCache = true;
CmdFlakeShow()
{
@@ -791,12 +790,6 @@ struct CmdFlakeShow : FlakeCommand
.description = "show the contents of the 'legacyPackages' output",
.handler = {&showLegacy, true}
});
-
- addFlag({
- .longName = "no-eval-cache",
- .description = "do not use the flake evaluation cache",
- .handler = {[&]() { useEvalCache = false; }}
- });
}
std::string description() override
@@ -934,13 +927,13 @@ struct CmdFlakeShow : FlakeCommand
}
};
- auto cache = openEvalCache(*state, flake, useEvalCache);
+ auto cache = openEvalCache(*state, flake);
visit(*cache->getRoot(), {}, fmt(ANSI_BOLD "%s" ANSI_NORMAL, flake->flake.lockedRef), "");
}
};
-struct CmdFlake : virtual MultiCommand, virtual Command
+struct CmdFlake : NixMultiCommand
{
CmdFlake()
: MultiCommand({
@@ -970,11 +963,6 @@ struct CmdFlake : virtual MultiCommand, virtual Command
command->second->prepare();
command->second->run();
}
-
- void printHelp(const string & programName, std::ostream & out) override
- {
- MultiCommand::printHelp(programName, out);
- }
};
static auto r1 = registerCommand<CmdFlake>("flake");
diff --git a/src/nix/get-env.sh b/src/nix/get-env.sh
index 2e0e83561..091c0f573 100644
--- a/src/nix/get-env.sh
+++ b/src/nix/get-env.sh
@@ -1,12 +1,6 @@
set -e
if [ -e .attrs.sh ]; then source .attrs.sh; fi
-outputs=$_outputs_saved
-for __output in $_outputs_saved; do
- declare "$__output"="$out"
-done
-unset _outputs_saved __output
-
export IN_NIX_SHELL=impure
export dontAddDisableDepTrack=1
@@ -14,5 +8,12 @@ if [[ -n $stdenv ]]; then
source $stdenv/setup
fi
-export > $out
-set >> $out
+for __output in $outputs; do
+ if [[ -z $__done ]]; then
+ export > ${!__output}
+ set >> ${!__output}
+ __done=1
+ else
+ echo -n >> ${!__output}
+ fi
+done
diff --git a/src/nix/installables.cc b/src/nix/installables.cc
index 06e61380b..1f1ed680f 100644
--- a/src/nix/installables.cc
+++ b/src/nix/installables.cc
@@ -76,7 +76,7 @@ MixFlakeOptions::MixFlakeOptions()
addFlag({
.longName = "override-input",
- .description = "override a specific flake input (e.g. 'dwarffs/nixpkgs')",
+ .description = "override a specific flake input (e.g. `dwarffs/nixpkgs`)",
.labels = {"input-path", "flake-url"},
.handler = {[&](std::string inputPath, std::string flakeRef) {
lockFlags.inputOverrides.insert_or_assign(
@@ -116,7 +116,7 @@ SourceExprCommand::SourceExprCommand()
addFlag({
.longName = "file",
.shortName = 'f',
- .description = "evaluate FILE rather than the default",
+ .description = "evaluate *file* rather than the default",
.labels = {"file"},
.handler = {&file},
.completer = completePath
@@ -124,7 +124,7 @@ SourceExprCommand::SourceExprCommand()
addFlag({
.longName ="expr",
- .description = "evaluate attributes from EXPR",
+ .description = "evaluate attributes from *expr*",
.labels = {"expr"},
.handler = {&expr}
});
@@ -183,8 +183,7 @@ void completeFlakeRefWithFragment(
auto flakeRef = parseFlakeRef(flakeRefS, absPath("."));
auto evalCache = openEvalCache(*evalState,
- std::make_shared<flake::LockedFlake>(lockFlake(*evalState, flakeRef, lockFlags)),
- true);
+ std::make_shared<flake::LockedFlake>(lockFlake(*evalState, flakeRef, lockFlags)));
auto root = evalCache->getRoot();
@@ -273,7 +272,7 @@ Buildable Installable::toBuildable()
}
std::vector<std::pair<std::shared_ptr<eval_cache::AttrCursor>, std::string>>
-Installable::getCursors(EvalState & state, bool useEvalCache)
+Installable::getCursors(EvalState & state)
{
auto evalCache =
std::make_shared<nix::eval_cache::EvalCache>(std::nullopt, state,
@@ -282,9 +281,9 @@ Installable::getCursors(EvalState & state, bool useEvalCache)
}
std::pair<std::shared_ptr<eval_cache::AttrCursor>, std::string>
-Installable::getCursor(EvalState & state, bool useEvalCache)
+Installable::getCursor(EvalState & state)
{
- auto cursors = getCursors(state, useEvalCache);
+ auto cursors = getCursors(state);
if (cursors.empty())
throw Error("cannot find flake attribute '%s'", what());
return cursors[0];
@@ -305,8 +304,8 @@ struct InstallableStorePath : Installable
if (storePath.isDerivation()) {
std::map<std::string, StorePath> outputs;
auto drv = store->readDerivation(storePath);
- for (auto & [name, output] : drv.outputs)
- outputs.emplace(name, output.path(*store, drv.name));
+ for (auto & i : drv.outputsAndPaths(*store))
+ outputs.emplace(i.first, i.second.second);
return {
BuildableFromDrv {
.drvPath = storePath,
@@ -420,12 +419,11 @@ Value * InstallableFlake::getFlakeOutputs(EvalState & state, const flake::Locked
ref<eval_cache::EvalCache> openEvalCache(
EvalState & state,
- std::shared_ptr<flake::LockedFlake> lockedFlake,
- bool useEvalCache)
+ std::shared_ptr<flake::LockedFlake> lockedFlake)
{
auto fingerprint = lockedFlake->getFingerprint();
return make_ref<nix::eval_cache::EvalCache>(
- useEvalCache && evalSettings.pureEval
+ evalSettings.useEvalCache && evalSettings.pureEval
? std::optional { std::cref(fingerprint) }
: std::nullopt,
state,
@@ -460,10 +458,9 @@ static std::string showAttrPaths(const std::vector<std::string> & paths)
std::tuple<std::string, FlakeRef, InstallableValue::DerivationInfo> InstallableFlake::toDerivation()
{
-
auto lockedFlake = getLockedFlake();
- auto cache = openEvalCache(*state, lockedFlake, true);
+ auto cache = openEvalCache(*state, lockedFlake);
auto root = cache->getRoot();
for (auto & attrPath : getActualAttrPaths()) {
@@ -517,11 +514,10 @@ std::pair<Value *, Pos> InstallableFlake::toValue(EvalState & state)
}
std::vector<std::pair<std::shared_ptr<eval_cache::AttrCursor>, std::string>>
-InstallableFlake::getCursors(EvalState & state, bool useEvalCache)
+InstallableFlake::getCursors(EvalState & state)
{
auto evalCache = openEvalCache(state,
- std::make_shared<flake::LockedFlake>(lockFlake(state, flakeRef, lockFlags)),
- useEvalCache);
+ std::make_shared<flake::LockedFlake>(lockFlake(state, flakeRef, lockFlags)));
auto root = evalCache->getRoot();
diff --git a/src/nix/installables.hh b/src/nix/installables.hh
index 9edff3331..26e87ee3a 100644
--- a/src/nix/installables.hh
+++ b/src/nix/installables.hh
@@ -62,10 +62,10 @@ struct Installable
}
virtual std::vector<std::pair<std::shared_ptr<eval_cache::AttrCursor>, std::string>>
- getCursors(EvalState & state, bool useEvalCache);
+ getCursors(EvalState & state);
std::pair<std::shared_ptr<eval_cache::AttrCursor>, std::string>
- getCursor(EvalState & state, bool useEvalCache);
+ getCursor(EvalState & state);
virtual FlakeRef nixpkgsFlakeRef() const
{
@@ -118,7 +118,7 @@ struct InstallableFlake : InstallableValue
std::pair<Value *, Pos> toValue(EvalState & state) override;
std::vector<std::pair<std::shared_ptr<eval_cache::AttrCursor>, std::string>>
- getCursors(EvalState & state, bool useEvalCache) override;
+ getCursors(EvalState & state) override;
std::shared_ptr<flake::LockedFlake> getLockedFlake() const;
@@ -127,7 +127,6 @@ struct InstallableFlake : InstallableValue
ref<eval_cache::EvalCache> openEvalCache(
EvalState & state,
- std::shared_ptr<flake::LockedFlake> lockedFlake,
- bool useEvalCache);
+ std::shared_ptr<flake::LockedFlake> lockedFlake);
}
diff --git a/src/nix/local.mk b/src/nix/local.mk
index b057b7cc6..e96200685 100644
--- a/src/nix/local.mk
+++ b/src/nix/local.mk
@@ -19,7 +19,7 @@ nix_CXXFLAGS += -I src/libutil -I src/libstore -I src/libfetchers -I src/libexpr
nix_LIBS = libexpr libmain libfetchers libstore libutil
-nix_LDFLAGS = -pthread $(SODIUM_LIBS) $(EDITLINE_LIBS) $(BOOST_LDFLAGS) -lboost_context -lboost_thread -lboost_system
+nix_LDFLAGS = -pthread $(SODIUM_LIBS) $(EDITLINE_LIBS) $(BOOST_LDFLAGS) -lboost_context -lboost_thread -lboost_system -llowdown
$(foreach name, \
nix-build nix-channel nix-collect-garbage nix-copy-closure nix-daemon nix-env nix-hash nix-instantiate nix-prefetch-url nix-shell nix-store, \
diff --git a/src/nix/main.cc b/src/nix/main.cc
index e62657e95..e9479f564 100644
--- a/src/nix/main.cc
+++ b/src/nix/main.cc
@@ -17,6 +17,8 @@
#include <netdb.h>
#include <netinet/in.h>
+#include <nlohmann/json.hpp>
+
extern std::string chrootHelperName;
void chrootHelper(int argc, char * * argv);
@@ -140,6 +142,11 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs
printHelp(programName, std::cout);
throw Exit();
}
+
+ std::string description() override
+ {
+ return "a tool for reproducible and declarative configuration management";
+ }
};
void mainWrapped(int argc, char * * argv)
@@ -172,6 +179,29 @@ void mainWrapped(int argc, char * * argv)
NixArgs args;
+ if (argc == 2 && std::string(argv[1]) == "__dump-args") {
+ std::cout << args.toJSON().dump() << "\n";
+ return;
+ }
+
+ if (argc == 2 && std::string(argv[1]) == "__dump-builtins") {
+ EvalState state({}, openStore("dummy://"));
+ auto res = nlohmann::json::object();
+ auto builtins = state.baseEnv.values[0]->attrs;
+ for (auto & builtin : *builtins) {
+ auto b = nlohmann::json::object();
+ if (builtin.value->type != tPrimOp) continue;
+ auto primOp = builtin.value->primOp;
+ if (!primOp->doc) continue;
+ b["arity"] = primOp->arity;
+ b["args"] = primOp->args;
+ b["doc"] = trim(stripIndentation(primOp->doc));
+ res[(std::string) builtin.name] = std::move(b);
+ }
+ std::cout << res.dump() << "\n";
+ return;
+ }
+
Finally printCompletions([&]()
{
if (completions) {
diff --git a/src/nix/make-content-addressable.cc b/src/nix/make-content-addressable.cc
index 2fe2e2fb2..38b60fc38 100644
--- a/src/nix/make-content-addressable.cc
+++ b/src/nix/make-content-addressable.cc
@@ -77,14 +77,16 @@ struct CmdMakeContentAddressable : StorePathsCommand, MixJSON
auto narHash = hashModuloSink.finish().first;
- ValidPathInfo info(store->makeFixedOutputPath(FileIngestionMethod::Recursive, narHash, path.name(), references, hasSelfReference));
+ ValidPathInfo info {
+ store->makeFixedOutputPath(FileIngestionMethod::Recursive, narHash, path.name(), references, hasSelfReference),
+ narHash,
+ };
info.references = std::move(references);
if (hasSelfReference) info.references.insert(info.path);
- info.narHash = narHash;
info.narSize = sink.s->size();
info.ca = FixedOutputHash {
.method = FileIngestionMethod::Recursive,
- .hash = *info.narHash,
+ .hash = info.narHash,
};
if (!json)
diff --git a/src/nix/markdown.cc b/src/nix/markdown.cc
new file mode 100644
index 000000000..40788a42f
--- /dev/null
+++ b/src/nix/markdown.cc
@@ -0,0 +1,50 @@
+#include "markdown.hh"
+#include "util.hh"
+#include "finally.hh"
+
+#include <sys/queue.h>
+extern "C" {
+#include <lowdown.h>
+}
+
+namespace nix {
+
+std::string renderMarkdownToTerminal(std::string_view markdown)
+{
+ struct lowdown_opts opts {
+ .type = LOWDOWN_TERM,
+ .maxdepth = 20,
+ .cols = std::min(getWindowSize().second, (unsigned short) 80),
+ .hmargin = 0,
+ .vmargin = 0,
+ .feat = LOWDOWN_COMMONMARK | LOWDOWN_FENCED | LOWDOWN_DEFLIST | LOWDOWN_TABLES,
+ .oflags = 0,
+ };
+
+ auto doc = lowdown_doc_new(&opts);
+ if (!doc)
+ throw Error("cannot allocate Markdown document");
+ Finally freeDoc([&]() { lowdown_doc_free(doc); });
+
+ size_t maxn = 0;
+ auto node = lowdown_doc_parse(doc, &maxn, markdown.data(), markdown.size());
+ if (!node)
+ throw Error("cannot parse Markdown document");
+ Finally freeNode([&]() { lowdown_node_free(node); });
+
+ auto renderer = lowdown_term_new(&opts);
+ if (!renderer)
+ throw Error("cannot allocate Markdown renderer");
+ Finally freeRenderer([&]() { lowdown_term_free(renderer); });
+
+ auto buf = lowdown_buf_new(16384);
+ if (!buf)
+ throw Error("cannot allocate Markdown output buffer");
+ Finally freeBuffer([&]() { lowdown_buf_free(buf); });
+
+ lowdown_term_rndr(buf, nullptr, renderer, node);
+
+ return std::string(buf->data, buf->size);
+}
+
+}
diff --git a/src/nix/markdown.hh b/src/nix/markdown.hh
new file mode 100644
index 000000000..78320fcf5
--- /dev/null
+++ b/src/nix/markdown.hh
@@ -0,0 +1,7 @@
+#include "types.hh"
+
+namespace nix {
+
+std::string renderMarkdownToTerminal(std::string_view markdown);
+
+}
diff --git a/src/nix/profile.cc b/src/nix/profile.cc
index d42d16f7e..f97df4d9e 100644
--- a/src/nix/profile.cc
+++ b/src/nix/profile.cc
@@ -129,11 +129,13 @@ struct ProfileManifest
auto narHash = hashString(htSHA256, *sink.s);
- ValidPathInfo info(store->makeFixedOutputPath(FileIngestionMethod::Recursive, narHash, "profile", references));
+ ValidPathInfo info {
+ store->makeFixedOutputPath(FileIngestionMethod::Recursive, narHash, "profile", references),
+ narHash,
+ };
info.references = std::move(references);
- info.narHash = narHash;
info.narSize = sink.s->size();
- info.ca = FixedOutputHash { .method = FileIngestionMethod::Recursive, .hash = *info.narHash };
+ info.ca = FixedOutputHash { .method = FileIngestionMethod::Recursive, .hash = info.narHash };
auto source = StringSource { *sink.s };
store->addToStore(info, source);
@@ -435,7 +437,7 @@ struct CmdProfileDiffClosures : virtual StoreCommand, MixDefaultProfile
}
};
-struct CmdProfile : virtual MultiCommand, virtual Command
+struct CmdProfile : NixMultiCommand
{
CmdProfile()
: MultiCommand({
@@ -459,11 +461,6 @@ struct CmdProfile : virtual MultiCommand, virtual Command
command->second->prepare();
command->second->run();
}
-
- void printHelp(const string & programName, std::ostream & out) override
- {
- MultiCommand::printHelp(programName, out);
- }
};
static auto r1 = registerCommand<CmdProfile>("profile");
diff --git a/src/nix/registry.cc b/src/nix/registry.cc
index 54ee9152b..afa3503b8 100644
--- a/src/nix/registry.cc
+++ b/src/nix/registry.cc
@@ -115,7 +115,7 @@ struct CmdRegistryPin : virtual Args, EvalCommand
}
};
-struct CmdRegistry : virtual MultiCommand, virtual Command
+struct CmdRegistry : virtual NixMultiCommand
{
CmdRegistry()
: MultiCommand({
@@ -141,11 +141,6 @@ struct CmdRegistry : virtual MultiCommand, virtual Command
command->second->prepare();
command->second->run();
}
-
- void printHelp(const string & programName, std::ostream & out) override
- {
- MultiCommand::printHelp(programName, out);
- }
};
static auto r1 = registerCommand<CmdRegistry>("registry");
diff --git a/src/nix/repl.cc b/src/nix/repl.cc
index c3c9e54a8..50d9b2568 100644
--- a/src/nix/repl.cc
+++ b/src/nix/repl.cc
@@ -32,6 +32,7 @@ extern "C" {
#include "globals.hh"
#include "command.hh"
#include "finally.hh"
+#include "markdown.hh"
#if HAVE_BOEHMGC
#define GC_INCLUDE_NEW
@@ -64,7 +65,7 @@ struct NixRepl
void mainLoop(const std::vector<std::string> & files);
StringSet completePrefix(string prefix);
bool getLine(string & input, const std::string &prompt);
- Path getDerivationPath(Value & v);
+ StorePath getDerivationPath(Value & v);
bool processLine(string line);
void loadFile(const Path & path);
void initEnv();
@@ -375,13 +376,16 @@ bool isVarName(const string & s)
}
-Path NixRepl::getDerivationPath(Value & v) {
+StorePath NixRepl::getDerivationPath(Value & v) {
auto drvInfo = getDerivation(*state, v, false);
if (!drvInfo)
throw Error("expression does not evaluate to a derivation, so I can't build it");
- Path drvPath = drvInfo->queryDrvPath();
- if (drvPath == "" || !state->store->isValidPath(state->store->parseStorePath(drvPath)))
- throw Error("expression did not evaluate to a valid derivation");
+ Path drvPathRaw = drvInfo->queryDrvPath();
+ if (drvPathRaw == "")
+ throw Error("expression did not evaluate to a valid derivation (no drv path)");
+ StorePath drvPath = state->store->parseStorePath(drvPathRaw);
+ if (!state->store->isValidPath(drvPath))
+ throw Error("expression did not evaluate to a valid derivation (invalid drv path)");
return drvPath;
}
@@ -416,7 +420,8 @@ bool NixRepl::processLine(string line)
<< " :r Reload all files\n"
<< " :s <expr> Build dependencies of derivation, then start nix-shell\n"
<< " :t <expr> Describe result of evaluation\n"
- << " :u <expr> Build derivation, then start nix-shell\n";
+ << " :u <expr> Build derivation, then start nix-shell\n"
+ << " :doc <expr> Show documentation of a builtin function\n";
}
else if (command == ":a" || command == ":add") {
@@ -474,29 +479,30 @@ bool NixRepl::processLine(string line)
evalString("drv: (import <nixpkgs> {}).runCommand \"shell\" { buildInputs = [ drv ]; } \"\"", f);
state->callFunction(f, v, result, Pos());
- Path drvPath = getDerivationPath(result);
- runProgram(settings.nixBinDir + "/nix-shell", Strings{drvPath});
+ StorePath drvPath = getDerivationPath(result);
+ runProgram(settings.nixBinDir + "/nix-shell", Strings{state->store->printStorePath(drvPath)});
}
else if (command == ":b" || command == ":i" || command == ":s") {
Value v;
evalString(arg, v);
- Path drvPath = getDerivationPath(v);
+ StorePath drvPath = getDerivationPath(v);
+ Path drvPathRaw = state->store->printStorePath(drvPath);
if (command == ":b") {
/* We could do the build in this process using buildPaths(),
but doing it in a child makes it easier to recover from
problems / SIGINT. */
- if (runProgram(settings.nixBinDir + "/nix", Strings{"build", "--no-link", drvPath}) == 0) {
- auto drv = readDerivation(*state->store, drvPath, Derivation::nameFromPath(state->store->parseStorePath(drvPath)));
+ if (runProgram(settings.nixBinDir + "/nix", Strings{"build", "--no-link", drvPathRaw}) == 0) {
+ auto drv = state->store->readDerivation(drvPath);
std::cout << std::endl << "this derivation produced the following outputs:" << std::endl;
- for (auto & i : drv.outputs)
- std::cout << fmt(" %s -> %s\n", i.first, state->store->printStorePath(i.second.path(*state->store, drv.name)));
+ for (auto & i : drv.outputsAndPaths(*state->store))
+ std::cout << fmt(" %s -> %s\n", i.first, state->store->printStorePath(i.second.second));
}
} else if (command == ":i") {
- runProgram(settings.nixBinDir + "/nix-env", Strings{"-i", drvPath});
+ runProgram(settings.nixBinDir + "/nix-env", Strings{"-i", drvPathRaw});
} else {
- runProgram(settings.nixBinDir + "/nix-shell", Strings{drvPath});
+ runProgram(settings.nixBinDir + "/nix-shell", Strings{drvPathRaw});
}
}
@@ -509,6 +515,29 @@ bool NixRepl::processLine(string line)
else if (command == ":q" || command == ":quit")
return false;
+ else if (command == ":doc") {
+ Value v;
+ evalString(arg, v);
+ if (auto doc = state->getDoc(v)) {
+ std::string markdown;
+
+ if (!doc->args.empty() && doc->name) {
+ auto args = doc->args;
+ for (auto & arg : args)
+ arg = "*" + arg + "*";
+
+ markdown +=
+ "**Synopsis:** `builtins." + (std::string) (*doc->name) + "` "
+ + concatStringsSep(" ", args) + "\n\n";
+ }
+
+ markdown += trim(stripIndentation(doc->doc));
+
+ std::cout << renderMarkdownToTerminal(markdown);
+ } else
+ throw Error("value does not have documentation");
+ }
+
else if (command != "")
throw Error("unknown command '%1%'", command);
@@ -782,7 +811,7 @@ struct CmdRepl : StoreCommand, MixEvalArgs
return {
Example{
"Display all special commands within the REPL:",
- "nix repl\n nix-repl> :?"
+ "nix repl\nnix-repl> :?"
}
};
}
diff --git a/src/nix/search.cc b/src/nix/search.cc
index f052cc699..88815efdb 100644
--- a/src/nix/search.cc
+++ b/src/nix/search.cc
@@ -177,7 +177,7 @@ struct CmdSearch : InstallableCommand, MixJSON
}
};
- for (auto & [cursor, prefix] : installable->getCursors(*state, true))
+ for (auto & [cursor, prefix] : installable->getCursors(*state))
visit(*cursor, parseAttrPath(*state, prefix));
if (!json && !results)
diff --git a/src/nix/show-config.cc b/src/nix/show-config.cc
index a97dc42f9..01a49f107 100644
--- a/src/nix/show-config.cc
+++ b/src/nix/show-config.cc
@@ -2,7 +2,8 @@
#include "common-args.hh"
#include "shared.hh"
#include "store-api.hh"
-#include "json.hh"
+
+#include <nlohmann/json.hpp>
using namespace nix;
@@ -19,8 +20,7 @@ struct CmdShowConfig : Command, MixJSON
{
if (json) {
// FIXME: use appropriate JSON types (bool, ints, etc).
- JSONObject jsonObj(std::cout);
- globalConfig.toJSON(jsonObj);
+ logger->stdout_("%s", globalConfig.toJSON().dump());
} else {
std::map<std::string, Config::SettingInfo> settings;
globalConfig.getSettings(settings);
diff --git a/src/nix/show-derivation.cc b/src/nix/show-derivation.cc
index 1b51d114f..8c4bfb03e 100644
--- a/src/nix/show-derivation.cc
+++ b/src/nix/show-derivation.cc
@@ -67,9 +67,9 @@ struct CmdShowDerivation : InstallablesCommand
{
auto outputsObj(drvObj.object("outputs"));
- for (auto & output : drv.outputs) {
+ for (auto & output : drv.outputsAndPaths(*store)) {
auto outputObj(outputsObj.object(output.first));
- outputObj.attr("path", store->printStorePath(output.second.path(*store, drv.name)));
+ outputObj.attr("path", store->printStorePath(output.second.second));
std::visit(overloaded {
[&](DerivationOutputInputAddressed doi) {
@@ -81,7 +81,7 @@ struct CmdShowDerivation : InstallablesCommand
[&](DerivationOutputCAFloating dof) {
outputObj.attr("hashAlgo", makeFileIngestionPrefix(dof.method) + printHashType(dof.hashType));
},
- }, output.second.output);
+ }, output.second.first.output);
}
}
diff --git a/src/nix/verify.cc b/src/nix/verify.cc
index fc7a9765c..26f755fd9 100644
--- a/src/nix/verify.cc
+++ b/src/nix/verify.cc
@@ -91,15 +91,15 @@ struct CmdVerify : StorePathsCommand
std::unique_ptr<AbstractHashSink> hashSink;
if (!info->ca)
- hashSink = std::make_unique<HashSink>(info->narHash->type);
+ hashSink = std::make_unique<HashSink>(info->narHash.type);
else
- hashSink = std::make_unique<HashModuloSink>(info->narHash->type, std::string(info->path.hashPart()));
+ hashSink = std::make_unique<HashModuloSink>(info->narHash.type, std::string(info->path.hashPart()));
store->narFromPath(info->path, *hashSink);
auto hash = hashSink->finish();
- if (hash.first != *info->narHash) {
+ if (hash.first != info->narHash) {
corrupted++;
act2.result(resCorruptedPath, store->printStorePath(info->path));
logError({
@@ -107,7 +107,7 @@ struct CmdVerify : StorePathsCommand
.hint = hintfmt(
"path '%s' was modified! expected hash '%s', got '%s'",
store->printStorePath(info->path),
- info->narHash->to_string(Base32, true),
+ info->narHash.to_string(Base32, true),
hash.first.to_string(Base32, true))
});
}