aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorJohn Ericson <John.Ericson@Obsidian.Systems>2020-09-22 00:21:37 +0000
committerJohn Ericson <John.Ericson@Obsidian.Systems>2020-09-22 00:21:37 +0000
commit6a0e0db349117f68647edf2e3acdb394f40c85b2 (patch)
tree9fbbe373d66b9d1d073666afc2abd285e11faa40 /src
parentc08514c589b637ecf8ff8ba7e8767c7d1977dae0 (diff)
parentecc8672aa007af045d77434b495ca09541e9fee3 (diff)
Merge remote-tracking branch 'upstream/master' into trustless-remote-builder-simple
Diffstat (limited to 'src')
-rw-r--r--src/libexpr/common-eval-args.cc2
-rw-r--r--src/libexpr/eval-cache.cc3
-rw-r--r--src/libexpr/eval.cc98
-rw-r--r--src/libexpr/eval.hh76
-rw-r--r--src/libexpr/flake/flakeref.cc1
-rw-r--r--src/libexpr/flake/lockfile.cc1
-rw-r--r--src/libexpr/flake/lockfile.hh2
-rw-r--r--src/libexpr/get-drvs.cc14
-rw-r--r--src/libexpr/get-drvs.hh2
-rw-r--r--src/libexpr/local.mk2
-rw-r--r--src/libexpr/primops.cc1554
-rw-r--r--src/libexpr/primops.hh8
-rw-r--r--src/libexpr/primops/derivation.nix27
-rw-r--r--src/libexpr/primops/fetchMercurial.cc3
-rw-r--r--src/libexpr/primops/fetchTree.cc175
-rw-r--r--src/libfetchers/fetchers.hh2
-rw-r--r--src/libfetchers/git.cc1
-rw-r--r--src/libfetchers/github.cc35
-rw-r--r--src/libfetchers/indirect.cc1
-rw-r--r--src/libfetchers/mercurial.cc1
-rw-r--r--src/libfetchers/registry.cc2
-rw-r--r--src/libmain/common-args.cc6
-rw-r--r--src/libmain/shared.cc2
-rw-r--r--src/libstore/binary-cache-store.cc4
-rw-r--r--src/libstore/binary-cache-store.hh24
-rw-r--r--src/libstore/build.cc1033
-rw-r--r--src/libstore/daemon.cc6
-rw-r--r--src/libstore/derivations.cc178
-rw-r--r--src/libstore/derivations.hh46
-rw-r--r--src/libstore/dummy-store.cc68
-rw-r--r--src/libstore/filetransfer.cc1
-rw-r--r--src/libstore/filetransfer.hh29
-rw-r--r--src/libstore/gc.cc59
-rw-r--r--src/libstore/globals.cc8
-rw-r--r--src/libstore/globals.hh789
-rw-r--r--src/libstore/http-binary-cache-store.cc51
-rw-r--r--src/libstore/legacy-ssh-store.cc42
-rw-r--r--src/libstore/local-binary-cache-store.cc36
-rw-r--r--src/libstore/local-store.cc48
-rw-r--r--src/libstore/local-store.hh27
-rw-r--r--src/libstore/misc.cc25
-rw-r--r--src/libstore/names.cc21
-rw-r--r--src/libstore/names.hh7
-rw-r--r--src/libstore/nar-accessor.cc3
-rw-r--r--src/libstore/profiles.cc6
-rw-r--r--src/libstore/profiles.hh4
-rw-r--r--src/libstore/references.cc16
-rw-r--r--src/libstore/references.hh2
-rw-r--r--src/libstore/remote-store.cc142
-rw-r--r--src/libstore/remote-store.hh60
-rw-r--r--src/libstore/s3-binary-cache-store.cc45
-rw-r--r--src/libstore/ssh-store.cc54
-rw-r--r--src/libstore/store-api.cc147
-rw-r--r--src/libstore/store-api.hh158
-rw-r--r--src/libstore/worker-protocol.hh78
-rw-r--r--src/libutil/abstract-setting-to-json.hh15
-rw-r--r--src/libutil/args.cc75
-rw-r--r--src/libutil/args.hh12
-rw-r--r--src/libutil/callback.hh46
-rw-r--r--src/libutil/config.cc49
-rw-r--r--src/libutil/config.hh18
-rw-r--r--src/libutil/fmt.hh1
-rw-r--r--src/libutil/logging.cc27
-rw-r--r--src/libutil/logging.hh10
-rw-r--r--src/libutil/serialise.hh12
-rw-r--r--src/libutil/tests/config.cc33
-rw-r--r--src/libutil/tests/logging.cc18
-rw-r--r--src/libutil/url-parts.hh44
-rw-r--r--src/libutil/url.cc1
-rw-r--r--src/libutil/url.hh38
-rw-r--r--src/libutil/util.cc41
-rw-r--r--src/libutil/util.hh47
-rwxr-xr-xsrc/nix-build/nix-build.cc62
-rwxr-xr-xsrc/nix-channel/nix-channel.cc11
-rw-r--r--src/nix-channel/unpack-channel.nix12
-rw-r--r--src/nix-daemon/nix-daemon.cc37
-rw-r--r--src/nix-env/nix-env.cc6
-rw-r--r--src/nix-env/user-env.cc3
-rw-r--r--src/nix-instantiate/nix-instantiate.cc7
-rw-r--r--src/nix-prefetch-url/nix-prefetch-url.cc11
-rw-r--r--src/nix-store/nix-store.cc30
-rw-r--r--src/nix/add-to-store.cc8
-rw-r--r--src/nix/build.cc7
-rw-r--r--src/nix/bundle.cc6
-rw-r--r--src/nix/command.cc20
-rw-r--r--src/nix/command.hh7
-rw-r--r--src/nix/describe-stores.cc44
-rw-r--r--src/nix/develop.cc143
-rw-r--r--src/nix/diff-closures.cc2
-rw-r--r--src/nix/doctor.cc4
-rw-r--r--src/nix/flake.cc7
-rw-r--r--src/nix/get-env.sh17
-rw-r--r--src/nix/installables.cc21
-rw-r--r--src/nix/installables.hh6
-rw-r--r--src/nix/local.mk4
-rw-r--r--src/nix/main.cc31
-rw-r--r--src/nix/markdown.cc50
-rw-r--r--src/nix/markdown.hh7
-rw-r--r--src/nix/profile.cc17
-rw-r--r--src/nix/registry.cc7
-rw-r--r--src/nix/repl.cc61
-rw-r--r--src/nix/show-config.cc6
-rw-r--r--src/nix/show-derivation.cc11
103 files changed, 4819 insertions, 1568 deletions
diff --git a/src/libexpr/common-eval-args.cc b/src/libexpr/common-eval-args.cc
index 6b48ead1f..10c1a6975 100644
--- a/src/libexpr/common-eval-args.cc
+++ b/src/libexpr/common-eval-args.cc
@@ -29,7 +29,7 @@ MixEvalArgs::MixEvalArgs()
addFlag({
.longName = "include",
.shortName = 'I',
- .description = "add a path to the list of locations used to look up <...> file names",
+ .description = "add a path to the list of locations used to look up `<...>` file names",
.labels = {"path"},
.handler = {[&](std::string s) { searchPath.push_back(s); }}
});
diff --git a/src/libexpr/eval-cache.cc b/src/libexpr/eval-cache.cc
index 46177a0a4..381344b40 100644
--- a/src/libexpr/eval-cache.cc
+++ b/src/libexpr/eval-cache.cc
@@ -391,7 +391,8 @@ Value & AttrCursor::forceValue()
if (root->db && (!cachedValue || std::get_if<placeholder_t>(&cachedValue->second))) {
if (v.type == tString)
- cachedValue = {root->db->setString(getKey(), v.string.s, v.string.context), v.string.s};
+ cachedValue = {root->db->setString(getKey(), v.string.s, v.string.context),
+ string_t{v.string.s, {}}};
else if (v.type == tPath)
cachedValue = {root->db->setString(getKey(), v.path), v.path};
else if (v.type == tBool)
diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc
index 0123070d1..883fc27a7 100644
--- a/src/libexpr/eval.cc
+++ b/src/libexpr/eval.cc
@@ -87,6 +87,7 @@ static void printValue(std::ostream & str, std::set<const Value *> & active, con
else if (*i == '\n') str << "\\n";
else if (*i == '\r') str << "\\r";
else if (*i == '\t') str << "\\t";
+ else if (*i == '$' && *(i+1) == '{') str << "\\" << *i;
else str << *i;
str << "\"";
break;
@@ -355,6 +356,7 @@ EvalState::EvalState(const Strings & _searchPath, ref<Store> store)
, sEpsilon(symbols.create(""))
, repair(NoRepair)
, store(store)
+ , regexCache(makeRegexCache())
, baseEnv(allocEnv(128))
, staticBaseEnv(false, 0)
{
@@ -369,7 +371,11 @@ EvalState::EvalState(const Strings & _searchPath, ref<Store> store)
for (auto & i : _searchPath) addToSearchPath(i);
for (auto & i : evalSettings.nixPath.get()) addToSearchPath(i);
}
- addToSearchPath("nix=" + canonPath(settings.nixDataDir + "/nix/corepkgs", true));
+
+ try {
+ addToSearchPath("nix=" + canonPath(settings.nixDataDir + "/nix/corepkgs", true));
+ } catch (Error &) {
+ }
if (evalSettings.restrictEval || evalSettings.pureEval) {
allowedPaths = PathSet();
@@ -381,10 +387,14 @@ EvalState::EvalState(const Strings & _searchPath, ref<Store> store)
auto path = r.second;
if (store->isInStore(r.second)) {
- StorePathSet closure;
- store->computeFSClosure(store->toStorePath(r.second).first, closure);
- for (auto & path : closure)
- allowedPaths->insert(store->printStorePath(path));
+ try {
+ StorePathSet closure;
+ store->computeFSClosure(store->toStorePath(r.second).first, closure);
+ for (auto & path : closure)
+ allowedPaths->insert(store->printStorePath(path));
+ } catch (InvalidPath &) {
+ allowedPaths->insert(r.second);
+ }
} else
allowedPaths->insert(r.second);
}
@@ -509,7 +519,7 @@ Value * EvalState::addPrimOp(const string & name,
if (arity == 0) {
auto vPrimOp = allocValue();
vPrimOp->type = tPrimOp;
- vPrimOp->primOp = new PrimOp(primOp, 1, sym);
+ vPrimOp->primOp = new PrimOp { .fun = primOp, .arity = 1, .name = sym };
Value v;
mkApp(v, *vPrimOp, *vPrimOp);
return addConstant(name, v);
@@ -517,7 +527,7 @@ Value * EvalState::addPrimOp(const string & name,
Value * v = allocValue();
v->type = tPrimOp;
- v->primOp = new PrimOp(primOp, arity, sym);
+ v->primOp = new PrimOp { .fun = primOp, .arity = arity, .name = sym };
staticBaseEnv.vars[symbols.create(name)] = baseEnvDispl;
baseEnv.values[baseEnvDispl++] = v;
baseEnv.values[0]->attrs->push_back(Attr(sym, v));
@@ -525,12 +535,59 @@ Value * EvalState::addPrimOp(const string & name,
}
+Value * EvalState::addPrimOp(PrimOp && primOp)
+{
+ /* Hack to make constants lazy: turn them into a application of
+ the primop to a dummy value. */
+ if (primOp.arity == 0) {
+ primOp.arity = 1;
+ auto vPrimOp = allocValue();
+ vPrimOp->type = tPrimOp;
+ vPrimOp->primOp = new PrimOp(std::move(primOp));
+ Value v;
+ mkApp(v, *vPrimOp, *vPrimOp);
+ return addConstant(primOp.name, v);
+ }
+
+ Symbol envName = primOp.name;
+ if (hasPrefix(primOp.name, "__"))
+ primOp.name = symbols.create(std::string(primOp.name, 2));
+
+ Value * v = allocValue();
+ v->type = tPrimOp;
+ v->primOp = new PrimOp(std::move(primOp));
+ staticBaseEnv.vars[envName] = baseEnvDispl;
+ baseEnv.values[baseEnvDispl++] = v;
+ baseEnv.values[0]->attrs->push_back(Attr(primOp.name, v));
+ return v;
+}
+
+
Value & EvalState::getBuiltin(const string & name)
{
return *baseEnv.values[0]->attrs->find(symbols.create(name))->value;
}
+std::optional<EvalState::Doc> EvalState::getDoc(Value & v)
+{
+ if (v.type == tPrimOp || v.type == tPrimOpApp) {
+ auto v2 = &v;
+ while (v2->type == tPrimOpApp)
+ v2 = v2->primOpApp.left;
+ if (v2->primOp->doc)
+ return Doc {
+ .pos = noPos,
+ .name = v2->primOp->name,
+ .arity = v2->primOp->arity,
+ .args = v2->primOp->args,
+ .doc = v2->primOp->doc,
+ };
+ }
+ return {};
+}
+
+
/* Every "format" object (even temporary) takes up a few hundred bytes
of stack space, which is a real killer in the recursive
evaluator. So here are some helper functions for throwing
@@ -1297,14 +1354,25 @@ void EvalState::autoCallFunction(Bindings & args, Value & fun, Value & res)
}
Value * actualArgs = allocValue();
- mkAttrs(*actualArgs, fun.lambda.fun->formals->formals.size());
-
- for (auto & i : fun.lambda.fun->formals->formals) {
- Bindings::iterator j = args.find(i.name);
- if (j != args.end())
- actualArgs->attrs->push_back(*j);
- else if (!i.def)
- throwTypeError("cannot auto-call a function that has an argument without a default value ('%1%')", i.name);
+ mkAttrs(*actualArgs, std::max(static_cast<uint32_t>(fun.lambda.fun->formals->formals.size()), args.size()));
+
+ if (fun.lambda.fun->formals->ellipsis) {
+ // If the formals have an ellipsis (eg the function accepts extra args) pass
+ // all available automatic arguments (which includes arguments specified on
+ // the command line via --arg/--argstr)
+ for (auto& v : args) {
+ actualArgs->attrs->push_back(v);
+ }
+ } else {
+ // Otherwise, only pass the arguments that the function accepts
+ for (auto & i : fun.lambda.fun->formals->formals) {
+ Bindings::iterator j = args.find(i.name);
+ if (j != args.end()) {
+ actualArgs->attrs->push_back(*j);
+ } else if (!i.def) {
+ throwTypeError("cannot auto-call a function that has an argument without a default value ('%1%')", i.name);
+ }
+ }
}
actualArgs->attrs->sort();
diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh
index 5855b4ef2..0e1f61baa 100644
--- a/src/libexpr/eval.hh
+++ b/src/libexpr/eval.hh
@@ -6,7 +6,6 @@
#include "symbol-table.hh"
#include "config.hh"
-#include <regex>
#include <map>
#include <optional>
#include <unordered_map>
@@ -30,8 +29,8 @@ struct PrimOp
PrimOpFun fun;
size_t arity;
Symbol name;
- PrimOp(PrimOpFun fun, size_t arity, Symbol name)
- : fun(fun), arity(arity), name(name) { }
+ std::vector<std::string> args;
+ const char * doc = nullptr;
};
@@ -65,6 +64,11 @@ typedef std::list<SearchPathElem> SearchPath;
void initGC();
+struct RegexCache;
+
+std::shared_ptr<RegexCache> makeRegexCache();
+
+
class EvalState
{
public:
@@ -120,7 +124,7 @@ private:
std::unordered_map<Path, Path> resolvedPaths;
/* Cache used by prim_match(). */
- std::unordered_map<std::string, std::regex> regexCache;
+ std::shared_ptr<RegexCache> regexCache;
public:
@@ -242,10 +246,23 @@ private:
Value * addPrimOp(const string & name,
size_t arity, PrimOpFun primOp);
+ Value * addPrimOp(PrimOp && primOp);
+
public:
Value & getBuiltin(const string & name);
+ struct Doc
+ {
+ Pos pos;
+ std::optional<Symbol> name;
+ size_t arity;
+ std::vector<std::string> args;
+ const char * doc;
+ };
+
+ std::optional<Doc> getDoc(Value & v);
+
private:
inline Value * lookupVar(Env * env, const ExprVar & var, bool noEval);
@@ -357,24 +374,57 @@ struct EvalSettings : Config
Setting<bool> enableNativeCode{this, false, "allow-unsafe-native-code-during-evaluation",
"Whether builtin functions that allow executing native code should be enabled."};
- Setting<Strings> nixPath{this, getDefaultNixPath(), "nix-path",
- "List of directories to be searched for <...> file references."};
+ Setting<Strings> nixPath{
+ this, getDefaultNixPath(), "nix-path",
+ "List of directories to be searched for `<...>` file references."};
- Setting<bool> restrictEval{this, false, "restrict-eval",
- "Whether to restrict file system access to paths in $NIX_PATH, "
- "and network access to the URI prefixes listed in 'allowed-uris'."};
+ Setting<bool> restrictEval{
+ this, false, "restrict-eval",
+ R"(
+ If set to `true`, the Nix evaluator will not allow access to any
+ files outside of the Nix search path (as set via the `NIX_PATH`
+ environment variable or the `-I` option), or to URIs outside of
+ `allowed-uri`. The default is `false`.
+ )"};
Setting<bool> pureEval{this, false, "pure-eval",
"Whether to restrict file system and network access to files specified by cryptographic hash."};
- Setting<bool> enableImportFromDerivation{this, true, "allow-import-from-derivation",
- "Whether the evaluator allows importing the result of a derivation."};
+ Setting<bool> enableImportFromDerivation{
+ this, true, "allow-import-from-derivation",
+ R"(
+ By default, Nix allows you to `import` from a derivation, allowing
+ building at evaluation time. With this option set to false, Nix will
+ throw an error when evaluating an expression that uses this feature,
+ allowing users to ensure their evaluation will not require any
+ builds to take place.
+ )"};
Setting<Strings> allowedUris{this, {}, "allowed-uris",
- "Prefixes of URIs that builtin functions such as fetchurl and fetchGit are allowed to fetch."};
+ R"(
+ A list of URI prefixes to which access is allowed in restricted
+ evaluation mode. For example, when set to
+ `https://github.com/NixOS`, builtin functions such as `fetchGit` are
+ allowed to access `https://github.com/NixOS/patchelf.git`.
+ )"};
Setting<bool> traceFunctionCalls{this, false, "trace-function-calls",
- "Emit log messages for each function entry and exit at the 'vomit' log level (-vvvv)."};
+ R"(
+ If set to `true`, the Nix evaluator will trace every function call.
+ Nix will print a log message at the "vomit" level for every function
+ entrance and function exit.
+
+ function-trace entered undefined position at 1565795816999559622
+ function-trace exited undefined position at 1565795816999581277
+ function-trace entered /nix/store/.../example.nix:226:41 at 1565795253249935150
+ function-trace exited /nix/store/.../example.nix:226:41 at 1565795253249941684
+
+ The `undefined position` means the function call is a builtin.
+
+ Use the `contrib/stack-collapse.py` script distributed with the Nix
+ source code to convert the trace logs in to a format suitable for
+ `flamegraph.pl`.
+ )"};
Setting<bool> useEvalCache{this, true, "eval-cache",
"Whether to use the flake evaluation cache."};
diff --git a/src/libexpr/flake/flakeref.cc b/src/libexpr/flake/flakeref.cc
index 6363446f6..d5c2ffe66 100644
--- a/src/libexpr/flake/flakeref.cc
+++ b/src/libexpr/flake/flakeref.cc
@@ -1,6 +1,7 @@
#include "flakeref.hh"
#include "store-api.hh"
#include "url.hh"
+#include "url-parts.hh"
#include "fetchers.hh"
#include "registry.hh"
diff --git a/src/libexpr/flake/lockfile.cc b/src/libexpr/flake/lockfile.cc
index a74846944..78431f000 100644
--- a/src/libexpr/flake/lockfile.cc
+++ b/src/libexpr/flake/lockfile.cc
@@ -1,5 +1,6 @@
#include "lockfile.hh"
#include "store-api.hh"
+#include "url-parts.hh"
#include <nlohmann/json.hpp>
diff --git a/src/libexpr/flake/lockfile.hh b/src/libexpr/flake/lockfile.hh
index 5e7cfda3e..9ec8b39c3 100644
--- a/src/libexpr/flake/lockfile.hh
+++ b/src/libexpr/flake/lockfile.hh
@@ -6,7 +6,7 @@
namespace nix {
class Store;
-struct StorePath;
+class StorePath;
}
namespace nix::flake {
diff --git a/src/libexpr/get-drvs.cc b/src/libexpr/get-drvs.cc
index 5d6e39aa0..91916e8bf 100644
--- a/src/libexpr/get-drvs.cc
+++ b/src/libexpr/get-drvs.cc
@@ -38,8 +38,11 @@ DrvInfo::DrvInfo(EvalState & state, ref<Store> store, const std::string & drvPat
auto i = drv.outputs.find(outputName);
if (i == drv.outputs.end())
throw Error("derivation '%s' does not have output '%s'", store->printStorePath(drvPath), outputName);
+ auto & [outputName, output] = *i;
- outPath = store->printStorePath(i->second.path(*store, drv.name));
+ auto optStorePath = output.path(*store, drv.name, outputName);
+ if (optStorePath)
+ outPath = store->printStorePath(*optStorePath);
}
@@ -77,12 +80,15 @@ string DrvInfo::queryDrvPath() const
string DrvInfo::queryOutPath() const
{
- if (outPath == "" && attrs) {
+ if (!outPath && attrs) {
Bindings::iterator i = attrs->find(state->sOutPath);
PathSet context;
- outPath = i != attrs->end() ? state->coerceToPath(*i->pos, *i->value, context) : "";
+ if (i != attrs->end())
+ outPath = state->coerceToPath(*i->pos, *i->value, context);
}
- return outPath;
+ if (!outPath)
+ throw UnimplementedError("CA derivations are not yet supported");
+ return *outPath;
}
diff --git a/src/libexpr/get-drvs.hh b/src/libexpr/get-drvs.hh
index d7860fc6a..29bb6a660 100644
--- a/src/libexpr/get-drvs.hh
+++ b/src/libexpr/get-drvs.hh
@@ -20,7 +20,7 @@ private:
mutable string name;
mutable string system;
mutable string drvPath;
- mutable string outPath;
+ mutable std::optional<string> outPath;
mutable string outputName;
Outputs outputs;
diff --git a/src/libexpr/local.mk b/src/libexpr/local.mk
index d84b150e0..687a8ccda 100644
--- a/src/libexpr/local.mk
+++ b/src/libexpr/local.mk
@@ -42,6 +42,6 @@ $(eval $(call install-file-in, $(d)/nix-expr.pc, $(prefix)/lib/pkgconfig, 0644))
$(foreach i, $(wildcard src/libexpr/flake/*.hh), \
$(eval $(call install-file-in, $(i), $(includedir)/nix/flake, 0644)))
-$(d)/primops.cc: $(d)/imported-drv-to-derivation.nix.gen.hh
+$(d)/primops.cc: $(d)/imported-drv-to-derivation.nix.gen.hh $(d)/primops/derivation.nix.gen.hh
$(d)/flake/flake.cc: $(d)/flake/call-flake.nix.gen.hh
diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index 30f4c3529..9cfe3f402 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -44,16 +44,6 @@ void EvalState::realiseContext(const PathSet & context)
throw InvalidPathError(store->printStorePath(ctx));
if (!outputName.empty() && ctx.isDerivation()) {
drvs.push_back(StorePathWithOutputs{ctx, {outputName}});
-
- /* Add the output of this derivation to the allowed
- paths. */
- if (allowedPaths) {
- auto drv = store->derivationFromPath(ctx);
- DerivationOutputs::iterator i = drv.outputs.find(outputName);
- if (i == drv.outputs.end())
- throw Error("derivation '%s' does not have an output named '%s'", ctxS, outputName);
- allowedPaths->insert(store->printStorePath(i->second.path(*store, drv.name)));
- }
}
}
@@ -69,15 +59,57 @@ void EvalState::realiseContext(const PathSet & context)
store->queryMissing(drvs, willBuild, willSubstitute, unknown, downloadSize, narSize);
store->buildPaths(drvs);
+
+ /* Add the output of this derivations to the allowed
+ paths. */
+ if (allowedPaths) {
+ for (auto & [drvPath, outputs] : drvs) {
+ auto outputPaths = store->queryDerivationOutputMap(drvPath);
+ for (auto & outputName : outputs) {
+ if (outputPaths.count(outputName) == 0)
+ throw Error("derivation '%s' does not have an output named '%s'",
+ store->printStorePath(drvPath), outputName);
+ allowedPaths->insert(store->printStorePath(outputPaths.at(outputName)));
+ }
+ }
+ }
}
+/* Add and attribute to the given attribute map from the output name to
+ the output path, or a placeholder.
+
+ Where possible the path is used, but for floating CA derivations we
+ may not know it. For sake of determinism we always assume we don't
+ and instead put in a place holder. In either case, however, the
+ string context will contain the drv path and output name, so
+ downstream derivations will have the proper dependency, and in
+ addition, before building, the placeholder will be rewritten to be
+ the actual path.
+
+ The 'drv' and 'drvPath' outputs must correspond. */
+static void mkOutputString(EvalState & state, Value & v,
+ const StorePath & drvPath, const BasicDerivation & drv,
+ std::pair<string, DerivationOutput> o)
+{
+ auto optOutputPath = o.second.path(*state.store, drv.name, o.first);
+ mkString(
+ *state.allocAttr(v, state.symbols.create(o.first)),
+ optOutputPath
+ ? state.store->printStorePath(*optOutputPath)
+ /* Downstream we would substitute this for an actual path once
+ we build the floating CA derivation */
+ /* FIXME: we need to depend on the basic derivation, not
+ derivation */
+ : downstreamPlaceholder(*state.store, drvPath, o.first),
+ {"!" + o.first + "!" + state.store->printStorePath(drvPath)});
+}
/* Load and evaluate an expression from path specified by the
argument. */
-static void prim_scopedImport(EvalState & state, const Pos & pos, Value * * args, Value & v)
+static void import(EvalState & state, const Pos & pos, Value & vPath, Value * vScope, Value & v)
{
PathSet context;
- Path path = state.coerceToPath(pos, *args[1], context);
+ Path path = state.coerceToPath(pos, vPath, context);
try {
state.realiseContext(context);
@@ -99,9 +131,10 @@ static void prim_scopedImport(EvalState & state, const Pos & pos, Value * * args
return std::nullopt;
return storePath;
};
+
if (auto optStorePath = isValidDerivationInStore()) {
auto storePath = *optStorePath;
- Derivation drv = readDerivation(*state.store, realPath, Derivation::nameFromPath(storePath));
+ Derivation drv = state.store->readDerivation(storePath);
Value & w = *state.allocValue();
state.mkAttrs(w, 3 + drv.outputs.size());
Value * v2 = state.allocAttr(w, state.sDrvPath);
@@ -113,9 +146,8 @@ static void prim_scopedImport(EvalState & state, const Pos & pos, Value * * args
state.mkList(*outputsVal, drv.outputs.size());
unsigned int outputs_index = 0;
- for (const auto & o : drv.outputsAndPaths(*state.store)) {
- v2 = state.allocAttr(w, state.symbols.create(o.first));
- mkString(*v2, state.store->printStorePath(o.second.second), {"!" + o.first + "!" + path});
+ for (const auto & o : drv.outputs) {
+ mkOutputString(state, w, storePath, drv, o);
outputsVal->listElems()[outputs_index] = state.allocValue();
mkString(*(outputsVal->listElems()[outputs_index++]), o.first);
}
@@ -133,17 +165,18 @@ static void prim_scopedImport(EvalState & state, const Pos & pos, Value * * args
mkApp(v, **fun, w);
state.forceAttrs(v, pos);
} else {
- state.forceAttrs(*args[0]);
- if (args[0]->attrs->empty())
+ if (!vScope)
state.evalFile(realPath, v);
else {
- Env * env = &state.allocEnv(args[0]->attrs->size());
+ state.forceAttrs(*vScope);
+
+ Env * env = &state.allocEnv(vScope->attrs->size());
env->up = &state.baseEnv;
StaticEnv staticEnv(false, &state.staticBaseEnv);
unsigned int displ = 0;
- for (auto & attr : *args[0]->attrs) {
+ for (auto & attr : *vScope->attrs) {
staticEnv.vars[attr.name] = displ;
env->values[displ++] = attr.value;
}
@@ -156,6 +189,76 @@ static void prim_scopedImport(EvalState & state, const Pos & pos, Value * * args
}
}
+static RegisterPrimOp primop_scopedImport(RegisterPrimOp::Info {
+ .name = "scopedImport",
+ .arity = 2,
+ .fun = [](EvalState & state, const Pos & pos, Value * * args, Value & v)
+ {
+ import(state, pos, *args[1], args[0], v);
+ }
+});
+
+static RegisterPrimOp primop_import({
+ .name = "import",
+ .args = {"path"},
+ .doc = R"(
+ Load, parse and return the Nix expression in the file *path*. If
+ *path* is a directory, the file ` default.nix ` in that directory
+ is loaded. Evaluation aborts if the file doesn’t exist or contains
+ an incorrect Nix expression. `import` implements Nix’s module
+ system: you can put any Nix expression (such as a set or a
+ function) in a separate file, and use it from Nix expressions in
+ other files.
+
+ > **Note**
+ >
+ > Unlike some languages, `import` is a regular function in Nix.
+ > Paths using the angle bracket syntax (e.g., `import` *\<foo\>*)
+ > are [normal path values](language-values.md).
+
+ A Nix expression loaded by `import` must not contain any *free
+ variables* (identifiers that are not defined in the Nix expression
+ itself and are not built-in). Therefore, it cannot refer to
+ variables that are in scope at the call site. For instance, if you
+ have a calling expression
+
+ ```nix
+ rec {
+ x = 123;
+ y = import ./foo.nix;
+ }
+ ```
+
+ then the following `foo.nix` will give an error:
+
+ ```nix
+ x + 456
+ ```
+
+ since `x` is not in scope in `foo.nix`. If you want `x` to be
+ available in `foo.nix`, you should pass it as a function argument:
+
+ ```nix
+ rec {
+ x = 123;
+ y = import ./foo.nix x;
+ }
+ ```
+
+ and
+
+ ```nix
+ x: x + 456
+ ```
+
+ (The function argument doesn’t have to be called `x` in `foo.nix`;
+ any name would work.)
+ )",
+ .fun = [](EvalState & state, const Pos & pos, Value * * args, Value & v)
+ {
+ import(state, pos, *args[0], nullptr, v);
+ }
+});
/* Want reasonable symbol names, so extern C */
/* !!! Should we pass the Pos or the file name too? */
@@ -275,6 +378,16 @@ static void prim_typeOf(EvalState & state, const Pos & pos, Value * * args, Valu
mkString(v, state.symbols.create(t));
}
+static RegisterPrimOp primop_typeOf({
+ .name = "__typeOf",
+ .args = {"e"},
+ .doc = R"(
+ Return a string representing the type of the value *e*, namely
+ `"int"`, `"bool"`, `"string"`, `"path"`, `"null"`, `"set"`,
+ `"list"`, `"lambda"` or `"float"`.
+ )",
+ .fun = prim_typeOf,
+});
/* Determine whether the argument is the null value. */
static void prim_isNull(EvalState & state, const Pos & pos, Value * * args, Value & v)
@@ -283,6 +396,18 @@ static void prim_isNull(EvalState & state, const Pos & pos, Value * * args, Valu
mkBool(v, args[0]->type == tNull);
}
+static RegisterPrimOp primop_isNull({
+ .name = "isNull",
+ .args = {"e"},
+ .doc = R"(
+ Return `true` if *e* evaluates to `null`, and `false` otherwise.
+
+ > **Warning**
+ >
+ > This function is *deprecated*; just write `e == null` instead.
+ )",
+ .fun = prim_isNull,
+});
/* Determine whether the argument is a function. */
static void prim_isFunction(EvalState & state, const Pos & pos, Value * * args, Value & v)
@@ -302,6 +427,14 @@ static void prim_isFunction(EvalState & state, const Pos & pos, Value * * args,
mkBool(v, res);
}
+static RegisterPrimOp primop_isFunction({
+ .name = "__isFunction",
+ .args = {"e"},
+ .doc = R"(
+ Return `true` if *e* evaluates to a function, and `false` otherwise.
+ )",
+ .fun = prim_isFunction,
+});
/* Determine whether the argument is an integer. */
static void prim_isInt(EvalState & state, const Pos & pos, Value * * args, Value & v)
@@ -310,6 +443,15 @@ static void prim_isInt(EvalState & state, const Pos & pos, Value * * args, Value
mkBool(v, args[0]->type == tInt);
}
+static RegisterPrimOp primop_isInt({
+ .name = "__isInt",
+ .args = {"e"},
+ .doc = R"(
+ Return `true` if *e* evaluates to an integer, and `false` otherwise.
+ )",
+ .fun = prim_isInt,
+});
+
/* Determine whether the argument is a float. */
static void prim_isFloat(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
@@ -317,6 +459,15 @@ static void prim_isFloat(EvalState & state, const Pos & pos, Value * * args, Val
mkBool(v, args[0]->type == tFloat);
}
+static RegisterPrimOp primop_isFloat({
+ .name = "__isFloat",
+ .args = {"e"},
+ .doc = R"(
+ Return `true` if *e* evaluates to a float, and `false` otherwise.
+ )",
+ .fun = prim_isFloat,
+});
+
/* Determine whether the argument is a string. */
static void prim_isString(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
@@ -324,6 +475,14 @@ static void prim_isString(EvalState & state, const Pos & pos, Value * * args, Va
mkBool(v, args[0]->type == tString);
}
+static RegisterPrimOp primop_isString({
+ .name = "__isString",
+ .args = {"e"},
+ .doc = R"(
+ Return `true` if *e* evaluates to a string, and `false` otherwise.
+ )",
+ .fun = prim_isString,
+});
/* Determine whether the argument is a Boolean. */
static void prim_isBool(EvalState & state, const Pos & pos, Value * * args, Value & v)
@@ -332,6 +491,15 @@ static void prim_isBool(EvalState & state, const Pos & pos, Value * * args, Valu
mkBool(v, args[0]->type == tBool);
}
+static RegisterPrimOp primop_isBool({
+ .name = "__isBool",
+ .args = {"e"},
+ .doc = R"(
+ Return `true` if *e* evaluates to a bool, and `false` otherwise.
+ )",
+ .fun = prim_isBool,
+});
+
/* Determine whether the argument is a path. */
static void prim_isPath(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
@@ -339,6 +507,15 @@ static void prim_isPath(EvalState & state, const Pos & pos, Value * * args, Valu
mkBool(v, args[0]->type == tPath);
}
+static RegisterPrimOp primop_isPath({
+ .name = "__isPath",
+ .args = {"e"},
+ .doc = R"(
+ Return `true` if *e* evaluates to a path, and `false` otherwise.
+ )",
+ .fun = prim_isPath,
+});
+
struct CompareValues
{
bool operator () (const Value * v1, const Value * v2) const
@@ -444,22 +621,43 @@ static void prim_genericClosure(EvalState & state, const Pos & pos, Value * * ar
v.listElems()[n++] = i;
}
+static RegisterPrimOp primop_genericClosure(RegisterPrimOp::Info {
+ .name = "__genericClosure",
+ .arity = 1,
+ .fun = prim_genericClosure,
+});
-static void prim_abort(EvalState & state, const Pos & pos, Value * * args, Value & v)
-{
- PathSet context;
- string s = state.coerceToString(pos, *args[0], context);
- throw Abort("evaluation aborted with the following error message: '%1%'", s);
-}
-
-
-static void prim_throw(EvalState & state, const Pos & pos, Value * * args, Value & v)
-{
- PathSet context;
- string s = state.coerceToString(pos, *args[0], context);
- throw ThrownError(s);
-}
-
+static RegisterPrimOp primop_abort({
+ .name = "abort",
+ .args = {"s"},
+ .doc = R"(
+ Abort Nix expression evaluation and print the error message *s*.
+ )",
+ .fun = [](EvalState & state, const Pos & pos, Value * * args, Value & v)
+ {
+ PathSet context;
+ string s = state.coerceToString(pos, *args[0], context);
+ throw Abort("evaluation aborted with the following error message: '%1%'", s);
+ }
+});
+
+static RegisterPrimOp primop_throw({
+ .name = "throw",
+ .args = {"s"},
+ .doc = R"(
+ Throw an error message *s*. This usually aborts Nix expression
+ evaluation, but in `nix-env -qa` and other commands that try to
+ evaluate a set of derivations to get information about those
+ derivations, a derivation that throws an error is silently skipped
+ (which is not the case for `abort`).
+ )",
+ .fun = [](EvalState & state, const Pos & pos, Value * * args, Value & v)
+ {
+ PathSet context;
+ string s = state.coerceToString(pos, *args[0], context);
+ throw ThrownError(s);
+ }
+});
static void prim_addErrorContext(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
@@ -473,6 +671,11 @@ static void prim_addErrorContext(EvalState & state, const Pos & pos, Value * * a
}
}
+static RegisterPrimOp primop_addErrorContext(RegisterPrimOp::Info {
+ .name = "__addErrorContext",
+ .arity = 2,
+ .fun = prim_addErrorContext,
+});
/* Try evaluating the argument. Success => {success=true; value=something;},
* else => {success=false; value=false;} */
@@ -490,6 +693,22 @@ static void prim_tryEval(EvalState & state, const Pos & pos, Value * * args, Val
v.attrs->sort();
}
+static RegisterPrimOp primop_tryEval({
+ .name = "__tryEval",
+ .args = {"e"},
+ .doc = R"(
+ Try to shallowly evaluate *e*. Return a set containing the
+ attributes `success` (`true` if *e* evaluated successfully,
+ `false` if an error was thrown) and `value`, equalling *e* if
+ successful and `false` otherwise. Note that this doesn't evaluate
+ *e* deeply, so ` let e = { x = throw ""; }; in (builtins.tryEval
+ e).success ` will be `true`. Using ` builtins.deepSeq ` one can
+ get the expected result: `let e = { x = throw ""; }; in
+ (builtins.tryEval (builtins.deepSeq e e)).success` will be
+ `false`.
+ )",
+ .fun = prim_tryEval,
+});
/* Return an environment variable. Use with care. */
static void prim_getEnv(EvalState & state, const Pos & pos, Value * * args, Value & v)
@@ -498,6 +717,22 @@ static void prim_getEnv(EvalState & state, const Pos & pos, Value * * args, Valu
mkString(v, evalSettings.restrictEval || evalSettings.pureEval ? "" : getEnv(name).value_or(""));
}
+static RegisterPrimOp primop_getEnv({
+ .name = "__getEnv",
+ .args = {"s"},
+ .doc = R"(
+ `getEnv` returns the value of the environment variable *s*, or an
+ empty string if the variable doesn’t exist. This function should be
+ used with care, as it can introduce all sorts of nasty environment
+ dependencies in your Nix expression.
+
+ `getEnv` is used in Nix Packages to locate the file
+ `~/.nixpkgs/config.nix`, which contains user-local settings for Nix
+ Packages. (That is, it does a `getEnv "HOME"` to locate the user’s
+ home directory.)
+ )",
+ .fun = prim_getEnv,
+});
/* Evaluate the first argument, then return the second argument. */
static void prim_seq(EvalState & state, const Pos & pos, Value * * args, Value & v)
@@ -507,6 +742,15 @@ static void prim_seq(EvalState & state, const Pos & pos, Value * * args, Value &
v = *args[1];
}
+static RegisterPrimOp primop_seq({
+ .name = "__seq",
+ .args = {"e1", "e2"},
+ .doc = R"(
+ Evaluate *e1*, then evaluate and return *e2*. This ensures that a
+ computation is strict in the value of *e1*.
+ )",
+ .fun = prim_seq,
+});
/* Evaluate the first argument deeply (i.e. recursing into lists and
attrsets), then return the second argument. */
@@ -517,6 +761,16 @@ static void prim_deepSeq(EvalState & state, const Pos & pos, Value * * args, Val
v = *args[1];
}
+static RegisterPrimOp primop_deepSeq({
+ .name = "__deepSeq",
+ .args = {"e1", "e2"},
+ .doc = R"(
+ This is like `seq e1 e2`, except that *e1* is evaluated *deeply*:
+ if it’s a list or set, its elements or attributes are also
+ evaluated recursively.
+ )",
+ .fun = prim_deepSeq,
+});
/* Evaluate the first expression and print it on standard error. Then
return the second expression. Useful for debugging. */
@@ -531,6 +785,17 @@ static void prim_trace(EvalState & state, const Pos & pos, Value * * args, Value
v = *args[1];
}
+static RegisterPrimOp primop_trace({
+ .name = "__trace",
+ .args = {"e1", "e2"},
+ .doc = R"(
+ Evaluate *e1* and print its abstract syntax representation on
+ standard error. Then return *e2*. This function is useful for
+ debugging.
+ )",
+ .fun = prim_trace,
+});
+
/*************************************************************
* Derivations
@@ -839,26 +1104,33 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
}
/* Write the resulting term into the Nix store directory. */
- auto drvPath = writeDerivation(state.store, drv, state.repair);
+ auto drvPath = writeDerivation(*state.store, drv, state.repair);
auto drvPathS = state.store->printStorePath(drvPath);
printMsg(lvlChatty, "instantiated '%1%' -> '%2%'", drvName, drvPathS);
/* Optimisation, but required in read-only mode! because in that
case we don't actually write store derivations, so we can't
- read them later. */
- drvHashes.insert_or_assign(drvPath,
- hashDerivationModulo(*state.store, Derivation(drv), false));
+ read them later.
+
+ However, we don't bother doing this for floating CA derivations because
+ their "hash modulo" is indeterminate until built. */
+ if (drv.type() != DerivationType::CAFloating)
+ drvHashes.insert_or_assign(drvPath,
+ hashDerivationModulo(*state.store, Derivation(drv), false));
state.mkAttrs(v, 1 + drv.outputs.size());
mkString(*state.allocAttr(v, state.sDrvPath), drvPathS, {"=" + drvPathS});
- for (auto & i : drv.outputsAndPaths(*state.store)) {
- mkString(*state.allocAttr(v, state.symbols.create(i.first)),
- state.store->printStorePath(i.second.second), {"!" + i.first + "!" + drvPathS});
- }
+ for (auto & i : drv.outputs)
+ mkOutputString(state, v, drvPath, drv, i);
v.attrs->sort();
}
+static RegisterPrimOp primop_derivationStrict(RegisterPrimOp::Info {
+ .name = "derivationStrict",
+ .arity = 1,
+ .fun = prim_derivationStrict,
+});
/* Return a placeholder string for the specified output that will be
substituted by the corresponding output path at build time. For
@@ -872,6 +1144,17 @@ static void prim_placeholder(EvalState & state, const Pos & pos, Value * * args,
mkString(v, hashPlaceholder(state.forceStringNoCtx(*args[0], pos)));
}
+static RegisterPrimOp primop_placeholder({
+ .name = "placeholder",
+ .args = {"output"},
+ .doc = R"(
+ Return a placeholder string for the specified *output* that will be
+ substituted by the corresponding output path at build time. Typical
+ outputs would be `"out"`, `"bin"` or `"dev"`.
+ )",
+ .fun = prim_placeholder,
+});
+
/*************************************************************
* Paths
@@ -886,6 +1169,15 @@ static void prim_toPath(EvalState & state, const Pos & pos, Value * * args, Valu
mkString(v, canonPath(path), context);
}
+static RegisterPrimOp primop_toPath({
+ .name = "__toPath",
+ .args = {"s"},
+ .doc = R"(
+ **DEPRECATED.** Use `/. + "/path"` to convert a string into an absolute
+ path. For relative paths, use `./. + "/path"`.
+ )",
+ .fun = prim_toPath,
+});
/* Allow a valid store path to be used in an expression. This is
useful in some generated expressions such as in nix-push, which
@@ -897,6 +1189,9 @@ static void prim_toPath(EvalState & state, const Pos & pos, Value * * args, Valu
corner cases. */
static void prim_storePath(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
+ if (evalSettings.pureEval)
+ throw EvalError("builtins.storePath' is not allowed in pure evaluation mode");
+
PathSet context;
Path path = state.checkSourcePath(state.coerceToPath(pos, *args[0], context));
/* Resolve symlinks in ‘path’, unless ‘path’ itself is a symlink
@@ -915,6 +1210,23 @@ static void prim_storePath(EvalState & state, const Pos & pos, Value * * args, V
mkString(v, path, context);
}
+static RegisterPrimOp primop_storePath({
+ .name = "__storePath",
+ .args = {"path"},
+ .doc = R"(
+ This function allows you to define a dependency on an already
+ existing store path. For example, the derivation attribute `src
+ = builtins.storePath /nix/store/f1d18v1y…-source` causes the
+ derivation to depend on the specified path, which must exist or
+ be substitutable. Note that this differs from a plain path
+ (e.g. `src = /nix/store/f1d18v1y…-source`) in that the latter
+ causes the path to be *copied* again to the Nix store, resulting
+ in a new path (e.g. `/nix/store/ld01dnzc…-source-source`).
+
+ This function is not available in pure evaluation mode.
+ )",
+ .fun = prim_storePath,
+});
static void prim_pathExists(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
@@ -942,6 +1254,15 @@ static void prim_pathExists(EvalState & state, const Pos & pos, Value * * args,
}
}
+static RegisterPrimOp primop_pathExists({
+ .name = "__pathExists",
+ .args = {"path"},
+ .doc = R"(
+ Return `true` if the path *path* exists at evaluation time, and
+ `false` otherwise.
+ )",
+ .fun = prim_pathExists,
+});
/* Return the base name of the given string, i.e., everything
following the last slash. */
@@ -951,6 +1272,16 @@ static void prim_baseNameOf(EvalState & state, const Pos & pos, Value * * args,
mkString(v, baseNameOf(state.coerceToString(pos, *args[0], context, false, false)), context);
}
+static RegisterPrimOp primop_baseNameOf({
+ .name = "baseNameOf",
+ .args = {"s"},
+ .doc = R"(
+ Return the *base name* of the string *s*, that is, everything
+ following the final slash in the string. This is similar to the GNU
+ `basename` command.
+ )",
+ .fun = prim_baseNameOf,
+});
/* Return the directory of the given path, i.e., everything before the
last slash. Return either a path or a string depending on the type
@@ -962,6 +1293,16 @@ static void prim_dirOf(EvalState & state, const Pos & pos, Value * * args, Value
if (args[0]->type == tPath) mkPath(v, dir.c_str()); else mkString(v, dir, context);
}
+static RegisterPrimOp primop_dirOf({
+ .name = "dirOf",
+ .args = {"s"},
+ .doc = R"(
+ Return the directory part of the string *s*, that is, everything
+ before the final slash in the string. This is similar to the GNU
+ `dirname` command.
+ )",
+ .fun = prim_dirOf,
+});
/* Return the contents of a file as a string. */
static void prim_readFile(EvalState & state, const Pos & pos, Value * * args, Value & v)
@@ -982,6 +1323,14 @@ static void prim_readFile(EvalState & state, const Pos & pos, Value * * args, Va
mkString(v, s.c_str());
}
+static RegisterPrimOp primop_readFile({
+ .name = "__readFile",
+ .args = {"path"},
+ .doc = R"(
+ Return the contents of the file *path* as a string.
+ )",
+ .fun = prim_readFile,
+});
/* Find a file in the Nix search path. Used to implement <x> paths,
which are desugared to 'findFile __nixPath "x"'. */
@@ -1027,6 +1376,12 @@ static void prim_findFile(EvalState & state, const Pos & pos, Value * * args, Va
mkPath(v, state.checkSourcePath(state.findFile(searchPath, path, pos)).c_str());
}
+static RegisterPrimOp primop_findFile(RegisterPrimOp::Info {
+ .name = "__findFile",
+ .arity = 2,
+ .fun = prim_findFile,
+});
+
/* Return the cryptographic hash of a file in base-16. */
static void prim_hashFile(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
@@ -1044,6 +1399,17 @@ static void prim_hashFile(EvalState & state, const Pos & pos, Value * * args, Va
mkString(v, hashFile(*ht, state.checkSourcePath(p)).to_string(Base16, false), context);
}
+static RegisterPrimOp primop_hashFile({
+ .name = "__hashFile",
+ .args = {"type", "p"},
+ .doc = R"(
+ Return a base-16 representation of the cryptographic hash of the
+ file at path *p*. The hash algorithm specified by *type* must be one
+ of `"md5"`, `"sha1"`, `"sha256"` or `"sha512"`.
+ )",
+ .fun = prim_hashFile,
+});
+
/* Read a directory (without . or ..) */
static void prim_readDir(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
@@ -1075,6 +1441,25 @@ static void prim_readDir(EvalState & state, const Pos & pos, Value * * args, Val
v.attrs->sort();
}
+static RegisterPrimOp primop_readDir({
+ .name = "__readDir",
+ .args = {"path"},
+ .doc = R"(
+ Return the contents of the directory *path* as a set mapping
+ directory entries to the corresponding file type. For instance, if
+ directory `A` contains a regular file `B` and another directory
+ `C`, then `builtins.readDir ./A` will return the set
+
+ ```nix
+ { B = "regular"; C = "directory"; }
+ ```
+
+ The possible values for the file type are `"regular"`,
+ `"directory"`, `"symlink"` and `"unknown"`.
+ )",
+ .fun = prim_readDir,
+});
+
/*************************************************************
* Creating files
@@ -1092,6 +1477,102 @@ static void prim_toXML(EvalState & state, const Pos & pos, Value * * args, Value
mkString(v, out.str(), context);
}
+static RegisterPrimOp primop_toXML({
+ .name = "__toXML",
+ .args = {"e"},
+ .doc = R"(
+ Return a string containing an XML representation of *e*. The main
+ application for `toXML` is to communicate information with the
+ builder in a more structured format than plain environment
+ variables.
+
+ Here is an example where this is the case:
+
+ ```nix
+ { stdenv, fetchurl, libxslt, jira, uberwiki }:
+
+ stdenv.mkDerivation (rec {
+ name = "web-server";
+
+ buildInputs = [ libxslt ];
+
+ builder = builtins.toFile "builder.sh" "
+ source $stdenv/setup
+ mkdir $out
+ echo "$servlets" | xsltproc ${stylesheet} - > $out/server-conf.xml ①
+ ";
+
+ stylesheet = builtins.toFile "stylesheet.xsl" ②
+ "<?xml version='1.0' encoding='UTF-8'?>
+ <xsl:stylesheet xmlns:xsl='http://www.w3.org/1999/XSL/Transform' version='1.0'>
+ <xsl:template match='/'>
+ <Configure>
+ <xsl:for-each select='/expr/list/attrs'>
+ <Call name='addWebApplication'>
+ <Arg><xsl:value-of select=\"attr[@name = 'path']/string/@value\" /></Arg>
+ <Arg><xsl:value-of select=\"attr[@name = 'war']/path/@value\" /></Arg>
+ </Call>
+ </xsl:for-each>
+ </Configure>
+ </xsl:template>
+ </xsl:stylesheet>
+ ";
+
+ servlets = builtins.toXML [ ③
+ { path = "/bugtracker"; war = jira + "/lib/atlassian-jira.war"; }
+ { path = "/wiki"; war = uberwiki + "/uberwiki.war"; }
+ ];
+ })
+ ```
+
+ The builder is supposed to generate the configuration file for a
+ [Jetty servlet container](http://jetty.mortbay.org/). A servlet
+ container contains a number of servlets (`*.war` files) each
+ exported under a specific URI prefix. So the servlet configuration
+ is a list of sets containing the `path` and `war` of the servlet
+ (①). This kind of information is difficult to communicate with the
+ normal method of passing information through an environment
+ variable, which just concatenates everything together into a
+ string (which might just work in this case, but wouldn’t work if
+ fields are optional or contain lists themselves). Instead the Nix
+ expression is converted to an XML representation with `toXML`,
+ which is unambiguous and can easily be processed with the
+ appropriate tools. For instance, in the example an XSLT stylesheet
+ (at point ②) is applied to it (at point ①) to generate the XML
+ configuration file for the Jetty server. The XML representation
+ produced at point ③ by `toXML` is as follows:
+
+ ```xml
+ <?xml version='1.0' encoding='utf-8'?>
+ <expr>
+ <list>
+ <attrs>
+ <attr name="path">
+ <string value="/bugtracker" />
+ </attr>
+ <attr name="war">
+ <path value="/nix/store/d1jh9pasa7k2...-jira/lib/atlassian-jira.war" />
+ </attr>
+ </attrs>
+ <attrs>
+ <attr name="path">
+ <string value="/wiki" />
+ </attr>
+ <attr name="war">
+ <path value="/nix/store/y6423b1yi4sx...-uberwiki/uberwiki.war" />
+ </attr>
+ </attrs>
+ </list>
+ </expr>
+ ```
+
+ Note that we used the `toFile` built-in to write the builder and
+ the stylesheet “inline” in the Nix expression. The path of the
+ stylesheet is spliced into the builder using the syntax `xsltproc
+ ${stylesheet}`.
+ )",
+ .fun = prim_toXML,
+});
/* Convert the argument (which can be any Nix expression) to a JSON
string. Not all Nix expressions can be sensibly or completely
@@ -1104,6 +1585,19 @@ static void prim_toJSON(EvalState & state, const Pos & pos, Value * * args, Valu
mkString(v, out.str(), context);
}
+static RegisterPrimOp primop_toJSON({
+ .name = "__toJSON",
+ .args = {"e"},
+ .doc = R"(
+ Return a string containing a JSON representation of *e*. Strings,
+ integers, floats, booleans, nulls and lists are mapped to their JSON
+ equivalents. Sets (except derivations) are represented as objects.
+ Derivations are translated to a JSON string containing the
+ derivation’s output path. Paths are copied to the store and
+ represented as a JSON string of the resulting store path.
+ )",
+ .fun = prim_toJSON,
+});
/* Parse a JSON string to a value. */
static void prim_fromJSON(EvalState & state, const Pos & pos, Value * * args, Value & v)
@@ -1112,6 +1606,20 @@ static void prim_fromJSON(EvalState & state, const Pos & pos, Value * * args, Va
parseJSON(state, s, v);
}
+static RegisterPrimOp primop_fromJSON({
+ .name = "__fromJSON",
+ .args = {"e"},
+ .doc = R"(
+ Convert a JSON string to a Nix value. For example,
+
+ ```nix
+ builtins.fromJSON ''{"x": [1, 2, 3], "y": null}''
+ ```
+
+ returns the value `{ x = [ 1 2 3 ]; y = null; }`.
+ )",
+ .fun = prim_fromJSON,
+});
/* Store a string in the Nix store as a source file that can be used
as an input by derivations. */
@@ -1146,6 +1654,83 @@ static void prim_toFile(EvalState & state, const Pos & pos, Value * * args, Valu
mkString(v, storePath, {storePath});
}
+static RegisterPrimOp primop_toFile({
+ .name = "__toFile",
+ .args = {"name", "s"},
+ .doc = R"(
+ Store the string *s* in a file in the Nix store and return its
+ path. The file has suffix *name*. This file can be used as an
+ input to derivations. One application is to write builders
+ “inline”. For instance, the following Nix expression combines the
+ [Nix expression for GNU Hello](expression-syntax.md) and its
+ [build script](build-script.md) into one file:
+
+ ```nix
+ { stdenv, fetchurl, perl }:
+
+ stdenv.mkDerivation {
+ name = "hello-2.1.1";
+
+ builder = builtins.toFile "builder.sh" "
+ source $stdenv/setup
+
+ PATH=$perl/bin:$PATH
+
+ tar xvfz $src
+ cd hello-*
+ ./configure --prefix=$out
+ make
+ make install
+ ";
+
+ src = fetchurl {
+ url = "http://ftp.nluug.nl/pub/gnu/hello/hello-2.1.1.tar.gz";
+ sha256 = "1md7jsfd8pa45z73bz1kszpp01yw6x5ljkjk2hx7wl800any6465";
+ };
+ inherit perl;
+ }
+ ```
+
+ It is even possible for one file to refer to another, e.g.,
+
+ ```nix
+ builder = let
+ configFile = builtins.toFile "foo.conf" "
+ # This is some dummy configuration file.
+ ...
+ ";
+ in builtins.toFile "builder.sh" "
+ source $stdenv/setup
+ ...
+ cp ${configFile} $out/etc/foo.conf
+ ";
+ ```
+
+ Note that `${configFile}` is an
+ [antiquotation](language-values.md), so the result of the
+ expression `configFile`
+ (i.e., a path like `/nix/store/m7p7jfny445k...-foo.conf`) will be
+ spliced into the resulting string.
+
+ It is however *not* allowed to have files mutually referring to each
+ other, like so:
+
+ ```nix
+ let
+ foo = builtins.toFile "foo" "...${bar}...";
+ bar = builtins.toFile "bar" "...${foo}...";
+ in foo
+ ```
+
+ This is not allowed because it would cause a cyclic dependency in
+ the computation of the cryptographic hashes for `foo` and `bar`.
+
+ It is also not possible to reference the result of a derivation. If
+ you are using Nixpkgs, the `writeTextFile` function is able to do
+ that.
+ )",
+ .fun = prim_toFile,
+});
static void addPath(EvalState & state, const Pos & pos, const string & name, const Path & path_,
Value * filterFun, FileIngestionMethod method, const std::optional<Hash> expectedHash, Value & v)
@@ -1216,6 +1801,48 @@ static void prim_filterSource(EvalState & state, const Pos & pos, Value * * args
addPath(state, pos, std::string(baseNameOf(path)), path, args[0], FileIngestionMethod::Recursive, std::nullopt, v);
}
+static RegisterPrimOp primop_filterSource({
+ .name = "__filterSource",
+ .args = {"e1", "e2"},
+ .doc = R"(
+ This function allows you to copy sources into the Nix store while
+ filtering certain files. For instance, suppose that you want to use
+ the directory `source-dir` as an input to a Nix expression, e.g.
+
+ ```nix
+ stdenv.mkDerivation {
+ ...
+ src = ./source-dir;
+ }
+ ```
+
+ However, if `source-dir` is a Subversion working copy, then all
+ those annoying `.svn` subdirectories will also be copied to the
+ store. Worse, the contents of those directories may change a lot,
+ causing lots of spurious rebuilds. With `filterSource` you can
+ filter out the `.svn` directories:
+
+ ```nix
+ src = builtins.filterSource
+ (path: type: type != "directory" || baseNameOf path != ".svn")
+ ./source-dir;
+ ```
+
+ Thus, the first argument *e1* must be a predicate function that is
+ called for each regular file, directory or symlink in the source
+ tree *e2*. If the function returns `true`, the file is copied to the
+ Nix store, otherwise it is omitted. The function is called with two
+ arguments. The first is the full path of the file. The second is a
+ string that identifies the type of the file, which is either
+ `"regular"`, `"directory"`, `"symlink"` or `"unknown"` (for other
+ kinds of files such as device nodes or fifos — but note that those
+ cannot be copied to the Nix store, so if the predicate returns
+ `true` for them, the copy will fail). If you exclude a directory,
+ the entire corresponding subtree of *e2* will be excluded.
+ )",
+ .fun = prim_filterSource,
+});
+
static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
state.forceAttrs(*args[0], pos);
@@ -1261,6 +1888,41 @@ static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value
addPath(state, pos, name, path, filterFun, method, expectedHash, v);
}
+static RegisterPrimOp primop_path({
+ .name = "__path",
+ .args = {"args"},
+ .doc = R"(
+ An enrichment of the built-in path type, based on the attributes
+ present in *args*. All are optional except `path`:
+
+ - path
+ The underlying path.
+
+ - name
+ The name of the path when added to the store. This can used to
+ reference paths that have nix-illegal characters in their names,
+ like `@`.
+
+ - filter
+ A function of the type expected by `builtins.filterSource`,
+ with the same semantics.
+
+ - recursive
+ When `false`, when `path` is added to the store it is with a
+ flat hash, rather than a hash of the NAR serialization of the
+ file. Thus, `path` must refer to a regular file, not a
+ directory. This allows similar behavior to `fetchurl`. Defaults
+ to `true`.
+
+ - sha256
+ When provided, this is the expected hash of the file at the
+ path. Evaluation will fail if the hash is incorrect, and
+ providing a hash allows `builtins.path` to be used even when the
+ `pure-eval` nix config option is on.
+ )",
+ .fun = prim_path,
+});
+
/*************************************************************
* Sets
@@ -1283,6 +1945,16 @@ static void prim_attrNames(EvalState & state, const Pos & pos, Value * * args, V
[](Value * v1, Value * v2) { return strcmp(v1->string.s, v2->string.s) < 0; });
}
+static RegisterPrimOp primop_attrNames({
+ .name = "__attrNames",
+ .args = {"set"},
+ .doc = R"(
+ Return the names of the attributes in the set *set* in an
+ alphabetically sorted list. For instance, `builtins.attrNames { y
+ = 1; x = "foo"; }` evaluates to `[ "x" "y" ]`.
+ )",
+ .fun = prim_attrNames,
+});
/* Return the values of the attributes in a set as a list, in the same
order as attrNames. */
@@ -1303,6 +1975,15 @@ static void prim_attrValues(EvalState & state, const Pos & pos, Value * * args,
v.listElems()[i] = ((Attr *) v.listElems()[i])->value;
}
+static RegisterPrimOp primop_attrValues({
+ .name = "__attrValues",
+ .args = {"set"},
+ .doc = R"(
+ Return the values of the attributes in the set *set* in the order
+ corresponding to the sorted attribute names.
+ )",
+ .fun = prim_attrValues,
+});
/* Dynamic version of the `.' operator. */
void prim_getAttr(EvalState & state, const Pos & pos, Value * * args, Value & v)
@@ -1322,9 +2003,20 @@ void prim_getAttr(EvalState & state, const Pos & pos, Value * * args, Value & v)
v = *i->value;
}
+static RegisterPrimOp primop_getAttr({
+ .name = "__getAttr",
+ .args = {"s", "set"},
+ .doc = R"(
+ `getAttr` returns the attribute named *s* from *set*. Evaluation
+ aborts if the attribute doesn’t exist. This is a dynamic version of
+ the `.` operator, since *s* is an expression rather than an
+ identifier.
+ )",
+ .fun = prim_getAttr,
+});
/* Return position information of the specified attribute. */
-void prim_unsafeGetAttrPos(EvalState & state, const Pos & pos, Value * * args, Value & v)
+static void prim_unsafeGetAttrPos(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
string attr = state.forceStringNoCtx(*args[0], pos);
state.forceAttrs(*args[1], pos);
@@ -1335,6 +2027,11 @@ void prim_unsafeGetAttrPos(EvalState & state, const Pos & pos, Value * * args, V
state.mkPos(v, i->pos);
}
+static RegisterPrimOp primop_unsafeGetAttrPos(RegisterPrimOp::Info {
+ .name = "__unsafeGetAttrPos",
+ .arity = 2,
+ .fun = prim_unsafeGetAttrPos,
+});
/* Dynamic version of the `?' operator. */
static void prim_hasAttr(EvalState & state, const Pos & pos, Value * * args, Value & v)
@@ -1344,6 +2041,16 @@ static void prim_hasAttr(EvalState & state, const Pos & pos, Value * * args, Val
mkBool(v, args[1]->attrs->find(state.symbols.create(attr)) != args[1]->attrs->end());
}
+static RegisterPrimOp primop_hasAttr({
+ .name = "__hasAttr",
+ .args = {"s", "set"},
+ .doc = R"(
+ `hasAttr` returns `true` if *set* has an attribute named *s*, and
+ `false` otherwise. This is a dynamic version of the `?` operator,
+ since *s* is an expression rather than an identifier.
+ )",
+ .fun = prim_hasAttr,
+});
/* Determine whether the argument is a set. */
static void prim_isAttrs(EvalState & state, const Pos & pos, Value * * args, Value & v)
@@ -1352,6 +2059,14 @@ static void prim_isAttrs(EvalState & state, const Pos & pos, Value * * args, Val
mkBool(v, args[0]->type == tAttrs);
}
+static RegisterPrimOp primop_isAttrs({
+ .name = "__isAttrs",
+ .args = {"e"},
+ .doc = R"(
+ Return `true` if *e* evaluates to a set, and `false` otherwise.
+ )",
+ .fun = prim_isAttrs,
+});
static void prim_removeAttrs(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
@@ -1375,6 +2090,21 @@ static void prim_removeAttrs(EvalState & state, const Pos & pos, Value * * args,
}
}
+static RegisterPrimOp primop_removeAttrs({
+ .name = "removeAttrs",
+ .args = {"set", "list"},
+ .doc = R"(
+ Remove the attributes listed in *list* from *set*. The attributes
+ don’t have to exist in *set*. For instance,
+
+ ```nix
+ removeAttrs { x = 1; y = 2; z = 3; } [ "a" "x" "z" ]
+ ```
+
+ evaluates to `{ y = 2; }`.
+ )",
+ .fun = prim_removeAttrs,
+});
/* Builds a set from a list specifying (name, value) pairs. To be
precise, a list [{name = "name1"; value = value1;} ... {name =
@@ -1416,10 +2146,31 @@ static void prim_listToAttrs(EvalState & state, const Pos & pos, Value * * args,
v.attrs->sort();
}
+static RegisterPrimOp primop_listToAttrs({
+ .name = "__listToAttrs",
+ .args = {"e"},
+ .doc = R"(
+ Construct a set from a list specifying the names and values of each
+ attribute. Each element of the list should be a set consisting of a
+ string-valued attribute `name` specifying the name of the attribute,
+ and an attribute `value` specifying its value. Example:
+
+ ```nix
+ builtins.listToAttrs
+ [ { name = "foo"; value = 123; }
+ { name = "bar"; value = 456; }
+ ]
+ ```
+
+ evaluates to
+
+ ```nix
+ { foo = 123; bar = 456; }
+ ```
+ )",
+ .fun = prim_listToAttrs,
+});
-/* Return the right-biased intersection of two sets as1 and as2,
- i.e. a set that contains every attribute from as2 that is also a
- member of as1. */
static void prim_intersectAttrs(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
state.forceAttrs(*args[0], pos);
@@ -1434,14 +2185,16 @@ static void prim_intersectAttrs(EvalState & state, const Pos & pos, Value * * ar
}
}
+static RegisterPrimOp primop_intersectAttrs({
+ .name = "__intersectAttrs",
+ .args = {"e1", "e2"},
+ .doc = R"(
+ Return a set consisting of the attributes in the set *e2* that also
+ exist in the set *e1*.
+ )",
+ .fun = prim_intersectAttrs,
+});
-/* Collect each attribute named `attr' from a list of attribute sets.
- Sets that don't contain the named attribute are ignored.
-
- Example:
- catAttrs "a" [{a = 1;} {b = 0;} {a = 2;}]
- => [1 2]
-*/
static void prim_catAttrs(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
Symbol attrName = state.symbols.create(state.forceStringNoCtx(*args[0], pos));
@@ -1463,20 +2216,23 @@ static void prim_catAttrs(EvalState & state, const Pos & pos, Value * * args, Va
v.listElems()[n] = res[n];
}
+static RegisterPrimOp primop_catAttrs({
+ .name = "__catAttrs",
+ .args = {"attr", "list"},
+ .doc = R"(
+ Collect each attribute named *attr* from a list of attribute
+ sets. Attrsets that don't contain the named attribute are
+ ignored. For example,
-/* Return a set containing the names of the formal arguments expected
- by the function `f'. The value of each attribute is a Boolean
- denoting whether the corresponding argument has a default value. For instance,
+ ```nix
+ builtins.catAttrs "a" [{a = 1;} {b = 0;} {a = 2;}]
+ ```
- functionArgs ({ x, y ? 123}: ...)
- => { x = false; y = true; }
+ evaluates to `[1 2]`.
+ )",
+ .fun = prim_catAttrs,
+});
- "Formal argument" here refers to the attributes pattern-matched by
- the function. Plain lambdas are not included, e.g.
-
- functionArgs (x: ...)
- => { }
-*/
static void prim_functionArgs(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
state.forceValue(*args[0], pos);
@@ -1501,8 +2257,24 @@ static void prim_functionArgs(EvalState & state, const Pos & pos, Value * * args
v.attrs->sort();
}
-
-/* Apply a function to every element of an attribute set. */
+static RegisterPrimOp primop_functionArgs({
+ .name = "__functionArgs",
+ .args = {"f"},
+ .doc = R"(
+ Return a set containing the names of the formal arguments expected
+ by the function *f*. The value of each attribute is a Boolean
+ denoting whether the corresponding argument has a default value. For
+ instance, `functionArgs ({ x, y ? 123}: ...) = { x = false; y =
+ true; }`.
+
+ "Formal argument" here refers to the attributes pattern-matched by
+ the function. Plain lambdas are not included, e.g. `functionArgs (x:
+ ...) = { }`.
+ )",
+ .fun = prim_functionArgs,
+});
+
+/* */
static void prim_mapAttrs(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
state.forceAttrs(*args[1], pos);
@@ -1518,6 +2290,20 @@ static void prim_mapAttrs(EvalState & state, const Pos & pos, Value * * args, Va
}
}
+static RegisterPrimOp primop_mapAttrs({
+ .name = "__mapAttrs",
+ .args = {"f", "attrset"},
+ .doc = R"(
+ Apply function *f* to every element of *attrset*. For example,
+
+ ```nix
+ builtins.mapAttrs (name: value: value * 10) { a = 1; b = 2; }
+ ```
+
+ evaluates to `{ a = 10; b = 20; }`.
+ )",
+ .fun = prim_mapAttrs,
+});
/*************************************************************
@@ -1532,6 +2318,14 @@ static void prim_isList(EvalState & state, const Pos & pos, Value * * args, Valu
mkBool(v, args[0]->isList());
}
+static RegisterPrimOp primop_isList({
+ .name = "__isList",
+ .args = {"e"},
+ .doc = R"(
+ Return `true` if *e* evaluates to a list, and `false` otherwise.
+ )",
+ .fun = prim_isList,
+});
static void elemAt(EvalState & state, const Pos & pos, Value & list, int n, Value & v)
{
@@ -1545,13 +2339,21 @@ static void elemAt(EvalState & state, const Pos & pos, Value & list, int n, Valu
v = *list.listElems()[n];
}
-
/* Return the n-1'th element of a list. */
static void prim_elemAt(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
elemAt(state, pos, *args[0], state.forceInt(*args[1], pos), v);
}
+static RegisterPrimOp primop_elemAt({
+ .name = "__elemAt",
+ .args = {"xs", "n"},
+ .doc = R"(
+ Return element *n* from the list *xs*. Elements are counted starting
+ from 0. A fatal error occurs if the index is out of bounds.
+ )",
+ .fun = prim_elemAt,
+});
/* Return the first element of a list. */
static void prim_head(EvalState & state, const Pos & pos, Value * * args, Value & v)
@@ -1559,6 +2361,16 @@ static void prim_head(EvalState & state, const Pos & pos, Value * * args, Value
elemAt(state, pos, *args[0], 0, v);
}
+static RegisterPrimOp primop_head({
+ .name = "__head",
+ .args = {"list"},
+ .doc = R"(
+ Return the first element of a list; abort evaluation if the argument
+ isn’t a list or is an empty list. You can test whether a list is
+ empty by comparing it with `[]`.
+ )",
+ .fun = prim_head,
+});
/* Return a list consisting of everything but the first element of
a list. Warning: this function takes O(n) time, so you probably
@@ -1577,6 +2389,21 @@ static void prim_tail(EvalState & state, const Pos & pos, Value * * args, Value
v.listElems()[n] = args[0]->listElems()[n + 1];
}
+static RegisterPrimOp primop_tail({
+ .name = "__tail",
+ .args = {"list"},
+ .doc = R"(
+ Return the second to last elements of a list; abort evaluation if
+ the argument isn’t a list or is an empty list.
+
+ > **Warning**
+ >
+ > This function should generally be avoided since it's inefficient:
+ > unlike Haskell's `tail`, it takes O(n) time, so recursing over a
+ > list by repeatedly calling `tail` takes O(n^2) time.
+ )",
+ .fun = prim_tail,
+});
/* Apply a function to every element of a list. */
static void prim_map(EvalState & state, const Pos & pos, Value * * args, Value & v)
@@ -1590,6 +2417,21 @@ static void prim_map(EvalState & state, const Pos & pos, Value * * args, Value &
*args[0], *args[1]->listElems()[n]);
}
+static RegisterPrimOp primop_map({
+ .name = "map",
+ .args = {"f", "list"},
+ .doc = R"(
+ Apply the function *f* to each element in the list *list*. For
+ example,
+
+ ```nix
+ map (x: "foo" + x) [ "bar" "bla" "abc" ]
+ ```
+
+ evaluates to `[ "foobar" "foobla" "fooabc" ]`.
+ )",
+ .fun = prim_map,
+});
/* Filter a list using a predicate; that is, return a list containing
every element from the list for which the predicate function
@@ -1621,6 +2463,15 @@ static void prim_filter(EvalState & state, const Pos & pos, Value * * args, Valu
}
}
+static RegisterPrimOp primop_filter({
+ .name = "__filter",
+ .args = {"f", "list"},
+ .doc = R"(
+ Return a list consisting of the elements of *list* for which the
+ function *f* returns `true`.
+ )",
+ .fun = prim_filter,
+});
/* Return true if a list contains a given element. */
static void prim_elem(EvalState & state, const Pos & pos, Value * * args, Value & v)
@@ -1635,6 +2486,15 @@ static void prim_elem(EvalState & state, const Pos & pos, Value * * args, Value
mkBool(v, res);
}
+static RegisterPrimOp primop_elem({
+ .name = "__elem",
+ .args = {"x", "xs"},
+ .doc = R"(
+ Return `true` if a value equal to *x* occurs in the list *xs*, and
+ `false` otherwise.
+ )",
+ .fun = prim_elem,
+});
/* Concatenate a list of lists. */
static void prim_concatLists(EvalState & state, const Pos & pos, Value * * args, Value & v)
@@ -1643,6 +2503,14 @@ static void prim_concatLists(EvalState & state, const Pos & pos, Value * * args,
state.concatLists(v, args[0]->listSize(), args[0]->listElems(), pos);
}
+static RegisterPrimOp primop_concatLists({
+ .name = "__concatLists",
+ .args = {"lists"},
+ .doc = R"(
+ Concatenate a list of lists into a single list.
+ )",
+ .fun = prim_concatLists,
+});
/* Return the length of a list. This is an O(1) time operation. */
static void prim_length(EvalState & state, const Pos & pos, Value * * args, Value & v)
@@ -1651,6 +2519,14 @@ static void prim_length(EvalState & state, const Pos & pos, Value * * args, Valu
mkInt(v, args[0]->listSize());
}
+static RegisterPrimOp primop_length({
+ .name = "__length",
+ .args = {"e"},
+ .doc = R"(
+ Return the length of the list *e*.
+ )",
+ .fun = prim_length,
+});
/* Reduce a list by applying a binary operator, from left to
right. The operator is applied strictly. */
@@ -1675,6 +2551,18 @@ static void prim_foldlStrict(EvalState & state, const Pos & pos, Value * * args,
}
}
+static RegisterPrimOp primop_foldlStrict({
+ .name = "__foldl'",
+ .args = {"op", "nul", "list"},
+ .doc = R"(
+ Reduce a list by applying a binary operator, from left to right,
+ e.g. `foldl’ op nul [x0 x1 x2 ...] = op (op (op nul x0) x1) x2)
+ ...`. The operator is applied strictly, i.e., its arguments are
+ evaluated first. For example, `foldl’ (x: y: x + y) 0 [1 2 3]`
+ evaluates to 6.
+ )",
+ .fun = prim_foldlStrict,
+});
static void anyOrAll(bool any, EvalState & state, const Pos & pos, Value * * args, Value & v)
{
@@ -1700,12 +2588,30 @@ static void prim_any(EvalState & state, const Pos & pos, Value * * args, Value &
anyOrAll(true, state, pos, args, v);
}
+static RegisterPrimOp primop_any({
+ .name = "__any",
+ .args = {"pred", "list"},
+ .doc = R"(
+ Return `true` if the function *pred* returns `true` for at least one
+ element of *list*, and `false` otherwise.
+ )",
+ .fun = prim_any,
+});
static void prim_all(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
anyOrAll(false, state, pos, args, v);
}
+static RegisterPrimOp primop_all({
+ .name = "__all",
+ .args = {"pred", "list"},
+ .doc = R"(
+ Return `true` if the function *pred* returns `true` for all elements
+ of *list*, and `false` otherwise.
+ )",
+ .fun = prim_all,
+});
static void prim_genList(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
@@ -1726,6 +2632,21 @@ static void prim_genList(EvalState & state, const Pos & pos, Value * * args, Val
}
}
+static RegisterPrimOp primop_genList({
+ .name = "__genList",
+ .args = {"generator", "length"},
+ .doc = R"(
+ Generate list of size *length*, with each element *i* equal to the
+ value returned by *generator* `i`. For example,
+
+ ```nix
+ builtins.genList (x: x * x) 5
+ ```
+
+ returns the list `[ 0 1 4 9 16 ]`.
+ )",
+ .fun = prim_genList,
+});
static void prim_lessThan(EvalState & state, const Pos & pos, Value * * args, Value & v);
@@ -1761,6 +2682,26 @@ static void prim_sort(EvalState & state, const Pos & pos, Value * * args, Value
std::stable_sort(v.listElems(), v.listElems() + len, comparator);
}
+static RegisterPrimOp primop_sort({
+ .name = "__sort",
+ .args = {"comparator", "list"},
+ .doc = R"(
+ Return *list* in sorted order. It repeatedly calls the function
+ *comparator* with two elements. The comparator should return `true`
+ if the first element is less than the second, and `false` otherwise.
+ For example,
+
+ ```nix
+ builtins.sort builtins.lessThan [ 483 249 526 147 42 77 ]
+ ```
+
+ produces the list `[ 42 77 147 249 483 526 ]`.
+
+ This is a stable sort: it preserves the relative order of elements
+ deemed equal by the comparator.
+ )",
+ .fun = prim_sort,
+});
static void prim_partition(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
@@ -1799,9 +2740,29 @@ static void prim_partition(EvalState & state, const Pos & pos, Value * * args, V
v.attrs->sort();
}
+static RegisterPrimOp primop_partition({
+ .name = "__partition",
+ .args = {"pred", "list"},
+ .doc = R"(
+ Given a predicate function *pred*, this function returns an
+ attrset containing a list named `right`, containing the elements
+ in *list* for which *pred* returned `true`, and a list named
+ `wrong`, containing the elements for which it returned
+ `false`. For example,
+
+ ```nix
+ builtins.partition (x: x > 10) [1 23 9 3 42]
+ ```
+
+ evaluates to
+
+ ```nix
+ { right = [ 23 42 ]; wrong = [ 1 9 3 ]; }
+ ```
+ )",
+ .fun = prim_partition,
+});
-/* concatMap = f: list: concatLists (map f list); */
-/* C++-version is to avoid allocating `mkApp', call `f' eagerly */
static void prim_concatMap(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
state.forceFunction(*args[0], pos);
@@ -1828,6 +2789,16 @@ static void prim_concatMap(EvalState & state, const Pos & pos, Value * * args, V
}
}
+static RegisterPrimOp primop_concatMap({
+ .name = "__concatMap",
+ .args = {"f", "list"},
+ .doc = R"(
+ This function is equivalent to `builtins.concatLists (map f list)`
+ but is more efficient.
+ )",
+ .fun = prim_concatMap,
+});
+
/*************************************************************
* Integer arithmetic
@@ -1844,6 +2815,14 @@ static void prim_add(EvalState & state, const Pos & pos, Value * * args, Value &
mkInt(v, state.forceInt(*args[0], pos) + state.forceInt(*args[1], pos));
}
+static RegisterPrimOp primop_add({
+ .name = "__add",
+ .args = {"e1", "e2"},
+ .doc = R"(
+ Return the sum of the numbers *e1* and *e2*.
+ )",
+ .fun = prim_add,
+});
static void prim_sub(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
@@ -1855,6 +2834,14 @@ static void prim_sub(EvalState & state, const Pos & pos, Value * * args, Value &
mkInt(v, state.forceInt(*args[0], pos) - state.forceInt(*args[1], pos));
}
+static RegisterPrimOp primop_sub({
+ .name = "__sub",
+ .args = {"e1", "e2"},
+ .doc = R"(
+ Return the difference between the numbers *e1* and *e2*.
+ )",
+ .fun = prim_sub,
+});
static void prim_mul(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
@@ -1866,6 +2853,14 @@ static void prim_mul(EvalState & state, const Pos & pos, Value * * args, Value &
mkInt(v, state.forceInt(*args[0], pos) * state.forceInt(*args[1], pos));
}
+static RegisterPrimOp primop_mul({
+ .name = "__mul",
+ .args = {"e1", "e2"},
+ .doc = R"(
+ Return the product of the numbers *e1* and *e2*.
+ )",
+ .fun = prim_mul,
+});
static void prim_div(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
@@ -1895,21 +2890,57 @@ static void prim_div(EvalState & state, const Pos & pos, Value * * args, Value &
}
}
+static RegisterPrimOp primop_div({
+ .name = "__div",
+ .args = {"e1", "e2"},
+ .doc = R"(
+ Return the quotient of the numbers *e1* and *e2*.
+ )",
+ .fun = prim_div,
+});
+
static void prim_bitAnd(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
mkInt(v, state.forceInt(*args[0], pos) & state.forceInt(*args[1], pos));
}
+static RegisterPrimOp primop_bitAnd({
+ .name = "__bitAnd",
+ .args = {"e1", "e2"},
+ .doc = R"(
+ Return the bitwise AND of the integers *e1* and *e2*.
+ )",
+ .fun = prim_bitAnd,
+});
+
static void prim_bitOr(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
mkInt(v, state.forceInt(*args[0], pos) | state.forceInt(*args[1], pos));
}
+static RegisterPrimOp primop_bitOr({
+ .name = "__bitOr",
+ .args = {"e1", "e2"},
+ .doc = R"(
+ Return the bitwise OR of the integers *e1* and *e2*.
+ )",
+ .fun = prim_bitOr,
+});
+
static void prim_bitXor(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
mkInt(v, state.forceInt(*args[0], pos) ^ state.forceInt(*args[1], pos));
}
+static RegisterPrimOp primop_bitXor({
+ .name = "__bitXor",
+ .args = {"e1", "e2"},
+ .doc = R"(
+ Return the bitwise XOR of the integers *e1* and *e2*.
+ )",
+ .fun = prim_bitXor,
+});
+
static void prim_lessThan(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
state.forceValue(*args[0], pos);
@@ -1918,6 +2949,17 @@ static void prim_lessThan(EvalState & state, const Pos & pos, Value * * args, Va
mkBool(v, comp(args[0], args[1]));
}
+static RegisterPrimOp primop_lessThan({
+ .name = "__lessThan",
+ .args = {"e1", "e2"},
+ .doc = R"(
+ Return `true` if the number *e1* is less than the number *e2*, and
+ `false` otherwise. Evaluation aborts if either *e1* or *e2* does not
+ evaluate to a number.
+ )",
+ .fun = prim_lessThan,
+});
+
/*************************************************************
* String manipulation
@@ -1934,6 +2976,29 @@ static void prim_toString(EvalState & state, const Pos & pos, Value * * args, Va
mkString(v, s, context);
}
+static RegisterPrimOp primop_toString({
+ .name = "toString",
+ .args = {"e"},
+ .doc = R"(
+ Convert the expression *e* to a string. *e* can be:
+
+ - A string (in which case the string is returned unmodified).
+
+ - A path (e.g., `toString /foo/bar` yields `"/foo/bar"`.
+
+ - A set containing `{ __toString = self: ...; }`.
+
+ - An integer.
+
+ - A list, in which case the string representations of its elements
+ are joined with spaces.
+
+ - A Boolean (`false` yields `""`, `true` yields `"1"`).
+
+ - `null`, which yields the empty string.
+ )",
+ .fun = prim_toString,
+});
/* `substring start len str' returns the substring of `str' starting
at character position `min(start, stringLength str)' inclusive and
@@ -1955,6 +3020,25 @@ static void prim_substring(EvalState & state, const Pos & pos, Value * * args, V
mkString(v, (unsigned int) start >= s.size() ? "" : string(s, start, len), context);
}
+static RegisterPrimOp primop_substring({
+ .name = "__substring",
+ .args = {"start", "len", "s"},
+ .doc = R"(
+ Return the substring of *s* from character position *start*
+ (zero-based) up to but not including *start + len*. If *start* is
+ greater than the length of the string, an empty string is returned,
+ and if *start + len* lies beyond the end of the string, only the
+ substring up to the end of the string is returned. *start* must be
+ non-negative. For example,
+
+ ```nix
+ builtins.substring 0 3 "nixos"
+ ```
+
+ evaluates to `"nix"`.
+ )",
+ .fun = prim_substring,
+});
static void prim_stringLength(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
@@ -1963,6 +3047,15 @@ static void prim_stringLength(EvalState & state, const Pos & pos, Value * * args
mkInt(v, s.size());
}
+static RegisterPrimOp primop_stringLength({
+ .name = "__stringLength",
+ .args = {"e"},
+ .doc = R"(
+ Return the length of the string *e*. If *e* is not a string,
+ evaluation is aborted.
+ )",
+ .fun = prim_stringLength,
+});
/* Return the cryptographic hash of a string in base-16. */
static void prim_hashString(EvalState & state, const Pos & pos, Value * * args, Value & v)
@@ -1981,18 +3074,36 @@ static void prim_hashString(EvalState & state, const Pos & pos, Value * * args,
mkString(v, hashString(*ht, s).to_string(Base16, false), context);
}
+static RegisterPrimOp primop_hashString({
+ .name = "__hashString",
+ .args = {"type", "s"},
+ .doc = R"(
+ Return a base-16 representation of the cryptographic hash of string
+ *s*. The hash algorithm specified by *type* must be one of `"md5"`,
+ `"sha1"`, `"sha256"` or `"sha512"`.
+ )",
+ .fun = prim_hashString,
+});
+
+struct RegexCache
+{
+ std::unordered_map<std::string, std::regex> cache;
+};
+
+std::shared_ptr<RegexCache> makeRegexCache()
+{
+ return std::make_shared<RegexCache>();
+}
-/* Match a regular expression against a string and return either
- ‘null’ or a list containing substring matches. */
void prim_match(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
auto re = state.forceStringNoCtx(*args[0], pos);
try {
- auto regex = state.regexCache.find(re);
- if (regex == state.regexCache.end())
- regex = state.regexCache.emplace(re, std::regex(re, std::regex::extended)).first;
+ auto regex = state.regexCache->cache.find(re);
+ if (regex == state.regexCache->cache.end())
+ regex = state.regexCache->cache.emplace(re, std::regex(re, std::regex::extended)).first;
PathSet context;
const std::string str = state.forceString(*args[1], context, pos);
@@ -2029,6 +3140,41 @@ void prim_match(EvalState & state, const Pos & pos, Value * * args, Value & v)
}
}
+static RegisterPrimOp primop_match({
+ .name = "__match",
+ .args = {"regex", "str"},
+ .doc = R"s(
+ Returns a list if the [extended POSIX regular
+ expression](http://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap09.html#tag_09_04)
+ *regex* matches *str* precisely, otherwise returns `null`. Each item
+ in the list is a regex group.
+
+ ```nix
+ builtins.match "ab" "abc"
+ ```
+
+ Evaluates to `null`.
+
+ ```nix
+ builtins.match "abc" "abc"
+ ```
+
+ Evaluates to `[ ]`.
+
+ ```nix
+ builtins.match "a(b)(c)" "abc"
+ ```
+
+ Evaluates to `[ "b" "c" ]`.
+
+ ```nix
+ builtins.match "[[:space:]]+([[:upper:]]+)[[:space:]]+" " FOO "
+ ```
+
+ Evaluates to `[ "foo" ]`.
+ )s",
+ .fun = prim_match,
+});
/* Split a string with a regular expression, and return a list of the
non-matching parts interleaved by the lists of the matching groups. */
@@ -2102,8 +3248,44 @@ static void prim_split(EvalState & state, const Pos & pos, Value * * args, Value
}
}
+static RegisterPrimOp primop_split({
+ .name = "__split",
+ .args = {"regex", "str"},
+ .doc = R"s(
+ Returns a list composed of non matched strings interleaved with the
+ lists of the [extended POSIX regular
+ expression](http://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap09.html#tag_09_04)
+ *regex* matches of *str*. Each item in the lists of matched
+ sequences is a regex group.
+
+ ```nix
+ builtins.split "(a)b" "abc"
+ ```
+
+ Evaluates to `[ "" [ "a" ] "c" ]`.
+
+ ```nix
+ builtins.split "([ac])" "abc"
+ ```
+
+ Evaluates to `[ "" [ "a" ] "b" [ "c" ] "" ]`.
-static void prim_concatStringSep(EvalState & state, const Pos & pos, Value * * args, Value & v)
+ ```nix
+ builtins.split "(a)|(c)" "abc"
+ ```
+
+ Evaluates to `[ "" [ "a" null ] "b" [ null "c" ] "" ]`.
+
+ ```nix
+ builtins.split "([[:upper:]]+)" " FOO "
+ ```
+
+ Evaluates to `[ " " [ "FOO" ] " " ]`.
+ )s",
+ .fun = prim_split,
+});
+
+static void prim_concatStringsSep(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
PathSet context;
@@ -2122,6 +3304,16 @@ static void prim_concatStringSep(EvalState & state, const Pos & pos, Value * * a
mkString(v, res, context);
}
+static RegisterPrimOp primop_concatStringsSep({
+ .name = "__concatStringsSep",
+ .args = {"separator", "list"},
+ .doc = R"(
+ Concatenate a list of strings with a separator between each
+ element, e.g. `concatStringsSep "/" ["usr" "local" "bin"] ==
+ "usr/local/bin"`.
+ )",
+ .fun = prim_concatStringsSep,
+});
static void prim_replaceStrings(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
@@ -2181,6 +3373,22 @@ static void prim_replaceStrings(EvalState & state, const Pos & pos, Value * * ar
mkString(v, res, context);
}
+static RegisterPrimOp primop_replaceStrings({
+ .name = "__replaceStrings",
+ .args = {"from", "to", "s"},
+ .doc = R"(
+ Given string *s*, replace every occurrence of the strings in *from*
+ with the corresponding string in *to*. For example,
+
+ ```nix
+ builtins.replaceStrings ["oo" "a"] ["a" "i"] "foobar"
+ ```
+
+ evaluates to `"fabir"`.
+ )",
+ .fun = prim_replaceStrings,
+});
+
/*************************************************************
* Versions
@@ -2197,6 +3405,19 @@ static void prim_parseDrvName(EvalState & state, const Pos & pos, Value * * args
v.attrs->sort();
}
+static RegisterPrimOp primop_parseDrvName({
+ .name = "__parseDrvName",
+ .args = {"s"},
+ .doc = R"(
+ Split the string *s* into a package name and version. The package
+ name is everything up to but not including the first dash followed
+ by a digit, and the version is everything following that dash. The
+ result is returned in a set `{ name, version }`. Thus,
+ `builtins.parseDrvName "nix-0.12pre12876"` returns `{ name =
+ "nix"; version = "0.12pre12876"; }`.
+ )",
+ .fun = prim_parseDrvName,
+});
static void prim_compareVersions(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
@@ -2205,6 +3426,18 @@ static void prim_compareVersions(EvalState & state, const Pos & pos, Value * * a
mkInt(v, compareVersions(version1, version2));
}
+static RegisterPrimOp primop_compareVersions({
+ .name = "__compareVersions",
+ .args = {"s1", "s2"},
+ .doc = R"(
+ Compare two strings representing versions and return `-1` if
+ version *s1* is older than version *s2*, `0` if they are the same,
+ and `1` if *s1* is newer than *s2*. The version comparison
+ algorithm is the same as the one used by [`nix-env
+ -u`](../command-ref/nix-env.md#operation---upgrade).
+ )",
+ .fun = prim_compareVersions,
+});
static void prim_splitVersion(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
@@ -2225,6 +3458,17 @@ static void prim_splitVersion(EvalState & state, const Pos & pos, Value * * args
}
}
+static RegisterPrimOp primop_splitVersion({
+ .name = "__splitVersion",
+ .args = {"s"},
+ .doc = R"(
+ Split a string representing a version into its components, by the
+ same version splitting logic underlying the version comparison in
+ [`nix-env -u`](../command-ref/nix-env.md#operation---upgrade).
+ )",
+ .fun = prim_splitVersion,
+});
+
/*************************************************************
* Primop registration
@@ -2238,7 +3482,20 @@ RegisterPrimOp::RegisterPrimOp(std::string name, size_t arity, PrimOpFun fun,
std::optional<std::string> requiredFeature)
{
if (!primOps) primOps = new PrimOps;
- primOps->push_back({name, arity, fun, requiredFeature});
+ primOps->push_back({
+ .name = name,
+ .args = {},
+ .arity = arity,
+ .requiredFeature = std::move(requiredFeature),
+ .fun = fun
+ });
+}
+
+
+RegisterPrimOp::RegisterPrimOp(Info && info)
+{
+ if (!primOps) primOps = new PrimOps;
+ primOps->push_back(std::move(info));
}
@@ -2262,15 +3519,6 @@ void EvalState::createBaseEnv()
mkNull(v);
addConstant("null", v);
- auto vThrow = addPrimOp("throw", 1, prim_throw);
-
- auto addPurityError = [&](const std::string & name) {
- Value * v2 = allocValue();
- mkString(*v2, fmt("'%s' is not allowed in pure evaluation mode", name));
- mkApp(v, *vThrow, *v2);
- addConstant(name, v);
- };
-
if (!evalSettings.pureEval) {
mkInt(v, time(0));
addConstant("__currentTime", v);
@@ -2295,132 +3543,16 @@ void EvalState::createBaseEnv()
addConstant("__langVersion", v);
// Miscellaneous
- auto vScopedImport = addPrimOp("scopedImport", 2, prim_scopedImport);
- Value * v2 = allocValue();
- mkAttrs(*v2, 0);
- mkApp(v, *vScopedImport, *v2);
- forceValue(v);
- addConstant("import", v);
if (evalSettings.enableNativeCode) {
addPrimOp("__importNative", 2, prim_importNative);
addPrimOp("__exec", 1, prim_exec);
}
- addPrimOp("__typeOf", 1, prim_typeOf);
- addPrimOp("isNull", 1, prim_isNull);
- addPrimOp("__isFunction", 1, prim_isFunction);
- addPrimOp("__isString", 1, prim_isString);
- addPrimOp("__isInt", 1, prim_isInt);
- addPrimOp("__isFloat", 1, prim_isFloat);
- addPrimOp("__isBool", 1, prim_isBool);
- addPrimOp("__isPath", 1, prim_isPath);
- addPrimOp("__genericClosure", 1, prim_genericClosure);
- addPrimOp("abort", 1, prim_abort);
- addPrimOp("__addErrorContext", 2, prim_addErrorContext);
- addPrimOp("__tryEval", 1, prim_tryEval);
- addPrimOp("__getEnv", 1, prim_getEnv);
-
- // Strictness
- addPrimOp("__seq", 2, prim_seq);
- addPrimOp("__deepSeq", 2, prim_deepSeq);
-
- // Debugging
- addPrimOp("__trace", 2, prim_trace);
-
- // Paths
- addPrimOp("__toPath", 1, prim_toPath);
- if (evalSettings.pureEval)
- addPurityError("__storePath");
- else
- addPrimOp("__storePath", 1, prim_storePath);
- addPrimOp("__pathExists", 1, prim_pathExists);
- addPrimOp("baseNameOf", 1, prim_baseNameOf);
- addPrimOp("dirOf", 1, prim_dirOf);
- addPrimOp("__readFile", 1, prim_readFile);
- addPrimOp("__readDir", 1, prim_readDir);
- addPrimOp("__findFile", 2, prim_findFile);
- addPrimOp("__hashFile", 2, prim_hashFile);
-
- // Creating files
- addPrimOp("__toXML", 1, prim_toXML);
- addPrimOp("__toJSON", 1, prim_toJSON);
- addPrimOp("__fromJSON", 1, prim_fromJSON);
- addPrimOp("__toFile", 2, prim_toFile);
- addPrimOp("__filterSource", 2, prim_filterSource);
- addPrimOp("__path", 1, prim_path);
-
- // Sets
- addPrimOp("__attrNames", 1, prim_attrNames);
- addPrimOp("__attrValues", 1, prim_attrValues);
- addPrimOp("__getAttr", 2, prim_getAttr);
- addPrimOp("__unsafeGetAttrPos", 2, prim_unsafeGetAttrPos);
- addPrimOp("__hasAttr", 2, prim_hasAttr);
- addPrimOp("__isAttrs", 1, prim_isAttrs);
- addPrimOp("removeAttrs", 2, prim_removeAttrs);
- addPrimOp("__listToAttrs", 1, prim_listToAttrs);
- addPrimOp("__intersectAttrs", 2, prim_intersectAttrs);
- addPrimOp("__catAttrs", 2, prim_catAttrs);
- addPrimOp("__functionArgs", 1, prim_functionArgs);
- addPrimOp("__mapAttrs", 2, prim_mapAttrs);
-
- // Lists
- addPrimOp("__isList", 1, prim_isList);
- addPrimOp("__elemAt", 2, prim_elemAt);
- addPrimOp("__head", 1, prim_head);
- addPrimOp("__tail", 1, prim_tail);
- addPrimOp("map", 2, prim_map);
- addPrimOp("__filter", 2, prim_filter);
- addPrimOp("__elem", 2, prim_elem);
- addPrimOp("__concatLists", 1, prim_concatLists);
- addPrimOp("__length", 1, prim_length);
- addPrimOp("__foldl'", 3, prim_foldlStrict);
- addPrimOp("__any", 2, prim_any);
- addPrimOp("__all", 2, prim_all);
- addPrimOp("__genList", 2, prim_genList);
- addPrimOp("__sort", 2, prim_sort);
- addPrimOp("__partition", 2, prim_partition);
- addPrimOp("__concatMap", 2, prim_concatMap);
-
- // Integer arithmetic
- addPrimOp("__add", 2, prim_add);
- addPrimOp("__sub", 2, prim_sub);
- addPrimOp("__mul", 2, prim_mul);
- addPrimOp("__div", 2, prim_div);
- addPrimOp("__bitAnd", 2, prim_bitAnd);
- addPrimOp("__bitOr", 2, prim_bitOr);
- addPrimOp("__bitXor", 2, prim_bitXor);
- addPrimOp("__lessThan", 2, prim_lessThan);
-
- // String manipulation
- addPrimOp("toString", 1, prim_toString);
- addPrimOp("__substring", 3, prim_substring);
- addPrimOp("__stringLength", 1, prim_stringLength);
- addPrimOp("__hashString", 2, prim_hashString);
- addPrimOp("__match", 2, prim_match);
- addPrimOp("__split", 2, prim_split);
- addPrimOp("__concatStringsSep", 2, prim_concatStringSep);
- addPrimOp("__replaceStrings", 3, prim_replaceStrings);
-
- // Versions
- addPrimOp("__parseDrvName", 1, prim_parseDrvName);
- addPrimOp("__compareVersions", 2, prim_compareVersions);
- addPrimOp("__splitVersion", 1, prim_splitVersion);
-
- // Derivations
- addPrimOp("derivationStrict", 1, prim_derivationStrict);
- addPrimOp("placeholder", 1, prim_placeholder);
-
- /* Add a wrapper around the derivation primop that computes the
- `drvPath' and `outPath' attributes lazily. */
- string path = canonPath(settings.nixDataDir + "/nix/corepkgs/derivation.nix", true);
- sDerivationNix = symbols.create(path);
- evalFile(path, v);
- addConstant("derivation", v);
/* Add a value containing the current Nix expression search path. */
mkList(v, searchPath.size());
int n = 0;
for (auto & i : searchPath) {
- v2 = v.listElems()[n++] = allocValue();
+ auto v2 = v.listElems()[n++] = allocValue();
mkAttrs(*v2, 2);
mkString(*allocAttr(*v2, symbols.create("path")), i.second);
mkString(*allocAttr(*v2, symbols.create("prefix")), i.first);
@@ -2431,7 +3563,21 @@ void EvalState::createBaseEnv()
if (RegisterPrimOp::primOps)
for (auto & primOp : *RegisterPrimOp::primOps)
if (!primOp.requiredFeature || settings.isExperimentalFeatureEnabled(*primOp.requiredFeature))
- addPrimOp(primOp.name, primOp.arity, primOp.primOp);
+ addPrimOp({
+ .fun = primOp.fun,
+ .arity = std::max(primOp.args.size(), primOp.arity),
+ .name = symbols.create(primOp.name),
+ .args = std::move(primOp.args),
+ .doc = primOp.doc,
+ });
+
+ /* Add a wrapper around the derivation primop that computes the
+ `drvPath' and `outPath' attributes lazily. */
+ sDerivationNix = symbols.create("//builtin/derivation.nix");
+ eval(parse(
+ #include "primops/derivation.nix.gen.hh"
+ , foFile, sDerivationNix, "/", staticBaseEnv), v);
+ addConstant("derivation", v);
/* Now that we've added all primops, sort the `builtins' set,
because attribute lookups expect it to be sorted. */
diff --git a/src/libexpr/primops.hh b/src/libexpr/primops.hh
index 75c460ecf..ed5e2ea58 100644
--- a/src/libexpr/primops.hh
+++ b/src/libexpr/primops.hh
@@ -10,9 +10,11 @@ struct RegisterPrimOp
struct Info
{
std::string name;
- size_t arity;
- PrimOpFun primOp;
+ std::vector<std::string> args;
+ size_t arity = 0;
+ const char * doc;
std::optional<std::string> requiredFeature;
+ PrimOpFun fun;
};
typedef std::vector<Info> PrimOps;
@@ -26,6 +28,8 @@ struct RegisterPrimOp
size_t arity,
PrimOpFun fun,
std::optional<std::string> requiredFeature = {});
+
+ RegisterPrimOp(Info && info);
};
/* These primops are disabled without enableNativeCode, but plugins
diff --git a/src/libexpr/primops/derivation.nix b/src/libexpr/primops/derivation.nix
new file mode 100644
index 000000000..c0fbe8082
--- /dev/null
+++ b/src/libexpr/primops/derivation.nix
@@ -0,0 +1,27 @@
+/* This is the implementation of the ‘derivation’ builtin function.
+ It's actually a wrapper around the ‘derivationStrict’ primop. */
+
+drvAttrs @ { outputs ? [ "out" ], ... }:
+
+let
+
+ strict = derivationStrict drvAttrs;
+
+ commonAttrs = drvAttrs // (builtins.listToAttrs outputsList) //
+ { all = map (x: x.value) outputsList;
+ inherit drvAttrs;
+ };
+
+ outputToAttrListElement = outputName:
+ { name = outputName;
+ value = commonAttrs // {
+ outPath = builtins.getAttr outputName strict;
+ drvPath = strict.drvPath;
+ type = "derivation";
+ inherit outputName;
+ };
+ };
+
+ outputsList = map outputToAttrListElement outputs;
+
+in (builtins.head outputsList).value
diff --git a/src/libexpr/primops/fetchMercurial.cc b/src/libexpr/primops/fetchMercurial.cc
index cef85cfef..1a064ed5c 100644
--- a/src/libexpr/primops/fetchMercurial.cc
+++ b/src/libexpr/primops/fetchMercurial.cc
@@ -3,8 +3,7 @@
#include "store-api.hh"
#include "fetchers.hh"
#include "url.hh"
-
-#include <regex>
+#include "url-parts.hh"
namespace nix {
diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc
index 0dbf4ae1d..06e8304b8 100644
--- a/src/libexpr/primops/fetchTree.cc
+++ b/src/libexpr/primops/fetchTree.cc
@@ -226,18 +226,187 @@ static void prim_fetchurl(EvalState & state, const Pos & pos, Value * * args, Va
fetch(state, pos, args, v, "fetchurl", false, "");
}
+static RegisterPrimOp primop_fetchurl({
+ .name = "__fetchurl",
+ .args = {"url"},
+ .doc = R"(
+ Download the specified URL and return the path of the downloaded
+ file. This function is not available if [restricted evaluation
+ mode](../command-ref/conf-file.md) is enabled.
+ )",
+ .fun = prim_fetchurl,
+});
+
static void prim_fetchTarball(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
fetch(state, pos, args, v, "fetchTarball", true, "source");
}
+static RegisterPrimOp primop_fetchTarball({
+ .name = "fetchTarball",
+ .args = {"args"},
+ .doc = R"(
+ Download the specified URL, unpack it and return the path of the
+ unpacked tree. The file must be a tape archive (`.tar`) compressed
+ with `gzip`, `bzip2` or `xz`. The top-level path component of the
+ files in the tarball is removed, so it is best if the tarball
+ contains a single directory at top level. The typical use of the
+ function is to obtain external Nix expression dependencies, such as
+ a particular version of Nixpkgs, e.g.
+
+ ```nix
+ with import (fetchTarball https://github.com/NixOS/nixpkgs/archive/nixos-14.12.tar.gz) {};
+
+ stdenv.mkDerivation { … }
+ ```
+
+ The fetched tarball is cached for a certain amount of time (1 hour
+ by default) in `~/.cache/nix/tarballs/`. You can change the cache
+ timeout either on the command line with `--option tarball-ttl number
+ of seconds` or in the Nix configuration file with this option: `
+ number of seconds to cache `.
+
+ Note that when obtaining the hash with ` nix-prefetch-url ` the
+ option `--unpack` is required.
+
+ This function can also verify the contents against a hash. In that
+ case, the function takes a set instead of a URL. The set requires
+ the attribute `url` and the attribute `sha256`, e.g.
+
+ ```nix
+ with import (fetchTarball {
+ url = "https://github.com/NixOS/nixpkgs/archive/nixos-14.12.tar.gz";
+ sha256 = "1jppksrfvbk5ypiqdz4cddxdl8z6zyzdb2srq8fcffr327ld5jj2";
+ }) {};
+
+ stdenv.mkDerivation { … }
+ ```
+
+ This function is not available if [restricted evaluation
+ mode](../command-ref/conf-file.md) is enabled.
+ )",
+ .fun = prim_fetchTarball,
+});
+
static void prim_fetchGit(EvalState &state, const Pos &pos, Value **args, Value &v)
{
fetchTree(state, pos, args, v, "git", true);
}
-static RegisterPrimOp r2("__fetchurl", 1, prim_fetchurl);
-static RegisterPrimOp r3("fetchTarball", 1, prim_fetchTarball);
-static RegisterPrimOp r4("fetchGit", 1, prim_fetchGit);
+static RegisterPrimOp primop_fetchGit({
+ .name = "fetchGit",
+ .args = {"args"},
+ .doc = R"(
+ Fetch a path from git. *args* can be a URL, in which case the HEAD
+ of the repo at that URL is fetched. Otherwise, it can be an
+ attribute with the following attributes (all except `url` optional):
+
+ - url
+ The URL of the repo.
+
+ - name
+ The name of the directory the repo should be exported to in the
+ store. Defaults to the basename of the URL.
+
+ - rev
+ The git revision to fetch. Defaults to the tip of `ref`.
+
+ - ref
+ The git ref to look for the requested revision under. This is
+ often a branch or tag name. Defaults to `HEAD`.
+
+ By default, the `ref` value is prefixed with `refs/heads/`. As
+ of Nix 2.3.0 Nix will not prefix `refs/heads/` if `ref` starts
+ with `refs/`.
+
+ - submodules
+ A Boolean parameter that specifies whether submodules should be
+ checked out. Defaults to `false`.
+
+ Here are some examples of how to use `fetchGit`.
+
+ - To fetch a private repository over SSH:
+
+ ```nix
+ builtins.fetchGit {
+ url = "git@github.com:my-secret/repository.git";
+ ref = "master";
+ rev = "adab8b916a45068c044658c4158d81878f9ed1c3";
+ }
+ ```
+
+ - To fetch an arbitrary reference:
+
+ ```nix
+ builtins.fetchGit {
+ url = "https://github.com/NixOS/nix.git";
+ ref = "refs/heads/0.5-release";
+ }
+ ```
+
+ - If the revision you're looking for is in the default branch of
+ the git repository you don't strictly need to specify the branch
+ name in the `ref` attribute.
+
+ However, if the revision you're looking for is in a future
+ branch for the non-default branch you will need to specify the
+ the `ref` attribute as well.
+
+ ```nix
+ builtins.fetchGit {
+ url = "https://github.com/nixos/nix.git";
+ rev = "841fcbd04755c7a2865c51c1e2d3b045976b7452";
+ ref = "1.11-maintenance";
+ }
+ ```
+
+ > **Note**
+ >
+ > It is nice to always specify the branch which a revision
+ > belongs to. Without the branch being specified, the fetcher
+ > might fail if the default branch changes. Additionally, it can
+ > be confusing to try a commit from a non-default branch and see
+ > the fetch fail. If the branch is specified the fault is much
+ > more obvious.
+
+ - If the revision you're looking for is in the default branch of
+ the git repository you may omit the `ref` attribute.
+
+ ```nix
+ builtins.fetchGit {
+ url = "https://github.com/nixos/nix.git";
+ rev = "841fcbd04755c7a2865c51c1e2d3b045976b7452";
+ }
+ ```
+
+ - To fetch a specific tag:
+
+ ```nix
+ builtins.fetchGit {
+ url = "https://github.com/nixos/nix.git";
+ ref = "refs/tags/1.9";
+ }
+ ```
+
+ - To fetch the latest version of a remote branch:
+
+ ```nix
+ builtins.fetchGit {
+ url = "ssh://git@github.com/nixos/nix.git";
+ ref = "master";
+ }
+ ```
+
+ > **Note**
+ >
+ > Nix will refetch the branch in accordance with
+ > the option `tarball-ttl`.
+
+ > **Note**
+ >
+ > This behavior is disabled in *Pure evaluation mode*.
+ )",
+ .fun = prim_fetchGit,
+});
}
diff --git a/src/libfetchers/fetchers.hh b/src/libfetchers/fetchers.hh
index be71b786b..89b1e6e7d 100644
--- a/src/libfetchers/fetchers.hh
+++ b/src/libfetchers/fetchers.hh
@@ -23,7 +23,7 @@ struct InputScheme;
struct Input
{
- friend class InputScheme;
+ friend struct InputScheme;
std::shared_ptr<InputScheme> scheme; // note: can be null
Attrs attrs;
diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc
index 5ca0f8521..ad7638d73 100644
--- a/src/libfetchers/git.cc
+++ b/src/libfetchers/git.cc
@@ -3,6 +3,7 @@
#include "globals.hh"
#include "tarfile.hh"
#include "store-api.hh"
+#include "url-parts.hh"
#include <sys/time.h>
diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc
index 9f84ffb68..1737658a7 100644
--- a/src/libfetchers/github.cc
+++ b/src/libfetchers/github.cc
@@ -3,14 +3,15 @@
#include "fetchers.hh"
#include "globals.hh"
#include "store-api.hh"
+#include "url-parts.hh"
#include <nlohmann/json.hpp>
namespace nix::fetchers {
-// A github or gitlab url
-const static std::string urlRegexS = "[a-zA-Z0-9.]*"; // FIXME: check
-std::regex urlRegex(urlRegexS, std::regex::ECMAScript);
+// A github or gitlab host
+const static std::string hostRegexS = "[a-zA-Z0-9.]*"; // FIXME: check
+std::regex hostRegex(hostRegexS, std::regex::ECMAScript);
struct GitArchiveInputScheme : InputScheme
{
@@ -50,9 +51,9 @@ struct GitArchiveInputScheme : InputScheme
throw BadURL("URL '%s' contains multiple branch/tag names", url.url);
ref = value;
}
- else if (name == "url") {
- if (!std::regex_match(value, urlRegex))
- throw BadURL("URL '%s' contains an invalid instance url", url.url);
+ else if (name == "host") {
+ if (!std::regex_match(value, hostRegex))
+ throw BadURL("URL '%s' contains an invalid instance host", url.url);
host_url = value;
}
// FIXME: barf on unsupported attributes
@@ -67,7 +68,7 @@ struct GitArchiveInputScheme : InputScheme
input.attrs.insert_or_assign("repo", path[1]);
if (rev) input.attrs.insert_or_assign("rev", rev->gitRev());
if (ref) input.attrs.insert_or_assign("ref", *ref);
- if (host_url) input.attrs.insert_or_assign("url", *host_url);
+ if (host_url) input.attrs.insert_or_assign("host", *host_url);
return input;
}
@@ -77,7 +78,7 @@ struct GitArchiveInputScheme : InputScheme
if (maybeGetStrAttr(attrs, "type") != type()) return {};
for (auto & [name, value] : attrs)
- if (name != "type" && name != "owner" && name != "repo" && name != "ref" && name != "rev" && name != "narHash" && name != "lastModified")
+ if (name != "type" && name != "owner" && name != "repo" && name != "ref" && name != "rev" && name != "narHash" && name != "lastModified" && name != "host")
throw Error("unsupported input attribute '%s'", name);
getStrAttr(attrs, "owner");
@@ -182,11 +183,21 @@ struct GitHubInputScheme : GitArchiveInputScheme
{
std::string type() override { return "github"; }
+ void addAccessToken(std::string & url) const
+ {
+ std::string accessToken = settings.githubAccessToken.get();
+ if (accessToken != "")
+ url += "?access_token=" + accessToken;
+ }
+
Hash getRevFromRef(nix::ref<Store> store, const Input & input) const override
{
auto host_url = maybeGetStrAttr(input.attrs, "url").value_or("github.com");
auto url = fmt("https://api.%s/repos/%s/%s/commits/%s", // FIXME: check
host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), *input.getRef());
+
+ addAccessToken(url);
+
auto json = nlohmann::json::parse(
readFile(
store->toRealPath(
@@ -200,14 +211,12 @@ struct GitHubInputScheme : GitArchiveInputScheme
{
// FIXME: use regular /archive URLs instead? api.github.com
// might have stricter rate limits.
- auto host_url = maybeGetStrAttr(input.attrs, "url").value_or("github.com");
+ auto host_url = maybeGetStrAttr(input.attrs, "host").value_or("github.com");
auto url = fmt("https://api.%s/repos/%s/%s/tarball/%s", // FIXME: check if this is correct for self hosted instances
host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"),
input.getRev()->to_string(Base16, false));
- std::string accessToken = settings.githubAccessToken.get();
- if (accessToken != "")
- url += "?access_token=" + accessToken;
+ addAccessToken(url);
return url;
}
@@ -228,7 +237,7 @@ struct GitLabInputScheme : GitArchiveInputScheme
Hash getRevFromRef(nix::ref<Store> store, const Input & input) const override
{
- auto host_url = maybeGetStrAttr(input.attrs, "url").value_or("gitlab.com");
+ auto host_url = maybeGetStrAttr(input.attrs, "host").value_or("gitlab.com");
auto url = fmt("https://%s/api/v4/projects/%s%%2F%s/repository/commits?ref_name=%s",
host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), *input.getRef());
auto json = nlohmann::json::parse(
diff --git a/src/libfetchers/indirect.cc b/src/libfetchers/indirect.cc
index b981d4d8e..74332ae3d 100644
--- a/src/libfetchers/indirect.cc
+++ b/src/libfetchers/indirect.cc
@@ -1,4 +1,5 @@
#include "fetchers.hh"
+#include "url-parts.hh"
namespace nix::fetchers {
diff --git a/src/libfetchers/mercurial.cc b/src/libfetchers/mercurial.cc
index 3e76ffc4d..d80c2ea7a 100644
--- a/src/libfetchers/mercurial.cc
+++ b/src/libfetchers/mercurial.cc
@@ -3,6 +3,7 @@
#include "globals.hh"
#include "tarfile.hh"
#include "store-api.hh"
+#include "url-parts.hh"
#include <sys/time.h>
diff --git a/src/libfetchers/registry.cc b/src/libfetchers/registry.cc
index d4134ce29..4367ee810 100644
--- a/src/libfetchers/registry.cc
+++ b/src/libfetchers/registry.cc
@@ -147,7 +147,7 @@ static std::shared_ptr<Registry> getGlobalRegistry(ref<Store> store)
if (!hasPrefix(path, "/")) {
auto storePath = downloadFile(store, path, "flake-registry.json", false).storePath;
if (auto store2 = store.dynamic_pointer_cast<LocalFSStore>())
- store2->addPermRoot(storePath, getCacheDir() + "/nix/flake-registry.json", true);
+ store2->addPermRoot(storePath, getCacheDir() + "/nix/flake-registry.json");
path = store->toRealPath(storePath);
}
diff --git a/src/libmain/common-args.cc b/src/libmain/common-args.cc
index 09f4cd133..3411e2d7a 100644
--- a/src/libmain/common-args.cc
+++ b/src/libmain/common-args.cc
@@ -28,7 +28,7 @@ MixCommonArgs::MixCommonArgs(const string & programName)
addFlag({
.longName = "option",
- .description = "set a Nix configuration option (overriding nix.conf)",
+ .description = "set a Nix configuration option (overriding `nix.conf`)",
.labels = {"name", "value"},
.handler = {[](std::string name, std::string value) {
try {
@@ -51,8 +51,8 @@ MixCommonArgs::MixCommonArgs(const string & programName)
addFlag({
.longName = "log-format",
- .description = "format of log output; \"raw\", \"internal-json\", \"bar\" "
- "or \"bar-with-logs\"",
+ .description = "format of log output; `raw`, `internal-json`, `bar` "
+ "or `bar-with-logs`",
.labels = {"format"},
.handler = {[](std::string format) { setLogFormat(format); }},
});
diff --git a/src/libmain/shared.cc b/src/libmain/shared.cc
index 2b1f25ca3..22ae51e47 100644
--- a/src/libmain/shared.cc
+++ b/src/libmain/shared.cc
@@ -277,6 +277,8 @@ void printVersion(const string & programName)
#if HAVE_SODIUM
cfg.push_back("signed-caches");
#endif
+ std::cout << "System type: " << settings.thisSystem << "\n";
+ std::cout << "Additional system types: " << concatStringsSep(", ", settings.extraPlatforms.get()) << "\n";
std::cout << "Features: " << concatStringsSep(", ", cfg) << "\n";
std::cout << "System configuration file: " << settings.nixConfDir + "/nix.conf" << "\n";
std::cout << "User configuration files: " <<
diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc
index 5433fe50d..ebc0bd6a4 100644
--- a/src/libstore/binary-cache-store.cc
+++ b/src/libstore/binary-cache-store.cc
@@ -11,6 +11,7 @@
#include "nar-accessor.hh"
#include "json.hh"
#include "thread-pool.hh"
+#include "callback.hh"
#include <chrono>
#include <future>
@@ -22,7 +23,8 @@
namespace nix {
BinaryCacheStore::BinaryCacheStore(const Params & params)
- : Store(params)
+ : BinaryCacheStoreConfig(params)
+ , Store(params)
{
if (secretKeyFile != "")
secretKey = std::unique_ptr<SecretKey>(new SecretKey(readFile(secretKeyFile)));
diff --git a/src/libstore/binary-cache-store.hh b/src/libstore/binary-cache-store.hh
index 9bcdf5901..4b779cdd4 100644
--- a/src/libstore/binary-cache-store.hh
+++ b/src/libstore/binary-cache-store.hh
@@ -11,17 +11,21 @@ namespace nix {
struct NarInfo;
-class BinaryCacheStore : public Store
+struct BinaryCacheStoreConfig : virtual StoreConfig
{
-public:
-
- const Setting<std::string> compression{this, "xz", "compression", "NAR compression method ('xz', 'bzip2', or 'none')"};
- const Setting<bool> writeNARListing{this, false, "write-nar-listing", "whether to write a JSON file listing the files in each NAR"};
- const Setting<bool> writeDebugInfo{this, false, "index-debug-info", "whether to index DWARF debug info files by build ID"};
- const Setting<Path> secretKeyFile{this, "", "secret-key", "path to secret key used to sign the binary cache"};
- const Setting<Path> localNarCache{this, "", "local-nar-cache", "path to a local cache of NARs"};
- const Setting<bool> parallelCompression{this, false, "parallel-compression",
+ using StoreConfig::StoreConfig;
+
+ const Setting<std::string> compression{(StoreConfig*) this, "xz", "compression", "NAR compression method ('xz', 'bzip2', or 'none')"};
+ const Setting<bool> writeNARListing{(StoreConfig*) this, false, "write-nar-listing", "whether to write a JSON file listing the files in each NAR"};
+ const Setting<bool> writeDebugInfo{(StoreConfig*) this, false, "index-debug-info", "whether to index DWARF debug info files by build ID"};
+ const Setting<Path> secretKeyFile{(StoreConfig*) this, "", "secret-key", "path to secret key used to sign the binary cache"};
+ const Setting<Path> localNarCache{(StoreConfig*) this, "", "local-nar-cache", "path to a local cache of NARs"};
+ const Setting<bool> parallelCompression{(StoreConfig*) this, false, "parallel-compression",
"enable multi-threading compression, available for xz only currently"};
+};
+
+class BinaryCacheStore : public Store, public virtual BinaryCacheStoreConfig
+{
private:
@@ -58,7 +62,7 @@ public:
public:
- virtual void init();
+ virtual void init() override;
private:
diff --git a/src/libstore/build.cc b/src/libstore/build.cc
index afb2bb096..0b51d90ea 100644
--- a/src/libstore/build.cc
+++ b/src/libstore/build.cc
@@ -16,6 +16,8 @@
#include "machines.hh"
#include "daemon.hh"
#include "worker-protocol.hh"
+#include "topo-sort.hh"
+#include "callback.hh"
#include <algorithm>
#include <iostream>
@@ -717,6 +719,33 @@ typedef enum {rpAccept, rpDecline, rpPostpone} HookReply;
class SubstitutionGoal;
+/* Unless we are repairing, we don't both to test validity and just assume it,
+ so the choices are `Absent` or `Valid`. */
+enum struct PathStatus {
+ Corrupt,
+ Absent,
+ Valid,
+};
+
+struct InitialOutputStatus {
+ StorePath path;
+ PathStatus status;
+ /* Valid in the store, and additionally non-corrupt if we are repairing */
+ bool isValid() const {
+ return status == PathStatus::Valid;
+ }
+ /* Merely present, allowed to be corrupt */
+ bool isPresent() const {
+ return status == PathStatus::Corrupt
+ || status == PathStatus::Valid;
+ }
+};
+
+struct InitialOutput {
+ bool wanted;
+ std::optional<InitialOutputStatus> known;
+};
+
class DerivationGoal : public Goal
{
private:
@@ -744,19 +773,14 @@ private:
/* The remainder is state held during the build. */
- /* Locks on the output paths. */
+ /* Locks on (fixed) output paths. */
PathLocks outputLocks;
/* All input paths (that is, the union of FS closures of the
immediate input paths). */
StorePathSet inputPaths;
- /* Outputs that are already valid. If we're repairing, these are
- the outputs that are valid *and* not corrupt. */
- StorePathSet validPaths;
-
- /* Outputs that are corrupt or not valid. */
- StorePathSet missingPaths;
+ std::map<std::string, InitialOutput> initialOutputs;
/* User selected for running the builder. */
std::unique_ptr<UserLock> buildUser;
@@ -839,6 +863,31 @@ private:
typedef map<StorePath, StorePath> RedirectedOutputs;
RedirectedOutputs redirectedOutputs;
+ /* The outputs paths used during the build.
+
+ - Input-addressed derivations or fixed content-addressed outputs are
+ sometimes built when some of their outputs already exist, and can not
+ be hidden via sandboxing. We use temporary locations instead and
+ rewrite after the build. Otherwise the regular predetermined paths are
+ put here.
+
+ - Floating content-addressed derivations do not know their final build
+ output paths until the outputs are hashed, so random locations are
+ used, and then renamed. The randomness helps guard against hidden
+ self-references.
+ */
+ OutputPathMap scratchOutputs;
+
+ /* The final output paths of the build.
+
+ - For input-addressed derivations, always the precomputed paths
+
+ - For content-addressed derivations, calcuated from whatever the hash
+ ends up being. (Note that fixed outputs derivations that produce the
+ "wrong" output still install that data under its true content-address.)
+ */
+ OutputPathMap finalOutputs;
+
BuildMode buildMode;
/* If we're repairing without a chroot, there may be outputs that
@@ -937,7 +986,8 @@ private:
void getDerivation();
void loadDerivation();
void haveDerivation();
- void outputsSubstituted();
+ void outputsSubstitutionTried();
+ void gaveUpOnSubstitution();
void closureRepaired();
void inputsRealised();
void tryToBuild();
@@ -998,13 +1048,27 @@ private:
void handleEOF(int fd) override;
void flushLine();
+ /* Wrappers around the corresponding Store methods that first consult the
+ derivation. This is currently needed because when there is no drv file
+ there also is no DB entry. */
+ std::map<std::string, std::optional<StorePath>> queryPartialDerivationOutputMap();
+ OutputPathMap queryDerivationOutputMap();
+
/* Return the set of (in)valid paths. */
- StorePathSet checkPathValidity(bool returnValid, bool checkHash);
+ void checkPathValidity();
/* Forcibly kill the child process, if any. */
void killChild();
- void addHashRewrite(const StorePath & path);
+ /* Create alternative path calculated from but distinct from the
+ input, so we can avoid overwriting outputs (or other store paths)
+ that already exist. */
+ StorePath makeFallbackPath(const StorePath & path);
+ /* Make a path to another based on the output name along with the
+ derivation hash. */
+ /* FIXME add option to randomize, so we can audit whether our
+ rewrites caught everything */
+ StorePath makeFallbackPath(std::string_view outputName);
void repairClosure();
@@ -1047,7 +1111,7 @@ DerivationGoal::DerivationGoal(const StorePath & drvPath, const BasicDerivation
{
this->drv = std::make_unique<BasicDerivation>(BasicDerivation(drv));
state = &DerivationGoal::haveDerivation;
- name = fmt("building of %s", worker.store.showPaths(drv.outputPaths(worker.store)));
+ name = fmt("building of %s", StorePathWithOutputs { drvPath, drv.outputNames() }.to_string(worker.store));
trace("created");
mcExpectedBuilds = std::make_unique<MaintainCount<uint64_t>>(worker.expectedBuilds);
@@ -1179,43 +1243,60 @@ void DerivationGoal::haveDerivation()
{
trace("have derivation");
+ if (drv->type() == DerivationType::CAFloating)
+ settings.requireExperimentalFeature("ca-derivations");
+
retrySubstitution = false;
- for (auto & i : drv->outputsAndPaths(worker.store))
- worker.store.addTempRoot(i.second.second);
+ for (auto & i : drv->outputsAndOptPaths(worker.store))
+ if (i.second.second)
+ worker.store.addTempRoot(*i.second.second);
/* Check what outputs paths are not already valid. */
- auto invalidOutputs = checkPathValidity(false, buildMode == bmRepair);
+ checkPathValidity();
+ bool allValid = true;
+ for (auto & [_, status] : initialOutputs) {
+ if (!status.wanted) continue;
+ if (!status.known || !status.known->isValid()) {
+ allValid = false;
+ break;
+ }
+ }
/* If they are all valid, then we're done. */
- if (invalidOutputs.size() == 0 && buildMode == bmNormal) {
+ if (allValid && buildMode == bmNormal) {
done(BuildResult::AlreadyValid);
return;
}
parsedDrv = std::make_unique<ParsedDerivation>(drvPath, *drv);
- if (drv->type() == DerivationType::CAFloating) {
- settings.requireExperimentalFeature("ca-derivations");
- throw UnimplementedError("ca-derivations isn't implemented yet");
- }
-
/* We are first going to try to create the invalid output paths
through substitutes. If that doesn't work, we'll build
them. */
if (settings.useSubstitutes && parsedDrv->substitutesAllowed())
- for (auto & i : invalidOutputs)
- addWaitee(worker.makeSubstitutionGoal(i, buildMode == bmRepair ? Repair : NoRepair, getDerivationCA(*drv)));
+ for (auto & [_, status] : initialOutputs) {
+ if (!status.wanted) continue;
+ if (!status.known) {
+ warn("do not know how to query for unknown floating content-addressed derivation output yet");
+ /* Nothing to wait for; tail call */
+ return DerivationGoal::gaveUpOnSubstitution();
+ }
+ addWaitee(worker.makeSubstitutionGoal(
+ status.known->path,
+ buildMode == bmRepair ? Repair : NoRepair,
+ getDerivationCA(*drv)));
+ }
if (waitees.empty()) /* to prevent hang (no wake-up event) */
- outputsSubstituted();
+ outputsSubstitutionTried();
else
- state = &DerivationGoal::outputsSubstituted;
+ state = &DerivationGoal::outputsSubstitutionTried;
}
-void DerivationGoal::outputsSubstituted()
+void DerivationGoal::outputsSubstitutionTried()
{
trace("all outputs substituted (maybe)");
@@ -1239,7 +1320,14 @@ void DerivationGoal::outputsSubstituted()
return;
}
- auto nrInvalid = checkPathValidity(false, buildMode == bmRepair).size();
+ checkPathValidity();
+ size_t nrInvalid = 0;
+ for (auto & [_, status] : initialOutputs) {
+ if (!status.wanted) continue;
+ if (!status.known || !status.known->isValid())
+ nrInvalid++;
+ }
+
if (buildMode == bmNormal && nrInvalid == 0) {
done(BuildResult::Substituted);
return;
@@ -1252,9 +1340,14 @@ void DerivationGoal::outputsSubstituted()
throw Error("some outputs of '%s' are not valid, so checking is not possible",
worker.store.printStorePath(drvPath));
- /* Otherwise, at least one of the output paths could not be
- produced using a substitute. So we have to build instead. */
+ /* Nothing to wait for; tail call */
+ gaveUpOnSubstitution();
+}
+/* At least one of the output paths could not be
+ produced using a substitute. So we have to build instead. */
+void DerivationGoal::gaveUpOnSubstitution()
+{
/* Make sure checkPathValidity() from now on checks all
outputs. */
wantedOutputs.clear();
@@ -1287,15 +1380,16 @@ void DerivationGoal::repairClosure()
that produced those outputs. */
/* Get the output closure. */
+ auto outputs = queryDerivationOutputMap();
StorePathSet outputClosure;
- for (auto & i : drv->outputsAndPaths(worker.store)) {
+ for (auto & i : outputs) {
if (!wantOutput(i.first, wantedOutputs)) continue;
- worker.store.computeFSClosure(i.second.second, outputClosure);
+ worker.store.computeFSClosure(i.second, outputClosure);
}
/* Filter out our own outputs (which we have already checked). */
- for (auto & i : drv->outputsAndPaths(worker.store))
- outputClosure.erase(i.second.second);
+ for (auto & i : outputs)
+ outputClosure.erase(i.second);
/* Get all dependencies of this derivation so that we know which
derivation is responsible for which path in the output
@@ -1305,9 +1399,10 @@ void DerivationGoal::repairClosure()
std::map<StorePath, StorePath> outputsToDrv;
for (auto & i : inputClosure)
if (i.isDerivation()) {
- Derivation drv = worker.store.derivationFromPath(i);
- for (auto & j : drv.outputsAndPaths(worker.store))
- outputsToDrv.insert_or_assign(j.second.second, i);
+ auto depOutputs = worker.store.queryPartialDerivationOutputMap(i);
+ for (auto & j : depOutputs)
+ if (j.second)
+ outputsToDrv.insert_or_assign(*j.second, i);
}
/* Check each path (slow!). */
@@ -1370,20 +1465,24 @@ void DerivationGoal::inputsRealised()
/* First, the input derivations. */
if (useDerivation)
- for (auto & i : dynamic_cast<Derivation *>(drv.get())->inputDrvs) {
+ for (auto & [depDrvPath, wantedDepOutputs] : dynamic_cast<Derivation *>(drv.get())->inputDrvs) {
/* Add the relevant output closures of the input derivation
`i' as input paths. Only add the closures of output paths
that are specified as inputs. */
- assert(worker.store.isValidPath(i.first));
- Derivation inDrv = worker.store.derivationFromPath(i.first);
- for (auto & j : i.second) {
- auto k = inDrv.outputs.find(j);
- if (k != inDrv.outputs.end())
- worker.store.computeFSClosure(k->second.path(worker.store, inDrv.name), inputPaths);
- else
+ assert(worker.store.isValidPath(drvPath));
+ auto outputs = worker.store.queryPartialDerivationOutputMap(depDrvPath);
+ for (auto & j : wantedDepOutputs) {
+ if (outputs.count(j) > 0) {
+ auto optRealizedInput = outputs.at(j);
+ if (!optRealizedInput)
+ throw Error(
+ "derivation '%s' requires output '%s' from input derivation '%s', which is supposedly realized already, yet we still don't know what path corresponds to that output",
+ worker.store.printStorePath(drvPath), j, worker.store.printStorePath(drvPath));
+ worker.store.computeFSClosure(*optRealizedInput, inputPaths);
+ } else
throw Error(
"derivation '%s' requires non-existent output '%s' from input derivation '%s'",
- worker.store.printStorePath(drvPath), j, worker.store.printStorePath(i.first));
+ worker.store.printStorePath(drvPath), j, worker.store.printStorePath(drvPath));
}
}
@@ -1426,14 +1525,18 @@ void DerivationGoal::tryToBuild()
{
trace("trying to build");
- /* Obtain locks on all output paths. The locks are automatically
- released when we exit this function or Nix crashes. If we
- can't acquire the lock, then continue; hopefully some other
- goal can start a build, and if not, the main loop will sleep a
- few seconds and then retry this goal. */
+ /* Obtain locks on all output paths, if the paths are known a priori.
+
+ The locks are automatically released when we exit this function or Nix
+ crashes. If we can't acquire the lock, then continue; hopefully some
+ other goal can start a build, and if not, the main loop will sleep a few
+ seconds and then retry this goal. */
PathSet lockFiles;
- for (auto & outPath : drv->outputPaths(worker.store))
- lockFiles.insert(worker.store.Store::toRealPath(outPath));
+ /* FIXME: Should lock something like the drv itself so we don't build same
+ CA drv concurrently */
+ for (auto & i : drv->outputsAndOptPaths(worker.store))
+ if (i.second.second)
+ lockFiles.insert(worker.store.Store::toRealPath(*i.second.second));
if (!outputLocks.lockPaths(lockFiles, "", false)) {
if (!actLock)
@@ -1452,24 +1555,29 @@ void DerivationGoal::tryToBuild()
omitted, but that would be less efficient.) Note that since we
now hold the locks on the output paths, no other process can
build this derivation, so no further checks are necessary. */
- validPaths = checkPathValidity(true, buildMode == bmRepair);
- if (buildMode != bmCheck && validPaths.size() == drv->outputs.size()) {
+ checkPathValidity();
+ bool allValid = true;
+ for (auto & [_, status] : initialOutputs) {
+ if (!status.wanted) continue;
+ if (!status.known || !status.known->isValid()) {
+ allValid = false;
+ break;
+ }
+ }
+ if (buildMode != bmCheck && allValid) {
debug("skipping build of derivation '%s', someone beat us to it", worker.store.printStorePath(drvPath));
outputLocks.setDeletion(true);
done(BuildResult::AlreadyValid);
return;
}
- missingPaths = drv->outputPaths(worker.store);
- if (buildMode != bmCheck)
- for (auto & i : validPaths) missingPaths.erase(i);
-
/* If any of the outputs already exist but are not valid, delete
them. */
- for (auto & i : drv->outputsAndPaths(worker.store)) {
- if (worker.store.isValidPath(i.second.second)) continue;
- debug("removing invalid path '%s'", worker.store.printStorePath(i.second.second));
- deletePath(worker.store.Store::toRealPath(i.second.second));
+ for (auto & [_, status] : initialOutputs) {
+ if (!status.known || status.known->isValid()) continue;
+ auto storePath = status.known->path;
+ debug("removing invalid path '%s'", worker.store.printStorePath(status.known->path));
+ deletePath(worker.store.Store::toRealPath(storePath));
}
/* Don't do a remote build if the derivation has the attribute
@@ -1477,7 +1585,6 @@ void DerivationGoal::tryToBuild()
supported for local builds. */
bool buildLocally = buildMode != bmNormal || parsedDrv->willBuildLocally(worker.store);
- /* Is the build hook willing to accept this job? */
if (!buildLocally) {
switch (tryBuildHook()) {
case rpAccept:
@@ -1661,8 +1768,10 @@ void DerivationGoal::buildDone()
/* Move paths out of the chroot for easier debugging of
build failures. */
if (useChroot && buildMode == bmNormal)
- for (auto & i : missingPaths) {
- auto p = worker.store.printStorePath(i);
+ for (auto & [_, status] : initialOutputs) {
+ if (!status.known) continue;
+ if (buildMode != bmCheck && status.known->isValid()) continue;
+ auto p = worker.store.printStorePath(status.known->path);
if (pathExists(chrootRootDir + p))
rename((chrootRootDir + p).c_str(), p.c_str());
}
@@ -1692,7 +1801,10 @@ void DerivationGoal::buildDone()
fmt("running post-build-hook '%s'", settings.postBuildHook),
Logger::Fields{worker.store.printStorePath(drvPath)});
PushActivity pact(act.id);
- auto outputPaths = drv->outputPaths(worker.store);
+ StorePathSet outputPaths;
+ for (auto i : drv->outputs) {
+ outputPaths.insert(finalOutputs.at(i.first));
+ }
std::map<std::string, std::string> hookEnvironment = getEnv();
hookEnvironment.emplace("DRV_PATH", worker.store.printStorePath(drvPath));
@@ -1868,7 +1980,15 @@ HookReply DerivationGoal::tryBuildHook()
/* Tell the hooks the missing outputs that have to be copied back
from the remote system. */
- writeStorePaths(worker.store, hook->sink, missingPaths);
+ {
+ StorePathSet missingPaths;
+ for (auto & [_, status] : initialOutputs) {
+ if (!status.known) continue;
+ if (buildMode != bmCheck && status.known->isValid()) continue;
+ missingPaths.insert(status.known->path);
+ }
+ writeStorePaths(worker.store, hook->sink, missingPaths);
+ }
hook->sink = FdSink();
hook->toHook.writeSide = -1;
@@ -1919,8 +2039,15 @@ StorePathSet DerivationGoal::exportReferences(const StorePathSet & storePaths)
for (auto & j : paths2) {
if (j.isDerivation()) {
Derivation drv = worker.store.derivationFromPath(j);
- for (auto & k : drv.outputsAndPaths(worker.store))
- worker.store.computeFSClosure(k.second.second, paths);
+ for (auto & k : drv.outputsAndOptPaths(worker.store)) {
+ if (!k.second.second)
+ /* FIXME: I am confused why we are calling
+ `computeFSClosure` on the output path, rather than
+ derivation itself. That doesn't seem right to me, so I
+ won't try to implemented this for CA derivations. */
+ throw UnimplementedError("exportReferences on CA derivations is not yet implemented");
+ worker.store.computeFSClosure(*k.second.second, paths);
+ }
}
}
@@ -1951,7 +2078,7 @@ void linkOrCopy(const Path & from, const Path & to)
file (e.g. 32000 of ext3), which is quite possible after a
'nix-store --optimise'. FIXME: actually, why don't we just
bind-mount in this case?
-
+
It can also fail with EPERM in BeegFS v7 and earlier versions
which don't allow hard-links to other directories */
if (errno != EMLINK && errno != EPERM)
@@ -2013,9 +2140,64 @@ void DerivationGoal::startBuilder()
chownToBuilder(tmpDir);
- /* Substitute output placeholders with the actual output paths. */
- for (auto & output : drv->outputsAndPaths(worker.store))
- inputRewrites[hashPlaceholder(output.first)] = worker.store.printStorePath(output.second.second);
+ for (auto & [outputName, status] : initialOutputs) {
+ /* Set scratch path we'll actually use during the build.
+
+ If we're not doing a chroot build, but we have some valid
+ output paths. Since we can't just overwrite or delete
+ them, we have to do hash rewriting: i.e. in the
+ environment/arguments passed to the build, we replace the
+ hashes of the valid outputs with unique dummy strings;
+ after the build, we discard the redirected outputs
+ corresponding to the valid outputs, and rewrite the
+ contents of the new outputs to replace the dummy strings
+ with the actual hashes. */
+ auto scratchPath =
+ !status.known
+ ? makeFallbackPath(outputName)
+ : !needsHashRewrite()
+ /* Can always use original path in sandbox */
+ ? status.known->path
+ : !status.known->isPresent()
+ /* If path doesn't yet exist can just use it */
+ ? status.known->path
+ : buildMode != bmRepair && !status.known->isValid()
+ /* If we aren't repairing we'll delete a corrupted path, so we
+ can use original path */
+ ? status.known->path
+ : /* If we are repairing or the path is totally valid, we'll need
+ to use a temporary path */
+ makeFallbackPath(status.known->path);
+ scratchOutputs.insert_or_assign(outputName, scratchPath);
+
+ /* A non-removed corrupted path needs to be stored here, too */
+ if (buildMode == bmRepair && !status.known->isValid())
+ redirectedBadOutputs.insert(status.known->path);
+
+ /* Substitute output placeholders with the scratch output paths.
+ We'll use during the build. */
+ inputRewrites[hashPlaceholder(outputName)] = worker.store.printStorePath(scratchPath);
+
+ /* Additional tasks if we know the final path a priori. */
+ if (!status.known) continue;
+ auto fixedFinalPath = status.known->path;
+
+ /* Additional tasks if the final and scratch are both known and
+ differ. */
+ if (fixedFinalPath == scratchPath) continue;
+
+ /* Ensure scratch path is ours to use. */
+ deletePath(worker.store.printStorePath(scratchPath));
+
+ /* Rewrite and unrewrite paths */
+ {
+ std::string h1 { fixedFinalPath.hashPart() };
+ std::string h2 { scratchPath.hashPart() };
+ inputRewrites[h1] = h2;
+ }
+
+ redirectedOutputs.insert_or_assign(std::move(fixedFinalPath), std::move(scratchPath));
+ }
/* Construct the environment passed to the builder. */
initEnv();
@@ -2199,8 +2381,15 @@ void DerivationGoal::startBuilder()
rebuilding a path that is in settings.dirsInChroot
(typically the dependencies of /bin/sh). Throw them
out. */
- for (auto & i : drv->outputsAndPaths(worker.store))
- dirsInChroot.erase(worker.store.printStorePath(i.second.second));
+ for (auto & i : drv->outputsAndOptPaths(worker.store)) {
+ /* If the name isn't known a priori (i.e. floating
+ content-addressed derivation), the temporary location we use
+ should be fresh. Freshness means it is impossible that the path
+ is already in the sandbox, so we don't need to worry about
+ removing it. */
+ if (i.second.second)
+ dirsInChroot.erase(worker.store.printStorePath(*i.second.second));
+ }
#elif __APPLE__
/* We don't really have any parent prep work to do (yet?)
@@ -2210,33 +2399,8 @@ void DerivationGoal::startBuilder()
#endif
}
- if (needsHashRewrite()) {
-
- if (pathExists(homeDir))
- throw Error("home directory '%1%' exists; please remove it to assure purity of builds without sandboxing", homeDir);
-
- /* We're not doing a chroot build, but we have some valid
- output paths. Since we can't just overwrite or delete
- them, we have to do hash rewriting: i.e. in the
- environment/arguments passed to the build, we replace the
- hashes of the valid outputs with unique dummy strings;
- after the build, we discard the redirected outputs
- corresponding to the valid outputs, and rewrite the
- contents of the new outputs to replace the dummy strings
- with the actual hashes. */
- if (validPaths.size() > 0)
- for (auto & i : validPaths)
- addHashRewrite(i);
-
- /* If we're repairing, then we don't want to delete the
- corrupt outputs in advance. So rewrite them as well. */
- if (buildMode == bmRepair)
- for (auto & i : missingPaths)
- if (worker.store.isValidPath(i) && pathExists(worker.store.printStorePath(i))) {
- addHashRewrite(i);
- redirectedBadOutputs.insert(i);
- }
- }
+ if (needsHashRewrite() && pathExists(homeDir))
+ throw Error("home directory '%1%' exists; please remove it to assure purity of builds without sandboxing", homeDir);
if (useChroot && settings.preBuildHook != "" && dynamic_cast<Derivation *>(drv.get())) {
printMsg(lvlChatty, format("executing pre-build hook '%1%'")
@@ -2612,8 +2776,11 @@ void DerivationGoal::writeStructuredAttrs()
/* Add an "outputs" object containing the output paths. */
nlohmann::json outputs;
- for (auto & i : drv->outputsAndPaths(worker.store))
- outputs[i.first] = rewriteStrings(worker.store.printStorePath(i.second.second), inputRewrites);
+ for (auto & i : drv->outputs) {
+ /* The placeholder must have a rewrite, so we use it to cover both the
+ cases where we know or don't know the output path ahead of time. */
+ outputs[i.first] = rewriteStrings(hashPlaceholder(i.first), inputRewrites);
+ }
json["outputs"] = outputs;
/* Handle exportReferencesGraph. */
@@ -2706,18 +2873,23 @@ void DerivationGoal::writeStructuredAttrs()
chownToBuilder(tmpDir + "/.attrs.sh");
}
+struct RestrictedStoreConfig : LocalFSStoreConfig
+{
+ using LocalFSStoreConfig::LocalFSStoreConfig;
+ const std::string name() { return "Restricted Store"; }
+};
/* A wrapper around LocalStore that only allows building/querying of
paths that are in the input closures of the build or were added via
recursive Nix calls. */
-struct RestrictedStore : public LocalFSStore
+struct RestrictedStore : public LocalFSStore, public virtual RestrictedStoreConfig
{
ref<LocalStore> next;
DerivationGoal & goal;
RestrictedStore(const Params & params, ref<LocalStore> next, DerivationGoal & goal)
- : Store(params), LocalFSStore(params), next(next), goal(goal)
+ : StoreConfig(params), Store(params), LocalFSStore(params), next(next), goal(goal)
{ }
Path getRealStoreDir() override
@@ -2756,8 +2928,12 @@ struct RestrictedStore : public LocalFSStore
void queryReferrers(const StorePath & path, StorePathSet & referrers) override
{ }
- OutputPathMap queryDerivationOutputMap(const StorePath & path) override
- { throw Error("queryDerivationOutputMap"); }
+ std::map<std::string, std::optional<StorePath>> queryPartialDerivationOutputMap(const StorePath & path) override
+ {
+ if (!goal.isAllowed(path))
+ throw InvalidPath("cannot query output map for unknown path '%s' in recursive Nix", printStorePath(path));
+ return next->queryPartialDerivationOutputMap(path);
+ }
std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override
{ throw Error("queryPathFromHashPart"); }
@@ -2811,19 +2987,20 @@ struct RestrictedStore : public LocalFSStore
StorePathSet newPaths;
for (auto & path : paths) {
- if (path.path.isDerivation()) {
- if (!goal.isAllowed(path.path))
- throw InvalidPath("cannot build unknown path '%s' in recursive Nix", printStorePath(path.path));
- auto drv = derivationFromPath(path.path);
- for (auto & output : drv.outputsAndPaths(*this))
- if (wantOutput(output.first, path.outputs))
- newPaths.insert(output.second.second);
- } else if (!goal.isAllowed(path.path))
+ if (!goal.isAllowed(path.path))
throw InvalidPath("cannot build unknown path '%s' in recursive Nix", printStorePath(path.path));
}
next->buildPaths(paths, buildMode);
+ for (auto & path : paths) {
+ if (!path.path.isDerivation()) continue;
+ auto outputs = next->queryDerivationOutputMap(path.path);
+ for (auto & output : outputs)
+ if (wantOutput(output.first, path.outputs))
+ newPaths.insert(output.second);
+ }
+
StorePathSet closure;
next->computeFSClosure(newPaths, closure);
for (auto & path : closure)
@@ -3440,14 +3617,10 @@ void DerivationGoal::runChild()
if (derivationIsImpure(derivationType))
sandboxProfile += "(import \"sandbox-network.sb\")\n";
- /* Our rwx outputs */
+ /* Add the output paths we'll use at build-time to the chroot */
sandboxProfile += "(allow file-read* file-write* process-exec\n";
- for (auto & i : missingPaths)
- sandboxProfile += fmt("\t(subpath \"%s\")\n", worker.store.printStorePath(i));
-
- /* Also add redirected outputs to the chroot */
- for (auto & i : redirectedOutputs)
- sandboxProfile += fmt("\t(subpath \"%s\")\n", worker.store.printStorePath(i.second));
+ for (auto & [_, path] : scratchOutputs)
+ sandboxProfile += fmt("\t(subpath \"%s\")\n", worker.store.printStorePath(path));
sandboxProfile += ")\n";
@@ -3570,23 +3743,6 @@ void DerivationGoal::runChild()
}
-/* Parse a list of reference specifiers. Each element must either be
- a store path, or the symbolic name of the output of the derivation
- (such as `out'). */
-StorePathSet parseReferenceSpecifiers(Store & store, const BasicDerivation & drv, const Strings & paths)
-{
- StorePathSet result;
- for (auto & i : paths) {
- if (store.isStorePath(i))
- result.insert(store.parseStorePath(i));
- else if (drv.outputs.count(i))
- result.insert(drv.outputs.find(i)->second.path(store, drv.name));
- else throw BuildError("derivation contains an illegal reference specifier '%s'", i);
- }
- return result;
-}
-
-
static void moveCheckToStore(const Path & src, const Path & dst)
{
/* For the rename of directory to succeed, we must be running as root or
@@ -3614,11 +3770,17 @@ void DerivationGoal::registerOutputs()
{
/* When using a build hook, the build hook can register the output
as valid (by doing `nix-store --import'). If so we don't have
- to do anything here. */
+ to do anything here.
+
+ We can only early return when the outputs are known a priori. For
+ floating content-addressed derivations this isn't the case.
+ */
if (hook) {
bool allValid = true;
- for (auto & i : drv->outputsAndPaths(worker.store))
- if (!worker.store.isValidPath(i.second.second)) allValid = false;
+ for (auto & i : drv->outputsAndOptPaths(worker.store)) {
+ if (!i.second.second || !worker.store.isValidPath(*i.second.second))
+ allValid = false;
+ }
if (allValid) return;
}
@@ -3639,47 +3801,51 @@ void DerivationGoal::registerOutputs()
Nix calls. */
StorePathSet referenceablePaths;
for (auto & p : inputPaths) referenceablePaths.insert(p);
- for (auto & i : drv->outputsAndPaths(worker.store)) referenceablePaths.insert(i.second.second);
+ for (auto & i : scratchOutputs) referenceablePaths.insert(i.second);
for (auto & p : addedPaths) referenceablePaths.insert(p);
- /* Check whether the output paths were created, and grep each
- output path to determine what other paths it references. Also make all
- output paths read-only. */
- for (auto & i : drv->outputsAndPaths(worker.store)) {
- auto path = worker.store.printStorePath(i.second.second);
- if (!missingPaths.count(i.second.second)) continue;
-
- Path actualPath = path;
- if (needsHashRewrite()) {
- auto r = redirectedOutputs.find(i.second.second);
- if (r != redirectedOutputs.end()) {
- auto redirected = worker.store.Store::toRealPath(r->second);
- if (buildMode == bmRepair
- && redirectedBadOutputs.count(i.second.second)
- && pathExists(redirected))
- replaceValidPath(path, redirected);
- if (buildMode == bmCheck)
- actualPath = redirected;
- }
- } else if (useChroot) {
- actualPath = chrootRootDir + path;
- if (pathExists(actualPath)) {
- /* Move output paths from the chroot to the Nix store. */
- if (buildMode == bmRepair)
- replaceValidPath(path, actualPath);
- else
- if (buildMode != bmCheck && rename(actualPath.c_str(), worker.store.toRealPath(path).c_str()) == -1)
- throw SysError("moving build output '%1%' from the sandbox to the Nix store", path);
- }
- if (buildMode != bmCheck) actualPath = worker.store.toRealPath(path);
+ /* FIXME `needsHashRewrite` should probably be removed and we get to the
+ real reason why we aren't using the chroot dir */
+ auto toRealPathChroot = [&](const Path & p) -> Path {
+ return useChroot && !needsHashRewrite()
+ ? chrootRootDir + p
+ : worker.store.toRealPath(p);
+ };
+
+ /* Check whether the output paths were created, and make all
+ output paths read-only. Then get the references of each output (that we
+ might need to register), so we can topologically sort them. For the ones
+ that are most definitely already installed, we just store their final
+ name so we can also use it in rewrites. */
+ StringSet outputsToSort;
+ struct AlreadyRegistered { StorePath path; };
+ struct PerhapsNeedToRegister { StorePathSet refs; };
+ std::map<std::string, std::variant<AlreadyRegistered, PerhapsNeedToRegister>> outputReferencesIfUnregistered;
+ std::map<std::string, struct stat> outputStats;
+ for (auto & [outputName, _] : drv->outputs) {
+ auto actualPath = toRealPathChroot(worker.store.printStorePath(scratchOutputs.at(outputName)));
+
+ outputsToSort.insert(outputName);
+
+ /* Updated wanted info to remove the outputs we definitely don't need to register */
+ auto & initialInfo = initialOutputs.at(outputName);
+
+ /* Don't register if already valid, and not checking */
+ initialInfo.wanted = buildMode == bmCheck
+ || !(initialInfo.known && initialInfo.known->isValid());
+ if (!initialInfo.wanted) {
+ outputReferencesIfUnregistered.insert_or_assign(
+ outputName,
+ AlreadyRegistered { .path = initialInfo.known->path });
+ continue;
}
struct stat st;
if (lstat(actualPath.c_str(), &st) == -1) {
if (errno == ENOENT)
throw BuildError(
- "builder for '%s' failed to produce output path '%s'",
- worker.store.printStorePath(drvPath), path);
+ "builder for '%s' failed to produce output path for output '%s' at '%s'",
+ worker.store.printStorePath(drvPath), outputName, actualPath);
throw SysError("getting attributes of path '%s'", actualPath);
}
@@ -3690,116 +3856,281 @@ void DerivationGoal::registerOutputs()
user. */
if ((!S_ISLNK(st.st_mode) && (st.st_mode & (S_IWGRP | S_IWOTH))) ||
(buildUser && st.st_uid != buildUser->getUID()))
- throw BuildError("suspicious ownership or permission on '%1%'; rejecting this build output", path);
+ throw BuildError(
+ "suspicious ownership or permission on '%s' for output '%s'; rejecting this build output",
+ actualPath, outputName);
#endif
- /* Apply hash rewriting if necessary. */
- bool rewritten = false;
- if (!outputRewrites.empty()) {
- logWarning({
- .name = "Rewriting hashes",
- .hint = hintfmt("rewriting hashes in '%1%'; cross fingers", path)
- });
-
- /* Canonicalise first. This ensures that the path we're
- rewriting doesn't contain a hard link to /etc/shadow or
- something like that. */
- canonicalisePathMetaData(actualPath, buildUser ? buildUser->getUID() : -1, inodesSeen);
+ /* Canonicalise first. This ensures that the path we're
+ rewriting doesn't contain a hard link to /etc/shadow or
+ something like that. */
+ canonicalisePathMetaData(actualPath, buildUser ? buildUser->getUID() : -1, inodesSeen);
- /* FIXME: this is in-memory. */
- StringSink sink;
- dumpPath(actualPath, sink);
- deletePath(actualPath);
- sink.s = make_ref<std::string>(rewriteStrings(*sink.s, outputRewrites));
- StringSource source(*sink.s);
- restorePath(actualPath, source);
+ debug("scanning for references for output %1 in temp location '%1%'", outputName, actualPath);
- rewritten = true;
- }
+ /* Pass blank Sink as we are not ready to hash data at this stage. */
+ NullSink blank;
+ auto references = worker.store.parseStorePathSet(
+ scanForReferences(blank, actualPath, worker.store.printStorePathSet(referenceablePaths)));
- /* Check that fixed-output derivations produced the right
- outputs (i.e., the content hash should match the specified
- hash). */
- std::optional<ContentAddress> ca;
+ outputReferencesIfUnregistered.insert_or_assign(
+ outputName,
+ PerhapsNeedToRegister { .refs = references });
+ outputStats.insert_or_assign(outputName, std::move(st));
+ }
- if (! std::holds_alternative<DerivationOutputInputAddressed>(i.second.first.output)) {
- DerivationOutputCAFloating outputHash;
- std::visit(overloaded {
- [&](DerivationOutputInputAddressed doi) {
- assert(false); // Enclosing `if` handles this case in other branch
- },
- [&](DerivationOutputCAFixed dof) {
- outputHash = DerivationOutputCAFloating {
- .method = dof.hash.method,
- .hashType = dof.hash.hash.type,
- };
- },
- [&](DerivationOutputCAFloating dof) {
- outputHash = dof;
+ auto sortedOutputNames = topoSort(outputsToSort,
+ {[&](const std::string & name) {
+ return std::visit(overloaded {
+ /* Since we'll use the already installed versions of these, we
+ can treat them as leaves and ignore any references they
+ have. */
+ [&](AlreadyRegistered _) { return StringSet {}; },
+ [&](PerhapsNeedToRegister refs) {
+ StringSet referencedOutputs;
+ /* FIXME build inverted map up front so no quadratic waste here */
+ for (auto & r : refs.refs)
+ for (auto & [o, p] : scratchOutputs)
+ if (r == p)
+ referencedOutputs.insert(o);
+ return referencedOutputs;
},
- }, i.second.first.output);
+ }, outputReferencesIfUnregistered.at(name));
+ }},
+ {[&](const std::string & path, const std::string & parent) {
+ // TODO with more -vvvv also show the temporary paths for manual inspection.
+ return BuildError(
+ "cycle detected in build of '%s' in the references of output '%s' from output '%s'",
+ worker.store.printStorePath(drvPath), path, parent);
+ }});
+
+ std::reverse(sortedOutputNames.begin(), sortedOutputNames.end());
+
+ for (auto & outputName : sortedOutputNames) {
+ auto output = drv->outputs.at(outputName);
+ auto & scratchPath = scratchOutputs.at(outputName);
+ auto actualPath = toRealPathChroot(worker.store.printStorePath(scratchPath));
+
+ auto finish = [&](StorePath finalStorePath) {
+ /* Store the final path */
+ finalOutputs.insert_or_assign(outputName, finalStorePath);
+ /* The rewrite rule will be used in downstream outputs that refer to
+ use. This is why the topological sort is essential to do first
+ before this for loop. */
+ if (scratchPath != finalStorePath)
+ outputRewrites[std::string { scratchPath.hashPart() }] = std::string { finalStorePath.hashPart() };
+ };
+
+ bool rewritten = false;
+ std::optional<StorePathSet> referencesOpt = std::visit(overloaded {
+ [&](AlreadyRegistered skippedFinalPath) -> std::optional<StorePathSet> {
+ finish(skippedFinalPath.path);
+ return std::nullopt;
+ },
+ [&](PerhapsNeedToRegister r) -> std::optional<StorePathSet> {
+ return r.refs;
+ },
+ }, outputReferencesIfUnregistered.at(outputName));
+
+ if (!referencesOpt)
+ continue;
+ auto references = *referencesOpt;
+
+ auto rewriteOutput = [&]() {
+ /* Apply hash rewriting if necessary. */
+ if (!outputRewrites.empty()) {
+ logWarning({
+ .name = "Rewriting hashes",
+ .hint = hintfmt("rewriting hashes in '%1%'; cross fingers", actualPath),
+ });
+
+ /* FIXME: this is in-memory. */
+ StringSink sink;
+ dumpPath(actualPath, sink);
+ deletePath(actualPath);
+ sink.s = make_ref<std::string>(rewriteStrings(*sink.s, outputRewrites));
+ StringSource source(*sink.s);
+ restorePath(actualPath, source);
+ rewritten = true;
+ }
+ };
+
+ auto rewriteRefs = [&]() -> std::pair<bool, StorePathSet> {
+ /* In the CA case, we need the rewritten refs to calculate the
+ final path, therefore we look for a *non-rewritten
+ self-reference, and use a bool rather try to solve the
+ computationally intractable fixed point. */
+ std::pair<bool, StorePathSet> res {
+ false,
+ {},
+ };
+ for (auto & r : references) {
+ auto name = r.name();
+ auto origHash = std::string { r.hashPart() };
+ if (r == scratchPath)
+ res.first = true;
+ else if (outputRewrites.count(origHash) == 0)
+ res.second.insert(r);
+ else {
+ std::string newRef = outputRewrites.at(origHash);
+ newRef += '-';
+ newRef += name;
+ res.second.insert(StorePath { newRef });
+ }
+ }
+ return res;
+ };
+
+ auto newInfoFromCA = [&](const DerivationOutputCAFloating outputHash) -> ValidPathInfo {
+ auto & st = outputStats.at(outputName);
if (outputHash.method == FileIngestionMethod::Flat) {
/* The output path should be a regular file without execute permission. */
if (!S_ISREG(st.st_mode) || (st.st_mode & S_IXUSR) != 0)
throw BuildError(
"output path '%1%' should be a non-executable regular file "
"since recursive hashing is not enabled (outputHashMode=flat)",
- path);
+ actualPath);
}
-
- /* Check the hash. In hash mode, move the path produced by
- the derivation to its content-addressed location. */
- Hash h2 = outputHash.method == FileIngestionMethod::Recursive
- ? hashPath(outputHash.hashType, actualPath).first
- : hashFile(outputHash.hashType, actualPath);
-
- auto dest = worker.store.makeFixedOutputPath(outputHash.method, h2, i.second.second.name());
-
- // true if either floating CA, or incorrect fixed hash.
- bool needsMove = true;
-
- if (auto p = std::get_if<DerivationOutputCAFixed>(& i.second.first.output)) {
- Hash & h = p->hash.hash;
- if (h != h2) {
-
- /* Throw an error after registering the path as
- valid. */
- worker.hashMismatch = true;
- delayedException = std::make_exception_ptr(
- BuildError("hash mismatch in fixed-output derivation '%s':\n wanted: %s\n got: %s",
- worker.store.printStorePath(dest),
- h.to_string(SRI, true),
- h2.to_string(SRI, true)));
- } else {
- // matched the fixed hash, so no move needed.
- needsMove = false;
- }
+ rewriteOutput();
+ /* FIXME optimize and deduplicate with addToStore */
+ std::string oldHashPart { scratchPath.hashPart() };
+ HashModuloSink caSink { outputHash.hashType, oldHashPart };
+ switch (outputHash.method) {
+ case FileIngestionMethod::Recursive:
+ dumpPath(actualPath, caSink);
+ break;
+ case FileIngestionMethod::Flat:
+ readFile(actualPath, caSink);
+ break;
}
+ auto got = caSink.finish().first;
+ auto refs = rewriteRefs();
+ HashModuloSink narSink { htSHA256, oldHashPart };
+ dumpPath(actualPath, narSink);
+ auto narHashAndSize = narSink.finish();
+ ValidPathInfo newInfo0 {
+ worker.store.makeFixedOutputPath(
+ outputHash.method,
+ got,
+ outputPathName(drv->name, outputName),
+ refs.second,
+ refs.first),
+ narHashAndSize.first,
+ };
+ newInfo0.narSize = narHashAndSize.second;
+ newInfo0.ca = FixedOutputHash {
+ .method = outputHash.method,
+ .hash = got,
+ };
+ newInfo0.references = refs.second;
+ if (refs.first)
+ newInfo0.references.insert(newInfo0.path);
- if (needsMove) {
- Path actualDest = worker.store.Store::toRealPath(dest);
+ assert(newInfo0.ca);
+ return newInfo0;
+ };
- if (worker.store.isValidPath(dest))
- std::rethrow_exception(delayedException);
+ ValidPathInfo newInfo = std::visit(overloaded {
+ [&](DerivationOutputInputAddressed output) {
+ /* input-addressed case */
+ auto requiredFinalPath = output.path;
+ /* Preemtively add rewrite rule for final hash, as that is
+ what the NAR hash will use rather than normalized-self references */
+ if (scratchPath != requiredFinalPath)
+ outputRewrites.insert_or_assign(
+ std::string { scratchPath.hashPart() },
+ std::string { requiredFinalPath.hashPart() });
+ rewriteOutput();
+ auto narHashAndSize = hashPath(htSHA256, actualPath);
+ ValidPathInfo newInfo0 { requiredFinalPath, narHashAndSize.first };
+ newInfo0.narSize = narHashAndSize.second;
+ auto refs = rewriteRefs();
+ newInfo0.references = refs.second;
+ if (refs.first)
+ newInfo0.references.insert(newInfo0.path);
+ return newInfo0;
+ },
+ [&](DerivationOutputCAFixed dof) {
+ auto newInfo0 = newInfoFromCA(DerivationOutputCAFloating {
+ .method = dof.hash.method,
+ .hashType = dof.hash.hash.type,
+ });
- if (actualPath != actualDest) {
- PathLocks outputLocks({actualDest});
- deletePath(actualDest);
- if (rename(actualPath.c_str(), actualDest.c_str()) == -1)
- throw SysError("moving '%s' to '%s'", actualPath, worker.store.printStorePath(dest));
+ /* Check wanted hash */
+ Hash & wanted = dof.hash.hash;
+ assert(newInfo0.ca);
+ auto got = getContentAddressHash(*newInfo0.ca);
+ if (wanted != got) {
+ /* Throw an error after registering the path as
+ valid. */
+ worker.hashMismatch = true;
+ delayedException = std::make_exception_ptr(
+ BuildError("hash mismatch in fixed-output derivation '%s':\n wanted: %s\n got: %s",
+ worker.store.printStorePath(drvPath),
+ wanted.to_string(SRI, true),
+ got.to_string(SRI, true)));
}
+ return newInfo0;
+ },
+ [&](DerivationOutputCAFloating dof) {
+ return newInfoFromCA(dof);
+ },
+ }, output.output);
+
+ /* Calculate where we'll move the output files. In the checking case we
+ will leave leave them where they are, for now, rather than move to
+ their usual "final destination" */
+ auto finalDestPath = worker.store.printStorePath(newInfo.path);
+
+ /* Lock final output path, if not already locked. This happens with
+ floating CA derivations and hash-mismatching fixed-output
+ derivations. */
+ PathLocks dynamicOutputLock;
+ auto optFixedPath = output.path(worker.store, drv->name, outputName);
+ if (!optFixedPath ||
+ worker.store.printStorePath(*optFixedPath) != finalDestPath)
+ {
+ assert(newInfo.ca);
+ dynamicOutputLock.lockPaths({worker.store.toRealPath(finalDestPath)});
+ }
- path = worker.store.printStorePath(dest);
- actualPath = actualDest;
+ /* Move files, if needed */
+ if (worker.store.toRealPath(finalDestPath) != actualPath) {
+ if (buildMode == bmRepair) {
+ /* Path already exists, need to replace it */
+ replaceValidPath(worker.store.toRealPath(finalDestPath), actualPath);
+ actualPath = worker.store.toRealPath(finalDestPath);
+ } else if (buildMode == bmCheck) {
+ /* Path already exists, and we want to compare, so we leave out
+ new path in place. */
+ } else if (worker.store.isValidPath(newInfo.path)) {
+ /* Path already exists because CA path produced by something
+ else. No moving needed. */
+ assert(newInfo.ca);
+ } else {
+ /* Temporarily add write perm so we can move, will be fixed
+ later. */
+ {
+ struct stat st;
+ auto & mode = st.st_mode;
+ if (lstat(actualPath.c_str(), &st))
+ throw SysError("getting attributes of path '%1%'", actualPath);
+ mode |= 0200;
+ /* Try to change the perms, but only if the file isn't a
+ symlink as symlinks permissions are mostly ignored and
+ calling `chmod` on it will just forward the call to the
+ target of the link. */
+ if (!S_ISLNK(st.st_mode))
+ if (chmod(actualPath.c_str(), mode) == -1)
+ throw SysError("changing mode of '%1%' to %2$o", actualPath, mode);
+ }
+ if (rename(
+ actualPath.c_str(),
+ worker.store.toRealPath(finalDestPath).c_str()) == -1)
+ throw SysError("moving build output '%1%' from it's temporary location to the Nix store", finalDestPath);
+ actualPath = worker.store.toRealPath(finalDestPath);
}
- else
- assert(worker.store.parseStorePath(path) == dest);
-
- ca = FixedOutputHash {
- .method = outputHash.method,
- .hash = h2,
- };
}
/* Get rid of all weird permissions. This also checks that
@@ -3807,45 +4138,33 @@ void DerivationGoal::registerOutputs()
canonicalisePathMetaData(actualPath,
buildUser && !rewritten ? buildUser->getUID() : -1, inodesSeen);
- /* For this output path, find the references to other paths
- contained in it. Compute the SHA-256 NAR hash at the same
- time. The hash is stored in the database so that we can
- verify later on whether nobody has messed with the store. */
- debug("scanning for references inside '%1%'", path);
- // HashResult hash;
- auto pathSetAndHash = scanForReferences(actualPath, worker.store.printStorePathSet(referenceablePaths));
- auto references = worker.store.parseStorePathSet(pathSetAndHash.first);
- HashResult hash = pathSetAndHash.second;
-
if (buildMode == bmCheck) {
- if (!worker.store.isValidPath(worker.store.parseStorePath(path))) continue;
- ValidPathInfo info(*worker.store.queryPathInfo(worker.store.parseStorePath(path)));
- if (hash.first != info.narHash) {
+ if (!worker.store.isValidPath(newInfo.path)) continue;
+ ValidPathInfo oldInfo(*worker.store.queryPathInfo(newInfo.path));
+ if (newInfo.narHash != oldInfo.narHash) {
worker.checkMismatch = true;
if (settings.runDiffHook || settings.keepFailed) {
- Path dst = worker.store.toRealPath(path + checkSuffix);
+ Path dst = worker.store.toRealPath(finalDestPath + checkSuffix);
deletePath(dst);
moveCheckToStore(actualPath, dst);
handleDiffHook(
buildUser ? buildUser->getUID() : getuid(),
buildUser ? buildUser->getGID() : getgid(),
- path, dst, worker.store.printStorePath(drvPath), tmpDir);
+ finalDestPath, dst, worker.store.printStorePath(drvPath), tmpDir);
throw NotDeterministic("derivation '%s' may not be deterministic: output '%s' differs from '%s'",
- worker.store.printStorePath(drvPath), worker.store.toRealPath(path), dst);
+ worker.store.printStorePath(drvPath), worker.store.toRealPath(finalDestPath), dst);
} else
throw NotDeterministic("derivation '%s' may not be deterministic: output '%s' differs",
- worker.store.printStorePath(drvPath), worker.store.toRealPath(path));
+ worker.store.printStorePath(drvPath), worker.store.toRealPath(finalDestPath));
}
/* Since we verified the build, it's now ultimately trusted. */
- if (!info.ultimate) {
- info.ultimate = true;
- worker.store.signPathInfo(info);
- ValidPathInfos infos;
- infos.push_back(std::move(info));
- worker.store.registerValidPaths(infos);
+ if (!oldInfo.ultimate) {
+ oldInfo.ultimate = true;
+ worker.store.signPathInfo(oldInfo);
+ worker.store.registerValidPaths({ std::move(oldInfo) });
}
continue;
@@ -3862,26 +4181,22 @@ void DerivationGoal::registerOutputs()
if (curRound == nrRounds) {
worker.store.optimisePath(actualPath); // FIXME: combine with scanForReferences()
- worker.markContentsGood(worker.store.parseStorePath(path));
+ worker.markContentsGood(newInfo.path);
}
- ValidPathInfo info {
- worker.store.parseStorePath(path),
- hash.first,
- };
- info.narSize = hash.second;
- info.references = std::move(references);
- info.deriver = drvPath;
- info.ultimate = true;
- info.ca = ca;
- worker.store.signPathInfo(info);
-
- if (!info.references.empty()) {
- // FIXME don't we have an experimental feature for fixed output with references?
- info.ca = {};
- }
+ newInfo.deriver = drvPath;
+ newInfo.ultimate = true;
+ worker.store.signPathInfo(newInfo);
+
+ finish(newInfo.path);
- infos.emplace(i.first, std::move(info));
+ /* If it's a CA path, register it right away. This is necessary if it
+ isn't statically known so that we can safely unlock the path before
+ the next iteration */
+ if (newInfo.ca)
+ worker.store.registerValidPaths({newInfo});
+
+ infos.emplace(outputName, std::move(newInfo));
}
if (buildMode == bmCheck) return;
@@ -3924,8 +4239,8 @@ void DerivationGoal::registerOutputs()
/* If this is the first round of several, then move the output out of the way. */
if (nrRounds > 1 && curRound == 1 && curRound < nrRounds && keepPreviousRound) {
- for (auto & i : drv->outputsAndPaths(worker.store)) {
- auto path = worker.store.printStorePath(i.second.second);
+ for (auto & [_, outputStorePath] : finalOutputs) {
+ auto path = worker.store.printStorePath(outputStorePath);
Path prev = path + checkSuffix;
deletePath(prev);
Path dst = path + checkSuffix;
@@ -3942,8 +4257,8 @@ void DerivationGoal::registerOutputs()
/* Remove the .check directories if we're done. FIXME: keep them
if the result was not determistic? */
if (curRound == nrRounds) {
- for (auto & i : drv->outputsAndPaths(worker.store)) {
- Path prev = worker.store.printStorePath(i.second.second) + checkSuffix;
+ for (auto & [_, outputStorePath] : finalOutputs) {
+ Path prev = worker.store.printStorePath(outputStorePath) + checkSuffix;
deletePath(prev);
}
}
@@ -3951,16 +4266,28 @@ void DerivationGoal::registerOutputs()
/* Register each output path as valid, and register the sets of
paths referenced by each of them. If there are cycles in the
outputs, this will fail. */
- {
- ValidPathInfos infos2;
- for (auto & i : infos) infos2.push_back(i.second);
- worker.store.registerValidPaths(infos2);
+ ValidPathInfos infos2;
+ for (auto & [outputName, newInfo] : infos) {
+ infos2.push_back(newInfo);
}
+ worker.store.registerValidPaths(infos2);
/* In case of a fixed-output derivation hash mismatch, throw an
exception now that we have registered the output as valid. */
if (delayedException)
std::rethrow_exception(delayedException);
+
+ /* If we made it this far, we are sure the output matches the derivation
+ (since the delayedException would be a fixed output CA mismatch). That
+ means it's safe to link the derivation to the output hash. We must do
+ that for floating CA derivations, which otherwise couldn't be cached,
+ but it's fine to do in all cases. */
+ for (auto & [outputName, newInfo] : infos) {
+ /* FIXME: we will want to track this mapping in the DB whether or
+ not we have a drv file. */
+ if (useDerivation)
+ worker.store.linkDeriverToPath(drvPath, outputName, newInfo.path);
+ }
}
@@ -4029,7 +4356,17 @@ void DerivationGoal::checkOutputs(const std::map<Path, ValidPathInfo> & outputs)
{
if (!value) return;
- auto spec = parseReferenceSpecifiers(worker.store, *drv, *value);
+ /* Parse a list of reference specifiers. Each element must
+ either be a store path, or the symbolic name of the output
+ of the derivation (such as `out'). */
+ StorePathSet spec;
+ for (auto & i : *value) {
+ if (worker.store.isStorePath(i))
+ spec.insert(worker.store.parseStorePath(i));
+ else if (finalOutputs.count(i))
+ spec.insert(finalOutputs.at(i));
+ else throw BuildError("derivation contains an illegal reference specifier '%s'", i);
+ }
auto used = recursive
? getClosure(info.path).first
@@ -4238,31 +4575,67 @@ void DerivationGoal::flushLine()
}
-StorePathSet DerivationGoal::checkPathValidity(bool returnValid, bool checkHash)
+std::map<std::string, std::optional<StorePath>> DerivationGoal::queryPartialDerivationOutputMap()
{
- StorePathSet result;
- for (auto & i : drv->outputsAndPaths(worker.store)) {
- if (!wantOutput(i.first, wantedOutputs)) continue;
- bool good =
- worker.store.isValidPath(i.second.second) &&
- (!checkHash || worker.pathContentsGood(i.second.second));
- if (good == returnValid) result.insert(i.second.second);
+ if (drv->type() != DerivationType::CAFloating) {
+ std::map<std::string, std::optional<StorePath>> res;
+ for (auto & [name, output] : drv->outputs)
+ res.insert_or_assign(name, output.path(worker.store, drv->name, name));
+ return res;
+ } else {
+ return worker.store.queryPartialDerivationOutputMap(drvPath);
}
- return result;
+}
+
+OutputPathMap DerivationGoal::queryDerivationOutputMap()
+{
+ if (drv->type() != DerivationType::CAFloating) {
+ OutputPathMap res;
+ for (auto & [name, output] : drv->outputsAndOptPaths(worker.store))
+ res.insert_or_assign(name, *output.second);
+ return res;
+ } else {
+ return worker.store.queryDerivationOutputMap(drvPath);
+ }
+}
+
+
+void DerivationGoal::checkPathValidity()
+{
+ bool checkHash = buildMode == bmRepair;
+ for (auto & i : queryPartialDerivationOutputMap()) {
+ InitialOutput info {
+ .wanted = wantOutput(i.first, wantedOutputs),
+ };
+ if (i.second) {
+ auto outputPath = *i.second;
+ info.known = {
+ .path = outputPath,
+ .status = !worker.store.isValidPath(outputPath)
+ ? PathStatus::Absent
+ : !checkHash || worker.pathContentsGood(outputPath)
+ ? PathStatus::Valid
+ : PathStatus::Corrupt,
+ };
+ }
+ initialOutputs.insert_or_assign(i.first, info);
+ }
+}
+
+
+StorePath DerivationGoal::makeFallbackPath(std::string_view outputName)
+{
+ return worker.store.makeStorePath(
+ "rewrite:" + std::string(drvPath.to_string()) + ":name:" + std::string(outputName),
+ Hash(htSHA256), outputPathName(drv->name, outputName));
}
-void DerivationGoal::addHashRewrite(const StorePath & path)
+StorePath DerivationGoal::makeFallbackPath(const StorePath & path)
{
- auto h1 = std::string(((std::string_view) path.to_string()).substr(0, 32));
- auto p = worker.store.makeStorePath(
+ return worker.store.makeStorePath(
"rewrite:" + std::string(drvPath.to_string()) + ":" + std::string(path.to_string()),
Hash(htSHA256), path.name());
- auto h2 = std::string(((std::string_view) p.to_string()).substr(0, 32));
- deletePath(worker.store.printStorePath(p));
- inputRewrites[h1] = h2;
- outputRewrites[h2] = h1;
- redirectedOutputs.insert_or_assign(path, std::move(p));
}
@@ -4983,7 +5356,7 @@ void Worker::waitForInput()
std::vector<unsigned char> buffer(4096);
for (auto & k : fds2) {
if (pollStatus.at(fdToPollStatus.at(k)).revents) {
- ssize_t rd = read(k, buffer.data(), buffer.size());
+ ssize_t rd = ::read(k, buffer.data(), buffer.size());
// FIXME: is there a cleaner way to handle pt close
// than EIO? Is this even standard?
if (rd == 0 || (rd == -1 && errno == EIO)) {
diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc
index 9d60c994d..0573a6813 100644
--- a/src/libstore/daemon.cc
+++ b/src/libstore/daemon.cc
@@ -232,7 +232,7 @@ struct ClientSettings
else if (setSubstituters(settings.extraSubstituters))
;
else
- warn("ignoring the user-specified setting '%s', because it is a restricted setting and you are not a trusted user", name);
+ debug("ignoring the client-specified setting '%s', because it is a restricted setting and you are not a trusted user", name);
} catch (UsageError & e) {
warn(e.what());
}
@@ -325,9 +325,9 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
case wopQueryDerivationOutputMap: {
auto path = store->parseStorePath(readString(from));
logger->startWork();
- OutputPathMap outputs = store->queryDerivationOutputMap(path);
+ auto outputs = store->queryPartialDerivationOutputMap(path);
logger->stopWork();
- writeOutputPathMap(*store, to, outputs);
+ worker_proto::write(*store, to, outputs);
break;
}
diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc
index a9fed2564..9d8ce5e36 100644
--- a/src/libstore/derivations.cc
+++ b/src/libstore/derivations.cc
@@ -7,7 +7,7 @@
namespace nix {
-std::optional<StorePath> DerivationOutput::pathOpt(const Store & store, std::string_view drvName) const
+std::optional<StorePath> DerivationOutput::path(const Store & store, std::string_view drvName, std::string_view outputName) const
{
return std::visit(overloaded {
[](DerivationOutputInputAddressed doi) -> std::optional<StorePath> {
@@ -15,7 +15,7 @@ std::optional<StorePath> DerivationOutput::pathOpt(const Store & store, std::str
},
[&](DerivationOutputCAFixed dof) -> std::optional<StorePath> {
return {
- store.makeFixedOutputPath(dof.hash.method, dof.hash.hash, drvName)
+ dof.path(store, drvName, outputName)
};
},
[](DerivationOutputCAFloating dof) -> std::optional<StorePath> {
@@ -25,6 +25,13 @@ std::optional<StorePath> DerivationOutput::pathOpt(const Store & store, std::str
}
+StorePath DerivationOutputCAFixed::path(const Store & store, std::string_view drvName, std::string_view outputName) const {
+ return store.makeFixedOutputPath(
+ hash.method, hash.hash,
+ outputPathName(drvName, outputName));
+}
+
+
bool derivationIsCA(DerivationType dt) {
switch (dt) {
case DerivationType::InputAddressed: return false;
@@ -61,7 +68,7 @@ bool BasicDerivation::isBuiltin() const
}
-StorePath writeDerivation(ref<Store> store,
+StorePath writeDerivation(Store & store,
const Derivation & drv, RepairFlag repair)
{
auto references = drv.inputSrcs;
@@ -71,10 +78,10 @@ StorePath writeDerivation(ref<Store> store,
(that can be missing (of course) and should not necessarily be
held during a garbage collection). */
auto suffix = std::string(drv.name) + drvExtension;
- auto contents = drv.unparse(*store, false);
+ auto contents = drv.unparse(store, false);
return settings.readOnlyMode
- ? store->computeStorePathForText(suffix, contents, references)
- : store->addTextToStore(suffix, contents, references, repair);
+ ? store.computeStorePathForText(suffix, contents, references)
+ : store.addTextToStore(suffix, contents, references, repair);
}
@@ -106,12 +113,15 @@ static string parseString(std::istream & str)
return res;
}
+static void validatePath(std::string_view s) {
+ if (s.size() == 0 || s[0] != '/')
+ throw FormatError("bad path '%1%' in derivation", s);
+}
static Path parsePath(std::istream & str)
{
- string s = parseString(str);
- if (s.size() == 0 || s[0] != '/')
- throw FormatError("bad path '%1%' in derivation", s);
+ auto s = parseString(str);
+ validatePath(s);
return s;
}
@@ -140,7 +150,7 @@ static StringSet parseStrings(std::istream & str, bool arePaths)
static DerivationOutput parseDerivationOutput(const Store & store,
- StorePath path, std::string_view hashAlgo, std::string_view hash)
+ std::string_view pathS, std::string_view hashAlgo, std::string_view hash)
{
if (hashAlgo != "") {
auto method = FileIngestionMethod::Flat;
@@ -148,44 +158,49 @@ static DerivationOutput parseDerivationOutput(const Store & store,
method = FileIngestionMethod::Recursive;
hashAlgo = hashAlgo.substr(2);
}
- const HashType hashType = parseHashType(hashAlgo);
-
- return hash != ""
- ? DerivationOutput {
- .output = DerivationOutputCAFixed {
- .hash = FixedOutputHash {
- .method = std::move(method),
- .hash = Hash::parseNonSRIUnprefixed(hash, hashType),
- },
- }
- }
- : (settings.requireExperimentalFeature("ca-derivations"),
- DerivationOutput {
- .output = DerivationOutputCAFloating {
- .method = std::move(method),
- .hashType = std::move(hashType),
- },
- });
- } else
+ const auto hashType = parseHashType(hashAlgo);
+ if (hash != "") {
+ validatePath(pathS);
+ return DerivationOutput {
+ .output = DerivationOutputCAFixed {
+ .hash = FixedOutputHash {
+ .method = std::move(method),
+ .hash = Hash::parseNonSRIUnprefixed(hash, hashType),
+ },
+ },
+ };
+ } else {
+ settings.requireExperimentalFeature("ca-derivations");
+ assert(pathS == "");
+ return DerivationOutput {
+ .output = DerivationOutputCAFloating {
+ .method = std::move(method),
+ .hashType = std::move(hashType),
+ },
+ };
+ }
+ } else {
+ validatePath(pathS);
return DerivationOutput {
.output = DerivationOutputInputAddressed {
- .path = std::move(path),
+ .path = store.parseStorePath(pathS),
}
};
+ }
}
static DerivationOutput parseDerivationOutput(const Store & store, std::istringstream & str)
{
- expect(str, ","); auto path = store.parseStorePath(parsePath(str));
+ expect(str, ","); const auto pathS = parseString(str);
expect(str, ","); const auto hashAlgo = parseString(str);
expect(str, ","); const auto hash = parseString(str);
expect(str, ")");
- return parseDerivationOutput(store, std::move(path), hashAlgo, hash);
+ return parseDerivationOutput(store, pathS, hashAlgo, hash);
}
-static Derivation parseDerivation(const Store & store, std::string && s, std::string_view name)
+Derivation parseDerivation(const Store & store, std::string && s, std::string_view name)
{
Derivation drv;
drv.name = name;
@@ -233,34 +248,6 @@ static Derivation parseDerivation(const Store & store, std::string && s, std::st
}
-Derivation readDerivation(const Store & store, const Path & drvPath, std::string_view name)
-{
- try {
- return parseDerivation(store, readFile(drvPath), name);
- } catch (FormatError & e) {
- throw Error("error parsing derivation '%1%': %2%", drvPath, e.msg());
- }
-}
-
-
-Derivation Store::derivationFromPath(const StorePath & drvPath)
-{
- ensurePath(drvPath);
- return readDerivation(drvPath);
-}
-
-
-Derivation Store::readDerivation(const StorePath & drvPath)
-{
- auto accessor = getFSAccessor();
- try {
- return parseDerivation(*this, accessor->readFile(printStorePath(drvPath)), Derivation::nameFromPath(drvPath));
- } catch (FormatError & e) {
- throw Error("error parsing derivation '%s': %s", printStorePath(drvPath), e.msg());
- }
-}
-
-
static void printString(string & res, std::string_view s)
{
char buf[s.size() * 2 + 2];
@@ -322,17 +309,19 @@ string Derivation::unparse(const Store & store, bool maskOutputs,
for (auto & i : outputs) {
if (first) first = false; else s += ',';
s += '('; printUnquotedString(s, i.first);
- s += ','; printUnquotedString(s, maskOutputs ? "" : store.printStorePath(i.second.path(store, name)));
std::visit(overloaded {
[&](DerivationOutputInputAddressed doi) {
+ s += ','; printUnquotedString(s, maskOutputs ? "" : store.printStorePath(doi.path));
s += ','; printUnquotedString(s, "");
s += ','; printUnquotedString(s, "");
},
[&](DerivationOutputCAFixed dof) {
+ s += ','; printUnquotedString(s, maskOutputs ? "" : store.printStorePath(dof.path(store, name, i.first)));
s += ','; printUnquotedString(s, dof.hash.printMethodAlgo());
s += ','; printUnquotedString(s, dof.hash.hash.to_string(Base16, false));
},
[&](DerivationOutputCAFloating dof) {
+ s += ','; printUnquotedString(s, "");
s += ','; printUnquotedString(s, makeFileIngestionPrefix(dof.method) + printHashType(dof.hashType));
s += ','; printUnquotedString(s, "");
},
@@ -388,6 +377,16 @@ bool isDerivation(const string & fileName)
}
+std::string outputPathName(std::string_view drvName, std::string_view outputName) {
+ std::string res { drvName };
+ if (outputName != "out") {
+ res += "-";
+ res += outputName;
+ }
+ return res;
+}
+
+
DerivationType BasicDerivation::type() const
{
std::set<std::string_view> inputAddressedOutputs, fixedCAOutputs, floatingCAOutputs;
@@ -480,12 +479,12 @@ DrvHashModulo hashDerivationModulo(Store & store, const Derivation & drv, bool m
throw Error("Regular input-addressed derivations are not yet allowed to depend on CA derivations");
case DerivationType::CAFixed: {
std::map<std::string, Hash> outputHashes;
- for (const auto & i : drv.outputsAndPaths(store)) {
- auto & dof = std::get<DerivationOutputCAFixed>(i.second.first.output);
+ for (const auto & i : drv.outputs) {
+ auto & dof = std::get<DerivationOutputCAFixed>(i.second.output);
auto hash = hashString(htSHA256, "fixed:out:"
+ dof.hash.printMethodAlgo() + ":"
+ dof.hash.hash.to_string(Base16, false) + ":"
- + store.printStorePath(i.second.second));
+ + store.printStorePath(dof.path(store, drv.name, i.first)));
outputHashes.insert_or_assign(i.first, std::move(hash));
}
return outputHashes;
@@ -536,21 +535,13 @@ bool wantOutput(const string & output, const std::set<string> & wanted)
}
-StorePathSet BasicDerivation::outputPaths(const Store & store) const
-{
- StorePathSet paths;
- for (auto & i : outputsAndPaths(store))
- paths.insert(i.second.second);
- return paths;
-}
-
static DerivationOutput readDerivationOutput(Source & in, const Store & store)
{
- auto path = store.parseStorePath(readString(in));
+ const auto pathS = readString(in);
const auto hashAlgo = readString(in);
const auto hash = readString(in);
- return parseDerivationOutput(store, std::move(path), hashAlgo, hash);
+ return parseDerivationOutput(store, pathS, hashAlgo, hash);
}
StringSet BasicDerivation::outputNames() const
@@ -561,23 +552,12 @@ StringSet BasicDerivation::outputNames() const
return names;
}
-DerivationOutputsAndPaths BasicDerivation::outputsAndPaths(const Store & store) const {
- DerivationOutputsAndPaths outsAndPaths;
- for (auto output : outputs)
- outsAndPaths.insert(std::make_pair(
- output.first,
- std::make_pair(output.second, output.second.path(store, name))
- )
- );
- return outsAndPaths;
-}
-
DerivationOutputsAndOptPaths BasicDerivation::outputsAndOptPaths(const Store & store) const {
DerivationOutputsAndOptPaths outsAndOptPaths;
for (auto output : outputs)
outsAndOptPaths.insert(std::make_pair(
output.first,
- std::make_pair(output.second, output.second.pathOpt(store, output.first))
+ std::make_pair(output.second, output.second.path(store, name, output.first))
)
);
return outsAndOptPaths;
@@ -622,22 +602,25 @@ Source & readDerivation(Source & in, const Store & store, BasicDerivation & drv,
void writeDerivation(Sink & out, const Store & store, const BasicDerivation & drv)
{
out << drv.outputs.size();
- for (auto & i : drv.outputsAndPaths(store)) {
- out << i.first
- << store.printStorePath(i.second.second);
+ for (auto & i : drv.outputs) {
+ out << i.first;
std::visit(overloaded {
[&](DerivationOutputInputAddressed doi) {
- out << "" << "";
+ out << store.printStorePath(doi.path)
+ << ""
+ << "";
},
[&](DerivationOutputCAFixed dof) {
- out << dof.hash.printMethodAlgo()
+ out << store.printStorePath(dof.path(store, drv.name, i.first))
+ << dof.hash.printMethodAlgo()
<< dof.hash.hash.to_string(Base16, false);
},
[&](DerivationOutputCAFloating dof) {
- out << (makeFileIngestionPrefix(dof.method) + printHashType(dof.hashType))
+ out << ""
+ << (makeFileIngestionPrefix(dof.method) + printHashType(dof.hashType))
<< "";
},
- }, i.second.first.output);
+ }, i.second.output);
}
writeStorePaths(store, out, drv.inputSrcs);
out << drv.platform << drv.builder << drv.args;
@@ -653,5 +636,12 @@ std::string hashPlaceholder(const std::string & outputName)
return "/" + hashString(htSHA256, "nix-output:" + outputName).to_string(Base32, false);
}
+std::string downstreamPlaceholder(const Store & store, const StorePath & drvPath, std::string_view outputName)
+{
+ auto drvNameWithExtension = drvPath.name();
+ auto drvName = drvNameWithExtension.substr(0, drvNameWithExtension.size() - 4);
+ auto clearText = "nix-upstream-output:" + std::string { drvPath.hashPart() } + ":" + outputPathName(drvName, outputName);
+ return "/" + hashString(htSHA256, clearText).to_string(Base32, false);
+}
}
diff --git a/src/libstore/derivations.hh b/src/libstore/derivations.hh
index 3aae30ab2..0b5652685 100644
--- a/src/libstore/derivations.hh
+++ b/src/libstore/derivations.hh
@@ -27,6 +27,7 @@ struct DerivationOutputInputAddressed
struct DerivationOutputCAFixed
{
FixedOutputHash hash; /* hash used for expected hash computation */
+ StorePath path(const Store & store, std::string_view drvName, std::string_view outputName) const;
};
/* Floating-output derivations, whose output paths are content addressed, but
@@ -49,14 +50,8 @@ struct DerivationOutput
std::optional<HashType> hashAlgoOpt(const Store & store) const;
/* Note, when you use this function you should make sure that you're passing
the right derivation name. When in doubt, you should use the safer
- interface provided by BasicDerivation::outputsAndPaths */
- std::optional<StorePath> pathOpt(const Store & store, std::string_view drvName) const;
- /* DEPRECATED: Remove after CA drvs are fully implemented */
- StorePath path(const Store & store, std::string_view drvName) const {
- auto p = pathOpt(store, drvName);
- if (!p) throw UnimplementedError("floating content-addressed derivations are not yet implemented");
- return *p;
- }
+ interface provided by BasicDerivation::outputsAndOptPaths */
+ std::optional<StorePath> path(const Store & store, std::string_view drvName, std::string_view outputName) const;
};
typedef std::map<string, DerivationOutput> DerivationOutputs;
@@ -113,17 +108,12 @@ struct BasicDerivation
/* Return true iff this is a fixed-output derivation. */
DerivationType type() const;
- /* Return the output paths of a derivation. */
- StorePathSet outputPaths(const Store & store) const;
-
/* Return the output names of a derivation. */
StringSet outputNames() const;
/* Calculates the maps that contains all the DerivationOutputs, but
- augmented with knowledge of the Store paths they would be written into.
- The first one of these functions will be removed when the CA work is
- completed */
- DerivationOutputsAndPaths outputsAndPaths(const Store & store) const;
+ augmented with knowledge of the Store paths they would be written
+ into. */
DerivationOutputsAndOptPaths outputsAndOptPaths(const Store & store) const;
static std::string_view nameFromPath(const StorePath & storePath);
@@ -146,15 +136,22 @@ class Store;
enum RepairFlag : bool { NoRepair = false, Repair = true };
/* Write a derivation to the Nix store, and return its path. */
-StorePath writeDerivation(ref<Store> store,
+StorePath writeDerivation(Store & store,
const Derivation & drv, RepairFlag repair = NoRepair);
/* Read a derivation from a file. */
-Derivation readDerivation(const Store & store, const Path & drvPath, std::string_view name);
+Derivation parseDerivation(const Store & store, std::string && s, std::string_view name);
// FIXME: remove
bool isDerivation(const string & fileName);
+/* Calculate the name that will be used for the store path for this
+ output.
+
+ This is usually <drv-name>-<output-name>, but is just <drv-name> when
+ the output name is "out". */
+std::string outputPathName(std::string_view drvName, std::string_view outputName);
+
// known CA drv's output hashes, current just for fixed-output derivations
// whose output hashes are always known since they are fixed up-front.
typedef std::map<std::string, Hash> CaOutputHashes;
@@ -202,6 +199,21 @@ struct Sink;
Source & readDerivation(Source & in, const Store & store, BasicDerivation & drv, std::string_view name);
void writeDerivation(Sink & out, const Store & store, const BasicDerivation & drv);
+/* This creates an opaque and almost certainly unique string
+ deterministically from the output name.
+
+ It is used as a placeholder to allow derivations to refer to their
+ own outputs without needing to use the hash of a derivation in
+ itself, making the hash near-impossible to calculate. */
std::string hashPlaceholder(const std::string & outputName);
+/* This creates an opaque and almost certainly unique string
+ deterministically from a derivation path and output name.
+
+ It is used as a placeholder to allow derivations to refer to
+ content-addressed paths whose content --- and thus the path
+ themselves --- isn't yet known. This occurs when a derivation has a
+ dependency which is a CA derivation. */
+std::string downstreamPlaceholder(const Store & store, const StorePath & drvPath, std::string_view outputName);
+
}
diff --git a/src/libstore/dummy-store.cc b/src/libstore/dummy-store.cc
new file mode 100644
index 000000000..49641c2ac
--- /dev/null
+++ b/src/libstore/dummy-store.cc
@@ -0,0 +1,68 @@
+#include "store-api.hh"
+#include "callback.hh"
+
+namespace nix {
+
+struct DummyStoreConfig : virtual StoreConfig {
+ using StoreConfig::StoreConfig;
+
+ const std::string name() override { return "Dummy Store"; }
+};
+
+struct DummyStore : public Store, public virtual DummyStoreConfig
+{
+ DummyStore(const std::string scheme, const std::string uri, const Params & params)
+ : DummyStore(params)
+ { }
+
+ DummyStore(const Params & params)
+ : StoreConfig(params)
+ , Store(params)
+ {
+ }
+
+ string getUri() override
+ {
+ return *uriSchemes().begin();
+ }
+
+ void queryPathInfoUncached(const StorePath & path,
+ Callback<std::shared_ptr<const ValidPathInfo>> callback) noexcept override
+ {
+ callback(nullptr);
+ }
+
+ static std::set<std::string> uriSchemes() {
+ return {"dummy"};
+ }
+
+ std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override
+ { unsupported("queryPathFromHashPart"); }
+
+ void addToStore(const ValidPathInfo & info, Source & source,
+ RepairFlag repair, CheckSigsFlag checkSigs) override
+ { unsupported("addToStore"); }
+
+ StorePath addToStore(const string & name, const Path & srcPath,
+ FileIngestionMethod method, HashType hashAlgo,
+ PathFilter & filter, RepairFlag repair) override
+ { unsupported("addToStore"); }
+
+ StorePath addTextToStore(const string & name, const string & s,
+ const StorePathSet & references, RepairFlag repair) override
+ { unsupported("addTextToStore"); }
+
+ void narFromPath(const StorePath & path, Sink & sink) override
+ { unsupported("narFromPath"); }
+
+ void ensurePath(const StorePath & path) override
+ { unsupported("ensurePath"); }
+
+ BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv,
+ BuildMode buildMode) override
+ { unsupported("buildDerivation"); }
+};
+
+static RegisterStoreImplementation<DummyStore, DummyStoreConfig> regStore;
+
+}
diff --git a/src/libstore/filetransfer.cc b/src/libstore/filetransfer.cc
index 4149f8155..6241b5e00 100644
--- a/src/libstore/filetransfer.cc
+++ b/src/libstore/filetransfer.cc
@@ -5,6 +5,7 @@
#include "s3.hh"
#include "compression.hh"
#include "finally.hh"
+#include "callback.hh"
#ifdef ENABLE_S3
#include <aws/core/client/ClientConfiguration.h>
diff --git a/src/libstore/filetransfer.hh b/src/libstore/filetransfer.hh
index 25ade0add..0d608c8d8 100644
--- a/src/libstore/filetransfer.hh
+++ b/src/libstore/filetransfer.hh
@@ -17,15 +17,30 @@ struct FileTransferSettings : Config
Setting<std::string> userAgentSuffix{this, "", "user-agent-suffix",
"String appended to the user agent in HTTP requests."};
- Setting<size_t> httpConnections{this, 25, "http-connections",
- "Number of parallel HTTP connections.",
+ Setting<size_t> httpConnections{
+ this, 25, "http-connections",
+ R"(
+ The maximum number of parallel TCP connections used to fetch
+ files from binary caches and by other downloads. It defaults
+ to 25. 0 means no limit.
+ )",
{"binary-caches-parallel-connections"}};
- Setting<unsigned long> connectTimeout{this, 0, "connect-timeout",
- "Timeout for connecting to servers during downloads. 0 means use curl's builtin default."};
-
- Setting<unsigned long> stalledDownloadTimeout{this, 300, "stalled-download-timeout",
- "Timeout (in seconds) for receiving data from servers during download. Nix cancels idle downloads after this timeout's duration."};
+ Setting<unsigned long> connectTimeout{
+ this, 0, "connect-timeout",
+ R"(
+ The timeout (in seconds) for establishing connections in the
+ binary cache substituter. It corresponds to `curl`’s
+ `--connect-timeout` option.
+ )"};
+
+ Setting<unsigned long> stalledDownloadTimeout{
+ this, 300, "stalled-download-timeout",
+ R"(
+ The timeout (in seconds) for receiving data from servers
+ during download. Nix cancels idle downloads after this
+ timeout's duration.
+ )"};
Setting<unsigned int> tries{this, 5, "download-attempts",
"How often Nix will attempt to download a file before giving up."};
diff --git a/src/libstore/gc.cc b/src/libstore/gc.cc
index e74382ed2..08b53c702 100644
--- a/src/libstore/gc.cc
+++ b/src/libstore/gc.cc
@@ -85,8 +85,7 @@ void LocalStore::addIndirectRoot(const Path & path)
}
-Path LocalFSStore::addPermRoot(const StorePath & storePath,
- const Path & _gcRoot, bool indirect, bool allowOutsideRootsDir)
+Path LocalFSStore::addPermRoot(const StorePath & storePath, const Path & _gcRoot)
{
Path gcRoot(canonPath(_gcRoot));
@@ -95,47 +94,12 @@ Path LocalFSStore::addPermRoot(const StorePath & storePath,
"creating a garbage collector root (%1%) in the Nix store is forbidden "
"(are you running nix-build inside the store?)", gcRoot);
- if (indirect) {
- /* Don't clobber the link if it already exists and doesn't
- point to the Nix store. */
- if (pathExists(gcRoot) && (!isLink(gcRoot) || !isInStore(readLink(gcRoot))))
- throw Error("cannot create symlink '%1%'; already exists", gcRoot);
- makeSymlink(gcRoot, printStorePath(storePath));
- addIndirectRoot(gcRoot);
- }
-
- else {
- if (!allowOutsideRootsDir) {
- Path rootsDir = canonPath((format("%1%/%2%") % stateDir % gcRootsDir).str());
-
- if (string(gcRoot, 0, rootsDir.size() + 1) != rootsDir + "/")
- throw Error(
- "path '%1%' is not a valid garbage collector root; "
- "it's not in the directory '%2%'",
- gcRoot, rootsDir);
- }
-
- if (baseNameOf(gcRoot) == std::string(storePath.to_string()))
- writeFile(gcRoot, "");
- else
- makeSymlink(gcRoot, printStorePath(storePath));
- }
-
- /* Check that the root can be found by the garbage collector.
- !!! This can be very slow on machines that have many roots.
- Instead of reading all the roots, it would be more efficient to
- check if the root is in a directory in or linked from the
- gcroots directory. */
- if (settings.checkRootReachability) {
- auto roots = findRoots(false);
- if (roots[storePath].count(gcRoot) == 0)
- logWarning({
- .name = "GC root",
- .hint = hintfmt("warning: '%1%' is not in a directory where the garbage collector looks for roots; "
- "therefore, '%2%' might be removed by the garbage collector",
- gcRoot, printStorePath(storePath))
- });
- }
+ /* Don't clobber the link if it already exists and doesn't
+ point to the Nix store. */
+ if (pathExists(gcRoot) && (!isLink(gcRoot) || !isInStore(readLink(gcRoot))))
+ throw Error("cannot create symlink '%1%'; already exists", gcRoot);
+ makeSymlink(gcRoot, printStorePath(storePath));
+ addIndirectRoot(gcRoot);
/* Grab the global GC root, causing us to block while a GC is in
progress. This prevents the set of permanent roots from
@@ -610,9 +574,12 @@ bool LocalStore::canReachRoot(GCState & state, StorePathSet & visited, const Sto
/* If keep-derivations is set and this is a derivation, then
don't delete the derivation if any of the outputs are alive. */
if (state.gcKeepDerivations && path.isDerivation()) {
- for (auto & i : queryDerivationOutputs(path))
- if (isValidPath(i) && queryPathInfo(i)->deriver == path)
- incoming.insert(i);
+ for (auto & [name, maybeOutPath] : queryPartialDerivationOutputMap(path))
+ if (maybeOutPath &&
+ isValidPath(*maybeOutPath) &&
+ queryPathInfo(*maybeOutPath)->deriver == path
+ )
+ incoming.insert(*maybeOutPath);
}
/* If keep-outputs is set, then don't delete this path if there
diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc
index 683fa5196..0beb9b2b7 100644
--- a/src/libstore/globals.cc
+++ b/src/libstore/globals.cc
@@ -2,6 +2,7 @@
#include "util.hh"
#include "archive.hh"
#include "args.hh"
+#include "abstract-setting-to-json.hh"
#include <algorithm>
#include <map>
@@ -9,6 +10,8 @@
#include <dlfcn.h>
#include <sys/utsname.h>
+#include <nlohmann/json.hpp>
+
namespace nix {
@@ -160,11 +163,6 @@ template<> std::string BaseSetting<SandboxMode>::to_string() const
else abort();
}
-template<> void BaseSetting<SandboxMode>::toJSON(JSONPlaceholder & out)
-{
- AbstractSetting::toJSON(out);
-}
-
template<> void BaseSetting<SandboxMode>::convertToArg(Args & args, const std::string & category)
{
args.addFlag({
diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh
index e3bb4cf84..8a2d3ff75 100644
--- a/src/libstore/globals.hh
+++ b/src/libstore/globals.hh
@@ -80,89 +80,209 @@ public:
Setting<bool> keepGoing{this, false, "keep-going",
"Whether to keep building derivations when another build fails."};
- Setting<bool> tryFallback{this, false, "fallback",
- "Whether to fall back to building when substitution fails.",
+ Setting<bool> tryFallback{
+ this, false, "fallback",
+ R"(
+ If set to `true`, Nix will fall back to building from source if a
+ binary substitute fails. This is equivalent to the `--fallback`
+ flag. The default is `false`.
+ )",
{"build-fallback"}};
/* Whether to show build log output in real time. */
bool verboseBuild = true;
Setting<size_t> logLines{this, 10, "log-lines",
- "If verbose-build is false, the number of lines of the tail of "
+ "If `verbose-build` is false, the number of lines of the tail of "
"the log to show if a build fails."};
- MaxBuildJobsSetting maxBuildJobs{this, 1, "max-jobs",
- "Maximum number of parallel build jobs. \"auto\" means use number of cores.",
+ MaxBuildJobsSetting maxBuildJobs{
+ this, 1, "max-jobs",
+ R"(
+ This option defines the maximum number of jobs that Nix will try to
+ build in parallel. The default is `1`. The special value `auto`
+ causes Nix to use the number of CPUs in your system. `0` is useful
+ when using remote builders to prevent any local builds (except for
+ `preferLocalBuild` derivation attribute which executes locally
+ regardless). It can be overridden using the `--max-jobs` (`-j`)
+ command line switch.
+ )",
{"build-max-jobs"}};
- Setting<unsigned int> buildCores{this, getDefaultCores(), "cores",
- "Number of CPU cores to utilize in parallel within a build, "
- "i.e. by passing this number to Make via '-j'. 0 means that the "
- "number of actual CPU cores on the local host ought to be "
- "auto-detected.", {"build-cores"}};
+ Setting<unsigned int> buildCores{
+ this, getDefaultCores(), "cores",
+ R"(
+ Sets the value of the `NIX_BUILD_CORES` environment variable in the
+ invocation of builders. Builders can use this variable at their
+ discretion to control the maximum amount of parallelism. For
+ instance, in Nixpkgs, if the derivation attribute
+ `enableParallelBuilding` is set to `true`, the builder passes the
+ `-jN` flag to GNU Make. It can be overridden using the `--cores`
+ command line switch and defaults to `1`. The value `0` means that
+ the builder should use all available CPU cores in the system.
+ )",
+ {"build-cores"}};
/* Read-only mode. Don't copy stuff to the store, don't change
the database. */
bool readOnlyMode = false;
- Setting<std::string> thisSystem{this, SYSTEM, "system",
- "The canonical Nix system name."};
-
- Setting<time_t> maxSilentTime{this, 0, "max-silent-time",
- "The maximum time in seconds that a builer can go without "
- "producing any output on stdout/stderr before it is killed. "
- "0 means infinity.",
+ Setting<std::string> thisSystem{
+ this, SYSTEM, "system",
+ R"(
+ This option specifies the canonical Nix system name of the current
+ installation, such as `i686-linux` or `x86_64-darwin`. Nix can only
+ build derivations whose `system` attribute equals the value
+ specified here. In general, it never makes sense to modify this
+ value from its default, since you can use it to ‘lie’ about the
+ platform you are building on (e.g., perform a Mac OS build on a
+ Linux machine; the result would obviously be wrong). It only makes
+ sense if the Nix binaries can run on multiple platforms, e.g.,
+ ‘universal binaries’ that run on `x86_64-linux` and `i686-linux`.
+
+ It defaults to the canonical Nix system name detected by `configure`
+ at build time.
+ )"};
+
+ Setting<time_t> maxSilentTime{
+ this, 0, "max-silent-time",
+ R"(
+ This option defines the maximum number of seconds that a builder can
+ go without producing any data on standard output or standard error.
+ This is useful (for instance in an automated build system) to catch
+ builds that are stuck in an infinite loop, or to catch remote builds
+ that are hanging due to network problems. It can be overridden using
+ the `--max-silent-time` command line switch.
+
+ The value `0` means that there is no timeout. This is also the
+ default.
+ )",
{"build-max-silent-time"}};
- Setting<time_t> buildTimeout{this, 0, "timeout",
- "The maximum duration in seconds that a builder can run. "
- "0 means infinity.", {"build-timeout"}};
+ Setting<time_t> buildTimeout{
+ this, 0, "timeout",
+ R"(
+ This option defines the maximum number of seconds that a builder can
+ run. This is useful (for instance in an automated build system) to
+ catch builds that are stuck in an infinite loop but keep writing to
+ their standard output or standard error. It can be overridden using
+ the `--timeout` command line switch.
+
+ The value `0` means that there is no timeout. This is also the
+ default.
+ )",
+ {"build-timeout"}};
PathSetting buildHook{this, true, nixLibexecDir + "/nix/build-remote", "build-hook",
"The path of the helper program that executes builds to remote machines."};
- Setting<std::string> builders{this, "@" + nixConfDir + "/machines", "builders",
- "A semicolon-separated list of build machines, in the format of nix.machines."};
-
- Setting<bool> buildersUseSubstitutes{this, false, "builders-use-substitutes",
- "Whether build machines should use their own substitutes for obtaining "
- "build dependencies if possible, rather than waiting for this host to "
- "upload them."};
+ Setting<std::string> builders{
+ this, "@" + nixConfDir + "/machines", "builders",
+ "A semicolon-separated list of build machines, in the format of `nix.machines`."};
+
+ Setting<bool> buildersUseSubstitutes{
+ this, false, "builders-use-substitutes",
+ R"(
+ If set to `true`, Nix will instruct remote build machines to use
+ their own binary substitutes if available. In practical terms, this
+ means that remote hosts will fetch as many build dependencies as
+ possible from their own substitutes (e.g, from `cache.nixos.org`),
+ instead of waiting for this host to upload them all. This can
+ drastically reduce build times if the network connection between
+ this computer and the remote build host is slow.
+ )"};
Setting<off_t> reservedSize{this, 8 * 1024 * 1024, "gc-reserved-space",
"Amount of reserved disk space for the garbage collector."};
- Setting<bool> fsyncMetadata{this, true, "fsync-metadata",
- "Whether SQLite should use fsync()."};
+ Setting<bool> fsyncMetadata{
+ this, true, "fsync-metadata",
+ R"(
+ If set to `true`, changes to the Nix store metadata (in
+ `/nix/var/nix/db`) are synchronously flushed to disk. This improves
+ robustness in case of system crashes, but reduces performance. The
+ default is `true`.
+ )"};
Setting<bool> useSQLiteWAL{this, !isWSL1(), "use-sqlite-wal",
"Whether SQLite should use WAL mode."};
Setting<bool> syncBeforeRegistering{this, false, "sync-before-registering",
- "Whether to call sync() before registering a path as valid."};
-
- Setting<bool> useSubstitutes{this, true, "substitute",
- "Whether to use substitutes.",
+ "Whether to call `sync()` before registering a path as valid."};
+
+ Setting<bool> useSubstitutes{
+ this, true, "substitute",
+ R"(
+ If set to `true` (default), Nix will use binary substitutes if
+ available. This option can be disabled to force building from
+ source.
+ )",
{"build-use-substitutes"}};
- Setting<std::string> buildUsersGroup{this, "", "build-users-group",
- "The Unix group that contains the build users."};
+ Setting<std::string> buildUsersGroup{
+ this, "", "build-users-group",
+ R"(
+ This options specifies the Unix group containing the Nix build user
+ accounts. In multi-user Nix installations, builds should not be
+ performed by the Nix account since that would allow users to
+ arbitrarily modify the Nix store and database by supplying specially
+ crafted builders; and they cannot be performed by the calling user
+ since that would allow him/her to influence the build result.
+
+ Therefore, if this option is non-empty and specifies a valid group,
+ builds will be performed under the user accounts that are a member
+ of the group specified here (as listed in `/etc/group`). Those user
+ accounts should not be used for any other purpose\!
+
+ Nix will never run two builds under the same user account at the
+ same time. This is to prevent an obvious security hole: a malicious
+ user writing a Nix expression that modifies the build result of a
+ legitimate Nix expression being built by another user. Therefore it
+ is good to have as many Nix build user accounts as you can spare.
+ (Remember: uids are cheap.)
+
+ The build users should have permission to create files in the Nix
+ store, but not delete them. Therefore, `/nix/store` should be owned
+ by the Nix account, its group should be the group specified here,
+ and its mode should be `1775`.
+
+ If the build users group is empty, builds will be performed under
+ the uid of the Nix process (that is, the uid of the caller if
+ `NIX_REMOTE` is empty, the uid under which the Nix daemon runs if
+ `NIX_REMOTE` is `daemon`). Obviously, this should not be used in
+ multi-user settings with untrusted users.
+ )"};
Setting<bool> impersonateLinux26{this, false, "impersonate-linux-26",
"Whether to impersonate a Linux 2.6 machine on newer kernels.",
{"build-impersonate-linux-26"}};
- Setting<bool> keepLog{this, true, "keep-build-log",
- "Whether to store build logs.",
+ Setting<bool> keepLog{
+ this, true, "keep-build-log",
+ R"(
+ If set to `true` (the default), Nix will write the build log of a
+ derivation (i.e. the standard output and error of its builder) to
+ the directory `/nix/var/log/nix/drvs`. The build log can be
+ retrieved using the command `nix-store -l path`.
+ )",
{"build-keep-log"}};
- Setting<bool> compressLog{this, true, "compress-build-log",
- "Whether to compress logs.",
+ Setting<bool> compressLog{
+ this, true, "compress-build-log",
+ R"(
+ If set to `true` (the default), build logs written to
+ `/nix/var/log/nix/drvs` will be compressed on the fly using bzip2.
+ Otherwise, they will not be compressed.
+ )",
{"build-compress-log"}};
- Setting<unsigned long> maxLogSize{this, 0, "max-build-log-size",
- "Maximum number of bytes a builder can write to stdout/stderr "
- "before being killed (0 means no limit).",
+ Setting<unsigned long> maxLogSize{
+ this, 0, "max-build-log-size",
+ R"(
+ This option defines the maximum number of bytes that a builder can
+ write to its stdout/stderr. If the builder exceeds this limit, it’s
+ killed. A value of `0` (the default) means that there is no limit.
+ )",
{"build-max-log-size"}};
/* When buildRepeat > 0 and verboseBuild == true, whether to print
@@ -173,57 +293,156 @@ public:
Setting<unsigned int> pollInterval{this, 5, "build-poll-interval",
"How often (in seconds) to poll for locks."};
- Setting<bool> checkRootReachability{this, false, "gc-check-reachability",
- "Whether to check if new GC roots can in fact be found by the "
- "garbage collector."};
-
- Setting<bool> gcKeepOutputs{this, false, "keep-outputs",
- "Whether the garbage collector should keep outputs of live derivations.",
+ Setting<bool> gcKeepOutputs{
+ this, false, "keep-outputs",
+ R"(
+ If `true`, the garbage collector will keep the outputs of
+ non-garbage derivations. If `false` (default), outputs will be
+ deleted unless they are GC roots themselves (or reachable from other
+ roots).
+
+ In general, outputs must be registered as roots separately. However,
+ even if the output of a derivation is registered as a root, the
+ collector will still delete store paths that are used only at build
+ time (e.g., the C compiler, or source tarballs downloaded from the
+ network). To prevent it from doing so, set this option to `true`.
+ )",
{"gc-keep-outputs"}};
- Setting<bool> gcKeepDerivations{this, true, "keep-derivations",
- "Whether the garbage collector should keep derivers of live paths.",
+ Setting<bool> gcKeepDerivations{
+ this, true, "keep-derivations",
+ R"(
+ If `true` (default), the garbage collector will keep the derivations
+ from which non-garbage store paths were built. If `false`, they will
+ be deleted unless explicitly registered as a root (or reachable from
+ other roots).
+
+ Keeping derivation around is useful for querying and traceability
+ (e.g., it allows you to ask with what dependencies or options a
+ store path was built), so by default this option is on. Turn it off
+ to save a bit of disk space (or a lot if `keep-outputs` is also
+ turned on).
+ )",
{"gc-keep-derivations"}};
- Setting<bool> autoOptimiseStore{this, false, "auto-optimise-store",
- "Whether to automatically replace files with identical contents with hard links."};
-
- Setting<bool> envKeepDerivations{this, false, "keep-env-derivations",
- "Whether to add derivations as a dependency of user environments "
- "(to prevent them from being GCed).",
+ Setting<bool> autoOptimiseStore{
+ this, false, "auto-optimise-store",
+ R"(
+ If set to `true`, Nix automatically detects files in the store
+ that have identical contents, and replaces them with hard links to
+ a single copy. This saves disk space. If set to `false` (the
+ default), you can still run `nix-store --optimise` to get rid of
+ duplicate files.
+ )"};
+
+ Setting<bool> envKeepDerivations{
+ this, false, "keep-env-derivations",
+ R"(
+ If `false` (default), derivations are not stored in Nix user
+ environments. That is, the derivations of any build-time-only
+ dependencies may be garbage-collected.
+
+ If `true`, when you add a Nix derivation to a user environment, the
+ path of the derivation is stored in the user environment. Thus, the
+ derivation will not be garbage-collected until the user environment
+ generation is deleted (`nix-env --delete-generations`). To prevent
+ build-time-only dependencies from being collected, you should also
+ turn on `keep-outputs`.
+
+ The difference between this option and `keep-derivations` is that
+ this one is “sticky”: it applies to any user environment created
+ while this option was enabled, while `keep-derivations` only applies
+ at the moment the garbage collector is run.
+ )",
{"env-keep-derivations"}};
/* Whether to lock the Nix client and worker to the same CPU. */
bool lockCPU;
- Setting<SandboxMode> sandboxMode{this,
+ Setting<SandboxMode> sandboxMode{
+ this,
#if __linux__
smEnabled
#else
smDisabled
#endif
, "sandbox",
- "Whether to enable sandboxed builds. Can be \"true\", \"false\" or \"relaxed\".",
+ R"(
+ If set to `true`, builds will be performed in a *sandboxed
+ environment*, i.e., they’re isolated from the normal file system
+ hierarchy and will only see their dependencies in the Nix store,
+ the temporary build directory, private versions of `/proc`,
+ `/dev`, `/dev/shm` and `/dev/pts` (on Linux), and the paths
+ configured with the `sandbox-paths` option. This is useful to
+ prevent undeclared dependencies on files in directories such as
+ `/usr/bin`. In addition, on Linux, builds run in private PID,
+ mount, network, IPC and UTS namespaces to isolate them from other
+ processes in the system (except that fixed-output derivations do
+ not run in private network namespace to ensure they can access the
+ network).
+
+ Currently, sandboxing only work on Linux and macOS. The use of a
+ sandbox requires that Nix is run as root (so you should use the
+ “build users” feature to perform the actual builds under different
+ users than root).
+
+ If this option is set to `relaxed`, then fixed-output derivations
+ and derivations that have the `__noChroot` attribute set to `true`
+ do not run in sandboxes.
+
+ The default is `true` on Linux and `false` on all other platforms.
+ )",
{"build-use-chroot", "build-use-sandbox"}};
- Setting<PathSet> sandboxPaths{this, {}, "sandbox-paths",
- "The paths to make available inside the build sandbox.",
+ Setting<PathSet> sandboxPaths{
+ this, {}, "sandbox-paths",
+ R"(
+ A list of paths bind-mounted into Nix sandbox environments. You can
+ use the syntax `target=source` to mount a path in a different
+ location in the sandbox; for instance, `/bin=/nix-bin` will mount
+ the path `/nix-bin` as `/bin` inside the sandbox. If *source* is
+ followed by `?`, then it is not an error if *source* does not exist;
+ for example, `/dev/nvidiactl?` specifies that `/dev/nvidiactl` will
+ only be mounted in the sandbox if it exists in the host filesystem.
+
+ Depending on how Nix was built, the default value for this option
+ may be empty or provide `/bin/sh` as a bind-mount of `bash`.
+ )",
{"build-chroot-dirs", "build-sandbox-paths"}};
Setting<bool> sandboxFallback{this, true, "sandbox-fallback",
"Whether to disable sandboxing when the kernel doesn't allow it."};
- Setting<PathSet> extraSandboxPaths{this, {}, "extra-sandbox-paths",
- "Additional paths to make available inside the build sandbox.",
+ Setting<PathSet> extraSandboxPaths{
+ this, {}, "extra-sandbox-paths",
+ R"(
+ A list of additional paths appended to `sandbox-paths`. Useful if
+ you want to extend its default value.
+ )",
{"build-extra-chroot-dirs", "build-extra-sandbox-paths"}};
- Setting<size_t> buildRepeat{this, 0, "repeat",
- "The number of times to repeat a build in order to verify determinism.",
+ Setting<size_t> buildRepeat{
+ this, 0, "repeat",
+ R"(
+ How many times to repeat builds to check whether they are
+ deterministic. The default value is 0. If the value is non-zero,
+ every build is repeated the specified number of times. If the
+ contents of any of the runs differs from the previous ones and
+ `enforce-determinism` is true, the build is rejected and the
+ resulting store paths are not registered as “valid” in Nix’s
+ database.
+ )",
{"build-repeat"}};
#if __linux__
- Setting<std::string> sandboxShmSize{this, "50%", "sandbox-dev-shm-size",
- "The size of /dev/shm in the build sandbox."};
+ Setting<std::string> sandboxShmSize{
+ this, "50%", "sandbox-dev-shm-size",
+ R"(
+ This option determines the maximum size of the `tmpfs` filesystem
+ mounted on `/dev/shm` in Linux sandboxes. For the format, see the
+ description of the `size` option of `tmpfs` in mount8. The default
+ is `50%`.
+ )"};
Setting<Path> sandboxBuildDir{this, "/build", "sandbox-build-dir",
"The build directory inside the sandbox."};
@@ -237,121 +456,411 @@ public:
"Whether to log Darwin sandbox access violations to the system log."};
#endif
- Setting<bool> runDiffHook{this, false, "run-diff-hook",
- "Whether to run the program specified by the diff-hook setting "
- "repeated builds produce a different result. Typically used to "
- "plug in diffoscope."};
+ Setting<bool> runDiffHook{
+ this, false, "run-diff-hook",
+ R"(
+ If true, enable the execution of the `diff-hook` program.
- PathSetting diffHook{this, true, "", "diff-hook",
- "A program that prints out the differences between the two paths "
- "specified on its command line."};
+ When using the Nix daemon, `run-diff-hook` must be set in the
+ `nix.conf` configuration file, and cannot be passed at the command
+ line.
+ )"};
- Setting<bool> enforceDeterminism{this, true, "enforce-determinism",
- "Whether to fail if repeated builds produce different output."};
+ PathSetting diffHook{
+ this, true, "", "diff-hook",
+ R"(
+ Absolute path to an executable capable of diffing build
+ results. The hook is executed if `run-diff-hook` is true, and the
+ output of a build is known to not be the same. This program is not
+ executed to determine if two results are the same.
- Setting<Strings> trustedPublicKeys{this,
- {"cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY="},
- "trusted-public-keys",
- "Trusted public keys for secure substitution.",
- {"binary-cache-public-keys"}};
+ The diff hook is executed by the same user and group who ran the
+ build. However, the diff hook does not have write access to the
+ store path just built.
+
+ The diff hook program receives three parameters:
+
+ 1. A path to the previous build's results
+
+ 2. A path to the current build's results
- Setting<Strings> secretKeyFiles{this, {}, "secret-key-files",
- "Secret keys with which to sign local builds."};
+ 3. The path to the build's derivation
- Setting<unsigned int> tarballTtl{this, 60 * 60, "tarball-ttl",
- "How long downloaded files are considered up-to-date."};
+ 4. The path to the build's scratch directory. This directory will
+ exist only if the build was run with `--keep-failed`.
- Setting<bool> requireSigs{this, true, "require-sigs",
- "Whether to check that any non-content-addressed path added to the "
- "Nix store has a valid signature (that is, one signed using a key "
- "listed in 'trusted-public-keys'."};
+ The stderr and stdout output from the diff hook will not be
+ displayed to the user. Instead, it will print to the nix-daemon's
+ log.
- Setting<StringSet> extraPlatforms{this,
+ When using the Nix daemon, `diff-hook` must be set in the `nix.conf`
+ configuration file, and cannot be passed at the command line.
+ )"};
+
+ Setting<bool> enforceDeterminism{
+ this, true, "enforce-determinism",
+ "Whether to fail if repeated builds produce different output. See `repeat`."};
+
+ Setting<Strings> trustedPublicKeys{
+ this,
+ {"cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY="},
+ "trusted-public-keys",
+ R"(
+ A whitespace-separated list of public keys. When paths are copied
+ from another Nix store (such as a binary cache), they must be
+ signed with one of these keys. For example:
+ `cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY=
+ hydra.nixos.org-1:CNHJZBh9K4tP3EKF6FkkgeVYsS3ohTl+oS0Qa8bezVs=`.
+ )",
+ {"binary-cache-public-keys"}};
+
+ Setting<Strings> secretKeyFiles{
+ this, {}, "secret-key-files",
+ R"(
+ A whitespace-separated list of files containing secret (private)
+ keys. These are used to sign locally-built paths. They can be
+ generated using `nix-store --generate-binary-cache-key`. The
+ corresponding public key can be distributed to other users, who
+ can add it to `trusted-public-keys` in their `nix.conf`.
+ )"};
+
+ Setting<unsigned int> tarballTtl{
+ this, 60 * 60, "tarball-ttl",
+ R"(
+ The number of seconds a downloaded tarball is considered fresh. If
+ the cached tarball is stale, Nix will check whether it is still up
+ to date using the ETag header. Nix will download a new version if
+ the ETag header is unsupported, or the cached ETag doesn't match.
+
+ Setting the TTL to `0` forces Nix to always check if the tarball is
+ up to date.
+
+ Nix caches tarballs in `$XDG_CACHE_HOME/nix/tarballs`.
+
+ Files fetched via `NIX_PATH`, `fetchGit`, `fetchMercurial`,
+ `fetchTarball`, and `fetchurl` respect this TTL.
+ )"};
+
+ Setting<bool> requireSigs{
+ this, true, "require-sigs",
+ R"(
+ If set to `true` (the default), any non-content-addressed path added
+ or copied to the Nix store (e.g. when substituting from a binary
+ cache) must have a valid signature, that is, be signed using one of
+ the keys listed in `trusted-public-keys` or `secret-key-files`. Set
+ to `false` to disable signature checking.
+ )"};
+
+ Setting<StringSet> extraPlatforms{
+ this,
std::string{SYSTEM} == "x86_64-linux" && !isWSL1() ? StringSet{"i686-linux"} : StringSet{},
"extra-platforms",
- "Additional platforms that can be built on the local system. "
- "These may be supported natively (e.g. armv7 on some aarch64 CPUs "
- "or using hacks like qemu-user."};
-
- Setting<StringSet> systemFeatures{this, getDefaultSystemFeatures(),
+ R"(
+ Platforms other than the native one which this machine is capable of
+ building for. This can be useful for supporting additional
+ architectures on compatible machines: i686-linux can be built on
+ x86\_64-linux machines (and the default for this setting reflects
+ this); armv7 is backwards-compatible with armv6 and armv5tel; some
+ aarch64 machines can also natively run 32-bit ARM code; and
+ qemu-user may be used to support non-native platforms (though this
+ may be slow and buggy). Most values for this are not enabled by
+ default because build systems will often misdetect the target
+ platform and generate incompatible code, so you may wish to
+ cross-check the results of using this option against proper
+ natively-built versions of your derivations.
+ )"};
+
+ Setting<StringSet> systemFeatures{
+ this, getDefaultSystemFeatures(),
"system-features",
- "Optional features that this system implements (like \"kvm\")."};
+ R"(
+ A set of system “features” supported by this machine, e.g. `kvm`.
+ Derivations can express a dependency on such features through the
+ derivation attribute `requiredSystemFeatures`. For example, the
+ attribute
+
+ requiredSystemFeatures = [ "kvm" ];
+
+ ensures that the derivation can only be built on a machine with the
+ `kvm` feature.
- Setting<Strings> substituters{this,
+ This setting by default includes `kvm` if `/dev/kvm` is accessible,
+ and the pseudo-features `nixos-test`, `benchmark` and `big-parallel`
+ that are used in Nixpkgs to route builds to specific machines.
+ )"};
+
+ Setting<Strings> substituters{
+ this,
nixStore == "/nix/store" ? Strings{"https://cache.nixos.org/"} : Strings(),
"substituters",
- "The URIs of substituters (such as https://cache.nixos.org/).",
+ R"(
+ A list of URLs of substituters, separated by whitespace. The default
+ is `https://cache.nixos.org`.
+ )",
{"binary-caches"}};
// FIXME: provide a way to add to option values.
- Setting<Strings> extraSubstituters{this, {}, "extra-substituters",
- "Additional URIs of substituters.",
+ Setting<Strings> extraSubstituters{
+ this, {}, "extra-substituters",
+ R"(
+ Additional binary caches appended to those specified in
+ `substituters`. When used by unprivileged users, untrusted
+ substituters (i.e. those not listed in `trusted-substituters`) are
+ silently ignored.
+ )",
{"extra-binary-caches"}};
- Setting<StringSet> trustedSubstituters{this, {}, "trusted-substituters",
- "Disabled substituters that may be enabled via the substituters option by untrusted users.",
+ Setting<StringSet> trustedSubstituters{
+ this, {}, "trusted-substituters",
+ R"(
+ A list of URLs of substituters, separated by whitespace. These are
+ not used by default, but can be enabled by users of the Nix daemon
+ by specifying `--option substituters urls` on the command
+ line. Unprivileged users are only allowed to pass a subset of the
+ URLs listed in `substituters` and `trusted-substituters`.
+ )",
{"trusted-binary-caches"}};
- Setting<Strings> trustedUsers{this, {"root"}, "trusted-users",
- "Which users or groups are trusted to ask the daemon to do unsafe things."};
-
- Setting<unsigned int> ttlNegativeNarInfoCache{this, 3600, "narinfo-cache-negative-ttl",
- "The TTL in seconds for negative lookups in the disk cache i.e binary cache lookups that "
- "return an invalid path result"};
-
- Setting<unsigned int> ttlPositiveNarInfoCache{this, 30 * 24 * 3600, "narinfo-cache-positive-ttl",
- "The TTL in seconds for positive lookups in the disk cache i.e binary cache lookups that "
- "return a valid path result."};
+ Setting<Strings> trustedUsers{
+ this, {"root"}, "trusted-users",
+ R"(
+ A list of names of users (separated by whitespace) that have
+ additional rights when connecting to the Nix daemon, such as the
+ ability to specify additional binary caches, or to import unsigned
+ NARs. You can also specify groups by prefixing them with `@`; for
+ instance, `@wheel` means all users in the `wheel` group. The default
+ is `root`.
+
+ > **Warning**
+ >
+ > Adding a user to `trusted-users` is essentially equivalent to
+ > giving that user root access to the system. For example, the user
+ > can set `sandbox-paths` and thereby obtain read access to
+ > directories that are otherwise inacessible to them.
+ )"};
+
+ Setting<unsigned int> ttlNegativeNarInfoCache{
+ this, 3600, "narinfo-cache-negative-ttl",
+ R"(
+ The TTL in seconds for negative lookups. If a store path is queried
+ from a substituter but was not found, there will be a negative
+ lookup cached in the local disk cache database for the specified
+ duration.
+ )"};
+
+ Setting<unsigned int> ttlPositiveNarInfoCache{
+ this, 30 * 24 * 3600, "narinfo-cache-positive-ttl",
+ R"(
+ The TTL in seconds for positive lookups. If a store path is queried
+ from a substituter, the result of the query will be cached in the
+ local disk cache database including some of the NAR metadata. The
+ default TTL is a month, setting a shorter TTL for positive lookups
+ can be useful for binary caches that have frequent garbage
+ collection, in which case having a more frequent cache invalidation
+ would prevent trying to pull the path again and failing with a hash
+ mismatch if the build isn't reproducible.
+ )"};
/* ?Who we trust to use the daemon in safe ways */
- Setting<Strings> allowedUsers{this, {"*"}, "allowed-users",
- "Which users or groups are allowed to connect to the daemon."};
+ Setting<Strings> allowedUsers{
+ this, {"*"}, "allowed-users",
+ R"(
+ A list of names of users (separated by whitespace) that are allowed
+ to connect to the Nix daemon. As with the `trusted-users` option,
+ you can specify groups by prefixing them with `@`. Also, you can
+ allow all users by specifying `*`. The default is `*`.
+
+ Note that trusted users are always allowed to connect.
+ )"};
Setting<bool> printMissing{this, true, "print-missing",
"Whether to print what paths need to be built or downloaded."};
- Setting<std::string> preBuildHook{this, "",
- "pre-build-hook",
- "A program to run just before a build to set derivation-specific build settings."};
+ Setting<std::string> preBuildHook{
+ this, "", "pre-build-hook",
+ R"(
+ If set, the path to a program that can set extra derivation-specific
+ settings for this system. This is used for settings that can't be
+ captured by the derivation model itself and are too variable between
+ different versions of the same system to be hard-coded into nix.
+
+ The hook is passed the derivation path and, if sandboxes are
+ enabled, the sandbox directory. It can then modify the sandbox and
+ send a series of commands to modify various settings to stdout. The
+ currently recognized commands are:
+
+ - `extra-sandbox-paths`
+ Pass a list of files and directories to be included in the
+ sandbox for this build. One entry per line, terminated by an
+ empty line. Entries have the same format as `sandbox-paths`.
+ )"};
+
+ Setting<std::string> postBuildHook{
+ this, "", "post-build-hook",
+ R"(
+ Optional. The path to a program to execute after each build.
+
+ This option is only settable in the global `nix.conf`, or on the
+ command line by trusted users.
+
+ When using the nix-daemon, the daemon executes the hook as `root`.
+ If the nix-daemon is not involved, the hook runs as the user
+ executing the nix-build.
+
+ - The hook executes after an evaluation-time build.
+
+ - The hook does not execute on substituted paths.
+
+ - The hook's output always goes to the user's terminal.
+
+ - If the hook fails, the build succeeds but no further builds
+ execute.
- Setting<std::string> postBuildHook{this, "", "post-build-hook",
- "A program to run just after each successful build."};
+ - The hook executes synchronously, and blocks other builds from
+ progressing while it runs.
- Setting<std::string> netrcFile{this, fmt("%s/%s", nixConfDir, "netrc"), "netrc-file",
- "Path to the netrc file used to obtain usernames/passwords for downloads."};
+ The program executes with no arguments. The program's environment
+ contains the following environment variables:
+
+ - `DRV_PATH`
+ The derivation for the built paths.
+
+ Example:
+ `/nix/store/5nihn1a7pa8b25l9zafqaqibznlvvp3f-bash-4.4-p23.drv`
+
+ - `OUT_PATHS`
+ Output paths of the built derivation, separated by a space
+ character.
+
+ Example:
+ `/nix/store/zf5lbh336mnzf1nlswdn11g4n2m8zh3g-bash-4.4-p23-dev
+ /nix/store/rjxwxwv1fpn9wa2x5ssk5phzwlcv4mna-bash-4.4-p23-doc
+ /nix/store/6bqvbzjkcp9695dq0dpl5y43nvy37pq1-bash-4.4-p23-info
+ /nix/store/r7fng3kk3vlpdlh2idnrbn37vh4imlj2-bash-4.4-p23-man
+ /nix/store/xfghy8ixrhz3kyy6p724iv3cxji088dx-bash-4.4-p23`.
+ )"};
+
+ Setting<std::string> netrcFile{
+ this, fmt("%s/%s", nixConfDir, "netrc"), "netrc-file",
+ R"(
+ If set to an absolute path to a `netrc` file, Nix will use the HTTP
+ authentication credentials in this file when trying to download from
+ a remote host through HTTP or HTTPS. Defaults to
+ `$NIX_CONF_DIR/netrc`.
+
+ The `netrc` file consists of a list of accounts in the following
+ format:
+
+ machine my-machine
+ login my-username
+ password my-password
+
+ For the exact syntax, see [the `curl`
+ documentation](https://ec.haxx.se/usingcurl-netrc.html).
+
+ > **Note**
+ >
+ > This must be an absolute path, and `~` is not resolved. For
+ > example, `~/.netrc` won't resolve to your home directory's
+ > `.netrc`.
+ )"};
/* Path to the SSL CA file used */
Path caFile;
#if __linux__
- Setting<bool> filterSyscalls{this, true, "filter-syscalls",
- "Whether to prevent certain dangerous system calls, such as "
- "creation of setuid/setgid files or adding ACLs or extended "
- "attributes. Only disable this if you're aware of the "
- "security implications."};
-
- Setting<bool> allowNewPrivileges{this, false, "allow-new-privileges",
- "Whether builders can acquire new privileges by calling programs with "
- "setuid/setgid bits or with file capabilities."};
+ Setting<bool> filterSyscalls{
+ this, true, "filter-syscalls",
+ R"(
+ Whether to prevent certain dangerous system calls, such as
+ creation of setuid/setgid files or adding ACLs or extended
+ attributes. Only disable this if you're aware of the
+ security implications.
+ )"};
+
+ Setting<bool> allowNewPrivileges{
+ this, false, "allow-new-privileges",
+ R"(
+ (Linux-specific.) By default, builders on Linux cannot acquire new
+ privileges by calling setuid/setgid programs or programs that have
+ file capabilities. For example, programs such as `sudo` or `ping`
+ will fail. (Note that in sandbox builds, no such programs are
+ available unless you bind-mount them into the sandbox via the
+ `sandbox-paths` option.) You can allow the use of such programs by
+ enabling this option. This is impure and usually undesirable, but
+ may be useful in certain scenarios (e.g. to spin up containers or
+ set up userspace network interfaces in tests).
+ )"};
#endif
- Setting<Strings> hashedMirrors{this, {}, "hashed-mirrors",
- "A list of servers used by builtins.fetchurl to fetch files by hash."};
-
- Setting<uint64_t> minFree{this, 0, "min-free",
- "Automatically run the garbage collector when free disk space drops below the specified amount."};
-
- Setting<uint64_t> maxFree{this, std::numeric_limits<uint64_t>::max(), "max-free",
- "Stop deleting garbage when free disk space is above the specified amount."};
+ Setting<Strings> hashedMirrors{
+ this, {}, "hashed-mirrors",
+ R"(
+ A list of web servers used by `builtins.fetchurl` to obtain files by
+ hash. The default is `http://tarballs.nixos.org/`. Given a hash type
+ *ht* and a base-16 hash *h*, Nix will try to download the file from
+ *hashed-mirror*/*ht*/*h*. This allows files to be downloaded even if
+ they have disappeared from their original URI. For example, given
+ the default mirror `http://tarballs.nixos.org/`, when building the
+ derivation
+
+ ```nix
+ builtins.fetchurl {
+ url = "https://example.org/foo-1.2.3.tar.xz";
+ sha256 = "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae";
+ }
+ ```
+
+ Nix will attempt to download this file from
+ `http://tarballs.nixos.org/sha256/2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae`
+ first. If it is not available there, if will try the original URI.
+ )"};
+
+ Setting<uint64_t> minFree{
+ this, 0, "min-free",
+ R"(
+ When free disk space in `/nix/store` drops below `min-free` during a
+ build, Nix performs a garbage-collection until `max-free` bytes are
+ available or there is no more garbage. A value of `0` (the default)
+ disables this feature.
+ )"};
+
+ Setting<uint64_t> maxFree{
+ this, std::numeric_limits<uint64_t>::max(), "max-free",
+ R"(
+ When a garbage collection is triggered by the `min-free` option, it
+ stops as soon as `max-free` bytes are available. The default is
+ infinity (i.e. delete all garbage).
+ )"};
Setting<uint64_t> minFreeCheckInterval{this, 5, "min-free-check-interval",
"Number of seconds between checking free disk space."};
- Setting<Paths> pluginFiles{this, {}, "plugin-files",
- "Plugins to dynamically load at nix initialization time."};
+ Setting<Paths> pluginFiles{
+ this, {}, "plugin-files",
+ R"(
+ A list of plugin files to be loaded by Nix. Each of these files will
+ be dlopened by Nix, allowing them to affect execution through static
+ initialization. In particular, these plugins may construct static
+ instances of RegisterPrimOp to add new primops or constants to the
+ expression language, RegisterStoreImplementation to add new store
+ implementations, RegisterCommand to add new subcommands to the `nix`
+ command, and RegisterSetting to add new nix config settings. See the
+ constructors for those types for more details.
+
+ Since these files are loaded into the same address space as Nix
+ itself, they must be DSOs compatible with the instance of Nix
+ running at the time (i.e. compiled against the same headers, not
+ linked to any incompatible libraries). They should not be linked to
+ any Nix libs directly, as those will be available already at load
+ time.
+
+ If an entry in the list is a directory, all files in the directory
+ are loaded as plugins (non-recursively).
+ )"};
Setting<std::string> githubAccessToken{this, "", "github-access-token",
- "GitHub access token to get access to GitHub data through the GitHub API for github:<..> flakes."};
+ "GitHub access token to get access to GitHub data through the GitHub API for `github:<..>` flakes."};
Setting<Strings> experimentalFeatures{this, {}, "experimental-features",
"Experimental Nix features to enable."};
diff --git a/src/libstore/http-binary-cache-store.cc b/src/libstore/http-binary-cache-store.cc
index c1ceb08cf..86be7c006 100644
--- a/src/libstore/http-binary-cache-store.cc
+++ b/src/libstore/http-binary-cache-store.cc
@@ -2,12 +2,20 @@
#include "filetransfer.hh"
#include "globals.hh"
#include "nar-info-disk-cache.hh"
+#include "callback.hh"
namespace nix {
MakeError(UploadToHTTP, Error);
-class HttpBinaryCacheStore : public BinaryCacheStore
+struct HttpBinaryCacheStoreConfig : virtual BinaryCacheStoreConfig
+{
+ using BinaryCacheStoreConfig::BinaryCacheStoreConfig;
+
+ const std::string name() override { return "Http Binary Cache Store"; }
+};
+
+class HttpBinaryCacheStore : public BinaryCacheStore, public HttpBinaryCacheStoreConfig
{
private:
@@ -24,9 +32,12 @@ private:
public:
HttpBinaryCacheStore(
- const Params & params, const Path & _cacheUri)
- : BinaryCacheStore(params)
- , cacheUri(_cacheUri)
+ const std::string & scheme,
+ const Path & _cacheUri,
+ const Params & params)
+ : StoreConfig(params)
+ , BinaryCacheStore(params)
+ , cacheUri(scheme + "://" + _cacheUri)
{
if (cacheUri.back() == '/')
cacheUri.pop_back();
@@ -55,6 +66,13 @@ public:
}
}
+ static std::set<std::string> uriSchemes()
+ {
+ static bool forceHttp = getEnv("_NIX_FORCE_HTTP") == "1";
+ auto ret = std::set<std::string>({"http", "https"});
+ if (forceHttp) ret.insert("file");
+ return ret;
+ }
protected:
void maybeDisable()
@@ -85,7 +103,7 @@ protected:
checkEnabled();
try {
- FileTransferRequest request(cacheUri + "/" + path);
+ FileTransferRequest request(makeRequest(path));
request.head = true;
getFileTransfer()->download(request);
return true;
@@ -103,7 +121,7 @@ protected:
std::shared_ptr<std::basic_iostream<char>> istream,
const std::string & mimeType) override
{
- auto req = FileTransferRequest(cacheUri + "/" + path);
+ auto req = makeRequest(path);
req.data = std::make_shared<string>(StreamToSourceAdapter(istream).drain());
req.mimeType = mimeType;
try {
@@ -115,8 +133,11 @@ protected:
FileTransferRequest makeRequest(const std::string & path)
{
- FileTransferRequest request(cacheUri + "/" + path);
- return request;
+ return FileTransferRequest(
+ hasPrefix(path, "https://") || hasPrefix(path, "http://") || hasPrefix(path, "file://")
+ ? path
+ : cacheUri + "/" + path);
+
}
void getFile(const std::string & path, Sink & sink) override
@@ -159,18 +180,6 @@ protected:
};
-static RegisterStoreImplementation regStore([](
- const std::string & uri, const Store::Params & params)
- -> std::shared_ptr<Store>
-{
- static bool forceHttp = getEnv("_NIX_FORCE_HTTP") == "1";
- if (std::string(uri, 0, 7) != "http://" &&
- std::string(uri, 0, 8) != "https://" &&
- (!forceHttp || std::string(uri, 0, 7) != "file://"))
- return 0;
- auto store = std::make_shared<HttpBinaryCacheStore>(params, uri);
- store->init();
- return store;
-});
+static RegisterStoreImplementation<HttpBinaryCacheStore, HttpBinaryCacheStoreConfig> regStore;
}
diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc
index dc03313f0..5af75669a 100644
--- a/src/libstore/legacy-ssh-store.cc
+++ b/src/libstore/legacy-ssh-store.cc
@@ -6,21 +6,28 @@
#include "worker-protocol.hh"
#include "ssh.hh"
#include "derivations.hh"
+#include "callback.hh"
namespace nix {
-static std::string uriScheme = "ssh://";
-
-struct LegacySSHStore : public Store
+struct LegacySSHStoreConfig : virtual StoreConfig
{
- const Setting<int> maxConnections{this, 1, "max-connections", "maximum number of concurrent SSH connections"};
- const Setting<Path> sshKey{this, "", "ssh-key", "path to an SSH private key"};
- const Setting<bool> compress{this, false, "compress", "whether to compress the connection"};
- const Setting<Path> remoteProgram{this, "nix-store", "remote-program", "path to the nix-store executable on the remote system"};
- const Setting<std::string> remoteStore{this, "", "remote-store", "URI of the store on the remote system"};
+ using StoreConfig::StoreConfig;
+ const Setting<int> maxConnections{(StoreConfig*) this, 1, "max-connections", "maximum number of concurrent SSH connections"};
+ const Setting<Path> sshKey{(StoreConfig*) this, "", "ssh-key", "path to an SSH private key"};
+ const Setting<bool> compress{(StoreConfig*) this, false, "compress", "whether to compress the connection"};
+ const Setting<Path> remoteProgram{(StoreConfig*) this, "nix-store", "remote-program", "path to the nix-store executable on the remote system"};
+ const Setting<std::string> remoteStore{(StoreConfig*) this, "", "remote-store", "URI of the store on the remote system"};
+
+ const std::string name() override { return "Legacy SSH Store"; }
+};
+struct LegacySSHStore : public Store, public virtual LegacySSHStoreConfig
+{
// Hack for getting remote build log output.
- const Setting<int> logFD{this, -1, "log-fd", "file descriptor to which SSH's stderr is connected"};
+ // Intentionally not in `LegacySSHStoreConfig` so that it doesn't appear in
+ // the documentation
+ const Setting<int> logFD{(StoreConfig*) this, -1, "log-fd", "file descriptor to which SSH's stderr is connected"};
struct Connection
{
@@ -37,8 +44,11 @@ struct LegacySSHStore : public Store
SSHMaster master;
- LegacySSHStore(const string & host, const Params & params)
- : Store(params)
+ static std::set<std::string> uriSchemes() { return {"ssh"}; }
+
+ LegacySSHStore(const string & scheme, const string & host, const Params & params)
+ : StoreConfig(params)
+ , Store(params)
, host(host)
, connections(make_ref<Pool<Connection>>(
std::max(1, (int) maxConnections),
@@ -84,7 +94,7 @@ struct LegacySSHStore : public Store
string getUri() override
{
- return uriScheme + host;
+ return *uriSchemes().begin() + "://" + host;
}
void queryPathInfoUncached(const StorePath & path,
@@ -325,12 +335,6 @@ public:
}
};
-static RegisterStoreImplementation regStore([](
- const std::string & uri, const Store::Params & params)
- -> std::shared_ptr<Store>
-{
- if (std::string(uri, 0, uriScheme.size()) != uriScheme) return 0;
- return std::make_shared<LegacySSHStore>(std::string(uri, uriScheme.size()), params);
-});
+static RegisterStoreImplementation<LegacySSHStore, LegacySSHStoreConfig> regStore;
}
diff --git a/src/libstore/local-binary-cache-store.cc b/src/libstore/local-binary-cache-store.cc
index 87d8334d7..b5744448e 100644
--- a/src/libstore/local-binary-cache-store.cc
+++ b/src/libstore/local-binary-cache-store.cc
@@ -4,7 +4,14 @@
namespace nix {
-class LocalBinaryCacheStore : public BinaryCacheStore
+struct LocalBinaryCacheStoreConfig : virtual BinaryCacheStoreConfig
+{
+ using BinaryCacheStoreConfig::BinaryCacheStoreConfig;
+
+ const std::string name() override { return "Local Binary Cache Store"; }
+};
+
+class LocalBinaryCacheStore : public BinaryCacheStore, public virtual LocalBinaryCacheStoreConfig
{
private:
@@ -13,8 +20,11 @@ private:
public:
LocalBinaryCacheStore(
- const Params & params, const Path & binaryCacheDir)
- : BinaryCacheStore(params)
+ const std::string scheme,
+ const Path & binaryCacheDir,
+ const Params & params)
+ : StoreConfig(params)
+ , BinaryCacheStore(params)
, binaryCacheDir(binaryCacheDir)
{
}
@@ -26,6 +36,8 @@ public:
return "file://" + binaryCacheDir;
}
+ static std::set<std::string> uriSchemes();
+
protected:
bool fileExists(const std::string & path) override;
@@ -85,16 +97,14 @@ bool LocalBinaryCacheStore::fileExists(const std::string & path)
return pathExists(binaryCacheDir + "/" + path);
}
-static RegisterStoreImplementation regStore([](
- const std::string & uri, const Store::Params & params)
- -> std::shared_ptr<Store>
+std::set<std::string> LocalBinaryCacheStore::uriSchemes()
{
- if (getEnv("_NIX_FORCE_HTTP_BINARY_CACHE_STORE") == "1" ||
- std::string(uri, 0, 7) != "file://")
- return 0;
- auto store = std::make_shared<LocalBinaryCacheStore>(params, std::string(uri, 7));
- store->init();
- return store;
-});
+ if (getEnv("_NIX_FORCE_HTTP_BINARY_CACHE_STORE") == "1")
+ return {};
+ else
+ return {"file"};
+}
+
+static RegisterStoreImplementation<LocalBinaryCacheStore, LocalBinaryCacheStoreConfig> regStore;
}
diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc
index bccd77b68..94ff34cb8 100644
--- a/src/libstore/local-store.cc
+++ b/src/libstore/local-store.cc
@@ -6,6 +6,7 @@
#include "derivations.hh"
#include "nar-info.hh"
#include "references.hh"
+#include "callback.hh"
#include <iostream>
#include <algorithm>
@@ -42,7 +43,8 @@ namespace nix {
LocalStore::LocalStore(const Params & params)
- : Store(params)
+ : StoreConfig(params)
+ , Store(params)
, LocalFSStore(params)
, realStoreDir_{this, false, rootDir != "" ? rootDir + "/nix/store" : storeDir, "real",
"physical path to the Nix store"}
@@ -578,13 +580,32 @@ void LocalStore::checkDerivationOutputs(const StorePath & drvPath, const Derivat
envHasRightPath(path, i.first);
},
[&](DerivationOutputCAFloating _) {
- throw UnimplementedError("floating CA output derivations are not yet implemented");
+ /* Nothing to check */
},
}, i.second.output);
}
}
+void LocalStore::linkDeriverToPath(const StorePath & deriver, const string & outputName, const StorePath & output)
+{
+ auto state(_state.lock());
+ return linkDeriverToPath(*state, queryValidPathId(*state, deriver), outputName, output);
+}
+
+void LocalStore::linkDeriverToPath(State & state, uint64_t deriver, const string & outputName, const StorePath & output)
+{
+ retrySQLite<void>([&]() {
+ state.stmtAddDerivationOutput.use()
+ (deriver)
+ (outputName)
+ (printStorePath(output))
+ .exec();
+ });
+
+}
+
+
uint64_t LocalStore::addValidPath(State & state,
const ValidPathInfo & info, bool checkOutputs)
{
@@ -618,12 +639,11 @@ uint64_t LocalStore::addValidPath(State & state,
registration above is undone. */
if (checkOutputs) checkDerivationOutputs(info.path, drv);
- for (auto & i : drv.outputsAndPaths(*this)) {
- state.stmtAddDerivationOutput.use()
- (id)
- (i.first)
- (printStorePath(i.second.second))
- .exec();
+ for (auto & i : drv.outputsAndOptPaths(*this)) {
+ /* Floating CA derivations have indeterminate output paths until
+ they are built, so don't register anything in that case */
+ if (i.second.second)
+ linkDeriverToPath(state, id, i.first, *i.second.second);
}
}
@@ -785,17 +805,21 @@ StorePathSet LocalStore::queryValidDerivers(const StorePath & path)
}
-OutputPathMap LocalStore::queryDerivationOutputMap(const StorePath & path)
+std::map<std::string, std::optional<StorePath>> LocalStore::queryPartialDerivationOutputMap(const StorePath & path)
{
- return retrySQLite<OutputPathMap>([&]() {
+ std::map<std::string, std::optional<StorePath>> outputs;
+ BasicDerivation drv = readDerivation(path);
+ for (auto & [outName, _] : drv.outputs) {
+ outputs.insert_or_assign(outName, std::nullopt);
+ }
+ return retrySQLite<std::map<std::string, std::optional<StorePath>>>([&]() {
auto state(_state.lock());
auto useQueryDerivationOutputs(state->stmtQueryDerivationOutputs.use()
(queryValidPathId(*state, path)));
- OutputPathMap outputs;
while (useQueryDerivationOutputs.next())
- outputs.emplace(
+ outputs.insert_or_assign(
useQueryDerivationOutputs.getStr(0),
parseStorePath(useQueryDerivationOutputs.getStr(1))
);
diff --git a/src/libstore/local-store.hh b/src/libstore/local-store.hh
index 31e6587ac..e7c9d1605 100644
--- a/src/libstore/local-store.hh
+++ b/src/libstore/local-store.hh
@@ -23,9 +23,6 @@ namespace nix {
const int nixSchemaVersion = 10;
-struct Derivation;
-
-
struct OptimiseStats
{
unsigned long filesLinked = 0;
@@ -33,8 +30,19 @@ struct OptimiseStats
uint64_t blocksFreed = 0;
};
+struct LocalStoreConfig : virtual LocalFSStoreConfig
+{
+ using LocalFSStoreConfig::LocalFSStoreConfig;
-class LocalStore : public LocalFSStore
+ Setting<bool> requireSigs{(StoreConfig*) this,
+ settings.requireSigs,
+ "require-sigs", "whether store paths should have a trusted signature on import"};
+
+ const std::string name() override { return "Local Store"; }
+};
+
+
+class LocalStore : public LocalFSStore, public virtual LocalStoreConfig
{
private:
@@ -98,10 +106,6 @@ public:
private:
- Setting<bool> requireSigs{(Store*) this,
- settings.requireSigs,
- "require-sigs", "whether store paths should have a trusted signature on import"};
-
const PublicKeys & getPublicKeys();
public:
@@ -133,7 +137,7 @@ public:
StorePathSet queryValidDerivers(const StorePath & path) override;
- OutputPathMap queryDerivationOutputMap(const StorePath & path) override;
+ std::map<std::string, std::optional<StorePath>> queryPartialDerivationOutputMap(const StorePath & path) override;
std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override;
@@ -282,6 +286,11 @@ private:
specified by the ‘secret-key-files’ option. */
void signPathInfo(ValidPathInfo & info);
+ /* Register the store path 'output' as the output named 'outputName' of
+ derivation 'deriver'. */
+ void linkDeriverToPath(const StorePath & deriver, const string & outputName, const StorePath & output);
+ void linkDeriverToPath(State & state, uint64_t deriver, const string & outputName, const StorePath & output);
+
Path getRealStoreDir() override { return realStoreDir; }
void createUser(const std::string & userName, uid_t userId) override;
diff --git a/src/libstore/misc.cc b/src/libstore/misc.cc
index f6aa570bb..ad4dccef9 100644
--- a/src/libstore/misc.cc
+++ b/src/libstore/misc.cc
@@ -5,7 +5,7 @@
#include "store-api.hh"
#include "thread-pool.hh"
#include "topo-sort.hh"
-
+#include "callback.hh"
namespace nix {
@@ -203,17 +203,24 @@ void Store::queryMissing(const std::vector<StorePathWithOutputs> & targets,
return;
}
+ PathSet invalid;
+ /* true for regular derivations, and CA derivations for which we
+ have a trust mapping for all wanted outputs. */
+ auto knownOutputPaths = true;
+ for (auto & [outputName, pathOpt] : queryPartialDerivationOutputMap(path.path)) {
+ if (!pathOpt) {
+ knownOutputPaths = false;
+ break;
+ }
+ if (wantOutput(outputName, path.outputs) && !isValidPath(*pathOpt))
+ invalid.insert(printStorePath(*pathOpt));
+ }
+ if (knownOutputPaths && invalid.empty()) return;
+
auto drv = make_ref<Derivation>(derivationFromPath(path.path));
ParsedDerivation parsedDrv(StorePath(path.path), *drv);
- PathSet invalid;
- for (auto & j : drv->outputsAndPaths(*this))
- if (wantOutput(j.first, path.outputs)
- && !isValidPath(j.second.second))
- invalid.insert(printStorePath(j.second.second));
- if (invalid.empty()) return;
-
- if (settings.useSubstitutes && parsedDrv.substitutesAllowed()) {
+ if (knownOutputPaths && settings.useSubstitutes && parsedDrv.substitutesAllowed()) {
auto drvState = make_ref<Sync<DrvState>>(DrvState(invalid.size()));
for (auto & output : invalid)
pool.enqueue(std::bind(checkOutput, printStorePath(path.path), drv, output, drvState));
diff --git a/src/libstore/names.cc b/src/libstore/names.cc
index d1c8a6101..41e28dc99 100644
--- a/src/libstore/names.cc
+++ b/src/libstore/names.cc
@@ -1,10 +1,18 @@
#include "names.hh"
#include "util.hh"
+#include <regex>
+
namespace nix {
+struct Regex
+{
+ std::regex regex;
+};
+
+
DrvName::DrvName()
{
name = "";
@@ -30,11 +38,18 @@ DrvName::DrvName(std::string_view s) : hits(0)
}
+DrvName::~DrvName()
+{ }
+
+
bool DrvName::matches(DrvName & n)
{
if (name != "*") {
- if (!regex) regex = std::unique_ptr<std::regex>(new std::regex(name, std::regex::extended));
- if (!std::regex_match(n.name, *regex)) return false;
+ if (!regex) {
+ regex = std::make_unique<Regex>();
+ regex->regex = std::regex(name, std::regex::extended);
+ }
+ if (!std::regex_match(n.name, regex->regex)) return false;
}
if (version != "" && version != n.version) return false;
return true;
@@ -99,7 +114,7 @@ DrvNames drvNamesFromArgs(const Strings & opArgs)
{
DrvNames result;
for (auto & i : opArgs)
- result.push_back(DrvName(i));
+ result.emplace_back(i);
return result;
}
diff --git a/src/libstore/names.hh b/src/libstore/names.hh
index 00e14b8c7..bc62aac93 100644
--- a/src/libstore/names.hh
+++ b/src/libstore/names.hh
@@ -3,10 +3,11 @@
#include <memory>
#include "types.hh"
-#include <regex>
namespace nix {
+struct Regex;
+
struct DrvName
{
string fullName;
@@ -16,10 +17,12 @@ struct DrvName
DrvName();
DrvName(std::string_view s);
+ ~DrvName();
+
bool matches(DrvName & n);
private:
- std::unique_ptr<std::regex> regex;
+ std::unique_ptr<Regex> regex;
};
typedef list<DrvName> DrvNames;
diff --git a/src/libstore/nar-accessor.cc b/src/libstore/nar-accessor.cc
index 59ec164b6..a9efdd0b6 100644
--- a/src/libstore/nar-accessor.cc
+++ b/src/libstore/nar-accessor.cc
@@ -49,7 +49,8 @@ struct NarAccessor : public FSAccessor
: acc(acc), source(source)
{ }
- void createMember(const Path & path, NarMember member) {
+ void createMember(const Path & path, NarMember member)
+ {
size_t level = std::count(path.begin(), path.end(), '/');
while (parents.size() > level) parents.pop();
diff --git a/src/libstore/profiles.cc b/src/libstore/profiles.cc
index 6862b42f0..c20386e2b 100644
--- a/src/libstore/profiles.cc
+++ b/src/libstore/profiles.cc
@@ -72,7 +72,7 @@ static void makeName(const Path & profile, GenerationNumber num,
}
-Path createGeneration(ref<LocalFSStore> store, Path profile, Path outPath)
+Path createGeneration(ref<LocalFSStore> store, Path profile, StorePath outPath)
{
/* The new generation number should be higher than old the
previous ones. */
@@ -82,7 +82,7 @@ Path createGeneration(ref<LocalFSStore> store, Path profile, Path outPath)
if (gens.size() > 0) {
Generation last = gens.back();
- if (readLink(last.path) == outPath) {
+ if (readLink(last.path) == store->printStorePath(outPath)) {
/* We only create a new generation symlink if it differs
from the last one.
@@ -105,7 +105,7 @@ Path createGeneration(ref<LocalFSStore> store, Path profile, Path outPath)
user environment etc. we've just built. */
Path generation;
makeName(profile, num + 1, generation);
- store->addPermRoot(store->parseStorePath(outPath), generation, false, true);
+ store->addPermRoot(outPath, generation);
return generation;
}
diff --git a/src/libstore/profiles.hh b/src/libstore/profiles.hh
index abe507f0e..be55a65d4 100644
--- a/src/libstore/profiles.hh
+++ b/src/libstore/profiles.hh
@@ -8,6 +8,8 @@
namespace nix {
+class StorePath;
+
typedef unsigned int GenerationNumber;
@@ -28,7 +30,7 @@ std::pair<Generations, std::optional<GenerationNumber>> findGenerations(Path pro
class LocalFSStore;
-Path createGeneration(ref<LocalFSStore> store, Path profile, Path outPath);
+Path createGeneration(ref<LocalFSStore> store, Path profile, StorePath outPath);
void deleteGeneration(const Path & profile, GenerationNumber gen);
diff --git a/src/libstore/references.cc b/src/libstore/references.cc
index 62a3cda61..d2096cb49 100644
--- a/src/libstore/references.cc
+++ b/src/libstore/references.cc
@@ -79,9 +79,17 @@ void RefScanSink::operator () (const unsigned char * data, size_t len)
std::pair<PathSet, HashResult> scanForReferences(const string & path,
const PathSet & refs)
{
- RefScanSink refsSink;
HashSink hashSink { htSHA256 };
- TeeSink sink { refsSink, hashSink };
+ auto found = scanForReferences(hashSink, path, refs);
+ auto hash = hashSink.finish();
+ return std::pair<PathSet, HashResult>(found, hash);
+}
+
+PathSet scanForReferences(Sink & toTee,
+ const string & path, const PathSet & refs)
+{
+ RefScanSink refsSink;
+ TeeSink sink { refsSink, toTee };
std::map<string, Path> backMap;
/* For efficiency (and a higher hit rate), just search for the
@@ -111,9 +119,7 @@ std::pair<PathSet, HashResult> scanForReferences(const string & path,
found.insert(j->second);
}
- auto hash = hashSink.finish();
-
- return std::pair<PathSet, HashResult>(found, hash);
+ return found;
}
diff --git a/src/libstore/references.hh b/src/libstore/references.hh
index 598a3203a..c2efd095c 100644
--- a/src/libstore/references.hh
+++ b/src/libstore/references.hh
@@ -7,6 +7,8 @@ namespace nix {
std::pair<PathSet, HashResult> scanForReferences(const Path & path, const PathSet & refs);
+PathSet scanForReferences(Sink & toTee, const Path & path, const PathSet & refs);
+
struct RewritingSink : Sink
{
std::string from, to, prev;
diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc
index 8dcc1d710..27535f1d0 100644
--- a/src/libstore/remote-store.cc
+++ b/src/libstore/remote-store.cc
@@ -1,5 +1,6 @@
#include "serialise.hh"
#include "util.hh"
+#include "remote-fs-accessor.hh"
#include "remote-store.hh"
#include "worker-protocol.hh"
#include "archive.hh"
@@ -9,6 +10,7 @@
#include "pool.hh"
#include "finally.hh"
#include "logging.hh"
+#include "callback.hh"
#include <sys/types.h>
#include <sys/stat.h>
@@ -31,7 +33,6 @@ template<> StorePathSet readStorePaths(const Store & store, Source & from)
return paths;
}
-
void writeStorePaths(const Store & store, Sink & out, const StorePathSet & paths)
{
out << paths.size();
@@ -39,12 +40,16 @@ void writeStorePaths(const Store & store, Sink & out, const StorePathSet & paths
out << store.printStorePath(i);
}
+
StorePathCAMap readStorePathCAMap(const Store & store, Source & from)
{
StorePathCAMap paths;
auto count = readNum<size_t>(from);
- while (count--)
- paths.insert_or_assign(store.parseStorePath(readString(from)), parseContentAddressOpt(readString(from)));
+ while (count--) {
+ auto path = store.parseStorePath(readString(from));
+ auto ca = parseContentAddressOpt(readString(from));
+ paths.insert_or_assign(path, ca);
+ }
return paths;
}
@@ -57,36 +62,52 @@ void writeStorePathCAMap(const Store & store, Sink & out, const StorePathCAMap &
}
}
-std::map<string, StorePath> readOutputPathMap(const Store & store, Source & from)
+
+namespace worker_proto {
+
+StorePath read(const Store & store, Source & from, Phantom<StorePath> _)
{
- std::map<string, StorePath> pathMap;
- auto rawInput = readStrings<Strings>(from);
- if (rawInput.size() % 2)
- throw Error("got an odd number of elements from the daemon when trying to read a output path map");
- auto curInput = rawInput.begin();
- while (curInput != rawInput.end()) {
- auto thisKey = *curInput++;
- auto thisValue = *curInput++;
- pathMap.emplace(thisKey, store.parseStorePath(thisValue));
- }
- return pathMap;
+ return store.parseStorePath(readString(from));
}
-void writeOutputPathMap(const Store & store, Sink & out, const std::map<string, StorePath> & pathMap)
+void write(const Store & store, Sink & out, const StorePath & storePath)
{
- out << 2*pathMap.size();
- for (auto & i : pathMap) {
- out << i.first;
- out << store.printStorePath(i.second);
- }
+ out << store.printStorePath(storePath);
+}
+
+
+template<>
+std::optional<StorePath> read(const Store & store, Source & from, Phantom<std::optional<StorePath>> _)
+{
+ auto s = readString(from);
+ return s == "" ? std::optional<StorePath> {} : store.parseStorePath(s);
+}
+
+template<>
+void write(const Store & store, Sink & out, const std::optional<StorePath> & storePathOpt)
+{
+ out << (storePathOpt ? store.printStorePath(*storePathOpt) : "");
+}
+
}
+
/* TODO: Separate these store impls into different files, give them better names */
RemoteStore::RemoteStore(const Params & params)
: Store(params)
+ , RemoteStoreConfig(params)
, connections(make_ref<Pool<Connection>>(
std::max(1, (int) maxConnections),
- [this]() { return openConnectionWrapper(); },
+ [this]() {
+ auto conn = openConnectionWrapper();
+ try {
+ initConnection(*conn);
+ } catch (...) {
+ failed = true;
+ throw;
+ }
+ return conn;
+ },
[this](const ref<Connection> & r) {
return
r->to.good()
@@ -113,19 +134,21 @@ ref<RemoteStore::Connection> RemoteStore::openConnectionWrapper()
UDSRemoteStore::UDSRemoteStore(const Params & params)
- : Store(params)
+ : StoreConfig(params)
+ , Store(params)
, LocalFSStore(params)
, RemoteStore(params)
{
}
-UDSRemoteStore::UDSRemoteStore(std::string socket_path, const Params & params)
- : Store(params)
- , LocalFSStore(params)
- , RemoteStore(params)
- , path(socket_path)
+UDSRemoteStore::UDSRemoteStore(
+ const std::string scheme,
+ std::string socket_path,
+ const Params & params)
+ : UDSRemoteStore(params)
{
+ path.emplace(socket_path);
}
@@ -169,8 +192,6 @@ ref<RemoteStore::Connection> UDSRemoteStore::openConnection()
conn->startTime = std::chrono::steady_clock::now();
- initConnection(*conn);
-
return conn;
}
@@ -278,9 +299,9 @@ struct ConnectionHandle
RemoteStore::Connection * operator -> () { return &*handle; }
- void processStderr(Sink * sink = 0, Source * source = 0)
+ void processStderr(Sink * sink = 0, Source * source = 0, bool flush = true)
{
- auto ex = handle->processStderr(sink, source);
+ auto ex = handle->processStderr(sink, source, flush);
if (ex) {
daemonException = true;
std::rethrow_exception(ex);
@@ -468,12 +489,28 @@ StorePathSet RemoteStore::queryDerivationOutputs(const StorePath & path)
}
-OutputPathMap RemoteStore::queryDerivationOutputMap(const StorePath & path)
+std::map<std::string, std::optional<StorePath>> RemoteStore::queryPartialDerivationOutputMap(const StorePath & path)
{
- auto conn(getConnection());
- conn->to << wopQueryDerivationOutputMap << printStorePath(path);
- conn.processStderr();
- return readOutputPathMap(*this, conn->from);
+ if (GET_PROTOCOL_MINOR(getProtocol()) >= 0x16) {
+ auto conn(getConnection());
+ conn->to << wopQueryDerivationOutputMap << printStorePath(path);
+ conn.processStderr();
+ return worker_proto::read(*this, conn->from, Phantom<std::map<std::string, std::optional<StorePath>>> {});
+ } else {
+ // Fallback for old daemon versions.
+ // For floating-CA derivations (and their co-dependencies) this is an
+ // under-approximation as it only returns the paths that can be inferred
+ // from the derivation itself (and not the ones that are known because
+ // the have been built), but as old stores don't handle floating-CA
+ // derivations this shouldn't matter
+ auto derivation = readDerivation(path);
+ auto outputsWithOptPaths = derivation.outputsAndOptPaths(*this);
+ std::map<std::string, std::optional<StorePath>> ret;
+ for (auto & [outputName, outputAndPath] : outputsWithOptPaths) {
+ ret.emplace(outputName, outputAndPath.second);
+ }
+ return ret;
+ }
}
@@ -529,6 +566,8 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source,
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 23) {
+ conn->to.flush();
+
std::exception_ptr ex;
struct FramedSink : BufferedSink
@@ -568,7 +607,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source,
std::thread stderrThread([&]()
{
try {
- conn.processStderr();
+ conn.processStderr(nullptr, nullptr, false);
} catch (...) {
ex = std::current_exception();
}
@@ -860,6 +899,18 @@ RemoteStore::Connection::~Connection()
}
}
+void RemoteStore::narFromPath(const StorePath & path, Sink & sink)
+{
+ auto conn(connections->get());
+ conn->to << wopNarFromPath << printStorePath(path);
+ conn->processStderr();
+ copyNAR(conn->from, sink);
+}
+
+ref<FSAccessor> RemoteStore::getFSAccessor()
+{
+ return make_ref<RemoteFSAccessor>(ref<Store>(shared_from_this()));
+}
static Logger::Fields readFields(Source & from)
{
@@ -878,9 +929,10 @@ static Logger::Fields readFields(Source & from)
}
-std::exception_ptr RemoteStore::Connection::processStderr(Sink * sink, Source * source)
+std::exception_ptr RemoteStore::Connection::processStderr(Sink * sink, Source * source, bool flush)
{
- to.flush();
+ if (flush)
+ to.flush();
while (true) {
@@ -941,14 +993,6 @@ std::exception_ptr RemoteStore::Connection::processStderr(Sink * sink, Source *
return nullptr;
}
-static std::string uriScheme = "unix://";
-
-static RegisterStoreImplementation regStore([](
- const std::string & uri, const Store::Params & params)
- -> std::shared_ptr<Store>
-{
- if (std::string(uri, 0, uriScheme.size()) != uriScheme) return 0;
- return std::make_shared<UDSRemoteStore>(std::string(uri, uriScheme.size()), params);
-});
+static RegisterStoreImplementation<UDSRemoteStore, UDSRemoteStoreConfig> regStore;
}
diff --git a/src/libstore/remote-store.hh b/src/libstore/remote-store.hh
index 72d2a6689..91c748006 100644
--- a/src/libstore/remote-store.hh
+++ b/src/libstore/remote-store.hh
@@ -16,18 +16,22 @@ struct FdSource;
template<typename T> class Pool;
struct ConnectionHandle;
-
-/* FIXME: RemoteStore is a misnomer - should be something like
- DaemonStore. */
-class RemoteStore : public virtual Store
+struct RemoteStoreConfig : virtual StoreConfig
{
-public:
+ using StoreConfig::StoreConfig;
- const Setting<int> maxConnections{(Store*) this, 1,
+ const Setting<int> maxConnections{(StoreConfig*) this, 1,
"max-connections", "maximum number of concurrent connections to the Nix daemon"};
- const Setting<unsigned int> maxConnectionAge{(Store*) this, std::numeric_limits<unsigned int>::max(),
+ const Setting<unsigned int> maxConnectionAge{(StoreConfig*) this, std::numeric_limits<unsigned int>::max(),
"max-connection-age", "number of seconds to reuse a connection"};
+};
+
+/* FIXME: RemoteStore is a misnomer - should be something like
+ DaemonStore. */
+class RemoteStore : public virtual Store, public virtual RemoteStoreConfig
+{
+public:
virtual bool sameMachine() = 0;
@@ -51,7 +55,7 @@ public:
StorePathSet queryDerivationOutputs(const StorePath & path) override;
- OutputPathMap queryDerivationOutputMap(const StorePath & path) override;
+ std::map<std::string, std::optional<StorePath>> queryPartialDerivationOutputMap(const StorePath & path) override;
std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override;
StorePathSet querySubstitutablePaths(const StorePathSet & paths) override;
@@ -102,8 +106,6 @@ public:
void flushBadConnections();
-protected:
-
struct Connection
{
AutoCloseFD fd;
@@ -114,11 +116,13 @@ protected:
virtual ~Connection();
- std::exception_ptr processStderr(Sink * sink = 0, Source * source = 0);
+ std::exception_ptr processStderr(Sink * sink = 0, Source * source = 0, bool flush = true);
};
ref<Connection> openConnectionWrapper();
+protected:
+
virtual ref<Connection> openConnection() = 0;
void initConnection(Connection & conn);
@@ -131,24 +135,54 @@ protected:
friend struct ConnectionHandle;
+ virtual ref<FSAccessor> getFSAccessor() override;
+
+ virtual void narFromPath(const StorePath & path, Sink & sink) override;
+
private:
std::atomic_bool failed{false};
};
-class UDSRemoteStore : public LocalFSStore, public RemoteStore
+struct UDSRemoteStoreConfig : virtual LocalFSStoreConfig, virtual RemoteStoreConfig
+{
+ UDSRemoteStoreConfig(const Store::Params & params)
+ : StoreConfig(params)
+ , LocalFSStoreConfig(params)
+ , RemoteStoreConfig(params)
+ {
+ }
+
+ UDSRemoteStoreConfig()
+ : UDSRemoteStoreConfig(Store::Params({}))
+ {
+ }
+
+ const std::string name() override { return "Local Daemon Store"; }
+};
+
+class UDSRemoteStore : public LocalFSStore, public RemoteStore, public virtual UDSRemoteStoreConfig
{
public:
UDSRemoteStore(const Params & params);
- UDSRemoteStore(std::string path, const Params & params);
+ UDSRemoteStore(const std::string scheme, std::string path, const Params & params);
std::string getUri() override;
+ static std::set<std::string> uriSchemes()
+ { return {"unix"}; }
+
bool sameMachine() override
{ return true; }
+ ref<FSAccessor> getFSAccessor() override
+ { return LocalFSStore::getFSAccessor(); }
+
+ void narFromPath(const StorePath & path, Sink & sink) override
+ { LocalFSStore::narFromPath(path, sink); }
+
private:
ref<RemoteStore::Connection> openConnection() override;
diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc
index a0a446bd3..d43f267e0 100644
--- a/src/libstore/s3-binary-cache-store.cc
+++ b/src/libstore/s3-binary-cache-store.cc
@@ -172,20 +172,26 @@ S3Helper::FileTransferResult S3Helper::getObject(
return res;
}
-struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore
+struct S3BinaryCacheStoreConfig : virtual BinaryCacheStoreConfig
{
- const Setting<std::string> profile{this, "", "profile", "The name of the AWS configuration profile to use."};
- const Setting<std::string> region{this, Aws::Region::US_EAST_1, "region", {"aws-region"}};
- const Setting<std::string> scheme{this, "", "scheme", "The scheme to use for S3 requests, https by default."};
- const Setting<std::string> endpoint{this, "", "endpoint", "An optional override of the endpoint to use when talking to S3."};
- const Setting<std::string> narinfoCompression{this, "", "narinfo-compression", "compression method for .narinfo files"};
- const Setting<std::string> lsCompression{this, "", "ls-compression", "compression method for .ls files"};
- const Setting<std::string> logCompression{this, "", "log-compression", "compression method for log/* files"};
+ using BinaryCacheStoreConfig::BinaryCacheStoreConfig;
+ const Setting<std::string> profile{(StoreConfig*) this, "", "profile", "The name of the AWS configuration profile to use."};
+ const Setting<std::string> region{(StoreConfig*) this, Aws::Region::US_EAST_1, "region", {"aws-region"}};
+ const Setting<std::string> scheme{(StoreConfig*) this, "", "scheme", "The scheme to use for S3 requests, https by default."};
+ const Setting<std::string> endpoint{(StoreConfig*) this, "", "endpoint", "An optional override of the endpoint to use when talking to S3."};
+ const Setting<std::string> narinfoCompression{(StoreConfig*) this, "", "narinfo-compression", "compression method for .narinfo files"};
+ const Setting<std::string> lsCompression{(StoreConfig*) this, "", "ls-compression", "compression method for .ls files"};
+ const Setting<std::string> logCompression{(StoreConfig*) this, "", "log-compression", "compression method for log/* files"};
const Setting<bool> multipartUpload{
- this, false, "multipart-upload", "whether to use multi-part uploads"};
+ (StoreConfig*) this, false, "multipart-upload", "whether to use multi-part uploads"};
const Setting<uint64_t> bufferSize{
- this, 5 * 1024 * 1024, "buffer-size", "size (in bytes) of each part in multi-part uploads"};
+ (StoreConfig*) this, 5 * 1024 * 1024, "buffer-size", "size (in bytes) of each part in multi-part uploads"};
+ const std::string name() override { return "S3 Binary Cache Store"; }
+};
+
+struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore, virtual S3BinaryCacheStoreConfig
+{
std::string bucketName;
Stats stats;
@@ -193,8 +199,11 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore
S3Helper s3Helper;
S3BinaryCacheStoreImpl(
- const Params & params, const std::string & bucketName)
- : S3BinaryCacheStore(params)
+ const std::string & scheme,
+ const std::string & bucketName,
+ const Params & params)
+ : StoreConfig(params)
+ , S3BinaryCacheStore(params)
, bucketName(bucketName)
, s3Helper(profile, region, scheme, endpoint)
{
@@ -426,17 +435,11 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore
return paths;
}
+ static std::set<std::string> uriSchemes() { return {"s3"}; }
+
};
-static RegisterStoreImplementation regStore([](
- const std::string & uri, const Store::Params & params)
- -> std::shared_ptr<Store>
-{
- if (std::string(uri, 0, 5) != "s3://") return 0;
- auto store = std::make_shared<S3BinaryCacheStoreImpl>(params, std::string(uri, 5));
- store->init();
- return store;
-});
+static RegisterStoreImplementation<S3BinaryCacheStoreImpl, S3BinaryCacheStoreConfig> regStore;
}
diff --git a/src/libstore/ssh-store.cc b/src/libstore/ssh-store.cc
index caae6b596..6d6eca98d 100644
--- a/src/libstore/ssh-store.cc
+++ b/src/libstore/ssh-store.cc
@@ -8,19 +8,25 @@
namespace nix {
-static std::string uriScheme = "ssh-ng://";
+struct SSHStoreConfig : virtual RemoteStoreConfig
+{
+ using RemoteStoreConfig::RemoteStoreConfig;
+
+ const Setting<Path> sshKey{(StoreConfig*) this, "", "ssh-key", "path to an SSH private key"};
+ const Setting<bool> compress{(StoreConfig*) this, false, "compress", "whether to compress the connection"};
+ const Setting<Path> remoteProgram{(StoreConfig*) this, "nix-daemon", "remote-program", "path to the nix-daemon executable on the remote system"};
+ const Setting<std::string> remoteStore{(StoreConfig*) this, "", "remote-store", "URI of the store on the remote system"};
+
+ const std::string name() override { return "SSH Store"; }
+};
-class SSHStore : public RemoteStore
+class SSHStore : public virtual RemoteStore, public virtual SSHStoreConfig
{
public:
- const Setting<Path> sshKey{(Store*) this, "", "ssh-key", "path to an SSH private key"};
- const Setting<bool> compress{(Store*) this, false, "compress", "whether to compress the connection"};
- const Setting<Path> remoteProgram{(Store*) this, "nix-daemon", "remote-program", "path to the nix-daemon executable on the remote system"};
- const Setting<std::string> remoteStore{(Store*) this, "", "remote-store", "URI of the store on the remote system"};
-
- SSHStore(const std::string & host, const Params & params)
- : Store(params)
+ SSHStore(const std::string & scheme, const std::string & host, const Params & params)
+ : StoreConfig(params)
+ , Store(params)
, RemoteStore(params)
, host(host)
, master(
@@ -32,18 +38,16 @@ public:
{
}
+ static std::set<std::string> uriSchemes() { return {"ssh-ng"}; }
+
std::string getUri() override
{
- return uriScheme + host;
+ return *uriSchemes().begin() + "://" + host;
}
bool sameMachine() override
{ return false; }
- void narFromPath(const StorePath & path, Sink & sink) override;
-
- ref<FSAccessor> getFSAccessor() override;
-
private:
struct Connection : RemoteStore::Connection
@@ -68,19 +72,6 @@ private:
};
};
-void SSHStore::narFromPath(const StorePath & path, Sink & sink)
-{
- auto conn(connections->get());
- conn->to << wopNarFromPath << printStorePath(path);
- conn->processStderr();
- copyNAR(conn->from, sink);
-}
-
-ref<FSAccessor> SSHStore::getFSAccessor()
-{
- return make_ref<RemoteFSAccessor>(ref<Store>(shared_from_this()));
-}
-
ref<RemoteStore::Connection> SSHStore::openConnection()
{
auto conn = make_ref<Connection>();
@@ -89,16 +80,9 @@ ref<RemoteStore::Connection> SSHStore::openConnection()
+ (remoteStore.get() == "" ? "" : " --store " + shellEscape(remoteStore.get())));
conn->to = FdSink(conn->sshConn->in.get());
conn->from = FdSource(conn->sshConn->out.get());
- initConnection(*conn);
return conn;
}
-static RegisterStoreImplementation regStore([](
- const std::string & uri, const Store::Params & params)
- -> std::shared_ptr<Store>
-{
- if (std::string(uri, 0, uriScheme.size()) != uriScheme) return 0;
- return std::make_shared<SSHStore>(std::string(uri, uriScheme.size()), params);
-});
+static RegisterStoreImplementation<SSHStore, SSHStoreConfig> regStore;
}
diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc
index 140bdfde7..aed43543f 100644
--- a/src/libstore/store-api.cc
+++ b/src/libstore/store-api.cc
@@ -1,16 +1,14 @@
#include "crypto.hh"
+#include "fs-accessor.hh"
#include "globals.hh"
#include "store-api.hh"
#include "util.hh"
#include "nar-info-disk-cache.hh"
#include "thread-pool.hh"
#include "json.hh"
-#include "derivations.hh"
#include "url.hh"
#include "archive.hh"
-
-#include <future>
-
+#include "callback.hh"
namespace nix {
@@ -140,21 +138,28 @@ StorePathWithOutputs Store::followLinksToStorePathWithOutputs(std::string_view p
*/
-StorePath Store::makeStorePath(const string & type,
- const Hash & hash, std::string_view name) const
+StorePath Store::makeStorePath(std::string_view type,
+ std::string_view hash, std::string_view name) const
{
/* e.g., "source:sha256:1abc...:/nix/store:foo.tar.gz" */
- string s = type + ":" + hash.to_string(Base16, true) + ":" + storeDir + ":" + std::string(name);
+ string s = std::string { type } + ":" + std::string { hash }
+ + ":" + storeDir + ":" + std::string { name };
auto h = compressHash(hashString(htSHA256, s), 20);
return StorePath(h, name);
}
-StorePath Store::makeOutputPath(const string & id,
+StorePath Store::makeStorePath(std::string_view type,
+ const Hash & hash, std::string_view name) const
+{
+ return makeStorePath(type, hash.to_string(Base16, true), name);
+}
+
+
+StorePath Store::makeOutputPath(std::string_view id,
const Hash & hash, std::string_view name) const
{
- return makeStorePath("output:" + id, hash,
- std::string(name) + (id == "out" ? "" : "-" + id));
+ return makeStorePath("output:" + std::string { id }, hash, outputPathName(name, id));
}
@@ -339,7 +344,7 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
Store::Store(const Params & params)
- : Config(params)
+ : StoreConfig(params)
, state({(size_t) pathInfoCacheSize})
{
}
@@ -359,6 +364,17 @@ bool Store::PathInfoCacheValue::isKnownNow()
return std::chrono::steady_clock::now() < time_point + ttl;
}
+OutputPathMap Store::queryDerivationOutputMap(const StorePath & path) {
+ auto resp = queryPartialDerivationOutputMap(path);
+ OutputPathMap result;
+ for (auto & [outName, optOutPath] : resp) {
+ if (!optOutPath)
+ throw Error("output '%s' has no store path mapped to it", outName);
+ result.insert_or_assign(outName, *optOutPath);
+ }
+ return result;
+}
+
StorePathSet Store::queryDerivationOutputs(const StorePath & path)
{
auto outputMap = this->queryDerivationOutputMap(path);
@@ -988,6 +1004,25 @@ Strings ValidPathInfo::shortRefs() const
}
+Derivation Store::derivationFromPath(const StorePath & drvPath)
+{
+ ensurePath(drvPath);
+ return readDerivation(drvPath);
+}
+
+
+Derivation Store::readDerivation(const StorePath & drvPath)
+{
+ auto accessor = getFSAccessor();
+ try {
+ return parseDerivation(*this,
+ accessor->readFile(printStorePath(drvPath)),
+ Derivation::nameFromPath(drvPath));
+ } catch (FormatError & e) {
+ throw Error("error parsing derivation '%s': %s", printStorePath(drvPath), e.msg());
+ }
+}
+
}
@@ -997,9 +1032,6 @@ Strings ValidPathInfo::shortRefs() const
namespace nix {
-
-RegisterStoreImplementation::Implementations * RegisterStoreImplementation::implementations = 0;
-
/* Split URI into protocol+hierarchy part and its parameter set. */
std::pair<std::string, Store::Params> splitUriAndParams(const std::string & uri_)
{
@@ -1013,24 +1045,6 @@ std::pair<std::string, Store::Params> splitUriAndParams(const std::string & uri_
return {uri, params};
}
-ref<Store> openStore(const std::string & uri_,
- const Store::Params & extraParams)
-{
- auto [uri, uriParams] = splitUriAndParams(uri_);
- auto params = extraParams;
- params.insert(uriParams.begin(), uriParams.end());
-
- for (auto fun : *RegisterStoreImplementation::implementations) {
- auto store = fun(uri, params);
- if (store) {
- store->warnUnknownSettings();
- return ref<Store>(store);
- }
- }
-
- throw Error("don't know how to open Nix store '%s'", uri);
-}
-
static bool isNonUriPath(const std::string & spec) {
return
// is not a URL
@@ -1040,44 +1054,62 @@ static bool isNonUriPath(const std::string & spec) {
&& spec.find("/") != std::string::npos;
}
-StoreType getStoreType(const std::string & uri, const std::string & stateDir)
+std::shared_ptr<Store> openFromNonUri(const std::string & uri, const Store::Params & params)
{
- if (uri == "daemon") {
- return tDaemon;
- } else if (uri == "local" || isNonUriPath(uri)) {
- return tLocal;
- } else if (uri == "" || uri == "auto") {
+ if (uri == "" || uri == "auto") {
+ auto stateDir = get(params, "state").value_or(settings.nixStateDir);
if (access(stateDir.c_str(), R_OK | W_OK) == 0)
- return tLocal;
+ return std::make_shared<LocalStore>(params);
else if (pathExists(settings.nixDaemonSocketFile))
- return tDaemon;
+ return std::make_shared<UDSRemoteStore>(params);
else
- return tLocal;
+ return std::make_shared<LocalStore>(params);
+ } else if (uri == "daemon") {
+ return std::make_shared<UDSRemoteStore>(params);
+ } else if (uri == "local") {
+ return std::make_shared<LocalStore>(params);
+ } else if (isNonUriPath(uri)) {
+ Store::Params params2 = params;
+ params2["root"] = absPath(uri);
+ return std::make_shared<LocalStore>(params2);
} else {
- return tOther;
+ return nullptr;
}
}
-
-static RegisterStoreImplementation regStore([](
- const std::string & uri, const Store::Params & params)
- -> std::shared_ptr<Store>
+ref<Store> openStore(const std::string & uri_,
+ const Store::Params & extraParams)
{
- switch (getStoreType(uri, get(params, "state").value_or(settings.nixStateDir))) {
- case tDaemon:
- return std::shared_ptr<Store>(std::make_shared<UDSRemoteStore>(params));
- case tLocal: {
- Store::Params params2 = params;
- if (isNonUriPath(uri)) {
- params2["root"] = absPath(uri);
+ auto params = extraParams;
+ try {
+ auto parsedUri = parseURL(uri_);
+ params.insert(parsedUri.query.begin(), parsedUri.query.end());
+
+ auto baseURI = parsedUri.authority.value_or("") + parsedUri.path;
+
+ for (auto implem : *Implementations::registered) {
+ if (implem.uriSchemes.count(parsedUri.scheme)) {
+ auto store = implem.create(parsedUri.scheme, baseURI, params);
+ if (store) {
+ store->init();
+ store->warnUnknownSettings();
+ return ref<Store>(store);
+ }
}
- return std::shared_ptr<Store>(std::make_shared<LocalStore>(params2));
}
- default:
- return nullptr;
}
-});
+ catch (BadURL &) {
+ auto [uri, uriParams] = splitUriAndParams(uri_);
+ params.insert(uriParams.begin(), uriParams.end());
+ if (auto store = openFromNonUri(uri, params)) {
+ store->warnUnknownSettings();
+ return ref<Store>(store);
+ }
+ }
+
+ throw Error("don't know how to open Nix store '%s'", uri_);
+}
std::list<ref<Store>> getDefaultSubstituters()
{
@@ -1111,5 +1143,6 @@ std::list<ref<Store>> getDefaultSubstituters()
return stores;
}
+std::vector<StoreFactory> * Implementations::registered = 0;
}
diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh
index 89d742b60..261908a6e 100644
--- a/src/libstore/store-api.hh
+++ b/src/libstore/store-api.hh
@@ -24,6 +24,31 @@
namespace nix {
+/**
+ * About the class hierarchy of the store implementations:
+ *
+ * Each store type `Foo` consists of two classes:
+ *
+ * 1. A class `FooConfig : virtual StoreConfig` that contains the configuration
+ * for the store
+ *
+ * It should only contain members of type `const Setting<T>` (or subclasses
+ * of it) and inherit the constructors of `StoreConfig`
+ * (`using StoreConfig::StoreConfig`).
+ *
+ * 2. A class `Foo : virtual Store, virtual FooConfig` that contains the
+ * implementation of the store.
+ *
+ * This class is expected to have a constructor `Foo(const Params & params)`
+ * that calls `StoreConfig(params)` (otherwise you're gonna encounter an
+ * `assertion failure` when trying to instantiate it).
+ *
+ * You can then register the new store using:
+ *
+ * ```
+ * cpp static RegisterStoreImplementation<Foo, FooConfig> regStore;
+ * ```
+ */
MakeError(SubstError, Error);
MakeError(BuildError, Error); // denotes a permanent build failure
@@ -33,6 +58,7 @@ MakeError(SubstituteGone, Error);
MakeError(SubstituterDisabled, Error);
MakeError(BadStorePath, Error);
+MakeError(InvalidStoreURI, Error);
class FSAccessor;
class NarInfoDiskCache;
@@ -144,12 +170,31 @@ struct BuildResult
}
};
-
-class Store : public std::enable_shared_from_this<Store>, public Config
+struct StoreConfig : public Config
{
-public:
-
- typedef std::map<std::string, std::string> Params;
+ using Config::Config;
+
+ /**
+ * When constructing a store implementation, we pass in a map `params` of
+ * parameters that's supposed to initialize the associated config.
+ * To do that, we must use the `StoreConfig(StringMap & params)`
+ * constructor, so we'd like to `delete` its default constructor to enforce
+ * it.
+ *
+ * However, actually deleting it means that all the subclasses of
+ * `StoreConfig` will have their default constructor deleted (because it's
+ * supposed to call the deleted default constructor of `StoreConfig`). But
+ * because we're always using virtual inheritance, the constructors of
+ * child classes will never implicitely call this one, so deleting it will
+ * be more painful than anything else.
+ *
+ * So we `assert(false)` here to ensure at runtime that the right
+ * constructor is always called without having to redefine a custom
+ * constructor for each `*Config` class.
+ */
+ StoreConfig() { assert(false); }
+
+ virtual const std::string name() = 0;
const PathSetting storeDir_{this, false, settings.nixStore,
"store", "path to the Nix store"};
@@ -169,6 +214,14 @@ public:
"system-features",
"Optional features that the system this store builds on implements (like \"kvm\")."};
+};
+
+class Store : public std::enable_shared_from_this<Store>, public virtual StoreConfig
+{
+public:
+
+ typedef std::map<std::string, std::string> Params;
+
protected:
struct PathInfoCacheValue {
@@ -202,6 +255,11 @@ protected:
Store(const Params & params);
public:
+ /**
+ * Perform any necessary effectful operation to make the store up and
+ * running
+ */
+ virtual void init() {};
virtual ~Store() { }
@@ -249,10 +307,12 @@ public:
StorePathWithOutputs followLinksToStorePathWithOutputs(std::string_view path) const;
/* Constructs a unique store path name. */
- StorePath makeStorePath(const string & type,
+ StorePath makeStorePath(std::string_view type,
+ std::string_view hash, std::string_view name) const;
+ StorePath makeStorePath(std::string_view type,
const Hash & hash, std::string_view name) const;
- StorePath makeOutputPath(const string & id,
+ StorePath makeOutputPath(std::string_view id,
const Hash & hash, std::string_view name) const;
StorePath makeFixedOutputPath(FileIngestionMethod method,
@@ -345,9 +405,15 @@ public:
/* Query the outputs of the derivation denoted by `path'. */
virtual StorePathSet queryDerivationOutputs(const StorePath & path);
- /* Query the mapping outputName=>outputPath for the given derivation */
- virtual OutputPathMap queryDerivationOutputMap(const StorePath & path)
- { unsupported("queryDerivationOutputMap"); }
+ /* Query the mapping outputName => outputPath for the given derivation. All
+ outputs are mentioned so ones mising the mapping are mapped to
+ `std::nullopt`. */
+ virtual std::map<std::string, std::optional<StorePath>> queryPartialDerivationOutputMap(const StorePath & path)
+ { unsupported("queryPartialDerivationOutputMap"); }
+
+ /* Query the mapping outputName=>outputPath for the given derivation.
+ Assume every output has a mapping and throw an exception otherwise. */
+ OutputPathMap queryDerivationOutputMap(const StorePath & path);
/* Query the full store path given the hash part of a valid store
path, or empty if the path doesn't exist. */
@@ -620,22 +686,25 @@ protected:
};
-
-class LocalFSStore : public virtual Store
+struct LocalFSStoreConfig : virtual StoreConfig
{
-public:
-
- // FIXME: the (Store*) cast works around a bug in gcc that causes
+ using StoreConfig::StoreConfig;
+ // FIXME: the (StoreConfig*) cast works around a bug in gcc that causes
// it to omit the call to the Setting constructor. Clang works fine
// either way.
- const PathSetting rootDir{(Store*) this, true, "",
+ const PathSetting rootDir{(StoreConfig*) this, true, "",
"root", "directory prefixed to all other paths"};
- const PathSetting stateDir{(Store*) this, false,
+ const PathSetting stateDir{(StoreConfig*) this, false,
rootDir != "" ? rootDir + "/nix/var/nix" : settings.nixStateDir,
"state", "directory where Nix will store state"};
- const PathSetting logDir{(Store*) this, false,
+ const PathSetting logDir{(StoreConfig*) this, false,
rootDir != "" ? rootDir + "/nix/var/log/nix" : settings.nixLogDir,
"log", "directory where Nix will store state"};
+};
+
+class LocalFSStore : public virtual Store, public virtual LocalFSStoreConfig
+{
+public:
const static string drvsLogDir;
@@ -645,8 +714,7 @@ public:
ref<FSAccessor> getFSAccessor() override;
/* Register a permanent GC root. */
- Path addPermRoot(const StorePath & storePath,
- const Path & gcRoot, bool indirect, bool allowOutsideRootsDir = false);
+ Path addPermRoot(const StorePath & storePath, const Path & gcRoot);
virtual Path getRealStoreDir() { return storeDir; }
@@ -725,39 +793,49 @@ ref<Store> openStore(const std::string & uri = settings.storeUri.get(),
const Store::Params & extraParams = Store::Params());
-enum StoreType {
- tDaemon,
- tLocal,
- tOther
-};
-
-
-StoreType getStoreType(const std::string & uri = settings.storeUri.get(),
- const std::string & stateDir = settings.nixStateDir);
-
/* Return the default substituter stores, defined by the
‘substituters’ option and various legacy options. */
std::list<ref<Store>> getDefaultSubstituters();
+struct StoreFactory
+{
+ std::set<std::string> uriSchemes;
+ std::function<std::shared_ptr<Store> (const std::string & scheme, const std::string & uri, const Store::Params & params)> create;
+ std::function<std::shared_ptr<StoreConfig> ()> getConfig;
+};
+struct Implementations
+{
+ static std::vector<StoreFactory> * registered;
-/* Store implementation registration. */
-typedef std::function<std::shared_ptr<Store>(
- const std::string & uri, const Store::Params & params)> OpenStore;
+ template<typename T, typename TConfig>
+ static void add()
+ {
+ if (!registered) registered = new std::vector<StoreFactory>();
+ StoreFactory factory{
+ .uriSchemes = T::uriSchemes(),
+ .create =
+ ([](const std::string & scheme, const std::string & uri, const Store::Params & params)
+ -> std::shared_ptr<Store>
+ { return std::make_shared<T>(scheme, uri, params); }),
+ .getConfig =
+ ([]()
+ -> std::shared_ptr<StoreConfig>
+ { return std::make_shared<TConfig>(StringMap({})); })
+ };
+ registered->push_back(factory);
+ }
+};
+template<typename T, typename TConfig>
struct RegisterStoreImplementation
{
- typedef std::vector<OpenStore> Implementations;
- static Implementations * implementations;
-
- RegisterStoreImplementation(OpenStore fun)
+ RegisterStoreImplementation()
{
- if (!implementations) implementations = new Implementations;
- implementations->push_back(fun);
+ Implementations::add<T, TConfig>();
}
};
-
/* Display a set of paths in human-readable form (i.e., between quotes
and separated by commas). */
string showPaths(const PathSet & paths);
diff --git a/src/libstore/worker-protocol.hh b/src/libstore/worker-protocol.hh
index 5eddaff56..13cf8d4ab 100644
--- a/src/libstore/worker-protocol.hh
+++ b/src/libstore/worker-protocol.hh
@@ -70,10 +70,84 @@ template<class T> T readStorePaths(const Store & store, Source & from);
void writeStorePaths(const Store & store, Sink & out, const StorePathSet & paths);
+/* To guide overloading */
+template<typename T>
+struct Phantom {};
+
+
+namespace worker_proto {
+/* FIXME maybe move more stuff inside here */
+
+StorePath read(const Store & store, Source & from, Phantom<StorePath> _);
+void write(const Store & store, Sink & out, const StorePath & storePath);
+
+template<typename T>
+std::map<std::string, T> read(const Store & store, Source & from, Phantom<std::map<std::string, T>> _);
+template<typename T>
+void write(const Store & store, Sink & out, const std::map<string, T> & resMap);
+template<typename T>
+std::optional<T> read(const Store & store, Source & from, Phantom<std::optional<T>> _);
+template<typename T>
+void write(const Store & store, Sink & out, const std::optional<T> & optVal);
+
+/* Specialization which uses and empty string for the empty case, taking
+ advantage of the fact StorePaths always serialize to a non-empty string.
+ This is done primarily for backwards compatability, so that StorePath <=
+ std::optional<StorePath>, where <= is the compatability partial order.
+ */
+template<>
+void write(const Store & store, Sink & out, const std::optional<StorePath> & optVal);
+
+template<typename T>
+std::map<std::string, T> read(const Store & store, Source & from, Phantom<std::map<std::string, T>> _)
+{
+ std::map<string, T> resMap;
+ auto size = (size_t)readInt(from);
+ while (size--) {
+ auto thisKey = readString(from);
+ resMap.insert_or_assign(std::move(thisKey), nix::worker_proto::read(store, from, Phantom<T> {}));
+ }
+ return resMap;
+}
+
+template<typename T>
+void write(const Store & store, Sink & out, const std::map<string, T> & resMap)
+{
+ out << resMap.size();
+ for (auto & i : resMap) {
+ out << i.first;
+ nix::worker_proto::write(store, out, i.second);
+ }
+}
+
+template<typename T>
+std::optional<T> read(const Store & store, Source & from, Phantom<std::optional<T>> _)
+{
+ auto tag = readNum<uint8_t>(from);
+ switch (tag) {
+ case 0:
+ return std::nullopt;
+ case 1:
+ return nix::worker_proto::read(store, from, Phantom<T> {});
+ default:
+ throw Error("got an invalid tag bit for std::optional: %#04x", tag);
+ }
+}
+
+template<typename T>
+void write(const Store & store, Sink & out, const std::optional<T> & optVal)
+{
+ out << (optVal ? 1 : 0);
+ if (optVal)
+ nix::worker_proto::write(store, out, *optVal);
+}
+
+
+}
+
+
StorePathCAMap readStorePathCAMap(const Store & store, Source & from);
void writeStorePathCAMap(const Store & store, Sink & out, const StorePathCAMap & paths);
-void writeOutputPathMap(const Store & store, Sink & out, const OutputPathMap & paths);
-
}
diff --git a/src/libutil/abstract-setting-to-json.hh b/src/libutil/abstract-setting-to-json.hh
new file mode 100644
index 000000000..b3fbc84f7
--- /dev/null
+++ b/src/libutil/abstract-setting-to-json.hh
@@ -0,0 +1,15 @@
+#pragma once
+
+#include <nlohmann/json.hpp>
+#include "config.hh"
+
+namespace nix {
+template<typename T>
+std::map<std::string, nlohmann::json> BaseSetting<T>::toJSONObject()
+{
+ auto obj = AbstractSetting::toJSONObject();
+ obj.emplace("value", value);
+ obj.emplace("defaultValue", defaultValue);
+ return obj;
+}
+}
diff --git a/src/libutil/args.cc b/src/libutil/args.cc
index 986c5d1cd..147602415 100644
--- a/src/libutil/args.cc
+++ b/src/libutil/args.cc
@@ -3,6 +3,8 @@
#include <glob.h>
+#include <nlohmann/json.hpp>
+
namespace nix {
void Args::addFlag(Flag && flag_)
@@ -205,6 +207,43 @@ bool Args::processArgs(const Strings & args, bool finish)
return res;
}
+nlohmann::json Args::toJSON()
+{
+ auto flags = nlohmann::json::object();
+
+ for (auto & [name, flag] : longFlags) {
+ auto j = nlohmann::json::object();
+ if (flag->shortName)
+ j["shortName"] = std::string(1, flag->shortName);
+ if (flag->description != "")
+ j["description"] = flag->description;
+ if (flag->category != "")
+ j["category"] = flag->category;
+ if (flag->handler.arity != ArityAny)
+ j["arity"] = flag->handler.arity;
+ if (!flag->labels.empty())
+ j["labels"] = flag->labels;
+ flags[name] = std::move(j);
+ }
+
+ auto args = nlohmann::json::array();
+
+ for (auto & arg : expectedArgs) {
+ auto j = nlohmann::json::object();
+ j["label"] = arg.label;
+ j["optional"] = arg.optional;
+ if (arg.handler.arity != ArityAny)
+ j["arity"] = arg.handler.arity;
+ args.push_back(std::move(j));
+ }
+
+ auto res = nlohmann::json::object();
+ res["description"] = description();
+ res["flags"] = std::move(flags);
+ res["args"] = std::move(args);
+ return res;
+}
+
static void hashTypeCompleter(size_t index, std::string_view prefix)
{
for (auto & type : hashTypes)
@@ -313,11 +352,29 @@ void Command::printHelp(const string & programName, std::ostream & out)
}
}
+nlohmann::json Command::toJSON()
+{
+ auto exs = nlohmann::json::array();
+
+ for (auto & example : examples()) {
+ auto ex = nlohmann::json::object();
+ ex["description"] = example.description;
+ ex["command"] = chomp(stripIndentation(example.command));
+ exs.push_back(std::move(ex));
+ }
+
+ auto res = Args::toJSON();
+ res["examples"] = std::move(exs);
+ auto s = doc();
+ if (s != "") res.emplace("doc", stripIndentation(s));
+ return res;
+}
+
MultiCommand::MultiCommand(const Commands & commands)
: commands(commands)
{
expectArgs({
- .label = "command",
+ .label = "subcommand",
.optional = true,
.handler = {[=](std::string s) {
assert(!command);
@@ -387,4 +444,20 @@ bool MultiCommand::processArgs(const Strings & args, bool finish)
return Args::processArgs(args, finish);
}
+nlohmann::json MultiCommand::toJSON()
+{
+ auto cmds = nlohmann::json::object();
+
+ for (auto & [name, commandFun] : commands) {
+ auto command = commandFun();
+ auto j = command->toJSON();
+ j["category"] = categories[command->category()];
+ cmds[name] = std::move(j);
+ }
+
+ auto res = Args::toJSON();
+ res["commands"] = std::move(cmds);
+ return res;
+}
+
}
diff --git a/src/libutil/args.hh b/src/libutil/args.hh
index 97a517344..3c1f87f7e 100644
--- a/src/libutil/args.hh
+++ b/src/libutil/args.hh
@@ -4,6 +4,8 @@
#include <map>
#include <memory>
+#include <nlohmann/json_fwd.hpp>
+
#include "util.hh"
namespace nix {
@@ -20,6 +22,7 @@ public:
virtual void printHelp(const string & programName, std::ostream & out);
+ /* Return a short one-line description of the command. */
virtual std::string description() { return ""; }
protected:
@@ -203,6 +206,8 @@ public:
});
}
+ virtual nlohmann::json toJSON();
+
friend class MultiCommand;
};
@@ -217,6 +222,9 @@ struct Command : virtual Args
virtual void prepare() { };
virtual void run() = 0;
+ /* Return documentation about this command, in Markdown format. */
+ virtual std::string doc() { return ""; }
+
struct Example
{
std::string description;
@@ -234,6 +242,8 @@ struct Command : virtual Args
virtual Category category() { return catDefault; }
void printHelp(const string & programName, std::ostream & out) override;
+
+ nlohmann::json toJSON() override;
};
typedef std::map<std::string, std::function<ref<Command>()>> Commands;
@@ -259,6 +269,8 @@ public:
bool processFlag(Strings::iterator & pos, Strings::iterator end) override;
bool processArgs(const Strings & args, bool finish) override;
+
+ nlohmann::json toJSON() override;
};
Strings argvToStrings(int argc, char * * argv);
diff --git a/src/libutil/callback.hh b/src/libutil/callback.hh
new file mode 100644
index 000000000..ef31794be
--- /dev/null
+++ b/src/libutil/callback.hh
@@ -0,0 +1,46 @@
+#pragma once
+
+#include <future>
+#include <functional>
+
+namespace nix {
+
+/* A callback is a wrapper around a lambda that accepts a valid of
+ type T or an exception. (We abuse std::future<T> to pass the value or
+ exception.) */
+template<typename T>
+class Callback
+{
+ std::function<void(std::future<T>)> fun;
+ std::atomic_flag done = ATOMIC_FLAG_INIT;
+
+public:
+
+ Callback(std::function<void(std::future<T>)> fun) : fun(fun) { }
+
+ Callback(Callback && callback) : fun(std::move(callback.fun))
+ {
+ auto prev = callback.done.test_and_set();
+ if (prev) done.test_and_set();
+ }
+
+ void operator()(T && t) noexcept
+ {
+ auto prev = done.test_and_set();
+ assert(!prev);
+ std::promise<T> promise;
+ promise.set_value(std::move(t));
+ fun(promise.get_future());
+ }
+
+ void rethrow(const std::exception_ptr & exc = std::current_exception()) noexcept
+ {
+ auto prev = done.test_and_set();
+ assert(!prev);
+ std::promise<T> promise;
+ promise.set_exception(exc);
+ fun(promise.get_future());
+ }
+};
+
+}
diff --git a/src/libutil/config.cc b/src/libutil/config.cc
index 8fc700a2b..5e6a211df 100644
--- a/src/libutil/config.cc
+++ b/src/libutil/config.cc
@@ -1,6 +1,8 @@
#include "config.hh"
#include "args.hh"
-#include "json.hh"
+#include "abstract-setting-to-json.hh"
+
+#include <nlohmann/json.hpp>
namespace nix {
@@ -131,15 +133,14 @@ void Config::resetOverriden()
s.second.setting->overriden = false;
}
-void Config::toJSON(JSONObject & out)
+nlohmann::json Config::toJSON()
{
+ auto res = nlohmann::json::object();
for (auto & s : _settings)
if (!s.second.isAlias) {
- JSONObject out2(out.object(s.first));
- out2.attr("description", s.second.setting->description);
- JSONPlaceholder out3(out2.placeholder("value"));
- s.second.setting->toJSON(out3);
+ res.emplace(s.first, s.second.setting->toJSON());
}
+ return res;
}
void Config::convertToArgs(Args & args, const std::string & category)
@@ -153,7 +154,7 @@ AbstractSetting::AbstractSetting(
const std::string & name,
const std::string & description,
const std::set<std::string> & aliases)
- : name(name), description(description), aliases(aliases)
+ : name(name), description(stripIndentation(description)), aliases(aliases)
{
}
@@ -162,19 +163,21 @@ void AbstractSetting::setDefault(const std::string & str)
if (!overriden) set(str);
}
-void AbstractSetting::toJSON(JSONPlaceholder & out)
+nlohmann::json AbstractSetting::toJSON()
{
- out.write(to_string());
+ return nlohmann::json(toJSONObject());
}
-void AbstractSetting::convertToArg(Args & args, const std::string & category)
+std::map<std::string, nlohmann::json> AbstractSetting::toJSONObject()
{
+ std::map<std::string, nlohmann::json> obj;
+ obj.emplace("description", description);
+ obj.emplace("aliases", aliases);
+ return obj;
}
-template<typename T>
-void BaseSetting<T>::toJSON(JSONPlaceholder & out)
+void AbstractSetting::convertToArg(Args & args, const std::string & category)
{
- out.write(value);
}
template<typename T>
@@ -255,13 +258,6 @@ template<> std::string BaseSetting<Strings>::to_string() const
return concatStringsSep(" ", value);
}
-template<> void BaseSetting<Strings>::toJSON(JSONPlaceholder & out)
-{
- JSONList list(out.list());
- for (auto & s : value)
- list.elem(s);
-}
-
template<> void BaseSetting<StringSet>::set(const std::string & str)
{
value = tokenizeString<StringSet>(str);
@@ -272,13 +268,6 @@ template<> std::string BaseSetting<StringSet>::to_string() const
return concatStringsSep(" ", value);
}
-template<> void BaseSetting<StringSet>::toJSON(JSONPlaceholder & out)
-{
- JSONList list(out.list());
- for (auto & s : value)
- list.elem(s);
-}
-
template class BaseSetting<int>;
template class BaseSetting<unsigned int>;
template class BaseSetting<long>;
@@ -323,10 +312,12 @@ void GlobalConfig::resetOverriden()
config->resetOverriden();
}
-void GlobalConfig::toJSON(JSONObject & out)
+nlohmann::json GlobalConfig::toJSON()
{
+ auto res = nlohmann::json::object();
for (auto & config : *configRegistrations)
- config->toJSON(out);
+ res.update(config->toJSON());
+ return res;
}
void GlobalConfig::convertToArgs(Args & args, const std::string & category)
diff --git a/src/libutil/config.hh b/src/libutil/config.hh
index 66073546e..1f5f4e7b9 100644
--- a/src/libutil/config.hh
+++ b/src/libutil/config.hh
@@ -4,6 +4,8 @@
#include "types.hh"
+#include <nlohmann/json_fwd.hpp>
+
#pragma once
namespace nix {
@@ -42,8 +44,6 @@ namespace nix {
class Args;
class AbstractSetting;
-class JSONPlaceholder;
-class JSONObject;
class AbstractConfig
{
@@ -97,7 +97,7 @@ public:
* Outputs all settings to JSON
* - out: JSONObject to write the configuration to
*/
- virtual void toJSON(JSONObject & out) = 0;
+ virtual nlohmann::json toJSON() = 0;
/**
* Converts settings to `Args` to be used on the command line interface
@@ -167,7 +167,7 @@ public:
void resetOverriden() override;
- void toJSON(JSONObject & out) override;
+ nlohmann::json toJSON() override;
void convertToArgs(Args & args, const std::string & category) override;
};
@@ -206,7 +206,9 @@ protected:
virtual std::string to_string() const = 0;
- virtual void toJSON(JSONPlaceholder & out);
+ nlohmann::json toJSON();
+
+ virtual std::map<std::string, nlohmann::json> toJSONObject();
virtual void convertToArg(Args & args, const std::string & category);
@@ -220,6 +222,7 @@ class BaseSetting : public AbstractSetting
protected:
T value;
+ const T defaultValue;
public:
@@ -229,6 +232,7 @@ public:
const std::set<std::string> & aliases = {})
: AbstractSetting(name, description, aliases)
, value(def)
+ , defaultValue(def)
{ }
operator const T &() const { return value; }
@@ -251,7 +255,7 @@ public:
void convertToArg(Args & args, const std::string & category) override;
- void toJSON(JSONPlaceholder & out) override;
+ std::map<std::string, nlohmann::json> toJSONObject() override;
};
template<typename T>
@@ -319,7 +323,7 @@ struct GlobalConfig : public AbstractConfig
void resetOverriden() override;
- void toJSON(JSONObject & out) override;
+ nlohmann::json toJSON() override;
void convertToArgs(Args & args, const std::string & category) override;
diff --git a/src/libutil/fmt.hh b/src/libutil/fmt.hh
index a39de041f..6e69bdce2 100644
--- a/src/libutil/fmt.hh
+++ b/src/libutil/fmt.hh
@@ -1,7 +1,6 @@
#pragma once
#include <boost/format.hpp>
-#include <boost/algorithm/string/replace.hpp>
#include <string>
#include "ansicolor.hh"
diff --git a/src/libutil/logging.cc b/src/libutil/logging.cc
index 832aee783..cbbf64395 100644
--- a/src/libutil/logging.cc
+++ b/src/libutil/logging.cc
@@ -184,6 +184,33 @@ struct JSONLogger : Logger {
json["action"] = "msg";
json["level"] = ei.level;
json["msg"] = oss.str();
+ json["raw_msg"] = ei.hint->str();
+
+ if (ei.errPos.has_value() && (*ei.errPos)) {
+ json["line"] = ei.errPos->line;
+ json["column"] = ei.errPos->column;
+ json["file"] = ei.errPos->file;
+ } else {
+ json["line"] = nullptr;
+ json["column"] = nullptr;
+ json["file"] = nullptr;
+ }
+
+ if (loggerSettings.showTrace.get() && !ei.traces.empty()) {
+ nlohmann::json traces = nlohmann::json::array();
+ for (auto iter = ei.traces.rbegin(); iter != ei.traces.rend(); ++iter) {
+ nlohmann::json stackFrame;
+ stackFrame["raw_msg"] = iter->hint.str();
+ if (iter->pos.has_value() && (*iter->pos)) {
+ stackFrame["line"] = iter->pos->line;
+ stackFrame["column"] = iter->pos->column;
+ stackFrame["file"] = iter->pos->file;
+ }
+ traces.push_back(stackFrame);
+ }
+
+ json["trace"] = traces;
+ }
write(json);
}
diff --git a/src/libutil/logging.hh b/src/libutil/logging.hh
index 09619aac6..63cb2b268 100644
--- a/src/libutil/logging.hh
+++ b/src/libutil/logging.hh
@@ -37,10 +37,12 @@ typedef uint64_t ActivityId;
struct LoggerSettings : Config
{
- Setting<bool> showTrace{this,
- false,
- "show-trace",
- "Whether to show a stack trace on evaluation errors."};
+ Setting<bool> showTrace{
+ this, false, "show-trace",
+ R"(
+ Where Nix should print out a stack trace in case of Nix
+ expression evaluation errors.
+ )"};
};
extern LoggerSettings loggerSettings;
diff --git a/src/libutil/serialise.hh b/src/libutil/serialise.hh
index 69ae0874a..6f4f4c855 100644
--- a/src/libutil/serialise.hh
+++ b/src/libutil/serialise.hh
@@ -22,8 +22,15 @@ struct Sink
}
};
+/* Just throws away data. */
+struct NullSink : Sink
+{
+ void operator () (const unsigned char * data, size_t len) override
+ { }
+};
-/* A buffered abstract sink. */
+/* A buffered abstract sink. Warning: a BufferedSink should not be
+ used from multiple threads concurrently. */
struct BufferedSink : virtual Sink
{
size_t bufSize, bufPos;
@@ -66,7 +73,8 @@ struct Source
};
-/* A buffered abstract source. */
+/* A buffered abstract source. Warning: a BufferedSource should not be
+ used from multiple threads concurrently. */
struct BufferedSource : Source
{
size_t bufSize, bufPosIn, bufPosOut;
diff --git a/src/libutil/tests/config.cc b/src/libutil/tests/config.cc
index 74c59fd31..c7777a21f 100644
--- a/src/libutil/tests/config.cc
+++ b/src/libutil/tests/config.cc
@@ -1,9 +1,9 @@
-#include "json.hh"
#include "config.hh"
#include "args.hh"
#include <sstream>
#include <gtest/gtest.h>
+#include <nlohmann/json.hpp>
namespace nix {
@@ -33,7 +33,7 @@ namespace nix {
const auto iter = settings.find("name-of-the-setting");
ASSERT_NE(iter, settings.end());
ASSERT_EQ(iter->second.value, "");
- ASSERT_EQ(iter->second.description, "description");
+ ASSERT_EQ(iter->second.description, "description\n");
}
TEST(Config, getDefinedOverridenSettingNotSet) {
@@ -59,7 +59,7 @@ namespace nix {
const auto iter = settings.find("name-of-the-setting");
ASSERT_NE(iter, settings.end());
ASSERT_EQ(iter->second.value, "value");
- ASSERT_EQ(iter->second.description, "description");
+ ASSERT_EQ(iter->second.description, "description\n");
}
TEST(Config, getDefinedSettingSet2) {
@@ -73,7 +73,7 @@ namespace nix {
const auto e = settings.find("name-of-the-setting");
ASSERT_NE(e, settings.end());
ASSERT_EQ(e->second.value, "value");
- ASSERT_EQ(e->second.description, "description");
+ ASSERT_EQ(e->second.description, "description\n");
}
TEST(Config, addSetting) {
@@ -152,29 +152,16 @@ namespace nix {
}
TEST(Config, toJSONOnEmptyConfig) {
- std::stringstream out;
- { // Scoped to force the destructor of JSONObject to write the final `}`
- JSONObject obj(out);
- Config config;
- config.toJSON(obj);
- }
-
- ASSERT_EQ(out.str(), "{}");
+ ASSERT_EQ(Config().toJSON().dump(), "{}");
}
TEST(Config, toJSONOnNonEmptyConfig) {
- std::stringstream out;
- { // Scoped to force the destructor of JSONObject to write the final `}`
- JSONObject obj(out);
-
- Config config;
- std::map<std::string, Config::SettingInfo> settings;
- Setting<std::string> setting{&config, "", "name-of-the-setting", "description"};
- setting.assign("value");
+ Config config;
+ std::map<std::string, Config::SettingInfo> settings;
+ Setting<std::string> setting{&config, "", "name-of-the-setting", "description"};
+ setting.assign("value");
- config.toJSON(obj);
- }
- ASSERT_EQ(out.str(), R"#({"name-of-the-setting":{"description":"description","value":"value"}})#");
+ ASSERT_EQ(config.toJSON().dump(), R"#({"name-of-the-setting":{"aliases":[],"defaultValue":"","description":"description\n","value":"value"}})#");
}
TEST(Config, setSettingAlias) {
diff --git a/src/libutil/tests/logging.cc b/src/libutil/tests/logging.cc
index ad588055f..7e53f17c6 100644
--- a/src/libutil/tests/logging.cc
+++ b/src/libutil/tests/logging.cc
@@ -34,6 +34,24 @@ namespace nix {
}
}
+ TEST(logEI, jsonOutput) {
+ SymbolTable testTable;
+ auto problem_file = testTable.create("random.nix");
+ testing::internal::CaptureStderr();
+
+ makeJSONLogger(*logger)->logEI({
+ .name = "error name",
+ .description = "error without any code lines.",
+ .hint = hintfmt("this hint has %1% templated %2%!!",
+ "yellow",
+ "values"),
+ .errPos = Pos(foFile, problem_file, 02, 13)
+ });
+
+ auto str = testing::internal::GetCapturedStderr();
+ ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- SysError --- error-unit-test\x1B[0m\nopening file '\x1B[33;1mrandom.nix\x1B[0m': \x1B[33;1mNo such file or directory\x1B[0m\n@nix {\"action\":\"msg\",\"column\":13,\"file\":\"random.nix\",\"level\":0,\"line\":2,\"msg\":\"\\u001b[31;1merror:\\u001b[0m\\u001b[34;1m --- error name --- error-unit-test\\u001b[0m\\n\\u001b[34;1mat: \\u001b[33;1m(2:13)\\u001b[34;1m in file: \\u001b[0mrandom.nix\\n\\nerror without any code lines.\\n\\nthis hint has \\u001b[33;1myellow\\u001b[0m templated \\u001b[33;1mvalues\\u001b[0m!!\",\"raw_msg\":\"this hint has \\u001b[33;1myellow\\u001b[0m templated \\u001b[33;1mvalues\\u001b[0m!!\"}\n");
+ }
+
TEST(logEI, appendingHintsToPreviousError) {
MakeError(TestError, Error);
diff --git a/src/libutil/url-parts.hh b/src/libutil/url-parts.hh
new file mode 100644
index 000000000..64e06cfbc
--- /dev/null
+++ b/src/libutil/url-parts.hh
@@ -0,0 +1,44 @@
+#pragma once
+
+#include <string>
+#include <regex>
+
+namespace nix {
+
+// URI stuff.
+const static std::string pctEncoded = "(?:%[0-9a-fA-F][0-9a-fA-F])";
+const static std::string schemeRegex = "(?:[a-z+.-]+)";
+const static std::string ipv6AddressRegex = "(?:\\[[0-9a-fA-F:]+\\])";
+const static std::string unreservedRegex = "(?:[a-zA-Z0-9-._~])";
+const static std::string subdelimsRegex = "(?:[!$&'\"()*+,;=])";
+const static std::string hostnameRegex = "(?:(?:" + unreservedRegex + "|" + pctEncoded + "|" + subdelimsRegex + ")*)";
+const static std::string hostRegex = "(?:" + ipv6AddressRegex + "|" + hostnameRegex + ")";
+const static std::string userRegex = "(?:(?:" + unreservedRegex + "|" + pctEncoded + "|" + subdelimsRegex + "|:)*)";
+const static std::string authorityRegex = "(?:" + userRegex + "@)?" + hostRegex + "(?::[0-9]+)?";
+const static std::string pcharRegex = "(?:" + unreservedRegex + "|" + pctEncoded + "|" + subdelimsRegex + "|[:@])";
+const static std::string queryRegex = "(?:" + pcharRegex + "|[/? \"])*";
+const static std::string segmentRegex = "(?:" + pcharRegex + "+)";
+const static std::string absPathRegex = "(?:(?:/" + segmentRegex + ")*/?)";
+const static std::string pathRegex = "(?:" + segmentRegex + "(?:/" + segmentRegex + ")*/?)";
+
+// A Git ref (i.e. branch or tag name).
+const static std::string refRegexS = "[a-zA-Z0-9][a-zA-Z0-9_.-]*"; // FIXME: check
+extern std::regex refRegex;
+
+// Instead of defining what a good Git Ref is, we define what a bad Git Ref is
+// This is because of the definition of a ref in refs.c in https://github.com/git/git
+// See tests/fetchGitRefs.sh for the full definition
+const static std::string badGitRefRegexS = "//|^[./]|/\\.|\\.\\.|[[:cntrl:][:space:]:?^~\[]|\\\\|\\*|\\.lock$|\\.lock/|@\\{|[/.]$|^@$|^$";
+extern std::regex badGitRefRegex;
+
+// A Git revision (a SHA-1 commit hash).
+const static std::string revRegexS = "[0-9a-fA-F]{40}";
+extern std::regex revRegex;
+
+// A ref or revision, or a ref followed by a revision.
+const static std::string refAndOrRevRegex = "(?:(" + revRegexS + ")|(?:(" + refRegexS + ")(?:/(" + revRegexS + "))?))";
+
+const static std::string flakeIdRegexS = "[a-zA-Z][a-zA-Z0-9_-]*";
+extern std::regex flakeIdRegex;
+
+}
diff --git a/src/libutil/url.cc b/src/libutil/url.cc
index 88c09eef9..c1bab866c 100644
--- a/src/libutil/url.cc
+++ b/src/libutil/url.cc
@@ -1,4 +1,5 @@
#include "url.hh"
+#include "url-parts.hh"
#include "util.hh"
namespace nix {
diff --git a/src/libutil/url.hh b/src/libutil/url.hh
index 2ef88ef2a..6e77142e3 100644
--- a/src/libutil/url.hh
+++ b/src/libutil/url.hh
@@ -2,8 +2,6 @@
#include "error.hh"
-#include <regex>
-
namespace nix {
struct ParsedURL
@@ -29,40 +27,4 @@ std::map<std::string, std::string> decodeQuery(const std::string & query);
ParsedURL parseURL(const std::string & url);
-// URI stuff.
-const static std::string pctEncoded = "(?:%[0-9a-fA-F][0-9a-fA-F])";
-const static std::string schemeRegex = "(?:[a-z+]+)";
-const static std::string ipv6AddressRegex = "(?:\\[[0-9a-fA-F:]+\\])";
-const static std::string unreservedRegex = "(?:[a-zA-Z0-9-._~])";
-const static std::string subdelimsRegex = "(?:[!$&'\"()*+,;=])";
-const static std::string hostnameRegex = "(?:(?:" + unreservedRegex + "|" + pctEncoded + "|" + subdelimsRegex + ")*)";
-const static std::string hostRegex = "(?:" + ipv6AddressRegex + "|" + hostnameRegex + ")";
-const static std::string userRegex = "(?:(?:" + unreservedRegex + "|" + pctEncoded + "|" + subdelimsRegex + "|:)*)";
-const static std::string authorityRegex = "(?:" + userRegex + "@)?" + hostRegex + "(?::[0-9]+)?";
-const static std::string pcharRegex = "(?:" + unreservedRegex + "|" + pctEncoded + "|" + subdelimsRegex + "|[:@])";
-const static std::string queryRegex = "(?:" + pcharRegex + "|[/? \"])*";
-const static std::string segmentRegex = "(?:" + pcharRegex + "+)";
-const static std::string absPathRegex = "(?:(?:/" + segmentRegex + ")*/?)";
-const static std::string pathRegex = "(?:" + segmentRegex + "(?:/" + segmentRegex + ")*/?)";
-
-// A Git ref (i.e. branch or tag name).
-const static std::string refRegexS = "[a-zA-Z0-9][a-zA-Z0-9_.-]*"; // FIXME: check
-extern std::regex refRegex;
-
-// Instead of defining what a good Git Ref is, we define what a bad Git Ref is
-// This is because of the definition of a ref in refs.c in https://github.com/git/git
-// See tests/fetchGitRefs.sh for the full definition
-const static std::string badGitRefRegexS = "//|^[./]|/\\.|\\.\\.|[[:cntrl:][:space:]:?^~\[]|\\\\|\\*|\\.lock$|\\.lock/|@\\{|[/.]$|^@$|^$";
-extern std::regex badGitRefRegex;
-
-// A Git revision (a SHA-1 commit hash).
-const static std::string revRegexS = "[0-9a-fA-F]{40}";
-extern std::regex revRegex;
-
-// A ref or revision, or a ref followed by a revision.
-const static std::string refAndOrRevRegex = "(?:(" + revRegexS + ")|(?:(" + refRegexS + ")(?:/(" + revRegexS + "))?))";
-
-const static std::string flakeIdRegexS = "[a-zA-Z][a-zA-Z0-9_-]*";
-extern std::regex flakeIdRegex;
-
}
diff --git a/src/libutil/util.cc b/src/libutil/util.cc
index c0b9698ee..9e7142e01 100644
--- a/src/libutil/util.cc
+++ b/src/libutil/util.cc
@@ -1464,6 +1464,47 @@ string base64Decode(std::string_view s)
}
+std::string stripIndentation(std::string_view s)
+{
+ size_t minIndent = 10000;
+ size_t curIndent = 0;
+ bool atStartOfLine = true;
+
+ for (auto & c : s) {
+ if (atStartOfLine && c == ' ')
+ curIndent++;
+ else if (c == '\n') {
+ if (atStartOfLine)
+ minIndent = std::max(minIndent, curIndent);
+ curIndent = 0;
+ atStartOfLine = true;
+ } else {
+ if (atStartOfLine) {
+ minIndent = std::min(minIndent, curIndent);
+ atStartOfLine = false;
+ }
+ }
+ }
+
+ std::string res;
+
+ size_t pos = 0;
+ while (pos < s.size()) {
+ auto eol = s.find('\n', pos);
+ if (eol == s.npos) eol = s.size();
+ if (eol - pos > minIndent)
+ res.append(s.substr(pos + minIndent, eol - pos - minIndent));
+ res.push_back('\n');
+ pos = eol + 1;
+ }
+
+ return res;
+}
+
+
+//////////////////////////////////////////////////////////////////////
+
+
static Sync<std::pair<unsigned short, unsigned short>> windowSize{{0, 0}};
diff --git a/src/libutil/util.hh b/src/libutil/util.hh
index 3a20679a8..b8e201203 100644
--- a/src/libutil/util.hh
+++ b/src/libutil/util.hh
@@ -12,13 +12,9 @@
#include <signal.h>
#include <functional>
-#include <limits>
-#include <cstdio>
#include <map>
#include <sstream>
#include <optional>
-#include <future>
-#include <iterator>
#ifndef HAVE_STRUCT_DIRENT_D_TYPE
#define DT_UNKNOWN 0
@@ -464,6 +460,12 @@ string base64Encode(std::string_view s);
string base64Decode(std::string_view s);
+/* Remove common leading whitespace from the lines in the string
+ 's'. For example, if every line is indented by at least 3 spaces,
+ then we remove 3 spaces from the start of every line. */
+std::string stripIndentation(std::string_view s);
+
+
/* Get a value for the specified key from an associate container. */
template <class T>
std::optional<typename T::mapped_type> get(const T & map, const typename T::key_type & key)
@@ -474,43 +476,8 @@ std::optional<typename T::mapped_type> get(const T & map, const typename T::key_
}
-/* A callback is a wrapper around a lambda that accepts a valid of
- type T or an exception. (We abuse std::future<T> to pass the value or
- exception.) */
template<typename T>
-class Callback
-{
- std::function<void(std::future<T>)> fun;
- std::atomic_flag done = ATOMIC_FLAG_INIT;
-
-public:
-
- Callback(std::function<void(std::future<T>)> fun) : fun(fun) { }
-
- Callback(Callback && callback) : fun(std::move(callback.fun))
- {
- auto prev = callback.done.test_and_set();
- if (prev) done.test_and_set();
- }
-
- void operator()(T && t) noexcept
- {
- auto prev = done.test_and_set();
- assert(!prev);
- std::promise<T> promise;
- promise.set_value(std::move(t));
- fun(promise.get_future());
- }
-
- void rethrow(const std::exception_ptr & exc = std::current_exception()) noexcept
- {
- auto prev = done.test_and_set();
- assert(!prev);
- std::promise<T> promise;
- promise.set_exception(exc);
- fun(promise.get_future());
- }
-};
+class Callback;
/* Start a thread that handles various signals. Also block those signals
diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc
index 94412042f..3a8d67f21 100755
--- a/src/nix-build/nix-build.cc
+++ b/src/nix-build/nix-build.cc
@@ -98,8 +98,8 @@ static void _main(int argc, char * * argv)
// List of environment variables kept for --pure
std::set<string> keepVars{
- "HOME", "USER", "LOGNAME", "DISPLAY", "PATH", "TERM",
- "IN_NIX_SHELL", "TZ", "PAGER", "NIX_BUILD_SHELL", "SHLVL",
+ "HOME", "USER", "LOGNAME", "DISPLAY", "PATH", "TERM", "IN_NIX_SHELL",
+ "NIX_SHELL_PRESERVE_PROMPT", "TZ", "PAGER", "NIX_BUILD_SHELL", "SHLVL",
"http_proxy", "https_proxy", "ftp_proxy", "all_proxy", "no_proxy"
};
@@ -446,7 +446,7 @@ static void _main(int argc, char * * argv)
"PATH=%4%:\"$PATH\"; "
"SHELL=%5%; "
"set +e; "
- R"s([ -n "$PS1" ] && PS1='\n\[\033[1;32m\][nix-shell:\w]\$\[\033[0m\] '; )s"
+ R"s([ -n "$PS1" -a -z "$NIX_SHELL_PRESERVE_PROMPT" ] && PS1='\n\[\033[1;32m\][nix-shell:\w]\$\[\033[0m\] '; )s"
"if [ \"$(type -t runHook)\" = function ]; then runHook shellHook; fi; "
"unset NIX_ENFORCE_PURITY; "
"shopt -u nullglob; "
@@ -487,50 +487,56 @@ static void _main(int argc, char * * argv)
std::vector<StorePathWithOutputs> pathsToBuild;
- std::map<Path, Path> drvPrefixes;
- std::map<Path, Path> resultSymlinks;
- std::vector<Path> outPaths;
+ std::map<StorePath, std::pair<size_t, StringSet>> drvMap;
for (auto & drvInfo : drvs) {
- auto drvPath = drvInfo.queryDrvPath();
- auto outPath = drvInfo.queryOutPath();
+ auto drvPath = store->parseStorePath(drvInfo.queryDrvPath());
auto outputName = drvInfo.queryOutputName();
if (outputName == "")
- throw Error("derivation '%s' lacks an 'outputName' attribute", drvPath);
+ throw Error("derivation '%s' lacks an 'outputName' attribute", store->printStorePath(drvPath));
- pathsToBuild.push_back({store->parseStorePath(drvPath), {outputName}});
+ pathsToBuild.push_back({drvPath, {outputName}});
- std::string drvPrefix;
- auto i = drvPrefixes.find(drvPath);
- if (i != drvPrefixes.end())
- drvPrefix = i->second;
+ auto i = drvMap.find(drvPath);
+ if (i != drvMap.end())
+ i->second.second.insert(outputName);
else {
- drvPrefix = outLink;
- if (drvPrefixes.size())
- drvPrefix += fmt("-%d", drvPrefixes.size() + 1);
- drvPrefixes[drvPath] = drvPrefix;
+ drvMap[drvPath] = {drvMap.size(), {outputName}};
}
-
- std::string symlink = drvPrefix;
- if (outputName != "out") symlink += "-" + outputName;
-
- resultSymlinks[symlink] = outPath;
- outPaths.push_back(outPath);
}
buildPaths(pathsToBuild);
if (dryRun) return;
- for (auto & symlink : resultSymlinks)
- if (auto store2 = store.dynamic_pointer_cast<LocalFSStore>())
- store2->addPermRoot(store->parseStorePath(symlink.second), absPath(symlink.first), true);
+ std::vector<StorePath> outPaths;
+
+ for (auto & [drvPath, info] : drvMap) {
+ auto & [counter, wantedOutputs] = info;
+ std::string drvPrefix = outLink;
+ if (counter)
+ drvPrefix += fmt("-%d", counter + 1);
+
+ auto builtOutputs = store->queryDerivationOutputMap(drvPath);
+
+ for (auto & outputName : wantedOutputs) {
+ auto outputPath = builtOutputs.at(outputName);
+
+ if (auto store2 = store.dynamic_pointer_cast<LocalFSStore>()) {
+ std::string symlink = drvPrefix;
+ if (outputName != "out") symlink += "-" + outputName;
+ store2->addPermRoot(outputPath, absPath(symlink));
+ }
+
+ outPaths.push_back(outputPath);
+ }
+ }
logger->stop();
for (auto & path : outPaths)
- std::cout << path << '\n';
+ std::cout << store->printStorePath(path) << '\n';
}
}
diff --git a/src/nix-channel/nix-channel.cc b/src/nix-channel/nix-channel.cc
index 3ccf620c9..e48f7af9a 100755
--- a/src/nix-channel/nix-channel.cc
+++ b/src/nix-channel/nix-channel.cc
@@ -76,6 +76,13 @@ static void update(const StringSet & channelNames)
auto store = openStore();
+ auto [fd, unpackChannelPath] = createTempFile();
+ writeFull(fd.get(),
+ #include "unpack-channel.nix.gen.hh"
+ );
+ fd = -1;
+ AutoDelete del(unpackChannelPath, false);
+
// Download each channel.
Strings exprs;
for (const auto & channel : channels) {
@@ -104,7 +111,7 @@ static void update(const StringSet & channelNames)
bool unpacked = false;
if (std::regex_search(filename, std::regex("\\.tar\\.(gz|bz2|xz)$"))) {
- runProgram(settings.nixBinDir + "/nix-build", false, { "--no-out-link", "--expr", "import <nix/unpack-channel.nix> "
+ runProgram(settings.nixBinDir + "/nix-build", false, { "--no-out-link", "--expr", "import " + unpackChannelPath +
"{ name = \"" + cname + "\"; channelName = \"" + name + "\"; src = builtins.storePath \"" + filename + "\"; }" });
unpacked = true;
}
@@ -125,7 +132,7 @@ static void update(const StringSet & channelNames)
// Unpack the channel tarballs into the Nix store and install them
// into the channels profile.
std::cerr << "unpacking channels...\n";
- Strings envArgs{ "--profile", profile, "--file", "<nix/unpack-channel.nix>", "--install", "--from-expression" };
+ Strings envArgs{ "--profile", profile, "--file", unpackChannelPath, "--install", "--from-expression" };
for (auto & expr : exprs)
envArgs.push_back(std::move(expr));
envArgs.push_back("--quiet");
diff --git a/src/nix-channel/unpack-channel.nix b/src/nix-channel/unpack-channel.nix
new file mode 100644
index 000000000..10515bc8b
--- /dev/null
+++ b/src/nix-channel/unpack-channel.nix
@@ -0,0 +1,12 @@
+{ name, channelName, src }:
+
+derivation {
+ builder = "builtin:unpack-channel";
+
+ system = "builtin";
+
+ inherit name channelName src;
+
+ # No point in doing this remotely.
+ preferLocalBuild = true;
+}
diff --git a/src/nix-daemon/nix-daemon.cc b/src/nix-daemon/nix-daemon.cc
index ce963acdc..04ccb2a8b 100644
--- a/src/nix-daemon/nix-daemon.cc
+++ b/src/nix-daemon/nix-daemon.cc
@@ -1,5 +1,6 @@
#include "shared.hh"
#include "local-store.hh"
+#include "remote-store.hh"
#include "util.hh"
#include "serialise.hh"
#include "archive.hh"
@@ -297,45 +298,29 @@ static int _main(int argc, char * * argv)
};
if (stdio) {
- if (getStoreType() == tDaemon) {
+ if (auto store = openUncachedStore().dynamic_pointer_cast<RemoteStore>()) {
ensureNoTrustedFlag();
- // Forward on this connection to the real daemon
- auto socketPath = settings.nixDaemonSocketFile;
- auto s = socket(PF_UNIX, SOCK_STREAM, 0);
- if (s == -1)
- throw SysError("creating Unix domain socket");
-
- auto socketDir = dirOf(socketPath);
- if (chdir(socketDir.c_str()) == -1)
- throw SysError("changing to socket directory '%1%'", socketDir);
-
- auto socketName = std::string(baseNameOf(socketPath));
- auto addr = sockaddr_un{};
- addr.sun_family = AF_UNIX;
- if (socketName.size() + 1 >= sizeof(addr.sun_path))
- throw Error("socket name %1% is too long", socketName);
- strcpy(addr.sun_path, socketName.c_str());
-
- if (connect(s, (struct sockaddr *) &addr, sizeof(addr)) == -1)
- throw SysError("cannot connect to daemon at %1%", socketPath);
-
- auto nfds = (s > STDIN_FILENO ? s : STDIN_FILENO) + 1;
+ auto conn = store->openConnectionWrapper();
+ int from = conn->from.fd;
+ int to = conn->to.fd;
+
+ auto nfds = std::max(from, STDIN_FILENO) + 1;
while (true) {
fd_set fds;
FD_ZERO(&fds);
- FD_SET(s, &fds);
+ FD_SET(from, &fds);
FD_SET(STDIN_FILENO, &fds);
if (select(nfds, &fds, nullptr, nullptr, nullptr) == -1)
throw SysError("waiting for data from client or server");
- if (FD_ISSET(s, &fds)) {
- auto res = splice(s, nullptr, STDOUT_FILENO, nullptr, SSIZE_MAX, SPLICE_F_MOVE);
+ if (FD_ISSET(from, &fds)) {
+ auto res = splice(from, nullptr, STDOUT_FILENO, nullptr, SSIZE_MAX, SPLICE_F_MOVE);
if (res == -1)
throw SysError("splicing data from daemon socket to stdout");
else if (res == 0)
throw EndOfFile("unexpected EOF from daemon socket");
}
if (FD_ISSET(STDIN_FILENO, &fds)) {
- auto res = splice(STDIN_FILENO, nullptr, s, nullptr, SSIZE_MAX, SPLICE_F_MOVE);
+ auto res = splice(STDIN_FILENO, nullptr, to, nullptr, SSIZE_MAX, SPLICE_F_MOVE);
if (res == -1)
throw SysError("splicing data from stdin to daemon socket");
else if (res == 0)
diff --git a/src/nix-env/nix-env.cc b/src/nix-env/nix-env.cc
index ddd036070..3e7c453fb 100644
--- a/src/nix-env/nix-env.cc
+++ b/src/nix-env/nix-env.cc
@@ -230,7 +230,7 @@ static DrvInfos filterBySelector(EvalState & state, const DrvInfos & allElems,
{
DrvNames selectors = drvNamesFromArgs(args);
if (selectors.empty())
- selectors.push_back(DrvName("*"));
+ selectors.emplace_back("*");
DrvInfos elems;
set<unsigned int> done;
@@ -708,7 +708,9 @@ static void opSet(Globals & globals, Strings opFlags, Strings opArgs)
}
debug(format("switching to new user environment"));
- Path generation = createGeneration(ref<LocalFSStore>(store2), globals.profile, drv.queryOutPath());
+ Path generation = createGeneration(
+ ref<LocalFSStore>(store2), globals.profile,
+ store2->parseStorePath(drv.queryOutPath()));
switchLink(globals.profile, generation);
}
diff --git a/src/nix-env/user-env.cc b/src/nix-env/user-env.cc
index 8e7f09e12..8c6c8af05 100644
--- a/src/nix-env/user-env.cc
+++ b/src/nix-env/user-env.cc
@@ -151,7 +151,8 @@ bool createUserEnv(EvalState & state, DrvInfos & elems,
}
debug(format("switching to new user environment"));
- Path generation = createGeneration(ref<LocalFSStore>(store2), profile, topLevelOut);
+ Path generation = createGeneration(ref<LocalFSStore>(store2), profile,
+ store2->parseStorePath(topLevelOut));
switchLink(profile, generation);
}
diff --git a/src/nix-instantiate/nix-instantiate.cc b/src/nix-instantiate/nix-instantiate.cc
index bf353677a..539092cbe 100644
--- a/src/nix-instantiate/nix-instantiate.cc
+++ b/src/nix-instantiate/nix-instantiate.cc
@@ -20,7 +20,6 @@ using namespace nix;
static Path gcRoot;
static int rootNr = 0;
-static bool indirectRoot = false;
enum OutputKind { okPlain, okXML, okJSON };
@@ -71,11 +70,11 @@ void processExpr(EvalState & state, const Strings & attrPaths,
if (gcRoot == "")
printGCWarning();
else {
- Path rootName = indirectRoot ? absPath(gcRoot) : gcRoot;
+ Path rootName = absPath(gcRoot);
if (++rootNr > 1) rootName += "-" + std::to_string(rootNr);
auto store2 = state.store.dynamic_pointer_cast<LocalFSStore>();
if (store2)
- drvPath = store2->addPermRoot(store2->parseStorePath(drvPath), rootName, indirectRoot);
+ drvPath = store2->addPermRoot(store2->parseStorePath(drvPath), rootName);
}
std::cout << fmt("%s%s\n", drvPath, (outputName != "out" ? "!" + outputName : ""));
}
@@ -127,7 +126,7 @@ static int _main(int argc, char * * argv)
else if (*arg == "--add-root")
gcRoot = getArg(*arg, arg, end);
else if (*arg == "--indirect")
- indirectRoot = true;
+ ;
else if (*arg == "--xml")
outputKind = okXML;
else if (*arg == "--json")
diff --git a/src/nix-prefetch-url/nix-prefetch-url.cc b/src/nix-prefetch-url/nix-prefetch-url.cc
index 1001f27af..377ae03a8 100644
--- a/src/nix-prefetch-url/nix-prefetch-url.cc
+++ b/src/nix-prefetch-url/nix-prefetch-url.cc
@@ -57,6 +57,7 @@ static int _main(int argc, char * * argv)
bool fromExpr = false;
string attrPath;
bool unpack = false;
+ bool executable = false;
string name;
struct MyArgs : LegacyArgs, MixEvalArgs
@@ -81,6 +82,8 @@ static int _main(int argc, char * * argv)
}
else if (*arg == "--unpack")
unpack = true;
+ else if (*arg == "--executable")
+ executable = true;
else if (*arg == "--name")
name = getArg(*arg, arg, end);
else if (*arg != "" && arg->at(0) == '-')
@@ -175,7 +178,11 @@ static int _main(int argc, char * * argv)
/* Download the file. */
{
- AutoCloseFD fd = open(tmpFile.c_str(), O_WRONLY | O_CREAT | O_EXCL, 0600);
+ auto mode = 0600;
+ if (executable)
+ mode = 0700;
+
+ AutoCloseFD fd = open(tmpFile.c_str(), O_WRONLY | O_CREAT | O_EXCL, mode);
if (!fd) throw SysError("creating temporary file '%s'", tmpFile);
FdSink sink(fd.get());
@@ -201,7 +208,7 @@ static int _main(int argc, char * * argv)
tmpFile = unpacked;
}
- const auto method = unpack ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
+ const auto method = unpack || executable ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
auto info = store->addToStoreSlow(name, tmpFile, method, ht, expectedHash);
storePath = info.path;
diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc
index a58edff57..b027e84b7 100644
--- a/src/nix-store/nix-store.cc
+++ b/src/nix-store/nix-store.cc
@@ -34,7 +34,6 @@ typedef void (* Operation) (Strings opFlags, Strings opArgs);
static Path gcRoot;
static int rootNr = 0;
-static bool indirectRoot = false;
static bool noOutput = false;
static std::shared_ptr<Store> store;
@@ -65,6 +64,7 @@ static PathSet realisePath(StorePathWithOutputs path, bool build = true)
if (path.path.isDerivation()) {
if (build) store->buildPaths({path});
+ auto outputPaths = store->queryDerivationOutputMap(path.path);
Derivation drv = store->derivationFromPath(path.path);
rootNr++;
@@ -77,7 +77,8 @@ static PathSet realisePath(StorePathWithOutputs path, bool build = true)
if (i == drv.outputs.end())
throw Error("derivation '%s' does not have an output named '%s'",
store2->printStorePath(path.path), j);
- auto outPath = store2->printStorePath(i->second.path(*store, drv.name));
+ auto outPath = outputPaths.at(i->first);
+ auto retPath = store->printStorePath(outPath);
if (store2) {
if (gcRoot == "")
printGCWarning();
@@ -85,10 +86,10 @@ static PathSet realisePath(StorePathWithOutputs path, bool build = true)
Path rootName = gcRoot;
if (rootNr > 1) rootName += "-" + std::to_string(rootNr);
if (i->first != "out") rootName += "-" + i->first;
- outPath = store2->addPermRoot(store->parseStorePath(outPath), rootName, indirectRoot);
+ retPath = store2->addPermRoot(outPath, rootName);
}
}
- outputs.insert(outPath);
+ outputs.insert(retPath);
}
return outputs;
}
@@ -104,7 +105,7 @@ static PathSet realisePath(StorePathWithOutputs path, bool build = true)
Path rootName = gcRoot;
rootNr++;
if (rootNr > 1) rootName += "-" + std::to_string(rootNr);
- return {store2->addPermRoot(path.path, rootName, indirectRoot)};
+ return {store2->addPermRoot(path.path, rootName)};
}
}
return {store->printStorePath(path.path)};
@@ -218,8 +219,13 @@ static StorePathSet maybeUseOutputs(const StorePath & storePath, bool useOutput,
if (useOutput && storePath.isDerivation()) {
auto drv = store->derivationFromPath(storePath);
StorePathSet outputs;
- for (auto & i : drv.outputsAndPaths(*store))
- outputs.insert(i.second.second);
+ if (forceRealise)
+ return store->queryDerivationOutputs(storePath);
+ for (auto & i : drv.outputsAndOptPaths(*store)) {
+ if (!i.second.second)
+ throw UsageError("Cannot use output path of floating content-addressed derivation until we know what it is (e.g. by building it)");
+ outputs.insert(*i.second.second);
+ }
return outputs;
}
else return {storePath};
@@ -309,11 +315,9 @@ static void opQuery(Strings opFlags, Strings opArgs)
case qOutputs: {
for (auto & i : opArgs) {
- auto i2 = store->followLinksToStorePath(i);
- if (forceRealise) realisePath({i2});
- Derivation drv = store->derivationFromPath(i2);
- for (auto & j : drv.outputsAndPaths(*store))
- cout << fmt("%1%\n", store->printStorePath(j.second.second));
+ auto outputs = maybeUseOutputs(store->followLinksToStorePath(i), true, forceRealise);
+ for (auto & outputPath : outputs)
+ cout << fmt("%1%\n", store->printStorePath(outputPath));
}
break;
}
@@ -1085,7 +1089,7 @@ static int _main(int argc, char * * argv)
else if (*arg == "--add-root")
gcRoot = absPath(getArg(*arg, arg, end));
else if (*arg == "--indirect")
- indirectRoot = true;
+ ;
else if (*arg == "--no-output")
noOutput = true;
else if (*arg != "" && arg->at(0) == '-') {
diff --git a/src/nix/add-to-store.cc b/src/nix/add-to-store.cc
index 713155840..023ffa4ed 100644
--- a/src/nix/add-to-store.cc
+++ b/src/nix/add-to-store.cc
@@ -36,6 +36,14 @@ struct CmdAddToStore : MixDryRun, StoreCommand
return "add a path to the Nix store";
}
+ std::string doc() override
+ {
+ return R"(
+ Copy the file or directory *path* to the Nix store, and
+ print the resulting store path on standard output.
+ )";
+ }
+
Examples examples() override
{
return {
diff --git a/src/nix/build.cc b/src/nix/build.cc
index 13d14a7fb..4605eb13e 100644
--- a/src/nix/build.cc
+++ b/src/nix/build.cc
@@ -71,14 +71,15 @@ struct CmdBuild : InstallablesCommand, MixDryRun, MixProfile
[&](BuildableOpaque bo) {
std::string symlink = outLink;
if (i) symlink += fmt("-%d", i);
- store2->addPermRoot(bo.path, absPath(symlink), true);
+ store2->addPermRoot(bo.path, absPath(symlink));
},
[&](BuildableFromDrv bfd) {
- for (auto & output : bfd.outputs) {
+ auto builtOutputs = store->queryDerivationOutputMap(bfd.drvPath);
+ for (auto & output : builtOutputs) {
std::string symlink = outLink;
if (i) symlink += fmt("-%d", i);
if (output.first != "out") symlink += fmt("-%s", output.first);
- store2->addPermRoot(output.second, absPath(symlink), true);
+ store2->addPermRoot(output.second, absPath(symlink));
}
},
}, buildables[i]);
diff --git a/src/nix/bundle.cc b/src/nix/bundle.cc
index eb3339f5d..fc41da9e4 100644
--- a/src/nix/bundle.cc
+++ b/src/nix/bundle.cc
@@ -98,14 +98,14 @@ struct CmdBundle : InstallableCommand
if (!evalState->isDerivation(*vRes))
throw Error("the bundler '%s' does not produce a derivation", bundler.what());
- auto attr1 = vRes->attrs->find(evalState->sDrvPath);
+ auto attr1 = vRes->attrs->get(evalState->sDrvPath);
if (!attr1)
throw Error("the bundler '%s' does not produce a derivation", bundler.what());
PathSet context2;
StorePath drvPath = store->parseStorePath(evalState->coerceToPath(*attr1->pos, *attr1->value, context2));
- auto attr2 = vRes->attrs->find(evalState->sOutPath);
+ auto attr2 = vRes->attrs->get(evalState->sOutPath);
if (!attr2)
throw Error("the bundler '%s' does not produce a derivation", bundler.what());
@@ -122,7 +122,7 @@ struct CmdBundle : InstallableCommand
if (!outLink)
outLink = baseNameOf(app.program);
- store.dynamic_pointer_cast<LocalFSStore>()->addPermRoot(outPath, absPath(*outLink), true);
+ store.dynamic_pointer_cast<LocalFSStore>()->addPermRoot(outPath, absPath(*outLink));
}
};
diff --git a/src/nix/command.cc b/src/nix/command.cc
index da32819da..37a4bc785 100644
--- a/src/nix/command.cc
+++ b/src/nix/command.cc
@@ -4,12 +4,25 @@
#include "nixexpr.hh"
#include "profiles.hh"
+#include <nlohmann/json.hpp>
+
extern char * * environ __attribute__((weak));
namespace nix {
Commands * RegisterCommand::commands = nullptr;
+void NixMultiCommand::printHelp(const string & programName, std::ostream & out)
+{
+ MultiCommand::printHelp(programName, out);
+}
+
+nlohmann::json NixMultiCommand::toJSON()
+{
+ // FIXME: use Command::toJSON() as well.
+ return MultiCommand::toJSON();
+}
+
StoreCommand::StoreCommand()
{
}
@@ -121,7 +134,7 @@ void MixProfile::updateProfile(const StorePath & storePath)
switchLink(profile2,
createGeneration(
ref<LocalFSStore>(store),
- profile2, store->printStorePath(storePath)));
+ profile2, storePath));
}
void MixProfile::updateProfile(const Buildables & buildables)
@@ -137,7 +150,10 @@ void MixProfile::updateProfile(const Buildables & buildables)
},
[&](BuildableFromDrv bfd) {
for (auto & output : bfd.outputs) {
- result.push_back(output.second);
+ /* Output path should be known because we just tried to
+ build it. */
+ assert(!output.second);
+ result.push_back(*output.second);
}
},
}, buildable);
diff --git a/src/nix/command.hh b/src/nix/command.hh
index bc46a2028..d60c8aeb6 100644
--- a/src/nix/command.hh
+++ b/src/nix/command.hh
@@ -21,6 +21,13 @@ static constexpr Command::Category catSecondary = 100;
static constexpr Command::Category catUtility = 101;
static constexpr Command::Category catNixInstallation = 102;
+struct NixMultiCommand : virtual MultiCommand, virtual Command
+{
+ void printHelp(const string & programName, std::ostream & out) override;
+
+ nlohmann::json toJSON() override;
+};
+
/* A command that requires a Nix store. */
struct StoreCommand : virtual Command
{
diff --git a/src/nix/describe-stores.cc b/src/nix/describe-stores.cc
new file mode 100644
index 000000000..0cc2d9337
--- /dev/null
+++ b/src/nix/describe-stores.cc
@@ -0,0 +1,44 @@
+#include "command.hh"
+#include "common-args.hh"
+#include "shared.hh"
+#include "store-api.hh"
+
+#include <nlohmann/json.hpp>
+
+using namespace nix;
+
+struct CmdDescribeStores : Command, MixJSON
+{
+ std::string description() override
+ {
+ return "show registered store types and their available options";
+ }
+
+ Category category() override { return catUtility; }
+
+ void run() override
+ {
+ auto res = nlohmann::json::object();
+ for (auto & implem : *Implementations::registered) {
+ auto storeConfig = implem.getConfig();
+ auto storeName = storeConfig->name();
+ res[storeName] = storeConfig->toJSON();
+ }
+ if (json) {
+ std::cout << res;
+ } else {
+ for (auto & [storeName, storeConfig] : res.items()) {
+ std::cout << "## " << storeName << std::endl << std::endl;
+ for (auto & [optionName, optionDesc] : storeConfig.items()) {
+ std::cout << "### " << optionName << std::endl << std::endl;
+ std::cout << optionDesc["description"].get<std::string>() << std::endl;
+ std::cout << "default: " << optionDesc["defaultValue"] << std::endl <<std::endl;
+ if (!optionDesc["aliases"].empty())
+ std::cout << "aliases: " << optionDesc["aliases"] << std::endl << std::endl;
+ }
+ }
+ }
+ }
+};
+
+static auto r1 = registerCommand<CmdDescribeStores>("describe-stores");
diff --git a/src/nix/develop.cc b/src/nix/develop.cc
index 434088da7..f29fa71d2 100644
--- a/src/nix/develop.cc
+++ b/src/nix/develop.cc
@@ -15,7 +15,7 @@ struct Var
{
bool exported = true;
bool associative = false;
- std::string value; // quoted string or array
+ std::string quoted; // quoted string or array
};
struct BuildEnvironment
@@ -75,12 +75,12 @@ BuildEnvironment readEnvironment(const Path & path)
else if (std::regex_search(pos, file.cend(), match, varRegex, std::regex_constants::match_continuous)) {
pos = match[0].second;
- res.env.insert({match[1], Var { .exported = exported.count(match[1]) > 0, .value = match[2] }});
+ res.env.insert({match[1], Var { .exported = exported.count(match[1]) > 0, .quoted = match[2] }});
}
else if (std::regex_search(pos, file.cend(), match, assocArrayRegex, std::regex_constants::match_continuous)) {
pos = match[0].second;
- res.env.insert({match[1], Var { .associative = true, .value = match[2] }});
+ res.env.insert({match[1], Var { .associative = true, .quoted = match[2] }});
}
else if (std::regex_search(pos, file.cend(), match, functionRegex, std::regex_constants::match_continuous)) {
@@ -92,6 +92,8 @@ BuildEnvironment readEnvironment(const Path & path)
path, file.substr(pos - file.cbegin(), 60));
}
+ res.env.erase("__output");
+
return res;
}
@@ -125,27 +127,35 @@ StorePath getDerivationEnvironment(ref<Store> store, const StorePath & drvPath)
/* Rehash and write the derivation. FIXME: would be nice to use
'buildDerivation', but that's privileged. */
drv.name += "-env";
- for (auto & output : drv.outputs)
- drv.env.erase(output.first);
- drv.outputs = {{"out", DerivationOutput { .output = DerivationOutputInputAddressed { .path = StorePath::dummy }}}};
- drv.env["out"] = "";
- drv.env["_outputs_saved"] = drv.env["outputs"];
- drv.env["outputs"] = "out";
+ for (auto & output : drv.outputs) {
+ output.second = { .output = DerivationOutputInputAddressed { .path = StorePath::dummy } };
+ drv.env[output.first] = "";
+ }
drv.inputSrcs.insert(std::move(getEnvShPath));
Hash h = std::get<0>(hashDerivationModulo(*store, drv, true));
- auto shellOutPath = store->makeOutputPath("out", h, drv.name);
- drv.outputs.insert_or_assign("out", DerivationOutput { .output = DerivationOutputInputAddressed {
- .path = shellOutPath
- } });
- drv.env["out"] = store->printStorePath(shellOutPath);
- auto shellDrvPath2 = writeDerivation(store, drv);
- /* Build the derivation. */
- store->buildPaths({{shellDrvPath2}});
+ for (auto & output : drv.outputs) {
+ auto outPath = store->makeOutputPath(output.first, h, drv.name);
+ output.second = { .output = DerivationOutputInputAddressed { .path = outPath } };
+ drv.env[output.first] = store->printStorePath(outPath);
+ }
- assert(store->isValidPath(shellOutPath));
+ auto shellDrvPath = writeDerivation(*store, drv);
- return shellOutPath;
+ /* Build the derivation. */
+ store->buildPaths({{shellDrvPath}});
+
+ for (auto & [_0, outputAndOptPath] : drv.outputsAndOptPaths(*store)) {
+ auto & [_1, optPath] = outputAndOptPath;
+ assert(optPath);
+ auto & outPath = *optPath;
+ assert(store->isValidPath(outPath));
+ auto outPathS = store->toRealPath(outPath);
+ if (lstat(outPathS).st_size)
+ return outPath;
+ }
+
+ throw Error("get-env.sh failed to produce an environment");
}
struct Common : InstallableCommand, MixProfile
@@ -171,8 +181,12 @@ struct Common : InstallableCommand, MixProfile
"UID",
};
- void makeRcScript(const BuildEnvironment & buildEnvironment, std::ostream & out)
+ std::string makeRcScript(
+ const BuildEnvironment & buildEnvironment,
+ const Path & outputsDir = absPath(".") + "/outputs")
{
+ std::ostringstream out;
+
out << "unset shellHook\n";
out << "nix_saved_PATH=\"$PATH\"\n";
@@ -180,9 +194,9 @@ struct Common : InstallableCommand, MixProfile
for (auto & i : buildEnvironment.env) {
if (!ignoreVars.count(i.first) && !hasPrefix(i.first, "BASH_")) {
if (i.second.associative)
- out << fmt("declare -A %s=(%s)\n", i.first, i.second.value);
+ out << fmt("declare -A %s=(%s)\n", i.first, i.second.quoted);
else {
- out << fmt("%s=%s\n", i.first, i.second.value);
+ out << fmt("%s=%s\n", i.first, i.second.quoted);
if (i.second.exported)
out << fmt("export %s\n", i.first);
}
@@ -193,13 +207,26 @@ struct Common : InstallableCommand, MixProfile
out << buildEnvironment.bashFunctions << "\n";
- // FIXME: set outputs
-
out << "export NIX_BUILD_TOP=\"$(mktemp -d --tmpdir nix-shell.XXXXXX)\"\n";
for (auto & i : {"TMP", "TMPDIR", "TEMP", "TEMPDIR"})
out << fmt("export %s=\"$NIX_BUILD_TOP\"\n", i);
out << "eval \"$shellHook\"\n";
+
+ /* Substitute occurrences of output paths. */
+ auto outputs = buildEnvironment.env.find("outputs");
+ assert(outputs != buildEnvironment.env.end());
+
+ // FIXME: properly unquote 'outputs'.
+ StringMap rewrites;
+ for (auto & outputName : tokenizeString<std::vector<std::string>>(replaceStrings(outputs->second.quoted, "'", ""))) {
+ auto from = buildEnvironment.env.find(outputName);
+ assert(from != buildEnvironment.env.end());
+ // FIXME: unquote
+ rewrites.insert({from->second.quoted, outputsDir + "/" + outputName});
+ }
+
+ return rewriteStrings(out.str(), rewrites);
}
Strings getDefaultFlakeAttrPaths() override
@@ -240,19 +267,57 @@ struct Common : InstallableCommand, MixProfile
struct CmdDevelop : Common, MixEnvironment
{
std::vector<std::string> command;
+ std::optional<std::string> phase;
CmdDevelop()
{
addFlag({
.longName = "command",
.shortName = 'c',
- .description = "command and arguments to be executed insted of an interactive shell",
+ .description = "command and arguments to be executed instead of an interactive shell",
.labels = {"command", "args"},
.handler = {[&](std::vector<std::string> ss) {
if (ss.empty()) throw UsageError("--command requires at least one argument");
command = ss;
}}
});
+
+ addFlag({
+ .longName = "phase",
+ .description = "phase to run (e.g. `build` or `configure`)",
+ .labels = {"phase-name"},
+ .handler = {&phase},
+ });
+
+ addFlag({
+ .longName = "configure",
+ .description = "run the configure phase",
+ .handler = {&phase, {"configure"}},
+ });
+
+ addFlag({
+ .longName = "build",
+ .description = "run the build phase",
+ .handler = {&phase, {"build"}},
+ });
+
+ addFlag({
+ .longName = "check",
+ .description = "run the check phase",
+ .handler = {&phase, {"check"}},
+ });
+
+ addFlag({
+ .longName = "install",
+ .description = "run the install phase",
+ .handler = {&phase, {"install"}},
+ });
+
+ addFlag({
+ .longName = "installcheck",
+ .description = "run the installcheck phase",
+ .handler = {&phase, {"installCheck"}},
+ });
}
std::string description() override
@@ -288,19 +353,31 @@ struct CmdDevelop : Common, MixEnvironment
auto [rcFileFd, rcFilePath] = createTempFile("nix-shell");
- std::ostringstream ss;
- makeRcScript(buildEnvironment, ss);
+ auto script = makeRcScript(buildEnvironment);
+
+ if (verbosity >= lvlDebug)
+ script += "set -x\n";
- ss << fmt("rm -f '%s'\n", rcFilePath);
+ script += fmt("rm -f '%s'\n", rcFilePath);
+
+ if (phase) {
+ if (!command.empty())
+ throw UsageError("you cannot use both '--command' and '--phase'");
+ // FIXME: foundMakefile is set by buildPhase, need to get
+ // rid of that.
+ script += fmt("foundMakefile=1\n");
+ script += fmt("runHook %1%Phase\n", *phase);
+ script += fmt("exit 0\n", *phase);
+ }
- if (!command.empty()) {
+ else if (!command.empty()) {
std::vector<std::string> args;
for (auto s : command)
args.push_back(shellEscape(s));
- ss << fmt("exec %s\n", concatStringsSep(" ", args));
+ script += fmt("exec %s\n", concatStringsSep(" ", args));
}
- writeFull(rcFileFd.get(), ss.str());
+ writeFull(rcFileFd.get(), script);
stopProgressBar();
@@ -315,7 +392,7 @@ struct CmdDevelop : Common, MixEnvironment
auto bashInstallable = std::make_shared<InstallableFlake>(
state,
- std::move(installable->nixpkgsFlakeRef()),
+ installable->nixpkgsFlakeRef(),
Strings{"bashInteractive"},
Strings{"legacyPackages." + settings.thisSystem.get() + "."},
lockFlags);
@@ -362,7 +439,7 @@ struct CmdPrintDevEnv : Common
stopProgressBar();
- makeRcScript(buildEnvironment, std::cout);
+ std::cout << makeRcScript(buildEnvironment);
}
};
diff --git a/src/nix/diff-closures.cc b/src/nix/diff-closures.cc
index 4199dae0f..0dc99d05e 100644
--- a/src/nix/diff-closures.cc
+++ b/src/nix/diff-closures.cc
@@ -81,7 +81,7 @@ void printClosureDiff(
auto beforeSize = totalSize(beforeVersions);
auto afterSize = totalSize(afterVersions);
auto sizeDelta = (int64_t) afterSize - (int64_t) beforeSize;
- auto showDelta = abs(sizeDelta) >= 8 * 1024;
+ auto showDelta = std::abs(sizeDelta) >= 8 * 1024;
std::set<std::string> removed, unchanged;
for (auto & [version, _] : beforeVersions)
diff --git a/src/nix/doctor.cc b/src/nix/doctor.cc
index 82e92cdd0..683e91446 100644
--- a/src/nix/doctor.cc
+++ b/src/nix/doctor.cc
@@ -49,9 +49,7 @@ struct CmdDoctor : StoreCommand
{
logger->log("Running checks against store uri: " + store->getUri());
- auto type = getStoreType();
-
- if (type < tOther) {
+ if (store.dynamic_pointer_cast<LocalFSStore>()) {
success &= checkNixInPath();
success &= checkProfileRoots(store);
}
diff --git a/src/nix/flake.cc b/src/nix/flake.cc
index 653f8db1b..ae6f4c5f9 100644
--- a/src/nix/flake.cc
+++ b/src/nix/flake.cc
@@ -933,7 +933,7 @@ struct CmdFlakeShow : FlakeCommand
}
};
-struct CmdFlake : virtual MultiCommand, virtual Command
+struct CmdFlake : NixMultiCommand
{
CmdFlake()
: MultiCommand({
@@ -963,11 +963,6 @@ struct CmdFlake : virtual MultiCommand, virtual Command
command->second->prepare();
command->second->run();
}
-
- void printHelp(const string & programName, std::ostream & out) override
- {
- MultiCommand::printHelp(programName, out);
- }
};
static auto r1 = registerCommand<CmdFlake>("flake");
diff --git a/src/nix/get-env.sh b/src/nix/get-env.sh
index 2e0e83561..091c0f573 100644
--- a/src/nix/get-env.sh
+++ b/src/nix/get-env.sh
@@ -1,12 +1,6 @@
set -e
if [ -e .attrs.sh ]; then source .attrs.sh; fi
-outputs=$_outputs_saved
-for __output in $_outputs_saved; do
- declare "$__output"="$out"
-done
-unset _outputs_saved __output
-
export IN_NIX_SHELL=impure
export dontAddDisableDepTrack=1
@@ -14,5 +8,12 @@ if [[ -n $stdenv ]]; then
source $stdenv/setup
fi
-export > $out
-set >> $out
+for __output in $outputs; do
+ if [[ -z $__done ]]; then
+ export > ${!__output}
+ set >> ${!__output}
+ __done=1
+ else
+ echo -n >> ${!__output}
+ fi
+done
diff --git a/src/nix/installables.cc b/src/nix/installables.cc
index d34f87982..9bf6b7caa 100644
--- a/src/nix/installables.cc
+++ b/src/nix/installables.cc
@@ -76,7 +76,7 @@ MixFlakeOptions::MixFlakeOptions()
addFlag({
.longName = "override-input",
- .description = "override a specific flake input (e.g. 'dwarffs/nixpkgs')",
+ .description = "override a specific flake input (e.g. `dwarffs/nixpkgs`)",
.labels = {"input-path", "flake-url"},
.handler = {[&](std::string inputPath, std::string flakeRef) {
lockFlags.inputOverrides.insert_or_assign(
@@ -116,7 +116,7 @@ SourceExprCommand::SourceExprCommand()
addFlag({
.longName = "file",
.shortName = 'f',
- .description = "evaluate FILE rather than the default",
+ .description = "evaluate *file* rather than the default",
.labels = {"file"},
.handler = {&file},
.completer = completePath
@@ -124,7 +124,7 @@ SourceExprCommand::SourceExprCommand()
addFlag({
.longName ="expr",
- .description = "evaluate attributes from EXPR",
+ .description = "evaluate attributes from *expr*",
.labels = {"expr"},
.handler = {&expr}
});
@@ -302,10 +302,10 @@ struct InstallableStorePath : Installable
Buildables toBuildables() override
{
if (storePath.isDerivation()) {
- std::map<std::string, StorePath> outputs;
+ std::map<std::string, std::optional<StorePath>> outputs;
auto drv = store->readDerivation(storePath);
- for (auto & i : drv.outputsAndPaths(*store))
- outputs.emplace(i.first, i.second.second);
+ for (auto & [name, output] : drv.outputsAndOptPaths(*store))
+ outputs.emplace(name, output.second);
return {
BuildableFromDrv {
.drvPath = storePath,
@@ -331,7 +331,7 @@ Buildables InstallableValue::toBuildables()
{
Buildables res;
- std::map<StorePath, OutputPathMap> drvsToOutputs;
+ std::map<StorePath, std::map<std::string, std::optional<StorePath>>> drvsToOutputs;
// Group by derivation, helps with .all in particular
for (auto & drv : toDerivations()) {
@@ -674,8 +674,11 @@ StorePathSet toStorePaths(ref<Store> store,
outPaths.insert(bo.path);
},
[&](BuildableFromDrv bfd) {
- for (auto & output : bfd.outputs)
- outPaths.insert(output.second);
+ for (auto & output : bfd.outputs) {
+ if (!output.second)
+ throw Error("Cannot operate on output of unbuilt CA drv");
+ outPaths.insert(*output.second);
+ }
},
}, b);
} else {
diff --git a/src/nix/installables.hh b/src/nix/installables.hh
index 26e87ee3a..c7c2f8981 100644
--- a/src/nix/installables.hh
+++ b/src/nix/installables.hh
@@ -20,7 +20,7 @@ struct BuildableOpaque {
struct BuildableFromDrv {
StorePath drvPath;
- std::map<std::string, StorePath> outputs;
+ std::map<std::string, std::optional<StorePath>> outputs;
};
typedef std::variant<
@@ -69,7 +69,7 @@ struct Installable
virtual FlakeRef nixpkgsFlakeRef() const
{
- return std::move(FlakeRef::fromAttrs({{"type","indirect"}, {"id", "nixpkgs"}}));
+ return FlakeRef::fromAttrs({{"type","indirect"}, {"id", "nixpkgs"}});
}
};
@@ -82,7 +82,7 @@ struct InstallableValue : Installable
struct DerivationInfo
{
StorePath drvPath;
- StorePath outPath;
+ std::optional<StorePath> outPath;
std::string outputName;
};
diff --git a/src/nix/local.mk b/src/nix/local.mk
index b057b7cc6..ab4e9121b 100644
--- a/src/nix/local.mk
+++ b/src/nix/local.mk
@@ -19,7 +19,7 @@ nix_CXXFLAGS += -I src/libutil -I src/libstore -I src/libfetchers -I src/libexpr
nix_LIBS = libexpr libmain libfetchers libstore libutil
-nix_LDFLAGS = -pthread $(SODIUM_LIBS) $(EDITLINE_LIBS) $(BOOST_LDFLAGS) -lboost_context -lboost_thread -lboost_system
+nix_LDFLAGS = -pthread $(SODIUM_LIBS) $(EDITLINE_LIBS) $(BOOST_LDFLAGS) -lboost_context -lboost_thread -lboost_system -llowdown
$(foreach name, \
nix-build nix-channel nix-collect-garbage nix-copy-closure nix-daemon nix-env nix-hash nix-instantiate nix-prefetch-url nix-shell nix-store, \
@@ -29,3 +29,5 @@ $(eval $(call install-symlink, $(bindir)/nix, $(libexecdir)/nix/build-remote))
src/nix-env/user-env.cc: src/nix-env/buildenv.nix.gen.hh
src/nix/develop.cc: src/nix/get-env.sh.gen.hh
+
+src/nix-channel/nix-channel.cc: src/nix-channel/unpack-channel.nix.gen.hh
diff --git a/src/nix/main.cc b/src/nix/main.cc
index e62657e95..1e9e07bc0 100644
--- a/src/nix/main.cc
+++ b/src/nix/main.cc
@@ -17,6 +17,8 @@
#include <netdb.h>
#include <netinet/in.h>
+#include <nlohmann/json.hpp>
+
extern std::string chrootHelperName;
void chrootHelper(int argc, char * * argv);
@@ -140,6 +142,11 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs
printHelp(programName, std::cout);
throw Exit();
}
+
+ std::string description() override
+ {
+ return "a tool for reproducible and declarative configuration management";
+ }
};
void mainWrapped(int argc, char * * argv)
@@ -172,6 +179,30 @@ void mainWrapped(int argc, char * * argv)
NixArgs args;
+ if (argc == 2 && std::string(argv[1]) == "__dump-args") {
+ std::cout << args.toJSON().dump() << "\n";
+ return;
+ }
+
+ if (argc == 2 && std::string(argv[1]) == "__dump-builtins") {
+ evalSettings.pureEval = false;
+ EvalState state({}, openStore("dummy://"));
+ auto res = nlohmann::json::object();
+ auto builtins = state.baseEnv.values[0]->attrs;
+ for (auto & builtin : *builtins) {
+ auto b = nlohmann::json::object();
+ if (builtin.value->type != tPrimOp) continue;
+ auto primOp = builtin.value->primOp;
+ if (!primOp->doc) continue;
+ b["arity"] = primOp->arity;
+ b["args"] = primOp->args;
+ b["doc"] = trim(stripIndentation(primOp->doc));
+ res[(std::string) builtin.name] = std::move(b);
+ }
+ std::cout << res.dump() << "\n";
+ return;
+ }
+
Finally printCompletions([&]()
{
if (completions) {
diff --git a/src/nix/markdown.cc b/src/nix/markdown.cc
new file mode 100644
index 000000000..40788a42f
--- /dev/null
+++ b/src/nix/markdown.cc
@@ -0,0 +1,50 @@
+#include "markdown.hh"
+#include "util.hh"
+#include "finally.hh"
+
+#include <sys/queue.h>
+extern "C" {
+#include <lowdown.h>
+}
+
+namespace nix {
+
+std::string renderMarkdownToTerminal(std::string_view markdown)
+{
+ struct lowdown_opts opts {
+ .type = LOWDOWN_TERM,
+ .maxdepth = 20,
+ .cols = std::min(getWindowSize().second, (unsigned short) 80),
+ .hmargin = 0,
+ .vmargin = 0,
+ .feat = LOWDOWN_COMMONMARK | LOWDOWN_FENCED | LOWDOWN_DEFLIST | LOWDOWN_TABLES,
+ .oflags = 0,
+ };
+
+ auto doc = lowdown_doc_new(&opts);
+ if (!doc)
+ throw Error("cannot allocate Markdown document");
+ Finally freeDoc([&]() { lowdown_doc_free(doc); });
+
+ size_t maxn = 0;
+ auto node = lowdown_doc_parse(doc, &maxn, markdown.data(), markdown.size());
+ if (!node)
+ throw Error("cannot parse Markdown document");
+ Finally freeNode([&]() { lowdown_node_free(node); });
+
+ auto renderer = lowdown_term_new(&opts);
+ if (!renderer)
+ throw Error("cannot allocate Markdown renderer");
+ Finally freeRenderer([&]() { lowdown_term_free(renderer); });
+
+ auto buf = lowdown_buf_new(16384);
+ if (!buf)
+ throw Error("cannot allocate Markdown output buffer");
+ Finally freeBuffer([&]() { lowdown_buf_free(buf); });
+
+ lowdown_term_rndr(buf, nullptr, renderer, node);
+
+ return std::string(buf->data, buf->size);
+}
+
+}
diff --git a/src/nix/markdown.hh b/src/nix/markdown.hh
new file mode 100644
index 000000000..78320fcf5
--- /dev/null
+++ b/src/nix/markdown.hh
@@ -0,0 +1,7 @@
+#include "types.hh"
+
+namespace nix {
+
+std::string renderMarkdownToTerminal(std::string_view markdown);
+
+}
diff --git a/src/nix/profile.cc b/src/nix/profile.cc
index cffc9ee44..7ce4dfe4c 100644
--- a/src/nix/profile.cc
+++ b/src/nix/profile.cc
@@ -180,7 +180,9 @@ struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile
auto [attrPath, resolvedRef, drv] = installable2->toDerivation();
ProfileElement element;
- element.storePaths = {drv.outPath}; // FIXME
+ if (!drv.outPath)
+ throw UnimplementedError("CA derivations are not yet supported by 'nix profile'");
+ element.storePaths = {*drv.outPath}; // FIXME
element.source = ProfileElementSource{
installable2->flakeRef,
resolvedRef,
@@ -191,7 +193,7 @@ struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile
manifest.elements.emplace_back(std::move(element));
} else
- throw Error("'nix profile install' does not support argument '%s'", installable->what());
+ throw UnimplementedError("'nix profile install' does not support argument '%s'", installable->what());
}
store->buildPaths(pathsToBuild);
@@ -349,7 +351,9 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf
printInfo("upgrading '%s' from flake '%s' to '%s'",
element.source->attrPath, element.source->resolvedRef, resolvedRef);
- element.storePaths = {drv.outPath}; // FIXME
+ if (!drv.outPath)
+ throw UnimplementedError("CA derivations are not yet supported by 'nix profile'");
+ element.storePaths = {*drv.outPath}; // FIXME
element.source = ProfileElementSource{
installable.flakeRef,
resolvedRef,
@@ -437,7 +441,7 @@ struct CmdProfileDiffClosures : virtual StoreCommand, MixDefaultProfile
}
};
-struct CmdProfile : virtual MultiCommand, virtual Command
+struct CmdProfile : NixMultiCommand
{
CmdProfile()
: MultiCommand({
@@ -461,11 +465,6 @@ struct CmdProfile : virtual MultiCommand, virtual Command
command->second->prepare();
command->second->run();
}
-
- void printHelp(const string & programName, std::ostream & out) override
- {
- MultiCommand::printHelp(programName, out);
- }
};
static auto r1 = registerCommand<CmdProfile>("profile");
diff --git a/src/nix/registry.cc b/src/nix/registry.cc
index ebee4545c..367268683 100644
--- a/src/nix/registry.cc
+++ b/src/nix/registry.cc
@@ -115,7 +115,7 @@ struct CmdRegistryPin : virtual Args, EvalCommand
}
};
-struct CmdRegistry : virtual MultiCommand, virtual Command
+struct CmdRegistry : virtual NixMultiCommand
{
CmdRegistry()
: MultiCommand({
@@ -141,11 +141,6 @@ struct CmdRegistry : virtual MultiCommand, virtual Command
command->second->prepare();
command->second->run();
}
-
- void printHelp(const string & programName, std::ostream & out) override
- {
- MultiCommand::printHelp(programName, out);
- }
};
static auto r1 = registerCommand<CmdRegistry>("registry");
diff --git a/src/nix/repl.cc b/src/nix/repl.cc
index a74655200..329999475 100644
--- a/src/nix/repl.cc
+++ b/src/nix/repl.cc
@@ -32,6 +32,7 @@ extern "C" {
#include "globals.hh"
#include "command.hh"
#include "finally.hh"
+#include "markdown.hh"
#if HAVE_BOEHMGC
#define GC_INCLUDE_NEW
@@ -64,7 +65,7 @@ struct NixRepl
void mainLoop(const std::vector<std::string> & files);
StringSet completePrefix(string prefix);
bool getLine(string & input, const std::string &prompt);
- Path getDerivationPath(Value & v);
+ StorePath getDerivationPath(Value & v);
bool processLine(string line);
void loadFile(const Path & path);
void initEnv();
@@ -375,13 +376,16 @@ bool isVarName(const string & s)
}
-Path NixRepl::getDerivationPath(Value & v) {
+StorePath NixRepl::getDerivationPath(Value & v) {
auto drvInfo = getDerivation(*state, v, false);
if (!drvInfo)
throw Error("expression does not evaluate to a derivation, so I can't build it");
- Path drvPath = drvInfo->queryDrvPath();
- if (drvPath == "" || !state->store->isValidPath(state->store->parseStorePath(drvPath)))
- throw Error("expression did not evaluate to a valid derivation");
+ Path drvPathRaw = drvInfo->queryDrvPath();
+ if (drvPathRaw == "")
+ throw Error("expression did not evaluate to a valid derivation (no drv path)");
+ StorePath drvPath = state->store->parseStorePath(drvPathRaw);
+ if (!state->store->isValidPath(drvPath))
+ throw Error("expression did not evaluate to a valid derivation (invalid drv path)");
return drvPath;
}
@@ -416,7 +420,8 @@ bool NixRepl::processLine(string line)
<< " :r Reload all files\n"
<< " :s <expr> Build dependencies of derivation, then start nix-shell\n"
<< " :t <expr> Describe result of evaluation\n"
- << " :u <expr> Build derivation, then start nix-shell\n";
+ << " :u <expr> Build derivation, then start nix-shell\n"
+ << " :doc <expr> Show documentation of a builtin function\n";
}
else if (command == ":a" || command == ":add") {
@@ -474,29 +479,30 @@ bool NixRepl::processLine(string line)
evalString("drv: (import <nixpkgs> {}).runCommand \"shell\" { buildInputs = [ drv ]; } \"\"", f);
state->callFunction(f, v, result, Pos());
- Path drvPath = getDerivationPath(result);
- runProgram(settings.nixBinDir + "/nix-shell", Strings{drvPath});
+ StorePath drvPath = getDerivationPath(result);
+ runProgram(settings.nixBinDir + "/nix-shell", Strings{state->store->printStorePath(drvPath)});
}
else if (command == ":b" || command == ":i" || command == ":s") {
Value v;
evalString(arg, v);
- Path drvPath = getDerivationPath(v);
+ StorePath drvPath = getDerivationPath(v);
+ Path drvPathRaw = state->store->printStorePath(drvPath);
if (command == ":b") {
/* We could do the build in this process using buildPaths(),
but doing it in a child makes it easier to recover from
problems / SIGINT. */
- if (runProgram(settings.nixBinDir + "/nix", Strings{"build", "--no-link", drvPath}) == 0) {
- auto drv = readDerivation(*state->store, drvPath, Derivation::nameFromPath(state->store->parseStorePath(drvPath)));
+ if (runProgram(settings.nixBinDir + "/nix", Strings{"build", "--no-link", drvPathRaw}) == 0) {
+ auto drv = state->store->readDerivation(drvPath);
std::cout << std::endl << "this derivation produced the following outputs:" << std::endl;
- for (auto & i : drv.outputsAndPaths(*state->store))
- std::cout << fmt(" %s -> %s\n", i.first, state->store->printStorePath(i.second.second));
+ for (auto & i : drv.outputsAndOptPaths(*state->store))
+ std::cout << fmt(" %s -> %s\n", i.first, state->store->printStorePath(*i.second.second));
}
} else if (command == ":i") {
- runProgram(settings.nixBinDir + "/nix-env", Strings{"-i", drvPath});
+ runProgram(settings.nixBinDir + "/nix-env", Strings{"-i", drvPathRaw});
} else {
- runProgram(settings.nixBinDir + "/nix-shell", Strings{drvPath});
+ runProgram(settings.nixBinDir + "/nix-shell", Strings{drvPathRaw});
}
}
@@ -509,6 +515,29 @@ bool NixRepl::processLine(string line)
else if (command == ":q" || command == ":quit")
return false;
+ else if (command == ":doc") {
+ Value v;
+ evalString(arg, v);
+ if (auto doc = state->getDoc(v)) {
+ std::string markdown;
+
+ if (!doc->args.empty() && doc->name) {
+ auto args = doc->args;
+ for (auto & arg : args)
+ arg = "*" + arg + "*";
+
+ markdown +=
+ "**Synopsis:** `builtins." + (std::string) (*doc->name) + "` "
+ + concatStringsSep(" ", args) + "\n\n";
+ }
+
+ markdown += trim(stripIndentation(doc->doc));
+
+ std::cout << renderMarkdownToTerminal(markdown);
+ } else
+ throw Error("value does not have documentation");
+ }
+
else if (command != "")
throw Error("unknown command '%1%'", command);
@@ -782,7 +811,7 @@ struct CmdRepl : StoreCommand, MixEvalArgs
return {
Example{
"Display all special commands within the REPL:",
- "nix repl\n nix-repl> :?"
+ "nix repl\nnix-repl> :?"
}
};
}
diff --git a/src/nix/show-config.cc b/src/nix/show-config.cc
index 4fd8886de..3ed1ad2aa 100644
--- a/src/nix/show-config.cc
+++ b/src/nix/show-config.cc
@@ -2,7 +2,8 @@
#include "common-args.hh"
#include "shared.hh"
#include "store-api.hh"
-#include "json.hh"
+
+#include <nlohmann/json.hpp>
using namespace nix;
@@ -19,8 +20,7 @@ struct CmdShowConfig : Command, MixJSON
{
if (json) {
// FIXME: use appropriate JSON types (bool, ints, etc).
- JSONObject jsonObj(std::cout);
- globalConfig.toJSON(jsonObj);
+ logger->stdout("%s", globalConfig.toJSON().dump());
} else {
std::map<std::string, Config::SettingInfo> settings;
globalConfig.getSettings(settings);
diff --git a/src/nix/show-derivation.cc b/src/nix/show-derivation.cc
index 8c4bfb03e..b9f33499b 100644
--- a/src/nix/show-derivation.cc
+++ b/src/nix/show-derivation.cc
@@ -67,21 +67,22 @@ struct CmdShowDerivation : InstallablesCommand
{
auto outputsObj(drvObj.object("outputs"));
- for (auto & output : drv.outputsAndPaths(*store)) {
- auto outputObj(outputsObj.object(output.first));
- outputObj.attr("path", store->printStorePath(output.second.second));
-
+ for (auto & [_outputName, output] : drv.outputs) {
+ auto & outputName = _outputName; // work around clang bug
+ auto outputObj { outputsObj.object(outputName) };
std::visit(overloaded {
[&](DerivationOutputInputAddressed doi) {
+ outputObj.attr("path", store->printStorePath(doi.path));
},
[&](DerivationOutputCAFixed dof) {
+ outputObj.attr("path", store->printStorePath(dof.path(*store, drv.name, outputName)));
outputObj.attr("hashAlgo", dof.hash.printMethodAlgo());
outputObj.attr("hash", dof.hash.hash.to_string(Base16, false));
},
[&](DerivationOutputCAFloating dof) {
outputObj.attr("hashAlgo", makeFileIngestionPrefix(dof.method) + printHashType(dof.hashType));
},
- }, output.second.first.output);
+ }, output.output);
}
}