aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/build-remote/build-remote.cc2
-rw-r--r--src/libcmd/command.cc4
-rw-r--r--src/libcmd/common-eval-args.cc6
-rw-r--r--src/libcmd/common-eval-args.hh3
-rw-r--r--src/libcmd/installable-flake.cc2
-rw-r--r--src/libcmd/installable-flake.hh2
-rw-r--r--src/libcmd/installables.cc6
-rw-r--r--src/libcmd/repl.cc8
-rw-r--r--src/libcmd/repl.hh2
-rw-r--r--src/libexpr/eval.cc111
-rw-r--r--src/libexpr/eval.hh127
-rw-r--r--src/libexpr/flake/flake.cc3
-rw-r--r--src/libexpr/lexer.l2
-rw-r--r--src/libexpr/parser.y92
-rw-r--r--src/libexpr/primops.cc358
-rw-r--r--src/libexpr/primops.hh19
-rw-r--r--src/libexpr/primops/context.cc54
-rw-r--r--src/libexpr/primops/fetchClosure.cc251
-rw-r--r--src/libexpr/primops/fetchMercurial.cc6
-rw-r--r--src/libexpr/primops/fetchTree.cc33
-rw-r--r--src/libexpr/primops/fromTOML.cc36
-rw-r--r--src/libexpr/search-path.cc56
-rw-r--r--src/libexpr/search-path.hh108
-rw-r--r--src/libexpr/tests/error_traces.cc2
-rw-r--r--src/libexpr/tests/search-path.cc90
-rw-r--r--src/libexpr/tests/value/print.cc236
-rw-r--r--src/libexpr/value.hh15
-rw-r--r--src/libfetchers/attrs.hh1
-rw-r--r--src/libfetchers/fetchers.cc6
-rw-r--r--src/libfetchers/fetchers.hh10
-rw-r--r--src/libfetchers/git.cc18
-rw-r--r--src/libfetchers/github.cc10
-rw-r--r--src/libfetchers/input-accessor.cc30
-rw-r--r--src/libfetchers/tarball.cc68
-rw-r--r--src/libstore/build/derivation-goal.cc7
-rw-r--r--src/libstore/build/entry-points.cc12
-rw-r--r--src/libstore/build/hook-instance.cc6
-rw-r--r--src/libstore/build/local-derivation-goal.cc83
-rw-r--r--src/libstore/build/personality.cc3
-rw-r--r--src/libstore/build/worker.cc11
-rw-r--r--src/libstore/build/worker.hh23
-rw-r--r--src/libstore/daemon.cc165
-rw-r--r--src/libstore/derivations.cc19
-rw-r--r--src/libstore/derivations.hh12
-rw-r--r--src/libstore/downstream-placeholder.cc39
-rw-r--r--src/libstore/downstream-placeholder.hh75
-rw-r--r--src/libstore/export-import.cc8
-rw-r--r--src/libstore/filetransfer.cc22
-rw-r--r--src/libstore/filetransfer.hh4
-rw-r--r--src/libstore/gc.cc7
-rw-r--r--src/libstore/globals.cc30
-rw-r--r--src/libstore/globals.hh60
-rw-r--r--src/libstore/legacy-ssh-store.cc80
-rw-r--r--src/libstore/local-fs-store.hh14
-rw-r--r--src/libstore/local-store.cc45
-rw-r--r--src/libstore/local-store.hh23
-rw-r--r--src/libstore/make-content-addressed.cc11
-rw-r--r--src/libstore/make-content-addressed.hh13
-rw-r--r--src/libstore/path-info.cc9
-rw-r--r--src/libstore/path-references.cc73
-rw-r--r--src/libstore/path-references.hh25
-rw-r--r--src/libstore/path.cc4
-rw-r--r--src/libstore/profiles.cc123
-rw-r--r--src/libstore/profiles.hh143
-rw-r--r--src/libstore/remote-store-connection.hh97
-rw-r--r--src/libstore/remote-store.cc336
-rw-r--r--src/libstore/remote-store.hh27
-rw-r--r--src/libstore/serve-protocol.hh58
-rw-r--r--src/libstore/sqlite.cc11
-rw-r--r--src/libstore/sqlite.hh23
-rw-r--r--src/libstore/ssh-store.cc1
-rw-r--r--src/libstore/ssh.cc23
-rw-r--r--src/libstore/ssh.hh1
-rw-r--r--src/libstore/store-api.hh5
-rw-r--r--src/libstore/tests/downstream-placeholder.cc33
-rw-r--r--src/libstore/uds-remote-store.cc8
-rw-r--r--src/libstore/uds-remote-store.hh8
-rw-r--r--src/libstore/worker-protocol-impl.hh78
-rw-r--r--src/libstore/worker-protocol.cc193
-rw-r--r--src/libstore/worker-protocol.hh338
-rw-r--r--src/libutil/abstract-setting-to-json.hh1
-rw-r--r--src/libutil/args.cc15
-rw-r--r--src/libutil/config-impl.hh64
-rw-r--r--src/libutil/config.cc63
-rw-r--r--src/libutil/config.hh31
-rw-r--r--src/libutil/experimental-features.cc23
-rw-r--r--src/libutil/experimental-features.hh10
-rw-r--r--src/libutil/filesystem.cc17
-rw-r--r--src/libutil/json-utils.cc19
-rw-r--r--src/libutil/json-utils.hh78
-rw-r--r--src/libutil/references.cc (renamed from src/libstore/references.cc)80
-rw-r--r--src/libutil/references.hh (renamed from src/libstore/references.hh)23
-rw-r--r--src/libutil/tests/references.cc46
-rw-r--r--src/libutil/tests/tests.cc2
-rw-r--r--src/libutil/util.cc44
-rw-r--r--src/libutil/util.hh17
-rwxr-xr-xsrc/nix-channel/nix-channel.cc8
-rw-r--r--src/nix-collect-garbage/nix-collect-garbage.cc14
-rw-r--r--src/nix-env/nix-env.cc7
-rw-r--r--src/nix-env/user-env.cc2
-rw-r--r--src/nix-store/nix-store.cc44
-rw-r--r--src/nix/app.cc3
-rw-r--r--src/nix/build.md2
-rw-r--r--src/nix/copy.md2
-rw-r--r--src/nix/daemon.cc33
-rw-r--r--src/nix/develop.md2
-rw-r--r--src/nix/eval.md2
-rw-r--r--src/nix/flake-check.md2
-rw-r--r--src/nix/flake.cc133
-rw-r--r--src/nix/flake.md19
-rw-r--r--src/nix/main.cc41
-rw-r--r--src/nix/nar-ls.md4
-rw-r--r--src/nix/nix.md7
-rw-r--r--src/nix/path-info.md8
-rw-r--r--src/nix/profile-list.md46
-rw-r--r--src/nix/profile.cc98
-rw-r--r--src/nix/search.md4
-rw-r--r--src/nix/shell.md8
-rw-r--r--src/nix/store-ls.md4
-rw-r--r--src/nix/upgrade-nix.cc2
-rw-r--r--src/nix/upgrade-nix.md2
-rw-r--r--src/nix/verify.md2
122 files changed, 3749 insertions, 1449 deletions
diff --git a/src/build-remote/build-remote.cc b/src/build-remote/build-remote.cc
index 323e04fdb..2fb17d06f 100644
--- a/src/build-remote/build-remote.cc
+++ b/src/build-remote/build-remote.cc
@@ -299,7 +299,7 @@ connected:
!trusted || *trusted;
});
- // See the very large comment in `case wopBuildDerivation:` in
+ // See the very large comment in `case WorkerProto::Op::BuildDerivation:` in
// `src/libstore/daemon.cc` that explains the trust model here.
//
// This condition mirrors that: that code enforces the "rules" outlined there;
diff --git a/src/libcmd/command.cc b/src/libcmd/command.cc
index 6c4648b34..4fc197956 100644
--- a/src/libcmd/command.cc
+++ b/src/libcmd/command.cc
@@ -239,9 +239,7 @@ void MixProfile::updateProfile(const StorePath & storePath)
if (!store) throw Error("'--profile' is not supported for this Nix store");
auto profile2 = absPath(*profile);
switchLink(profile2,
- createGeneration(
- ref<LocalFSStore>(store),
- profile2, storePath));
+ createGeneration(*store, profile2, storePath));
}
void MixProfile::updateProfile(const BuiltPaths & buildables)
diff --git a/src/libcmd/common-eval-args.cc b/src/libcmd/common-eval-args.cc
index ff3abd534..3df2c71a5 100644
--- a/src/libcmd/common-eval-args.cc
+++ b/src/libcmd/common-eval-args.cc
@@ -105,7 +105,9 @@ MixEvalArgs::MixEvalArgs()
)",
.category = category,
.labels = {"path"},
- .handler = {[&](std::string s) { searchPath.push_back(s); }}
+ .handler = {[&](std::string s) {
+ searchPath.elements.emplace_back(SearchPath::Elem::parse(s));
+ }}
});
addFlag({
@@ -165,7 +167,7 @@ SourcePath lookupFileArg(EvalState & state, std::string_view s)
{
if (EvalSettings::isPseudoUrl(s)) {
auto storePath = fetchers::downloadTarball(
- state.store, EvalSettings::resolvePseudoUrl(s), "source", false).first.storePath;
+ state.store, EvalSettings::resolvePseudoUrl(s), "source", false).tree.storePath;
return state.rootPath(CanonPath(state.store->toRealPath(storePath)));
}
diff --git a/src/libcmd/common-eval-args.hh b/src/libcmd/common-eval-args.hh
index b65cb5b20..6359b2579 100644
--- a/src/libcmd/common-eval-args.hh
+++ b/src/libcmd/common-eval-args.hh
@@ -3,6 +3,7 @@
#include "args.hh"
#include "common-args.hh"
+#include "search-path.hh"
namespace nix {
@@ -19,7 +20,7 @@ struct MixEvalArgs : virtual Args, virtual MixRepair
Bindings * getAutoArgs(EvalState & state);
- Strings searchPath;
+ SearchPath searchPath;
std::optional<std::string> evalStoreUrl;
diff --git a/src/libcmd/installable-flake.cc b/src/libcmd/installable-flake.cc
index eb944240b..4da9b131b 100644
--- a/src/libcmd/installable-flake.cc
+++ b/src/libcmd/installable-flake.cc
@@ -151,7 +151,7 @@ DerivedPathsWithInfo InstallableFlake::toDerivedPaths()
},
ExtraPathInfoFlake::Flake {
.originalRef = flakeRef,
- .resolvedRef = getLockedFlake()->flake.lockedRef,
+ .lockedRef = getLockedFlake()->flake.lockedRef,
}),
}};
}
diff --git a/src/libcmd/installable-flake.hh b/src/libcmd/installable-flake.hh
index 7ac4358d2..314918c14 100644
--- a/src/libcmd/installable-flake.hh
+++ b/src/libcmd/installable-flake.hh
@@ -19,7 +19,7 @@ struct ExtraPathInfoFlake : ExtraPathInfoValue
*/
struct Flake {
FlakeRef originalRef;
- FlakeRef resolvedRef;
+ FlakeRef lockedRef;
};
Flake flake;
diff --git a/src/libcmd/installables.cc b/src/libcmd/installables.cc
index a2b882355..10b077fb5 100644
--- a/src/libcmd/installables.cc
+++ b/src/libcmd/installables.cc
@@ -701,7 +701,7 @@ RawInstallablesCommand::RawInstallablesCommand()
{
addFlag({
.longName = "stdin",
- .description = "Read installables from the standard input.",
+ .description = "Read installables from the standard input. No default installable applied.",
.handler = {&readFromStdIn, true}
});
@@ -730,9 +730,9 @@ void RawInstallablesCommand::run(ref<Store> store)
while (std::cin >> word) {
rawInstallables.emplace_back(std::move(word));
}
+ } else {
+ applyDefaultInstallables(rawInstallables);
}
-
- applyDefaultInstallables(rawInstallables);
run(store, std::move(rawInstallables));
}
diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc
index 4b160a100..f9e9c2bf8 100644
--- a/src/libcmd/repl.cc
+++ b/src/libcmd/repl.cc
@@ -68,7 +68,7 @@ struct NixRepl
const Path historyFile;
- NixRepl(const Strings & searchPath, nix::ref<Store> store,ref<EvalState> state,
+ NixRepl(const SearchPath & searchPath, nix::ref<Store> store,ref<EvalState> state,
std::function<AnnotatedValues()> getValues);
virtual ~NixRepl();
@@ -104,7 +104,7 @@ std::string removeWhitespace(std::string s)
}
-NixRepl::NixRepl(const Strings & searchPath, nix::ref<Store> store, ref<EvalState> state,
+NixRepl::NixRepl(const SearchPath & searchPath, nix::ref<Store> store, ref<EvalState> state,
std::function<NixRepl::AnnotatedValues()> getValues)
: AbstractNixRepl(state)
, debugTraceIndex(0)
@@ -1024,7 +1024,7 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int m
std::unique_ptr<AbstractNixRepl> AbstractNixRepl::create(
- const Strings & searchPath, nix::ref<Store> store, ref<EvalState> state,
+ const SearchPath & searchPath, nix::ref<Store> store, ref<EvalState> state,
std::function<AnnotatedValues()> getValues)
{
return std::make_unique<NixRepl>(
@@ -1044,7 +1044,7 @@ void AbstractNixRepl::runSimple(
NixRepl::AnnotatedValues values;
return values;
};
- const Strings & searchPath = {};
+ SearchPath searchPath = {};
auto repl = std::make_unique<NixRepl>(
searchPath,
openStore(),
diff --git a/src/libcmd/repl.hh b/src/libcmd/repl.hh
index 731c8e6db..6d88883fe 100644
--- a/src/libcmd/repl.hh
+++ b/src/libcmd/repl.hh
@@ -25,7 +25,7 @@ struct AbstractNixRepl
typedef std::vector<std::pair<Value*,std::string>> AnnotatedValues;
static std::unique_ptr<AbstractNixRepl> create(
- const Strings & searchPath, nix::ref<Store> store, ref<EvalState> state,
+ const SearchPath & searchPath, nix::ref<Store> store, ref<EvalState> state,
std::function<AnnotatedValues()> getValues);
static void runSimple(
diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc
index 740a5e677..be1bdb806 100644
--- a/src/libexpr/eval.cc
+++ b/src/libexpr/eval.cc
@@ -4,6 +4,7 @@
#include "util.hh"
#include "store-api.hh"
#include "derivations.hh"
+#include "downstream-placeholder.hh"
#include "globals.hh"
#include "eval-inline.hh"
#include "filetransfer.hh"
@@ -94,11 +95,16 @@ RootValue allocRootValue(Value * v)
#endif
}
-void Value::print(const SymbolTable & symbols, std::ostream & str,
- std::set<const void *> * seen) const
+void Value::print(const SymbolTable &symbols, std::ostream &str,
+ std::set<const void *> *seen, int depth) const
+
{
checkInterrupt();
+ if (depth <= 0) {
+ str << "«too deep»";
+ return;
+ }
switch (internalType) {
case tInt:
str << integer;
@@ -122,7 +128,7 @@ void Value::print(const SymbolTable & symbols, std::ostream & str,
str << "{ ";
for (auto & i : attrs->lexicographicOrder(symbols)) {
str << symbols[i->name] << " = ";
- i->value->print(symbols, str, seen);
+ i->value->print(symbols, str, seen, depth - 1);
str << "; ";
}
str << "}";
@@ -138,7 +144,7 @@ void Value::print(const SymbolTable & symbols, std::ostream & str,
str << "[ ";
for (auto v2 : listItems()) {
if (v2)
- v2->print(symbols, str, seen);
+ v2->print(symbols, str, seen, depth - 1);
else
str << "(nullptr)";
str << " ";
@@ -180,11 +186,10 @@ void Value::print(const SymbolTable & symbols, std::ostream & str,
}
}
-
-void Value::print(const SymbolTable & symbols, std::ostream & str, bool showRepeated) const
-{
+void Value::print(const SymbolTable &symbols, std::ostream &str,
+ bool showRepeated, int depth) const {
std::set<const void *> seen;
- print(symbols, str, showRepeated ? nullptr : &seen);
+ print(symbols, str, showRepeated ? nullptr : &seen, depth);
}
// Pretty print types for assertion errors
@@ -210,20 +215,21 @@ const Value * getPrimOp(const Value &v) {
return primOp;
}
-std::string_view showType(ValueType type)
+std::string_view showType(ValueType type, bool withArticle)
{
+ #define WA(a, w) withArticle ? a " " w : w
switch (type) {
- case nInt: return "an integer";
- case nBool: return "a Boolean";
- case nString: return "a string";
- case nPath: return "a path";
+ case nInt: return WA("an", "integer");
+ case nBool: return WA("a", "Boolean");
+ case nString: return WA("a", "string");
+ case nPath: return WA("a", "path");
case nNull: return "null";
- case nAttrs: return "a set";
- case nList: return "a list";
- case nFunction: return "a function";
- case nExternal: return "an external value";
- case nFloat: return "a float";
- case nThunk: return "a thunk";
+ case nAttrs: return WA("a", "set");
+ case nList: return WA("a", "list");
+ case nFunction: return WA("a", "function");
+ case nExternal: return WA("an", "external value");
+ case nFloat: return WA("a", "float");
+ case nThunk: return WA("a", "thunk");
}
abort();
}
@@ -492,7 +498,7 @@ ErrorBuilder & ErrorBuilder::withFrame(const Env & env, const Expr & expr)
EvalState::EvalState(
- const Strings & _searchPath,
+ const SearchPath & _searchPath,
ref<Store> store,
std::shared_ptr<Store> buildStore)
: sWith(symbols.create("<with>"))
@@ -557,30 +563,32 @@ EvalState::EvalState(
/* Initialise the Nix expression search path. */
if (!evalSettings.pureEval) {
- for (auto & i : _searchPath) addToSearchPath(i);
- for (auto & i : evalSettings.nixPath.get()) addToSearchPath(i);
+ for (auto & i : _searchPath.elements)
+ addToSearchPath(SearchPath::Elem {i});
+ for (auto & i : evalSettings.nixPath.get())
+ addToSearchPath(SearchPath::Elem::parse(i));
}
if (evalSettings.restrictEval || evalSettings.pureEval) {
allowedPaths = PathSet();
- for (auto & i : searchPath) {
- auto r = resolveSearchPathElem(i);
- if (!r.first) continue;
+ for (auto & i : searchPath.elements) {
+ auto r = resolveSearchPathPath(i.path);
+ if (!r) continue;
- auto path = r.second;
+ auto path = *std::move(r);
- if (store->isInStore(r.second)) {
+ if (store->isInStore(path)) {
try {
StorePathSet closure;
- store->computeFSClosure(store->toStorePath(r.second).first, closure);
+ store->computeFSClosure(store->toStorePath(path).first, closure);
for (auto & path : closure)
allowPath(path);
} catch (InvalidPath &) {
- allowPath(r.second);
+ allowPath(path);
}
} else
- allowPath(r.second);
+ allowPath(path);
}
}
@@ -701,28 +709,34 @@ Path EvalState::toRealPath(const Path & path, const NixStringContext & context)
}
-Value * EvalState::addConstant(const std::string & name, Value & v)
+Value * EvalState::addConstant(const std::string & name, Value & v, Constant info)
{
Value * v2 = allocValue();
*v2 = v;
- addConstant(name, v2);
+ addConstant(name, v2, info);
return v2;
}
-void EvalState::addConstant(const std::string & name, Value * v)
+void EvalState::addConstant(const std::string & name, Value * v, Constant info)
{
- staticBaseEnv->vars.emplace_back(symbols.create(name), baseEnvDispl);
- baseEnv.values[baseEnvDispl++] = v;
auto name2 = name.substr(0, 2) == "__" ? name.substr(2) : name;
- baseEnv.values[0]->attrs->push_back(Attr(symbols.create(name2), v));
-}
+ constantInfos.push_back({name2, info});
-Value * EvalState::addPrimOp(const std::string & name,
- size_t arity, PrimOpFun primOp)
-{
- return addPrimOp(PrimOp { .fun = primOp, .arity = arity, .name = name });
+ if (!(evalSettings.pureEval && info.impureOnly)) {
+ /* Check the type, if possible.
+
+ We might know the type of a thunk in advance, so be allowed
+ to just write it down in that case. */
+ if (auto gotType = v->type(true); gotType != nThunk)
+ assert(info.type == gotType);
+
+ /* Install value the base environment. */
+ staticBaseEnv->vars.emplace_back(symbols.create(name), baseEnvDispl);
+ baseEnv.values[baseEnvDispl++] = v;
+ baseEnv.values[0]->attrs->push_back(Attr(symbols.create(name2), v));
+ }
}
@@ -736,7 +750,10 @@ Value * EvalState::addPrimOp(PrimOp && primOp)
vPrimOp->mkPrimOp(new PrimOp(primOp));
Value v;
v.mkApp(vPrimOp, vPrimOp);
- return addConstant(primOp.name, v);
+ return addConstant(primOp.name, v, {
+ .type = nThunk, // FIXME
+ .doc = primOp.doc,
+ });
}
auto envName = symbols.create(primOp.name);
@@ -762,13 +779,13 @@ std::optional<EvalState::Doc> EvalState::getDoc(Value & v)
{
if (v.isPrimOp()) {
auto v2 = &v;
- if (v2->primOp->doc)
+ if (auto * doc = v2->primOp->doc)
return Doc {
.pos = {},
.name = v2->primOp->name,
.arity = v2->primOp->arity,
.args = v2->primOp->args,
- .doc = v2->primOp->doc,
+ .doc = doc,
};
}
return {};
@@ -1058,7 +1075,7 @@ void EvalState::mkOutputString(
? store->printStorePath(*std::move(optOutputPath))
/* Downstream we would substitute this for an actual path once
we build the floating CA derivation */
- : downstreamPlaceholder(*store, drvPath, outputName),
+ : DownstreamPlaceholder::unknownCaOutput(drvPath, outputName).render(),
NixStringContext {
NixStringContextElem::Built {
.drvPath = drvPath,
@@ -2380,7 +2397,7 @@ DerivedPath EvalState::coerceToDerivedPath(const PosIdx pos, Value & v, std::str
// This is testing for the case of CA derivations
auto sExpected = optOutputPath
? store->printStorePath(*optOutputPath)
- : downstreamPlaceholder(*store, b.drvPath, output);
+ : DownstreamPlaceholder::unknownCaOutput(b.drvPath, output).render();
if (s != sExpected)
error(
"string '%s' has context with the output '%s' from derivation '%s', but the string is not the right placeholder for this derivation output. It should be '%s'",
@@ -2619,7 +2636,7 @@ Strings EvalSettings::getDefaultNixPath()
{
Strings res;
auto add = [&](const Path & p, const std::string & s = std::string()) {
- if (pathExists(p)) {
+ if (pathAccessible(p)) {
if (s.empty()) {
res.push_back(p);
} else {
diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh
index a90ff34c0..277e77ad5 100644
--- a/src/libexpr/eval.hh
+++ b/src/libexpr/eval.hh
@@ -9,6 +9,7 @@
#include "config.hh"
#include "experimental-features.hh"
#include "input-accessor.hh"
+#include "search-path.hh"
#include <map>
#include <optional>
@@ -25,15 +26,72 @@ struct DerivedPath;
enum RepairFlag : bool;
+/**
+ * Function that implements a primop.
+ */
typedef void (* PrimOpFun) (EvalState & state, const PosIdx pos, Value * * args, Value & v);
+/**
+ * Info about a primitive operation, and its implementation
+ */
struct PrimOp
{
- PrimOpFun fun;
- size_t arity;
+ /**
+ * Name of the primop. `__` prefix is treated specially.
+ */
std::string name;
+
+ /**
+ * Names of the parameters of a primop, for primops that take a
+ * fixed number of arguments to be substituted for these parameters.
+ */
std::vector<std::string> args;
+
+ /**
+ * Aritiy of the primop.
+ *
+ * If `args` is not empty, this field will be computed from that
+ * field instead, so it doesn't need to be manually set.
+ */
+ size_t arity = 0;
+
+ /**
+ * Optional free-form documentation about the primop.
+ */
const char * doc = nullptr;
+
+ /**
+ * Implementation of the primop.
+ */
+ PrimOpFun fun;
+
+ /**
+ * Optional experimental for this to be gated on.
+ */
+ std::optional<ExperimentalFeature> experimentalFeature;
+};
+
+/**
+ * Info about a constant
+ */
+struct Constant
+{
+ /**
+ * Optional type of the constant (known since it is a fixed value).
+ *
+ * @todo we should use an enum for this.
+ */
+ ValueType type = nThunk;
+
+ /**
+ * Optional free-form documentation about the constant.
+ */
+ const char * doc = nullptr;
+
+ /**
+ * Whether the constant is impure, and not available in pure mode.
+ */
+ bool impureOnly = false;
};
#if HAVE_BOEHMGC
@@ -65,11 +123,6 @@ std::string printValue(const EvalState & state, const Value & v);
std::ostream & operator << (std::ostream & os, const ValueType t);
-// FIXME: maybe change this to an std::variant<SourcePath, URL>.
-typedef std::pair<std::string, std::string> SearchPathElem;
-typedef std::list<SearchPathElem> SearchPath;
-
-
/**
* Initialise the Boehm GC, if applicable.
*/
@@ -256,7 +309,7 @@ private:
SearchPath searchPath;
- std::map<std::string, std::pair<bool, std::string>> searchPathResolved;
+ std::map<std::string, std::optional<std::string>> searchPathResolved;
/**
* Cache used by checkSourcePath().
@@ -283,12 +336,12 @@ private:
public:
EvalState(
- const Strings & _searchPath,
+ const SearchPath & _searchPath,
ref<Store> store,
std::shared_ptr<Store> buildStore = nullptr);
~EvalState();
- void addToSearchPath(const std::string & s);
+ void addToSearchPath(SearchPath::Elem && elem);
SearchPath getSearchPath() { return searchPath; }
@@ -370,12 +423,16 @@ public:
* Look up a file in the search path.
*/
SourcePath findFile(const std::string_view path);
- SourcePath findFile(SearchPath & searchPath, const std::string_view path, const PosIdx pos = noPos);
+ SourcePath findFile(const SearchPath & searchPath, const std::string_view path, const PosIdx pos = noPos);
/**
+ * Try to resolve a search path value (not the optinal key part)
+ *
* If the specified search path element is a URI, download it.
+ *
+ * If it is not found, return `std::nullopt`
*/
- std::pair<bool, std::string> resolveSearchPathElem(const SearchPathElem & elem);
+ std::optional<std::string> resolveSearchPathPath(const SearchPath::Path & path);
/**
* Evaluate an expression to normal form
@@ -483,7 +540,7 @@ public:
* Coerce to `DerivedPath`.
*
* Must be a string which is either a literal store path or a
- * "placeholder (see `downstreamPlaceholder()`).
+ * "placeholder (see `DownstreamPlaceholder`).
*
* Even more importantly, the string context must be exactly one
* element, which is either a `NixStringContextElem::Opaque` or
@@ -509,18 +566,23 @@ public:
*/
std::shared_ptr<StaticEnv> staticBaseEnv; // !!! should be private
+ /**
+ * Name and documentation about every constant.
+ *
+ * Constants from primops are hard to crawl, and their docs will go
+ * here too.
+ */
+ std::vector<std::pair<std::string, Constant>> constantInfos;
+
private:
unsigned int baseEnvDispl = 0;
void createBaseEnv();
- Value * addConstant(const std::string & name, Value & v);
+ Value * addConstant(const std::string & name, Value & v, Constant info);
- void addConstant(const std::string & name, Value * v);
-
- Value * addPrimOp(const std::string & name,
- size_t arity, PrimOpFun primOp);
+ void addConstant(const std::string & name, Value * v, Constant info);
Value * addPrimOp(PrimOp && primOp);
@@ -534,6 +596,10 @@ public:
std::optional<std::string> name;
size_t arity;
std::vector<std::string> args;
+ /**
+ * Unlike the other `doc` fields in this file, this one should never be
+ * `null`.
+ */
const char * doc;
};
@@ -622,7 +688,7 @@ public:
* @param optOutputPath Optional output path for that string. Must
* be passed if and only if output store object is input-addressed.
* Will be printed to form string if passed, otherwise a placeholder
- * will be used (see `downstreamPlaceholder()`).
+ * will be used (see `DownstreamPlaceholder`).
*/
void mkOutputString(
Value & value,
@@ -700,8 +766,11 @@ struct DebugTraceStacker {
/**
* @return A string representing the type of the value `v`.
+ *
+ * @param withArticle Whether to begin with an english article, e.g. "an
+ * integer" vs "integer".
*/
-std::string_view showType(ValueType type);
+std::string_view showType(ValueType type, bool withArticle = true);
std::string showType(const Value & v);
/**
@@ -733,7 +802,12 @@ struct EvalSettings : Config
Setting<Strings> nixPath{
this, getDefaultNixPath(), "nix-path",
- "List of directories to be searched for `<...>` file references."};
+ R"(
+ List of directories to be searched for `<...>` file references
+
+ In particular, outside of [pure evaluation mode](#conf-pure-evaluation), this determines the value of
+ [`builtins.nixPath`](@docroot@/language/builtin-constants.md#builtin-constants-nixPath).
+ )"};
Setting<bool> restrictEval{
this, false, "restrict-eval",
@@ -741,11 +815,18 @@ struct EvalSettings : Config
If set to `true`, the Nix evaluator will not allow access to any
files outside of the Nix search path (as set via the `NIX_PATH`
environment variable or the `-I` option), or to URIs outside of
- `allowed-uri`. The default is `false`.
+ [`allowed-uris`](../command-ref/conf-file.md#conf-allowed-uris).
+ The default is `false`.
)"};
Setting<bool> pureEval{this, false, "pure-eval",
- "Whether to restrict file system and network access to files specified by cryptographic hash."};
+ R"(
+ Pure evaluation mode ensures that the result of Nix expressions is fully determined by explicitly declared inputs, and not influenced by external state:
+
+ - Restrict file system and network access to files specified by cryptographic hash
+ - Disable [`bultins.currentSystem`](@docroot@/language/builtin-constants.md#builtins-currentSystem) and [`builtins.currentTime`](@docroot@/language/builtin-constants.md#builtins-currentTime)
+ )"
+ };
Setting<bool> enableImportFromDerivation{
this, true, "allow-import-from-derivation",
diff --git a/src/libexpr/flake/flake.cc b/src/libexpr/flake/flake.cc
index 60bb6a71e..5aa44d6a1 100644
--- a/src/libexpr/flake/flake.cc
+++ b/src/libexpr/flake/flake.cc
@@ -788,9 +788,6 @@ static RegisterPrimOp r2({
```nix
(builtins.getFlake "github:edolstra/dwarffs").rev
```
-
- This function is only available if you enable the experimental feature
- `flakes`.
)",
.fun = prim_getFlake,
.experimentalFeature = Xp::Flakes,
diff --git a/src/libexpr/lexer.l b/src/libexpr/lexer.l
index 462b3b602..a3a8608d9 100644
--- a/src/libexpr/lexer.l
+++ b/src/libexpr/lexer.l
@@ -36,7 +36,7 @@ static inline PosIdx makeCurPos(const YYLTYPE & loc, ParseData * data)
#define CUR_POS makeCurPos(*yylloc, data)
// backup to recover from yyless(0)
-YYLTYPE prev_yylloc;
+thread_local YYLTYPE prev_yylloc;
static void initLoc(YYLTYPE * loc)
{
diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y
index 4d981712a..0a1ad9967 100644
--- a/src/libexpr/parser.y
+++ b/src/libexpr/parser.y
@@ -275,7 +275,12 @@ static Expr * stripIndentation(const PosIdx pos, SymbolTable & symbols,
}
/* If this is a single string, then don't do a concatenation. */
- return es2->size() == 1 && dynamic_cast<ExprString *>((*es2)[0].second) ? (*es2)[0].second : new ExprConcatStrings(pos, true, es2);
+ if (es2->size() == 1 && dynamic_cast<ExprString *>((*es2)[0].second)) {
+ auto *const result = (*es2)[0].second;
+ delete es2;
+ return result;
+ }
+ return new ExprConcatStrings(pos, true, es2);
}
@@ -330,7 +335,7 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParseData * data, const char * err
%type <ind_string_parts> ind_string_parts
%type <e> path_start string_parts string_attr
%type <id> attr
-%token <id> ID ATTRPATH
+%token <id> ID
%token <str> STR IND_STR
%token <n> INT
%token <nf> FLOAT
@@ -658,7 +663,7 @@ Expr * EvalState::parse(
ParseData data {
.state = *this,
.symbols = symbols,
- .basePath = std::move(basePath),
+ .basePath = basePath,
.origin = {origin},
};
@@ -729,19 +734,9 @@ Expr * EvalState::parseStdin()
}
-void EvalState::addToSearchPath(const std::string & s)
+void EvalState::addToSearchPath(SearchPath::Elem && elem)
{
- size_t pos = s.find('=');
- std::string prefix;
- Path path;
- if (pos == std::string::npos) {
- path = s;
- } else {
- prefix = std::string(s, 0, pos);
- path = std::string(s, pos + 1);
- }
-
- searchPath.emplace_back(prefix, path);
+ searchPath.elements.emplace_back(std::move(elem));
}
@@ -751,22 +746,19 @@ SourcePath EvalState::findFile(const std::string_view path)
}
-SourcePath EvalState::findFile(SearchPath & searchPath, const std::string_view path, const PosIdx pos)
+SourcePath EvalState::findFile(const SearchPath & searchPath, const std::string_view path, const PosIdx pos)
{
- for (auto & i : searchPath) {
- std::string suffix;
- if (i.first.empty())
- suffix = concatStrings("/", path);
- else {
- auto s = i.first.size();
- if (path.compare(0, s, i.first) != 0 ||
- (path.size() > s && path[s] != '/'))
- continue;
- suffix = path.size() == s ? "" : concatStrings("/", path.substr(s));
- }
- auto r = resolveSearchPathElem(i);
- if (!r.first) continue;
- Path res = r.second + suffix;
+ for (auto & i : searchPath.elements) {
+ auto suffixOpt = i.prefix.suffixIfPotentialMatch(path);
+
+ if (!suffixOpt) continue;
+ auto suffix = *suffixOpt;
+
+ auto rOpt = resolveSearchPathPath(i.path);
+ if (!rOpt) continue;
+ auto r = *rOpt;
+
+ Path res = suffix == "" ? r : concatStrings(r, "/", suffix);
if (pathExists(res)) return CanonPath(canonPath(res));
}
@@ -783,49 +775,53 @@ SourcePath EvalState::findFile(SearchPath & searchPath, const std::string_view p
}
-std::pair<bool, std::string> EvalState::resolveSearchPathElem(const SearchPathElem & elem)
+std::optional<std::string> EvalState::resolveSearchPathPath(const SearchPath::Path & value0)
{
- auto i = searchPathResolved.find(elem.second);
+ auto & value = value0.s;
+ auto i = searchPathResolved.find(value);
if (i != searchPathResolved.end()) return i->second;
- std::pair<bool, std::string> res;
+ std::optional<std::string> res;
- if (EvalSettings::isPseudoUrl(elem.second)) {
+ if (EvalSettings::isPseudoUrl(value)) {
try {
auto storePath = fetchers::downloadTarball(
- store, EvalSettings::resolvePseudoUrl(elem.second), "source", false).first.storePath;
- res = { true, store->toRealPath(storePath) };
+ store, EvalSettings::resolvePseudoUrl(value), "source", false).tree.storePath;
+ res = { store->toRealPath(storePath) };
} catch (FileTransferError & e) {
logWarning({
- .msg = hintfmt("Nix search path entry '%1%' cannot be downloaded, ignoring", elem.second)
+ .msg = hintfmt("Nix search path entry '%1%' cannot be downloaded, ignoring", value)
});
- res = { false, "" };
+ res = std::nullopt;
}
}
- else if (hasPrefix(elem.second, "flake:")) {
+ else if (hasPrefix(value, "flake:")) {
experimentalFeatureSettings.require(Xp::Flakes);
- auto flakeRef = parseFlakeRef(elem.second.substr(6), {}, true, false);
- debug("fetching flake search path element '%s''", elem.second);
+ auto flakeRef = parseFlakeRef(value.substr(6), {}, true, false);
+ debug("fetching flake search path element '%s''", value);
auto storePath = flakeRef.resolve(store).fetchTree(store).first.storePath;
- res = { true, store->toRealPath(storePath) };
+ res = { store->toRealPath(storePath) };
}
else {
- auto path = absPath(elem.second);
+ auto path = absPath(value);
if (pathExists(path))
- res = { true, path };
+ res = { path };
else {
logWarning({
- .msg = hintfmt("Nix search path entry '%1%' does not exist, ignoring", elem.second)
+ .msg = hintfmt("Nix search path entry '%1%' does not exist, ignoring", value)
});
- res = { false, "" };
+ res = std::nullopt;
}
}
- debug("resolved search path element '%s' to '%s'", elem.second, res.second);
+ if (res)
+ debug("resolved search path element '%s' to '%s'", value, *res);
+ else
+ debug("failed to resolve search path element '%s'", value);
- searchPathResolved[elem.second] = res;
+ searchPathResolved[value] = res;
return res;
}
diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index 6fbd66389..8a61e57cc 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -1,11 +1,12 @@
#include "archive.hh"
#include "derivations.hh"
+#include "downstream-placeholder.hh"
#include "eval-inline.hh"
#include "eval.hh"
#include "globals.hh"
#include "json-to-value.hh"
#include "names.hh"
-#include "references.hh"
+#include "path-references.hh"
#include "store-api.hh"
#include "util.hh"
#include "value-to-json.hh"
@@ -87,7 +88,7 @@ StringMap EvalState::realiseContext(const NixStringContext & context)
auto outputs = resolveDerivedPath(*store, drv);
for (auto & [outputName, outputPath] : outputs) {
res.insert_or_assign(
- downstreamPlaceholder(*store, drv.drvPath, outputName),
+ DownstreamPlaceholder::unknownCaOutput(drv.drvPath, outputName).render(),
store->printStorePath(outputPath)
);
}
@@ -237,7 +238,7 @@ static void import(EvalState & state, const PosIdx pos, Value & vPath, Value * v
}
}
-static RegisterPrimOp primop_scopedImport(RegisterPrimOp::Info {
+static RegisterPrimOp primop_scopedImport(PrimOp {
.name = "scopedImport",
.arity = 2,
.fun = [](EvalState & state, const PosIdx pos, Value * * args, Value & v)
@@ -691,7 +692,7 @@ static void prim_genericClosure(EvalState & state, const PosIdx pos, Value * * a
v.listElems()[n++] = i;
}
-static RegisterPrimOp primop_genericClosure(RegisterPrimOp::Info {
+static RegisterPrimOp primop_genericClosure(PrimOp {
.name = "__genericClosure",
.args = {"attrset"},
.arity = 1,
@@ -808,7 +809,7 @@ static void prim_addErrorContext(EvalState & state, const PosIdx pos, Value * *
}
}
-static RegisterPrimOp primop_addErrorContext(RegisterPrimOp::Info {
+static RegisterPrimOp primop_addErrorContext(PrimOp {
.name = "__addErrorContext",
.arity = 2,
.fun = prim_addErrorContext,
@@ -1151,16 +1152,14 @@ drvName, Bindings * attrs, Value & v)
if (i->value->type() == nNull) continue;
}
- if (i->name == state.sContentAddressed) {
- contentAddressed = state.forceBool(*i->value, noPos, context_below);
- if (contentAddressed)
- experimentalFeatureSettings.require(Xp::CaDerivations);
+ if (i->name == state.sContentAddressed && state.forceBool(*i->value, noPos, context_below)) {
+ contentAddressed = true;
+ experimentalFeatureSettings.require(Xp::CaDerivations);
}
- else if (i->name == state.sImpure) {
- isImpure = state.forceBool(*i->value, noPos, context_below);
- if (isImpure)
- experimentalFeatureSettings.require(Xp::ImpureDerivations);
+ else if (i->name == state.sImpure && state.forceBool(*i->value, noPos, context_below)) {
+ isImpure = true;
+ experimentalFeatureSettings.require(Xp::ImpureDerivations);
}
/* The `args' attribute is special: it supplies the
@@ -1401,7 +1400,7 @@ drvName, Bindings * attrs, Value & v)
v.mkAttrs(result);
}
-static RegisterPrimOp primop_derivationStrict(RegisterPrimOp::Info {
+static RegisterPrimOp primop_derivationStrict(PrimOp {
.name = "derivationStrict",
.arity = 1,
.fun = prim_derivationStrict,
@@ -1502,7 +1501,9 @@ static RegisterPrimOp primop_storePath({
causes the path to be *copied* again to the Nix store, resulting
in a new path (e.g. `/nix/store/ld01dnzc…-source-source`).
- This function is not available in pure evaluation mode.
+ Not available in [pure evaluation mode](@docroot@/command-ref/conf-file.md#conf-pure-eval).
+
+ See also [`builtins.fetchClosure`](#builtins-fetchClosure).
)",
.fun = prim_storePath,
});
@@ -1657,7 +1658,10 @@ static void prim_findFile(EvalState & state, const PosIdx pos, Value * * args, V
}));
}
- searchPath.emplace_back(prefix, path);
+ searchPath.elements.emplace_back(SearchPath::Elem {
+ .prefix = SearchPath::Prefix { .s = prefix },
+ .path = SearchPath::Path { .s = path },
+ });
}
auto path = state.forceStringNoCtx(*args[1], pos, "while evaluating the second argument passed to builtins.findFile");
@@ -1665,9 +1669,52 @@ static void prim_findFile(EvalState & state, const PosIdx pos, Value * * args, V
v.mkPath(state.checkSourcePath(state.findFile(searchPath, path, pos)));
}
-static RegisterPrimOp primop_findFile(RegisterPrimOp::Info {
+static RegisterPrimOp primop_findFile(PrimOp {
.name = "__findFile",
- .arity = 2,
+ .args = {"search path", "lookup path"},
+ .doc = R"(
+ Look up the given path with the given search path.
+
+ A search path is represented list of [attribute sets](./values.md#attribute-set) with two attributes, `prefix`, and `path`.
+ `prefix` is a relative path.
+ `path` denotes a file system location; the exact syntax depends on the command line interface.
+
+ Examples of search path attribute sets:
+
+ - ```
+ {
+ prefix = "nixos-config";
+ path = "/etc/nixos/configuration.nix";
+ }
+ ```
+
+ - ```
+ {
+ prefix = "";
+ path = "/nix/var/nix/profiles/per-user/root/channels";
+ }
+ ```
+
+ The lookup algorithm checks each entry until a match is found, returning a [path value](@docroot@/language/values.html#type-path) of the match.
+
+ This is the process for each entry:
+ If the lookup path matches `prefix`, then the remainder of the lookup path (the "suffix") is searched for within the directory denoted by `patch`.
+ Note that the `path` may need to be downloaded at this point to look inside.
+ If the suffix is found inside that directory, then the entry is a match;
+ the combined absolute path of the directory (now downloaded if need be) and the suffix is returned.
+
+ The syntax
+
+ ```nix
+ <nixpkgs>
+ ```
+
+ is equivalent to:
+
+ ```nix
+ builtins.findFile builtins.nixPath "nixpkgs"
+ ```
+ )",
.fun = prim_findFile,
});
@@ -2386,7 +2433,7 @@ static void prim_unsafeGetAttrPos(EvalState & state, const PosIdx pos, Value * *
state.mkPos(v, i->pos);
}
-static RegisterPrimOp primop_unsafeGetAttrPos(RegisterPrimOp::Info {
+static RegisterPrimOp primop_unsafeGetAttrPos(PrimOp {
.name = "__unsafeGetAttrPos",
.arity = 2,
.fun = prim_unsafeGetAttrPos,
@@ -3909,13 +3956,8 @@ static void prim_replaceStrings(EvalState & state, const PosIdx pos, Value * * a
for (auto elem : args[0]->listItems())
from.emplace_back(state.forceString(*elem, pos, "while evaluating one of the strings to replace passed to builtins.replaceStrings"));
- std::vector<std::pair<std::string, NixStringContext>> to;
- to.reserve(args[1]->listSize());
- for (auto elem : args[1]->listItems()) {
- NixStringContext ctx;
- auto s = state.forceString(*elem, ctx, pos, "while evaluating one of the replacement strings passed to builtins.replaceStrings");
- to.emplace_back(s, std::move(ctx));
- }
+ std::unordered_map<size_t, std::string> cache;
+ auto to = args[1]->listItems();
NixStringContext context;
auto s = state.forceString(*args[2], context, pos, "while evaluating the third argument passed to builtins.replaceStrings");
@@ -3926,10 +3968,19 @@ static void prim_replaceStrings(EvalState & state, const PosIdx pos, Value * * a
bool found = false;
auto i = from.begin();
auto j = to.begin();
- for (; i != from.end(); ++i, ++j)
+ size_t j_index = 0;
+ for (; i != from.end(); ++i, ++j, ++j_index)
if (s.compare(p, i->size(), *i) == 0) {
found = true;
- res += j->first;
+ auto v = cache.find(j_index);
+ if (v == cache.end()) {
+ NixStringContext ctx;
+ auto ts = state.forceString(**j, ctx, pos, "while evaluating one of the replacement strings passed to builtins.replaceStrings");
+ v = (cache.emplace(j_index, ts)).first;
+ for (auto& path : ctx)
+ context.insert(path);
+ }
+ res += v->second;
if (i->empty()) {
if (p < s.size())
res += s[p];
@@ -3937,9 +3988,6 @@ static void prim_replaceStrings(EvalState & state, const PosIdx pos, Value * * a
} else {
p += i->size();
}
- for (auto& path : j->second)
- context.insert(path);
- j->second.clear();
break;
}
if (!found) {
@@ -3957,7 +4005,11 @@ static RegisterPrimOp primop_replaceStrings({
.args = {"from", "to", "s"},
.doc = R"(
Given string *s*, replace every occurrence of the strings in *from*
- with the corresponding string in *to*. For example,
+ with the corresponding string in *to*.
+
+ The argument *to* is lazy, that is, it is only evaluated when its corresponding pattern in *from* is matched in the string *s*
+
+ Example:
```nix
builtins.replaceStrings ["oo" "a"] ["a" "i"] "foobar"
@@ -4054,22 +4106,10 @@ static RegisterPrimOp primop_splitVersion({
RegisterPrimOp::PrimOps * RegisterPrimOp::primOps;
-RegisterPrimOp::RegisterPrimOp(std::string name, size_t arity, PrimOpFun fun)
-{
- if (!primOps) primOps = new PrimOps;
- primOps->push_back({
- .name = name,
- .args = {},
- .arity = arity,
- .fun = fun,
- });
-}
-
-
-RegisterPrimOp::RegisterPrimOp(Info && info)
+RegisterPrimOp::RegisterPrimOp(PrimOp && primOp)
{
if (!primOps) primOps = new PrimOps;
- primOps->push_back(std::move(info));
+ primOps->push_back(std::move(primOp));
}
@@ -4082,85 +4122,253 @@ void EvalState::createBaseEnv()
/* `builtins' must be first! */
v.mkAttrs(buildBindings(128).finish());
- addConstant("builtins", v);
+ addConstant("builtins", v, {
+ .type = nAttrs,
+ .doc = R"(
+ Contains all the [built-in functions](@docroot@/language/builtins.md) and values.
+
+ Since built-in functions were added over time, [testing for attributes](./operators.md#has-attribute) in `builtins` can be used for graceful fallback on older Nix installations:
+
+ ```nix
+ # if hasContext is not available, we assume `s` has a context
+ if builtins ? hasContext then builtins.hasContext s else true
+ ```
+ )",
+ });
v.mkBool(true);
- addConstant("true", v);
+ addConstant("true", v, {
+ .type = nBool,
+ .doc = R"(
+ Primitive value.
+
+ It can be returned by
+ [comparison operators](@docroot@/language/operators.md#Comparison)
+ and used in
+ [conditional expressions](@docroot@/language/constructs.md#Conditionals).
+
+ The name `true` is not special, and can be shadowed:
+
+ ```nix-repl
+ nix-repl> let true = 1; in true
+ 1
+ ```
+ )",
+ });
v.mkBool(false);
- addConstant("false", v);
+ addConstant("false", v, {
+ .type = nBool,
+ .doc = R"(
+ Primitive value.
+
+ It can be returned by
+ [comparison operators](@docroot@/language/operators.md#Comparison)
+ and used in
+ [conditional expressions](@docroot@/language/constructs.md#Conditionals).
+
+ The name `false` is not special, and can be shadowed:
+
+ ```nix-repl
+ nix-repl> let false = 1; in false
+ 1
+ ```
+ )",
+ });
v.mkNull();
- addConstant("null", v);
+ addConstant("null", v, {
+ .type = nNull,
+ .doc = R"(
+ Primitive value.
+
+ The name `null` is not special, and can be shadowed:
+
+ ```nix-repl
+ nix-repl> let null = 1; in null
+ 1
+ ```
+ )",
+ });
if (!evalSettings.pureEval) {
v.mkInt(time(0));
- addConstant("__currentTime", v);
+ }
+ addConstant("__currentTime", v, {
+ .type = nInt,
+ .doc = R"(
+ Return the [Unix time](https://en.wikipedia.org/wiki/Unix_time) at first evaluation.
+ Repeated references to that name will re-use the initially obtained value.
+
+ Example:
+
+ ```console
+ $ nix repl
+ Welcome to Nix 2.15.1 Type :? for help.
+ nix-repl> builtins.currentTime
+ 1683705525
+
+ nix-repl> builtins.currentTime
+ 1683705525
+ ```
+
+ The [store path](@docroot@/glossary.md#gloss-store-path) of a derivation depending on `currentTime` will differ for each evaluation, unless both evaluate `builtins.currentTime` in the same second.
+ )",
+ .impureOnly = true,
+ });
+
+ if (!evalSettings.pureEval) {
v.mkString(settings.thisSystem.get());
- addConstant("__currentSystem", v);
}
+ addConstant("__currentSystem", v, {
+ .type = nString,
+ .doc = R"(
+ The value of the [`system` configuration option](@docroot@/command-ref/conf-file.md#conf-pure-eval).
+
+ It can be used to set the `system` attribute for [`builtins.derivation`](@docroot@/language/derivations.md) such that the resulting derivation can be built on the same system that evaluates the Nix expression:
+
+ ```nix
+ builtins.derivation {
+ # ...
+ system = builtins.currentSystem;
+ }
+ ```
+
+ It can be overridden in order to create derivations for different system than the current one:
+
+ ```console
+ $ nix-instantiate --system "mips64-linux" --eval --expr 'builtins.currentSystem'
+ "mips64-linux"
+ ```
+ )",
+ .impureOnly = true,
+ });
v.mkString(nixVersion);
- addConstant("__nixVersion", v);
+ addConstant("__nixVersion", v, {
+ .type = nString,
+ .doc = R"(
+ The version of Nix.
+
+ For example, where the command line returns the current Nix version,
+
+ ```shell-session
+ $ nix --version
+ nix (Nix) 2.16.0
+ ```
+
+ the Nix language evaluator returns the same value:
+
+ ```nix-repl
+ nix-repl> builtins.nixVersion
+ "2.16.0"
+ ```
+ )",
+ });
v.mkString(store->storeDir);
- addConstant("__storeDir", v);
+ addConstant("__storeDir", v, {
+ .type = nString,
+ .doc = R"(
+ Logical file system location of the [Nix store](@docroot@/glossary.md#gloss-store) currently in use.
+
+ This value is determined by the `store` parameter in [Store URLs](@docroot@/command-ref/new-cli/nix3-help-stores.md):
+
+ ```shell-session
+ $ nix-instantiate --store 'dummy://?store=/blah' --eval --expr builtins.storeDir
+ "/blah"
+ ```
+ )",
+ });
/* Language version. This should be increased every time a new
language feature gets added. It's not necessary to increase it
when primops get added, because you can just use `builtins ?
primOp' to check. */
v.mkInt(6);
- addConstant("__langVersion", v);
+ addConstant("__langVersion", v, {
+ .type = nInt,
+ .doc = R"(
+ The current version of the Nix language.
+ )",
+ });
// Miscellaneous
if (evalSettings.enableNativeCode) {
- addPrimOp("__importNative", 2, prim_importNative);
- addPrimOp("__exec", 1, prim_exec);
+ addPrimOp({
+ .name = "__importNative",
+ .arity = 2,
+ .fun = prim_importNative,
+ });
+ addPrimOp({
+ .name = "__exec",
+ .arity = 1,
+ .fun = prim_exec,
+ });
}
addPrimOp({
- .fun = evalSettings.traceVerbose ? prim_trace : prim_second,
- .arity = 2,
.name = "__traceVerbose",
.args = { "e1", "e2" },
+ .arity = 2,
.doc = R"(
Evaluate *e1* and print its abstract syntax representation on standard
error if `--trace-verbose` is enabled. Then return *e2*. This function
is useful for debugging.
)",
+ .fun = evalSettings.traceVerbose ? prim_trace : prim_second,
});
/* Add a value containing the current Nix expression search path. */
- mkList(v, searchPath.size());
+ mkList(v, searchPath.elements.size());
int n = 0;
- for (auto & i : searchPath) {
+ for (auto & i : searchPath.elements) {
auto attrs = buildBindings(2);
- attrs.alloc("path").mkString(i.second);
- attrs.alloc("prefix").mkString(i.first);
+ attrs.alloc("path").mkString(i.path.s);
+ attrs.alloc("prefix").mkString(i.prefix.s);
(v.listElems()[n++] = allocValue())->mkAttrs(attrs);
}
- addConstant("__nixPath", v);
+ addConstant("__nixPath", v, {
+ .type = nList,
+ .doc = R"(
+ The search path used to resolve angle bracket path lookups.
+
+ Angle bracket expressions can be
+ [desugared](https://en.wikipedia.org/wiki/Syntactic_sugar)
+ using this and
+ [`builtins.findFile`](./builtins.html#builtins-findFile):
+
+ ```nix
+ <nixpkgs>
+ ```
+
+ is equivalent to:
+
+ ```nix
+ builtins.findFile builtins.nixPath "nixpkgs"
+ ```
+ )",
+ });
if (RegisterPrimOp::primOps)
for (auto & primOp : *RegisterPrimOp::primOps)
- if (!primOp.experimentalFeature
- || experimentalFeatureSettings.isEnabled(*primOp.experimentalFeature))
+ if (experimentalFeatureSettings.isEnabled(primOp.experimentalFeature))
{
- addPrimOp({
- .fun = primOp.fun,
- .arity = std::max(primOp.args.size(), primOp.arity),
- .name = primOp.name,
- .args = primOp.args,
- .doc = primOp.doc,
- });
+ auto primOpAdjusted = primOp;
+ primOpAdjusted.arity = std::max(primOp.args.size(), primOp.arity);
+ addPrimOp(std::move(primOpAdjusted));
}
/* Add a wrapper around the derivation primop that computes the
- `drvPath' and `outPath' attributes lazily. */
+ `drvPath' and `outPath' attributes lazily.
+
+ Null docs because it is documented separately.
+ */
auto vDerivation = allocValue();
- addConstant("derivation", vDerivation);
+ addConstant("derivation", vDerivation, {
+ .type = nFunction,
+ });
/* Now that we've added all primops, sort the `builtins' set,
because attribute lookups expect it to be sorted. */
diff --git a/src/libexpr/primops.hh b/src/libexpr/primops.hh
index 4ae73fe1f..930e7f32a 100644
--- a/src/libexpr/primops.hh
+++ b/src/libexpr/primops.hh
@@ -10,17 +10,7 @@ namespace nix {
struct RegisterPrimOp
{
- struct Info
- {
- std::string name;
- std::vector<std::string> args;
- size_t arity = 0;
- const char * doc;
- PrimOpFun fun;
- std::optional<ExperimentalFeature> experimentalFeature;
- };
-
- typedef std::vector<Info> PrimOps;
+ typedef std::vector<PrimOp> PrimOps;
static PrimOps * primOps;
/**
@@ -28,12 +18,7 @@ struct RegisterPrimOp
* will get called during EvalState initialization, so there
* may be primops not yet added and builtins is not yet sorted.
*/
- RegisterPrimOp(
- std::string name,
- size_t arity,
- PrimOpFun fun);
-
- RegisterPrimOp(Info && info);
+ RegisterPrimOp(PrimOp && primOp);
};
/* These primops are disabled without enableNativeCode, but plugins
diff --git a/src/libexpr/primops/context.cc b/src/libexpr/primops/context.cc
index 07bf400cf..8b3468009 100644
--- a/src/libexpr/primops/context.cc
+++ b/src/libexpr/primops/context.cc
@@ -12,7 +12,11 @@ static void prim_unsafeDiscardStringContext(EvalState & state, const PosIdx pos,
v.mkString(*s);
}
-static RegisterPrimOp primop_unsafeDiscardStringContext("__unsafeDiscardStringContext", 1, prim_unsafeDiscardStringContext);
+static RegisterPrimOp primop_unsafeDiscardStringContext({
+ .name = "__unsafeDiscardStringContext",
+ .arity = 1,
+ .fun = prim_unsafeDiscardStringContext
+});
static void prim_hasContext(EvalState & state, const PosIdx pos, Value * * args, Value & v)
@@ -22,7 +26,16 @@ static void prim_hasContext(EvalState & state, const PosIdx pos, Value * * args,
v.mkBool(!context.empty());
}
-static RegisterPrimOp primop_hasContext("__hasContext", 1, prim_hasContext);
+static RegisterPrimOp primop_hasContext({
+ .name = "__hasContext",
+ .args = {"s"},
+ .doc = R"(
+ Return `true` if string *s* has a non-empty context. The
+ context can be obtained with
+ [`getContext`](#builtins-getContext).
+ )",
+ .fun = prim_hasContext
+});
/* Sometimes we want to pass a derivation path (i.e. pkg.drvPath) to a
@@ -51,7 +64,11 @@ static void prim_unsafeDiscardOutputDependency(EvalState & state, const PosIdx p
v.mkString(*s, context2);
}
-static RegisterPrimOp primop_unsafeDiscardOutputDependency("__unsafeDiscardOutputDependency", 1, prim_unsafeDiscardOutputDependency);
+static RegisterPrimOp primop_unsafeDiscardOutputDependency({
+ .name = "__unsafeDiscardOutputDependency",
+ .arity = 1,
+ .fun = prim_unsafeDiscardOutputDependency
+});
/* Extract the context of a string as a structured Nix value.
@@ -119,7 +136,30 @@ static void prim_getContext(EvalState & state, const PosIdx pos, Value * * args,
v.mkAttrs(attrs);
}
-static RegisterPrimOp primop_getContext("__getContext", 1, prim_getContext);
+static RegisterPrimOp primop_getContext({
+ .name = "__getContext",
+ .args = {"s"},
+ .doc = R"(
+ Return the string context of *s*.
+
+ The string context tracks references to derivations within a string.
+ It is represented as an attribute set of [store derivation](@docroot@/glossary.md#gloss-store-derivation) paths mapping to output names.
+
+ Using [string interpolation](@docroot@/language/string-interpolation.md) on a derivation will add that derivation to the string context.
+ For example,
+
+ ```nix
+ builtins.getContext "${derivation { name = "a"; builder = "b"; system = "c"; }}"
+ ```
+
+ evaluates to
+
+ ```
+ { "/nix/store/arhvjaf6zmlyn8vh8fgn55rpwnxq0n7l-a.drv" = { outputs = [ "out" ]; }; }
+ ```
+ )",
+ .fun = prim_getContext
+});
/* Append the given context to a given string.
@@ -192,6 +232,10 @@ static void prim_appendContext(EvalState & state, const PosIdx pos, Value * * ar
v.mkString(orig, context);
}
-static RegisterPrimOp primop_appendContext("__appendContext", 2, prim_appendContext);
+static RegisterPrimOp primop_appendContext({
+ .name = "__appendContext",
+ .arity = 2,
+ .fun = prim_appendContext
+});
}
diff --git a/src/libexpr/primops/fetchClosure.cc b/src/libexpr/primops/fetchClosure.cc
index 4cf1f1e0b..7fe8203f4 100644
--- a/src/libexpr/primops/fetchClosure.cc
+++ b/src/libexpr/primops/fetchClosure.cc
@@ -5,37 +5,150 @@
namespace nix {
+/**
+ * Handler for the content addressed case.
+ *
+ * @param state Evaluator state and store to write to.
+ * @param fromStore Store containing the path to rewrite.
+ * @param fromPath Source path to be rewritten.
+ * @param toPathMaybe Path to write the rewritten path to. If empty, the error shows the actual path.
+ * @param v Return `Value`
+ */
+static void runFetchClosureWithRewrite(EvalState & state, const PosIdx pos, Store & fromStore, const StorePath & fromPath, const std::optional<StorePath> & toPathMaybe, Value &v) {
+
+ // establish toPath or throw
+
+ if (!toPathMaybe || !state.store->isValidPath(*toPathMaybe)) {
+ auto rewrittenPath = makeContentAddressed(fromStore, *state.store, fromPath);
+ if (toPathMaybe && *toPathMaybe != rewrittenPath)
+ throw Error({
+ .msg = hintfmt("rewriting '%s' to content-addressed form yielded '%s', while '%s' was expected",
+ state.store->printStorePath(fromPath),
+ state.store->printStorePath(rewrittenPath),
+ state.store->printStorePath(*toPathMaybe)),
+ .errPos = state.positions[pos]
+ });
+ if (!toPathMaybe)
+ throw Error({
+ .msg = hintfmt(
+ "rewriting '%s' to content-addressed form yielded '%s'\n"
+ "Use this value for the 'toPath' attribute passed to 'fetchClosure'",
+ state.store->printStorePath(fromPath),
+ state.store->printStorePath(rewrittenPath)),
+ .errPos = state.positions[pos]
+ });
+ }
+
+ auto toPath = *toPathMaybe;
+
+ // check and return
+
+ auto resultInfo = state.store->queryPathInfo(toPath);
+
+ if (!resultInfo->isContentAddressed(*state.store)) {
+ // We don't perform the rewriting when outPath already exists, as an optimisation.
+ // However, we can quickly detect a mistake if the toPath is input addressed.
+ throw Error({
+ .msg = hintfmt(
+ "The 'toPath' value '%s' is input-addressed, so it can't possibly be the result of rewriting to a content-addressed path.\n\n"
+ "Set 'toPath' to an empty string to make Nix report the correct content-addressed path.",
+ state.store->printStorePath(toPath)),
+ .errPos = state.positions[pos]
+ });
+ }
+
+ state.mkStorePathString(toPath, v);
+}
+
+/**
+ * Fetch the closure and make sure it's content addressed.
+ */
+static void runFetchClosureWithContentAddressedPath(EvalState & state, const PosIdx pos, Store & fromStore, const StorePath & fromPath, Value & v) {
+
+ if (!state.store->isValidPath(fromPath))
+ copyClosure(fromStore, *state.store, RealisedPath::Set { fromPath });
+
+ auto info = state.store->queryPathInfo(fromPath);
+
+ if (!info->isContentAddressed(*state.store)) {
+ throw Error({
+ .msg = hintfmt(
+ "The 'fromPath' value '%s' is input-addressed, but 'inputAddressed' is set to 'false' (default).\n\n"
+ "If you do intend to fetch an input-addressed store path, add\n\n"
+ " inputAddressed = true;\n\n"
+ "to the 'fetchClosure' arguments.\n\n"
+ "Note that to ensure authenticity input-addressed store paths, users must configure a trusted binary cache public key on their systems. This is not needed for content-addressed paths.",
+ state.store->printStorePath(fromPath)),
+ .errPos = state.positions[pos]
+ });
+ }
+
+ state.mkStorePathString(fromPath, v);
+}
+
+/**
+ * Fetch the closure and make sure it's input addressed.
+ */
+static void runFetchClosureWithInputAddressedPath(EvalState & state, const PosIdx pos, Store & fromStore, const StorePath & fromPath, Value & v) {
+
+ if (!state.store->isValidPath(fromPath))
+ copyClosure(fromStore, *state.store, RealisedPath::Set { fromPath });
+
+ auto info = state.store->queryPathInfo(fromPath);
+
+ if (info->isContentAddressed(*state.store)) {
+ throw Error({
+ .msg = hintfmt(
+ "The store object referred to by 'fromPath' at '%s' is not input-addressed, but 'inputAddressed' is set to 'true'.\n\n"
+ "Remove the 'inputAddressed' attribute (it defaults to 'false') to expect 'fromPath' to be content-addressed",
+ state.store->printStorePath(fromPath)),
+ .errPos = state.positions[pos]
+ });
+ }
+
+ state.mkStorePathString(fromPath, v);
+}
+
+typedef std::optional<StorePath> StorePathOrGap;
+
static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * args, Value & v)
{
state.forceAttrs(*args[0], pos, "while evaluating the argument passed to builtins.fetchClosure");
std::optional<std::string> fromStoreUrl;
std::optional<StorePath> fromPath;
- bool toCA = false;
- std::optional<StorePath> toPath;
+ std::optional<StorePathOrGap> toPath;
+ std::optional<bool> inputAddressedMaybe;
for (auto & attr : *args[0]->attrs) {
const auto & attrName = state.symbols[attr.name];
+ auto attrHint = [&]() -> std::string {
+ return "while evaluating the '" + attrName + "' attribute passed to builtins.fetchClosure";
+ };
if (attrName == "fromPath") {
NixStringContext context;
- fromPath = state.coerceToStorePath(attr.pos, *attr.value, context,
- "while evaluating the 'fromPath' attribute passed to builtins.fetchClosure");
+ fromPath = state.coerceToStorePath(attr.pos, *attr.value, context, attrHint());
}
else if (attrName == "toPath") {
state.forceValue(*attr.value, attr.pos);
- toCA = true;
- if (attr.value->type() != nString || attr.value->string.s != std::string("")) {
+ bool isEmptyString = attr.value->type() == nString && attr.value->string.s == std::string("");
+ if (isEmptyString) {
+ toPath = StorePathOrGap {};
+ }
+ else {
NixStringContext context;
- toPath = state.coerceToStorePath(attr.pos, *attr.value, context,
- "while evaluating the 'toPath' attribute passed to builtins.fetchClosure");
+ toPath = state.coerceToStorePath(attr.pos, *attr.value, context, attrHint());
}
}
else if (attrName == "fromStore")
fromStoreUrl = state.forceStringNoCtx(*attr.value, attr.pos,
- "while evaluating the 'fromStore' attribute passed to builtins.fetchClosure");
+ attrHint());
+
+ else if (attrName == "inputAddressed")
+ inputAddressedMaybe = state.forceBool(*attr.value, attr.pos, attrHint());
else
throw Error({
@@ -50,6 +163,18 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * arg
.errPos = state.positions[pos]
});
+ bool inputAddressed = inputAddressedMaybe.value_or(false);
+
+ if (inputAddressed) {
+ if (toPath)
+ throw Error({
+ .msg = hintfmt("attribute '%s' is set to true, but '%s' is also set. Please remove one of them",
+ "inputAddressed",
+ "toPath"),
+ .errPos = state.positions[pos]
+ });
+ }
+
if (!fromStoreUrl)
throw Error({
.msg = hintfmt("attribute '%s' is missing in call to 'fetchClosure'", "fromStore"),
@@ -74,55 +199,40 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * arg
auto fromStore = openStore(parsedURL.to_string());
- if (toCA) {
- if (!toPath || !state.store->isValidPath(*toPath)) {
- auto remappings = makeContentAddressed(*fromStore, *state.store, { *fromPath });
- auto i = remappings.find(*fromPath);
- assert(i != remappings.end());
- if (toPath && *toPath != i->second)
- throw Error({
- .msg = hintfmt("rewriting '%s' to content-addressed form yielded '%s', while '%s' was expected",
- state.store->printStorePath(*fromPath),
- state.store->printStorePath(i->second),
- state.store->printStorePath(*toPath)),
- .errPos = state.positions[pos]
- });
- if (!toPath)
- throw Error({
- .msg = hintfmt(
- "rewriting '%s' to content-addressed form yielded '%s'; "
- "please set this in the 'toPath' attribute passed to 'fetchClosure'",
- state.store->printStorePath(*fromPath),
- state.store->printStorePath(i->second)),
- .errPos = state.positions[pos]
- });
- }
- } else {
- if (!state.store->isValidPath(*fromPath))
- copyClosure(*fromStore, *state.store, RealisedPath::Set { *fromPath });
- toPath = fromPath;
- }
-
- /* In pure mode, require a CA path. */
- if (evalSettings.pureEval) {
- auto info = state.store->queryPathInfo(*toPath);
- if (!info->isContentAddressed(*state.store))
- throw Error({
- .msg = hintfmt("in pure mode, 'fetchClosure' requires a content-addressed path, which '%s' isn't",
- state.store->printStorePath(*toPath)),
- .errPos = state.positions[pos]
- });
- }
-
- state.mkStorePathString(*toPath, v);
+ if (toPath)
+ runFetchClosureWithRewrite(state, pos, *fromStore, *fromPath, *toPath, v);
+ else if (inputAddressed)
+ runFetchClosureWithInputAddressedPath(state, pos, *fromStore, *fromPath, v);
+ else
+ runFetchClosureWithContentAddressedPath(state, pos, *fromStore, *fromPath, v);
}
static RegisterPrimOp primop_fetchClosure({
.name = "__fetchClosure",
.args = {"args"},
.doc = R"(
- Fetch a Nix store closure from a binary cache, rewriting it into
- content-addressed form. For example,
+ Fetch a store path [closure](@docroot@/glossary.md#gloss-closure) from a binary cache, and return the store path as a string with context.
+
+ This function can be invoked in three ways, that we will discuss in order of preference.
+
+ **Fetch a content-addressed store path**
+
+ Example:
+
+ ```nix
+ builtins.fetchClosure {
+ fromStore = "https://cache.nixos.org";
+ fromPath = /nix/store/ldbhlwhh39wha58rm61bkiiwm6j7211j-git-2.33.1;
+ }
+ ```
+
+ This is the simplest invocation, and it does not require the user of the expression to configure [`trusted-public-keys`](@docroot@/command-ref/conf-file.md#conf-trusted-public-keys) to ensure their authenticity.
+
+ If your store path is [input addressed](@docroot@/glossary.md#gloss-input-addressed-store-object) instead of content addressed, consider the other two invocations.
+
+ **Fetch any store path and rewrite it to a fully content-addressed store path**
+
+ Example:
```nix
builtins.fetchClosure {
@@ -132,31 +242,42 @@ static RegisterPrimOp primop_fetchClosure({
}
```
- fetches `/nix/store/r2jd...` from the specified binary cache,
+ This example fetches `/nix/store/r2jd...` from the specified binary cache,
and rewrites it into the content-addressed store path
`/nix/store/ldbh...`.
- If `fromPath` is already content-addressed, or if you are
- allowing impure evaluation (`--impure`), then `toPath` may be
- omitted.
+ Like the previous example, no extra configuration or privileges are required.
To find out the correct value for `toPath` given a `fromPath`,
- you can use `nix store make-content-addressed`:
+ use [`nix store make-content-addressed`](@docroot@/command-ref/new-cli/nix3-store-make-content-addressed.md):
```console
# nix store make-content-addressed --from https://cache.nixos.org /nix/store/r2jd6ygnmirm2g803mksqqjm4y39yi6i-git-2.33.1
rewrote '/nix/store/r2jd6ygnmirm2g803mksqqjm4y39yi6i-git-2.33.1' to '/nix/store/ldbhlwhh39wha58rm61bkiiwm6j7211j-git-2.33.1'
```
- This function is similar to `builtins.storePath` in that it
- allows you to use a previously built store path in a Nix
- expression. However, it is more reproducible because it requires
- specifying a binary cache from which the path can be fetched.
- Also, requiring a content-addressed final store path avoids the
- need for users to configure binary cache public keys.
+ Alternatively, set `toPath = ""` and find the correct `toPath` in the error message.
+
+ **Fetch an input-addressed store path as is**
+
+ Example:
+
+ ```nix
+ builtins.fetchClosure {
+ fromStore = "https://cache.nixos.org";
+ fromPath = /nix/store/r2jd6ygnmirm2g803mksqqjm4y39yi6i-git-2.33.1;
+ inputAddressed = true;
+ }
+ ```
+
+ It is possible to fetch an [input-addressed store path](@docroot@/glossary.md#gloss-input-addressed-store-object) and return it as is.
+ However, this is the least preferred way of invoking `fetchClosure`, because it requires that the input-addressed paths are trusted by the Nix configuration.
+
+ **`builtins.storePath`**
- This function is only available if you enable the experimental
- feature `fetch-closure`.
+ `fetchClosure` is similar to [`builtins.storePath`](#builtins-storePath) in that it allows you to use a previously built store path in a Nix expression.
+ However, `fetchClosure` is more reproducible because it specifies a binary cache from which the path can be fetched.
+ Also, using content-addressed store paths does not require users to configure [`trusted-public-keys`](@docroot@/command-ref/conf-file.md#conf-trusted-public-keys) to ensure their authenticity.
)",
.fun = prim_fetchClosure,
.experimentalFeature = Xp::FetchClosure,
diff --git a/src/libexpr/primops/fetchMercurial.cc b/src/libexpr/primops/fetchMercurial.cc
index 2c0d98e74..322692b52 100644
--- a/src/libexpr/primops/fetchMercurial.cc
+++ b/src/libexpr/primops/fetchMercurial.cc
@@ -88,6 +88,10 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a
state.allowPath(tree.storePath);
}
-static RegisterPrimOp r_fetchMercurial("fetchMercurial", 1, prim_fetchMercurial);
+static RegisterPrimOp r_fetchMercurial({
+ .name = "fetchMercurial",
+ .arity = 1,
+ .fun = prim_fetchMercurial
+});
}
diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc
index cd7039025..579a45f92 100644
--- a/src/libexpr/primops/fetchTree.cc
+++ b/src/libexpr/primops/fetchTree.cc
@@ -22,7 +22,7 @@ void emitTreeAttrs(
{
assert(input.isLocked());
- auto attrs = state.buildBindings(8);
+ auto attrs = state.buildBindings(10);
state.mkStorePathString(tree.storePath, attrs.alloc(state.sOutPath));
@@ -56,6 +56,11 @@ void emitTreeAttrs(
}
+ if (auto dirtyRev = fetchers::maybeGetStrAttr(input.attrs, "dirtyRev")) {
+ attrs.alloc("dirtyRev").mkString(*dirtyRev);
+ attrs.alloc("dirtyShortRev").mkString(*fetchers::maybeGetStrAttr(input.attrs, "dirtyShortRev"));
+ }
+
if (auto lastModified = input.getLastModified()) {
attrs.alloc("lastModified").mkInt(*lastModified);
attrs.alloc("lastModifiedDate").mkString(
@@ -194,7 +199,11 @@ static void prim_fetchTree(EvalState & state, const PosIdx pos, Value * * args,
}
// FIXME: document
-static RegisterPrimOp primop_fetchTree("fetchTree", 1, prim_fetchTree);
+static RegisterPrimOp primop_fetchTree({
+ .name = "fetchTree",
+ .arity = 1,
+ .fun = prim_fetchTree
+});
static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v,
const std::string & who, bool unpack, std::string name)
@@ -262,7 +271,7 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
// https://github.com/NixOS/nix/issues/4313
auto storePath =
unpack
- ? fetchers::downloadTarball(state.store, *url, name, (bool) expectedHash).first.storePath
+ ? fetchers::downloadTarball(state.store, *url, name, (bool) expectedHash).tree.storePath
: fetchers::downloadFile(state.store, *url, name, (bool) expectedHash).storePath;
if (expectedHash) {
@@ -286,9 +295,9 @@ static RegisterPrimOp primop_fetchurl({
.name = "__fetchurl",
.args = {"url"},
.doc = R"(
- Download the specified URL and return the path of the downloaded
- file. This function is not available if [restricted evaluation
- mode](../command-ref/conf-file.md) is enabled.
+ Download the specified URL and return the path of the downloaded file.
+
+ Not available in [restricted evaluation mode](@docroot@/command-ref/conf-file.md#conf-restrict-eval).
)",
.fun = prim_fetchurl,
});
@@ -338,8 +347,7 @@ static RegisterPrimOp primop_fetchTarball({
stdenv.mkDerivation { … }
```
- This function is not available if [restricted evaluation
- mode](../command-ref/conf-file.md) is enabled.
+ Not available in [restricted evaluation mode](@docroot@/command-ref/conf-file.md#conf-restrict-eval).
)",
.fun = prim_fetchTarball,
});
@@ -470,14 +478,9 @@ static RegisterPrimOp primop_fetchGit({
}
```
- > **Note**
- >
- > Nix will refetch the branch in accordance with
- > the option `tarball-ttl`.
+ Nix will refetch the branch according to the [`tarball-ttl`](@docroot@/command-ref/conf-file.md#conf-tarball-ttl) setting.
- > **Note**
- >
- > This behavior is disabled in *Pure evaluation mode*.
+ This behavior is disabled in [pure evaluation mode](@docroot@/command-ref/conf-file.md#conf-pure-eval).
- To fetch the content of a checked-out work directory:
diff --git a/src/libexpr/primops/fromTOML.cc b/src/libexpr/primops/fromTOML.cc
index 8a5231781..2f4d4022e 100644
--- a/src/libexpr/primops/fromTOML.cc
+++ b/src/libexpr/primops/fromTOML.cc
@@ -3,6 +3,8 @@
#include "../../toml11/toml.hpp"
+#include <sstream>
+
namespace nix {
static void prim_fromTOML(EvalState & state, const PosIdx pos, Value * * args, Value & val)
@@ -58,8 +60,18 @@ static void prim_fromTOML(EvalState & state, const PosIdx pos, Value * * args, V
case toml::value_t::offset_datetime:
case toml::value_t::local_date:
case toml::value_t::local_time:
- // We fail since Nix doesn't have date and time types
- throw std::runtime_error("Dates and times are not supported");
+ {
+ if (experimentalFeatureSettings.isEnabled(Xp::ParseTomlTimestamps)) {
+ auto attrs = state.buildBindings(2);
+ attrs.alloc("_type").mkString("timestamp");
+ std::ostringstream s;
+ s << t;
+ attrs.alloc("value").mkString(s.str());
+ v.mkAttrs(attrs);
+ } else {
+ throw std::runtime_error("Dates and times are not supported");
+ }
+ }
break;;
case toml::value_t::empty:
v.mkNull();
@@ -78,6 +90,24 @@ static void prim_fromTOML(EvalState & state, const PosIdx pos, Value * * args, V
}
}
-static RegisterPrimOp primop_fromTOML("fromTOML", 1, prim_fromTOML);
+static RegisterPrimOp primop_fromTOML({
+ .name = "fromTOML",
+ .args = {"e"},
+ .doc = R"(
+ Convert a TOML string to a Nix value. For example,
+
+ ```nix
+ builtins.fromTOML ''
+ x=1
+ s="a"
+ [table]
+ y=2
+ ''
+ ```
+
+ returns the value `{ s = "a"; table = { y = 2; }; x = 1; }`.
+ )",
+ .fun = prim_fromTOML
+});
}
diff --git a/src/libexpr/search-path.cc b/src/libexpr/search-path.cc
new file mode 100644
index 000000000..36bb4c3a5
--- /dev/null
+++ b/src/libexpr/search-path.cc
@@ -0,0 +1,56 @@
+#include "search-path.hh"
+#include "util.hh"
+
+namespace nix {
+
+std::optional<std::string_view> SearchPath::Prefix::suffixIfPotentialMatch(
+ std::string_view path) const
+{
+ auto n = s.size();
+
+ /* Non-empty prefix and suffix must be separated by a /, or the
+ prefix is not a valid path prefix. */
+ bool needSeparator = n > 0 && (path.size() - n) > 0;
+
+ if (needSeparator && path[n] != '/') {
+ return std::nullopt;
+ }
+
+ /* Prefix must be prefix of this path. */
+ if (path.compare(0, n, s) != 0) {
+ return std::nullopt;
+ }
+
+ /* Skip next path separator. */
+ return {
+ path.substr(needSeparator ? n + 1 : n)
+ };
+}
+
+
+SearchPath::Elem SearchPath::Elem::parse(std::string_view rawElem)
+{
+ size_t pos = rawElem.find('=');
+
+ return SearchPath::Elem {
+ .prefix = Prefix {
+ .s = pos == std::string::npos
+ ? std::string { "" }
+ : std::string { rawElem.substr(0, pos) },
+ },
+ .path = Path {
+ .s = std::string { rawElem.substr(pos + 1) },
+ },
+ };
+}
+
+
+SearchPath parseSearchPath(const Strings & rawElems)
+{
+ SearchPath res;
+ for (auto & rawElem : rawElems)
+ res.elements.emplace_back(SearchPath::Elem::parse(rawElem));
+ return res;
+}
+
+}
diff --git a/src/libexpr/search-path.hh b/src/libexpr/search-path.hh
new file mode 100644
index 000000000..ce78135b5
--- /dev/null
+++ b/src/libexpr/search-path.hh
@@ -0,0 +1,108 @@
+#pragma once
+///@file
+
+#include <optional>
+
+#include "types.hh"
+#include "comparator.hh"
+
+namespace nix {
+
+/**
+ * A "search path" is a list of ways look for something, used with
+ * `builtins.findFile` and `< >` lookup expressions.
+ */
+struct SearchPath
+{
+ /**
+ * A single element of a `SearchPath`.
+ *
+ * Each element is tried in succession when looking up a path. The first
+ * element to completely match wins.
+ */
+ struct Elem;
+
+ /**
+ * The first part of a `SearchPath::Elem` pair.
+ *
+ * Called a "prefix" because it takes the form of a prefix of a file
+ * path (first `n` path components). When looking up a path, to use
+ * a `SearchPath::Elem`, its `Prefix` must match the path.
+ */
+ struct Prefix;
+
+ /**
+ * The second part of a `SearchPath::Elem` pair.
+ *
+ * It is either a path or a URL (with certain restrictions / extra
+ * structure).
+ *
+ * If the prefix of the path we are looking up matches, we then
+ * check if the rest of the path points to something that exists
+ * within the directory denoted by this. If so, the
+ * `SearchPath::Elem` as a whole matches, and that *something* being
+ * pointed to by the rest of the path we are looking up is the
+ * result.
+ */
+ struct Path;
+
+ /**
+ * The list of search path elements. Each one is checked for a path
+ * when looking up. (The actual lookup entry point is in `EvalState`
+ * not in this class.)
+ */
+ std::list<SearchPath::Elem> elements;
+
+ /**
+ * Parse a string into a `SearchPath`
+ */
+ static SearchPath parse(const Strings & rawElems);
+};
+
+struct SearchPath::Prefix
+{
+ /**
+ * Underlying string
+ *
+ * @todo Should we normalize this when constructing a `SearchPath::Prefix`?
+ */
+ std::string s;
+
+ GENERATE_CMP(SearchPath::Prefix, me->s);
+
+ /**
+ * If the path possibly matches this search path element, return the
+ * suffix that we should look for inside the resolved value of the
+ * element
+ * Note the double optionality in the name. While we might have a matching prefix, the suffix may not exist.
+ */
+ std::optional<std::string_view> suffixIfPotentialMatch(std::string_view path) const;
+};
+
+struct SearchPath::Path
+{
+ /**
+ * The location of a search path item, as a path or URL.
+ *
+ * @todo Maybe change this to `std::variant<SourcePath, URL>`.
+ */
+ std::string s;
+
+ GENERATE_CMP(SearchPath::Path, me->s);
+};
+
+struct SearchPath::Elem
+{
+
+ Prefix prefix;
+ Path path;
+
+ GENERATE_CMP(SearchPath::Elem, me->prefix, me->path);
+
+ /**
+ * Parse a string into a `SearchPath::Elem`
+ */
+ static SearchPath::Elem parse(std::string_view rawElem);
+};
+
+}
diff --git a/src/libexpr/tests/error_traces.cc b/src/libexpr/tests/error_traces.cc
index 24e95ac39..285651256 100644
--- a/src/libexpr/tests/error_traces.cc
+++ b/src/libexpr/tests/error_traces.cc
@@ -171,7 +171,7 @@ namespace nix {
hintfmt("value is %s while a string was expected", "an integer"),
hintfmt("while evaluating one of the strings to replace passed to builtins.replaceStrings"));
- ASSERT_TRACE2("replaceStrings [ \"old\" ] [ true ] {}",
+ ASSERT_TRACE2("replaceStrings [ \"oo\" ] [ true ] \"foo\"",
TypeError,
hintfmt("value is %s while a string was expected", "a Boolean"),
hintfmt("while evaluating one of the replacement strings passed to builtins.replaceStrings"));
diff --git a/src/libexpr/tests/search-path.cc b/src/libexpr/tests/search-path.cc
new file mode 100644
index 000000000..dbe7ab95f
--- /dev/null
+++ b/src/libexpr/tests/search-path.cc
@@ -0,0 +1,90 @@
+#include <gtest/gtest.h>
+#include <gmock/gmock.h>
+
+#include "search-path.hh"
+
+namespace nix {
+
+TEST(SearchPathElem, parse_justPath) {
+ ASSERT_EQ(
+ SearchPath::Elem::parse("foo"),
+ (SearchPath::Elem {
+ .prefix = SearchPath::Prefix { .s = "" },
+ .path = SearchPath::Path { .s = "foo" },
+ }));
+}
+
+TEST(SearchPathElem, parse_emptyPrefix) {
+ ASSERT_EQ(
+ SearchPath::Elem::parse("=foo"),
+ (SearchPath::Elem {
+ .prefix = SearchPath::Prefix { .s = "" },
+ .path = SearchPath::Path { .s = "foo" },
+ }));
+}
+
+TEST(SearchPathElem, parse_oneEq) {
+ ASSERT_EQ(
+ SearchPath::Elem::parse("foo=bar"),
+ (SearchPath::Elem {
+ .prefix = SearchPath::Prefix { .s = "foo" },
+ .path = SearchPath::Path { .s = "bar" },
+ }));
+}
+
+TEST(SearchPathElem, parse_twoEqs) {
+ ASSERT_EQ(
+ SearchPath::Elem::parse("foo=bar=baz"),
+ (SearchPath::Elem {
+ .prefix = SearchPath::Prefix { .s = "foo" },
+ .path = SearchPath::Path { .s = "bar=baz" },
+ }));
+}
+
+
+TEST(SearchPathElem, suffixIfPotentialMatch_justPath) {
+ SearchPath::Prefix prefix { .s = "" };
+ ASSERT_EQ(prefix.suffixIfPotentialMatch("any/thing"), std::optional { "any/thing" });
+}
+
+TEST(SearchPathElem, suffixIfPotentialMatch_misleadingPrefix1) {
+ SearchPath::Prefix prefix { .s = "foo" };
+ ASSERT_EQ(prefix.suffixIfPotentialMatch("fooX"), std::nullopt);
+}
+
+TEST(SearchPathElem, suffixIfPotentialMatch_misleadingPrefix2) {
+ SearchPath::Prefix prefix { .s = "foo" };
+ ASSERT_EQ(prefix.suffixIfPotentialMatch("fooX/bar"), std::nullopt);
+}
+
+TEST(SearchPathElem, suffixIfPotentialMatch_partialPrefix) {
+ SearchPath::Prefix prefix { .s = "fooX" };
+ ASSERT_EQ(prefix.suffixIfPotentialMatch("foo"), std::nullopt);
+}
+
+TEST(SearchPathElem, suffixIfPotentialMatch_exactPrefix) {
+ SearchPath::Prefix prefix { .s = "foo" };
+ ASSERT_EQ(prefix.suffixIfPotentialMatch("foo"), std::optional { "" });
+}
+
+TEST(SearchPathElem, suffixIfPotentialMatch_multiKey) {
+ SearchPath::Prefix prefix { .s = "foo/bar" };
+ ASSERT_EQ(prefix.suffixIfPotentialMatch("foo/bar/baz"), std::optional { "baz" });
+}
+
+TEST(SearchPathElem, suffixIfPotentialMatch_trailingSlash) {
+ SearchPath::Prefix prefix { .s = "foo" };
+ ASSERT_EQ(prefix.suffixIfPotentialMatch("foo/"), std::optional { "" });
+}
+
+TEST(SearchPathElem, suffixIfPotentialMatch_trailingDoubleSlash) {
+ SearchPath::Prefix prefix { .s = "foo" };
+ ASSERT_EQ(prefix.suffixIfPotentialMatch("foo//"), std::optional { "/" });
+}
+
+TEST(SearchPathElem, suffixIfPotentialMatch_trailingPath) {
+ SearchPath::Prefix prefix { .s = "foo" };
+ ASSERT_EQ(prefix.suffixIfPotentialMatch("foo/bar/baz"), std::optional { "bar/baz" });
+}
+
+}
diff --git a/src/libexpr/tests/value/print.cc b/src/libexpr/tests/value/print.cc
new file mode 100644
index 000000000..5e96e12ec
--- /dev/null
+++ b/src/libexpr/tests/value/print.cc
@@ -0,0 +1,236 @@
+#include "tests/libexpr.hh"
+
+#include "value.hh"
+
+namespace nix {
+
+using namespace testing;
+
+struct ValuePrintingTests : LibExprTest
+{
+ template<class... A>
+ void test(Value v, std::string_view expected, A... args)
+ {
+ std::stringstream out;
+ v.print(state.symbols, out, args...);
+ ASSERT_EQ(out.str(), expected);
+ }
+};
+
+TEST_F(ValuePrintingTests, tInt)
+{
+ Value vInt;
+ vInt.mkInt(10);
+ test(vInt, "10");
+}
+
+TEST_F(ValuePrintingTests, tBool)
+{
+ Value vBool;
+ vBool.mkBool(true);
+ test(vBool, "true");
+}
+
+TEST_F(ValuePrintingTests, tString)
+{
+ Value vString;
+ vString.mkString("some-string");
+ test(vString, "\"some-string\"");
+}
+
+TEST_F(ValuePrintingTests, tPath)
+{
+ Value vPath;
+ vPath.mkString("/foo");
+ test(vPath, "\"/foo\"");
+}
+
+TEST_F(ValuePrintingTests, tNull)
+{
+ Value vNull;
+ vNull.mkNull();
+ test(vNull, "null");
+}
+
+TEST_F(ValuePrintingTests, tAttrs)
+{
+ Value vOne;
+ vOne.mkInt(1);
+
+ Value vTwo;
+ vTwo.mkInt(2);
+
+ BindingsBuilder builder(state, state.allocBindings(10));
+ builder.insert(state.symbols.create("one"), &vOne);
+ builder.insert(state.symbols.create("two"), &vTwo);
+
+ Value vAttrs;
+ vAttrs.mkAttrs(builder.finish());
+
+ test(vAttrs, "{ one = 1; two = 2; }");
+}
+
+TEST_F(ValuePrintingTests, tList)
+{
+ Value vOne;
+ vOne.mkInt(1);
+
+ Value vTwo;
+ vTwo.mkInt(2);
+
+ Value vList;
+ state.mkList(vList, 5);
+ vList.bigList.elems[0] = &vOne;
+ vList.bigList.elems[1] = &vTwo;
+ vList.bigList.size = 3;
+
+ test(vList, "[ 1 2 (nullptr) ]");
+}
+
+TEST_F(ValuePrintingTests, vThunk)
+{
+ Value vThunk;
+ vThunk.mkThunk(nullptr, nullptr);
+
+ test(vThunk, "<CODE>");
+}
+
+TEST_F(ValuePrintingTests, vApp)
+{
+ Value vApp;
+ vApp.mkApp(nullptr, nullptr);
+
+ test(vApp, "<CODE>");
+}
+
+TEST_F(ValuePrintingTests, vLambda)
+{
+ Value vLambda;
+ vLambda.mkLambda(nullptr, nullptr);
+
+ test(vLambda, "<LAMBDA>");
+}
+
+TEST_F(ValuePrintingTests, vPrimOp)
+{
+ Value vPrimOp;
+ vPrimOp.mkPrimOp(nullptr);
+
+ test(vPrimOp, "<PRIMOP>");
+}
+
+TEST_F(ValuePrintingTests, vPrimOpApp)
+{
+ Value vPrimOpApp;
+ vPrimOpApp.mkPrimOpApp(nullptr, nullptr);
+
+ test(vPrimOpApp, "<PRIMOP-APP>");
+}
+
+TEST_F(ValuePrintingTests, vExternal)
+{
+ struct MyExternal : ExternalValueBase
+ {
+ public:
+ std::string showType() const override
+ {
+ return "";
+ }
+ std::string typeOf() const override
+ {
+ return "";
+ }
+ virtual std::ostream & print(std::ostream & str) const override
+ {
+ str << "testing-external!";
+ return str;
+ }
+ } myExternal;
+ Value vExternal;
+ vExternal.mkExternal(&myExternal);
+
+ test(vExternal, "testing-external!");
+}
+
+TEST_F(ValuePrintingTests, vFloat)
+{
+ Value vFloat;
+ vFloat.mkFloat(2.0);
+
+ test(vFloat, "2");
+}
+
+TEST_F(ValuePrintingTests, vBlackhole)
+{
+ Value vBlackhole;
+ vBlackhole.mkBlackhole();
+ test(vBlackhole, "«potential infinite recursion»");
+}
+
+TEST_F(ValuePrintingTests, depthAttrs)
+{
+ Value vOne;
+ vOne.mkInt(1);
+
+ Value vTwo;
+ vTwo.mkInt(2);
+
+ BindingsBuilder builder(state, state.allocBindings(10));
+ builder.insert(state.symbols.create("one"), &vOne);
+ builder.insert(state.symbols.create("two"), &vTwo);
+
+ Value vAttrs;
+ vAttrs.mkAttrs(builder.finish());
+
+ BindingsBuilder builder2(state, state.allocBindings(10));
+ builder2.insert(state.symbols.create("one"), &vOne);
+ builder2.insert(state.symbols.create("two"), &vTwo);
+ builder2.insert(state.symbols.create("nested"), &vAttrs);
+
+ Value vNested;
+ vNested.mkAttrs(builder2.finish());
+
+ test(vNested, "{ nested = «too deep»; one = «too deep»; two = «too deep»; }", false, 1);
+ test(vNested, "{ nested = { one = «too deep»; two = «too deep»; }; one = 1; two = 2; }", false, 2);
+ test(vNested, "{ nested = { one = 1; two = 2; }; one = 1; two = 2; }", false, 3);
+ test(vNested, "{ nested = { one = 1; two = 2; }; one = 1; two = 2; }", false, 4);
+}
+
+TEST_F(ValuePrintingTests, depthList)
+{
+ Value vOne;
+ vOne.mkInt(1);
+
+ Value vTwo;
+ vTwo.mkInt(2);
+
+ BindingsBuilder builder(state, state.allocBindings(10));
+ builder.insert(state.symbols.create("one"), &vOne);
+ builder.insert(state.symbols.create("two"), &vTwo);
+
+ Value vAttrs;
+ vAttrs.mkAttrs(builder.finish());
+
+ BindingsBuilder builder2(state, state.allocBindings(10));
+ builder2.insert(state.symbols.create("one"), &vOne);
+ builder2.insert(state.symbols.create("two"), &vTwo);
+ builder2.insert(state.symbols.create("nested"), &vAttrs);
+
+ Value vNested;
+ vNested.mkAttrs(builder2.finish());
+
+ Value vList;
+ state.mkList(vList, 5);
+ vList.bigList.elems[0] = &vOne;
+ vList.bigList.elems[1] = &vTwo;
+ vList.bigList.elems[2] = &vNested;
+ vList.bigList.size = 3;
+
+ test(vList, "[ «too deep» «too deep» «too deep» ]", false, 1);
+ test(vList, "[ 1 2 { nested = «too deep»; one = «too deep»; two = «too deep»; } ]", false, 2);
+ test(vList, "[ 1 2 { nested = { one = «too deep»; two = «too deep»; }; one = 1; two = 2; } ]", false, 3);
+ test(vList, "[ 1 2 { nested = { one = 1; two = 2; }; one = 1; two = 2; } ]", false, 4);
+ test(vList, "[ 1 2 { nested = { one = 1; two = 2; }; one = 1; two = 2; } ]", false, 5);
+}
+
+} // namespace nix
diff --git a/src/libexpr/value.hh b/src/libexpr/value.hh
index 89c0c36fd..c44683e50 100644
--- a/src/libexpr/value.hh
+++ b/src/libexpr/value.hh
@@ -2,6 +2,7 @@
///@file
#include <cassert>
+#include <climits>
#include "symbol-table.hh"
#include "value/context.hh"
@@ -137,11 +138,11 @@ private:
friend std::string showType(const Value & v);
- void print(const SymbolTable & symbols, std::ostream & str, std::set<const void *> * seen) const;
+ void print(const SymbolTable &symbols, std::ostream &str, std::set<const void *> *seen, int depth) const;
public:
- void print(const SymbolTable & symbols, std::ostream & str, bool showRepeated = false) const;
+ void print(const SymbolTable &symbols, std::ostream &str, bool showRepeated = false, int depth = INT_MAX) const;
// Functions needed to distinguish the type
// These should be removed eventually, by putting the functionality that's
@@ -218,8 +219,11 @@ public:
/**
* Returns the normal type of a Value. This only returns nThunk if
* the Value hasn't been forceValue'd
+ *
+ * @param invalidIsThunk Instead of aborting an an invalid (probably
+ * 0, so uninitialized) internal type, return `nThunk`.
*/
- inline ValueType type() const
+ inline ValueType type(bool invalidIsThunk = false) const
{
switch (internalType) {
case tInt: return nInt;
@@ -234,7 +238,10 @@ public:
case tFloat: return nFloat;
case tThunk: case tApp: case tBlackhole: return nThunk;
}
- abort();
+ if (invalidIsThunk)
+ return nThunk;
+ else
+ abort();
}
/**
diff --git a/src/libfetchers/attrs.hh b/src/libfetchers/attrs.hh
index 1a14bb023..9f885a793 100644
--- a/src/libfetchers/attrs.hh
+++ b/src/libfetchers/attrs.hh
@@ -2,6 +2,7 @@
///@file
#include "types.hh"
+#include "hash.hh"
#include <variant>
diff --git a/src/libfetchers/fetchers.cc b/src/libfetchers/fetchers.cc
index 91db3a9eb..2860c1ceb 100644
--- a/src/libfetchers/fetchers.cc
+++ b/src/libfetchers/fetchers.cc
@@ -159,6 +159,12 @@ std::pair<Tree, Input> Input::fetch(ref<Store> store) const
input.to_string(), *prevLastModified);
}
+ if (auto prevRev = getRev()) {
+ if (input.getRev() != prevRev)
+ throw Error("'rev' attribute mismatch in input '%s', expected %s",
+ input.to_string(), prevRev->gitRev());
+ }
+
if (auto prevRevCount = getRevCount()) {
if (input.getRevCount() != prevRevCount)
throw Error("'revCount' attribute mismatch in input '%s', expected %d",
diff --git a/src/libfetchers/fetchers.hh b/src/libfetchers/fetchers.hh
index 498ad7e4d..d0738f619 100644
--- a/src/libfetchers/fetchers.hh
+++ b/src/libfetchers/fetchers.hh
@@ -158,6 +158,7 @@ struct DownloadFileResult
StorePath storePath;
std::string etag;
std::string effectiveUrl;
+ std::optional<std::string> immutableUrl;
};
DownloadFileResult downloadFile(
@@ -167,7 +168,14 @@ DownloadFileResult downloadFile(
bool locked,
const Headers & headers = {});
-std::pair<Tree, time_t> downloadTarball(
+struct DownloadTarballResult
+{
+ Tree tree;
+ time_t lastModified;
+ std::optional<std::string> immutableUrl;
+};
+
+DownloadTarballResult downloadTarball(
ref<Store> store,
const std::string & url,
const std::string & name,
diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc
index 1da8c9609..be5842d53 100644
--- a/src/libfetchers/git.cc
+++ b/src/libfetchers/git.cc
@@ -62,6 +62,7 @@ std::optional<std::string> readHead(const Path & path)
.program = "git",
// FIXME: use 'HEAD' to avoid returning all refs
.args = {"ls-remote", "--symref", path},
+ .isInteractive = true,
});
if (status != 0) return std::nullopt;
@@ -242,6 +243,13 @@ std::pair<StorePath, Input> fetchFromWorkdir(ref<Store> store, Input & input, co
"lastModified",
workdirInfo.hasHead ? std::stoull(runProgram("git", true, { "-C", actualPath, "--git-dir", gitDir, "log", "-1", "--format=%ct", "--no-show-signature", "HEAD" })) : 0);
+ if (workdirInfo.hasHead) {
+ input.attrs.insert_or_assign("dirtyRev", chomp(
+ runProgram("git", true, { "-C", actualPath, "--git-dir", gitDir, "rev-parse", "--verify", "HEAD" })) + "-dirty");
+ input.attrs.insert_or_assign("dirtyShortRev", chomp(
+ runProgram("git", true, { "-C", actualPath, "--git-dir", gitDir, "rev-parse", "--verify", "--short", "HEAD" })) + "-dirty");
+ }
+
return {std::move(storePath), input};
}
} // end namespace
@@ -282,7 +290,7 @@ struct GitInputScheme : InputScheme
if (maybeGetStrAttr(attrs, "type") != "git") return {};
for (auto & [name, value] : attrs)
- if (name != "type" && name != "url" && name != "ref" && name != "rev" && name != "shallow" && name != "submodules" && name != "lastModified" && name != "revCount" && name != "narHash" && name != "allRefs" && name != "name")
+ if (name != "type" && name != "url" && name != "ref" && name != "rev" && name != "shallow" && name != "submodules" && name != "lastModified" && name != "revCount" && name != "narHash" && name != "allRefs" && name != "name" && name != "dirtyRev" && name != "dirtyShortRev")
throw Error("unsupported Git input attribute '%s'", name);
parseURL(getStrAttr(attrs, "url"));
@@ -350,7 +358,7 @@ struct GitInputScheme : InputScheme
args.push_back(destDir);
- runProgram("git", true, args);
+ runProgram("git", true, args, {}, true);
}
std::optional<Path> getSourcePath(const Input & input) override
@@ -555,7 +563,7 @@ struct GitInputScheme : InputScheme
: ref == "HEAD"
? *ref
: "refs/heads/" + *ref;
- runProgram("git", true, { "-C", repoDir, "--git-dir", gitDir, "fetch", "--quiet", "--force", "--", actualUrl, fmt("%s:%s", fetchRef, fetchRef) });
+ runProgram("git", true, { "-C", repoDir, "--git-dir", gitDir, "fetch", "--quiet", "--force", "--", actualUrl, fmt("%s:%s", fetchRef, fetchRef) }, {}, true);
} catch (Error & e) {
if (!pathExists(localRefFile)) throw;
warn("could not update local clone of Git repository '%s'; continuing with the most recent version", actualUrl);
@@ -622,7 +630,7 @@ struct GitInputScheme : InputScheme
// everything to ensure we get the rev.
Activity act(*logger, lvlTalkative, actUnknown, fmt("making temporary clone of '%s'", repoDir));
runProgram("git", true, { "-C", tmpDir, "fetch", "--quiet", "--force",
- "--update-head-ok", "--", repoDir, "refs/*:refs/*" });
+ "--update-head-ok", "--", repoDir, "refs/*:refs/*" }, {}, true);
}
runProgram("git", true, { "-C", tmpDir, "checkout", "--quiet", input.getRev()->gitRev() });
@@ -649,7 +657,7 @@ struct GitInputScheme : InputScheme
{
Activity act(*logger, lvlTalkative, actUnknown, fmt("fetching submodules of '%s'", actualUrl));
- runProgram("git", true, { "-C", tmpDir, "submodule", "--quiet", "update", "--init", "--recursive" });
+ runProgram("git", true, { "-C", tmpDir, "submodule", "--quiet", "update", "--init", "--recursive" }, {}, true);
}
filter = isNotDotGitDirectory;
diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc
index 6c1d573ce..80598e7f8 100644
--- a/src/libfetchers/github.cc
+++ b/src/libfetchers/github.cc
@@ -207,21 +207,21 @@ struct GitArchiveInputScheme : InputScheme
auto url = getDownloadUrl(input);
- auto [tree, lastModified] = downloadTarball(store, url.url, input.getName(), true, url.headers);
+ auto result = downloadTarball(store, url.url, input.getName(), true, url.headers);
- input.attrs.insert_or_assign("lastModified", uint64_t(lastModified));
+ input.attrs.insert_or_assign("lastModified", uint64_t(result.lastModified));
getCache()->add(
store,
lockedAttrs,
{
{"rev", rev->gitRev()},
- {"lastModified", uint64_t(lastModified)}
+ {"lastModified", uint64_t(result.lastModified)}
},
- tree.storePath,
+ result.tree.storePath,
true);
- return {std::move(tree.storePath), input};
+ return {result.tree.storePath, input};
}
};
diff --git a/src/libfetchers/input-accessor.cc b/src/libfetchers/input-accessor.cc
index f9909c218..f37a8058b 100644
--- a/src/libfetchers/input-accessor.cc
+++ b/src/libfetchers/input-accessor.cc
@@ -75,22 +75,28 @@ SourcePath SourcePath::resolveSymlinks() const
int linksAllowed = 1024;
- for (auto & component : path) {
- res.path.push(component);
- while (true) {
- if (auto st = res.maybeLstat()) {
+ std::list<std::string> todo;
+ for (auto & c : path)
+ todo.push_back(std::string(c));
+
+ while (!todo.empty()) {
+ auto c = *todo.begin();
+ todo.pop_front();
+ if (c == "" || c == ".")
+ ;
+ else if (c == "..")
+ res.path.pop();
+ else {
+ res.path.push(c);
+ if (auto st = res.maybeLstat(); st && st->type == InputAccessor::tSymlink) {
if (!linksAllowed--)
throw Error("infinite symlink recursion in path '%s'", path);
- if (st->type != InputAccessor::tSymlink) break;
auto target = res.readLink();
+ res.path.pop();
if (hasPrefix(target, "/"))
- res = CanonPath(target);
- else {
- res.path.pop();
- res.path.extend(CanonPath(target));
- }
- } else
- break;
+ res.path = CanonPath::root;
+ todo.splice(todo.begin(), tokenizeString<std::list<std::string>>(target, "/"));
+ }
}
}
diff --git a/src/libfetchers/tarball.cc b/src/libfetchers/tarball.cc
index 96fe5faca..e42aca6db 100644
--- a/src/libfetchers/tarball.cc
+++ b/src/libfetchers/tarball.cc
@@ -32,7 +32,8 @@ DownloadFileResult downloadFile(
return {
.storePath = std::move(cached->storePath),
.etag = getStrAttr(cached->infoAttrs, "etag"),
- .effectiveUrl = getStrAttr(cached->infoAttrs, "url")
+ .effectiveUrl = getStrAttr(cached->infoAttrs, "url"),
+ .immutableUrl = maybeGetStrAttr(cached->infoAttrs, "immutableUrl"),
};
};
@@ -55,12 +56,14 @@ DownloadFileResult downloadFile(
}
// FIXME: write to temporary file.
-
Attrs infoAttrs({
{"etag", res.etag},
{"url", res.effectiveUri},
});
+ if (res.immutableUrl)
+ infoAttrs.emplace("immutableUrl", *res.immutableUrl);
+
std::optional<StorePath> storePath;
if (res.cached) {
@@ -111,10 +114,11 @@ DownloadFileResult downloadFile(
.storePath = std::move(*storePath),
.etag = res.etag,
.effectiveUrl = res.effectiveUri,
+ .immutableUrl = res.immutableUrl,
};
}
-std::pair<Tree, time_t> downloadTarball(
+DownloadTarballResult downloadTarball(
ref<Store> store,
const std::string & url,
const std::string & name,
@@ -131,8 +135,9 @@ std::pair<Tree, time_t> downloadTarball(
if (cached && !cached->expired)
return {
- Tree { .actualPath = store->toRealPath(cached->storePath), .storePath = std::move(cached->storePath) },
- getIntAttr(cached->infoAttrs, "lastModified")
+ .tree = Tree { .actualPath = store->toRealPath(cached->storePath), .storePath = std::move(cached->storePath) },
+ .lastModified = (time_t) getIntAttr(cached->infoAttrs, "lastModified"),
+ .immutableUrl = maybeGetStrAttr(cached->infoAttrs, "immutableUrl"),
};
auto res = downloadFile(store, url, name, locked, headers);
@@ -160,6 +165,9 @@ std::pair<Tree, time_t> downloadTarball(
{"etag", res.etag},
});
+ if (res.immutableUrl)
+ infoAttrs.emplace("immutableUrl", *res.immutableUrl);
+
getCache()->add(
store,
inAttrs,
@@ -168,8 +176,9 @@ std::pair<Tree, time_t> downloadTarball(
locked);
return {
- Tree { .actualPath = store->toRealPath(*unpackedStorePath), .storePath = std::move(*unpackedStorePath) },
- lastModified,
+ .tree = Tree { .actualPath = store->toRealPath(*unpackedStorePath), .storePath = std::move(*unpackedStorePath) },
+ .lastModified = lastModified,
+ .immutableUrl = res.immutableUrl,
};
}
@@ -189,21 +198,33 @@ struct CurlInputScheme : InputScheme
virtual bool isValidURL(const ParsedURL & url) const = 0;
- std::optional<Input> inputFromURL(const ParsedURL & url) const override
+ std::optional<Input> inputFromURL(const ParsedURL & _url) const override
{
- if (!isValidURL(url))
+ if (!isValidURL(_url))
return std::nullopt;
Input input;
- auto urlWithoutApplicationScheme = url;
- urlWithoutApplicationScheme.scheme = parseUrlScheme(url.scheme).transport;
+ auto url = _url;
+
+ url.scheme = parseUrlScheme(url.scheme).transport;
- input.attrs.insert_or_assign("type", inputType());
- input.attrs.insert_or_assign("url", urlWithoutApplicationScheme.to_string());
auto narHash = url.query.find("narHash");
if (narHash != url.query.end())
input.attrs.insert_or_assign("narHash", narHash->second);
+
+ if (auto i = get(url.query, "rev"))
+ input.attrs.insert_or_assign("rev", *i);
+
+ if (auto i = get(url.query, "revCount"))
+ if (auto n = string2Int<uint64_t>(*i))
+ input.attrs.insert_or_assign("revCount", *n);
+
+ url.query.erase("rev");
+ url.query.erase("revCount");
+
+ input.attrs.insert_or_assign("type", inputType());
+ input.attrs.insert_or_assign("url", url.to_string());
return input;
}
@@ -212,7 +233,8 @@ struct CurlInputScheme : InputScheme
auto type = maybeGetStrAttr(attrs, "type");
if (type != inputType()) return {};
- std::set<std::string> allowedNames = {"type", "url", "narHash", "name", "unpack"};
+ // FIXME: some of these only apply to TarballInputScheme.
+ std::set<std::string> allowedNames = {"type", "url", "narHash", "name", "unpack", "rev", "revCount"};
for (auto & [name, value] : attrs)
if (!allowedNames.count(name))
throw Error("unsupported %s input attribute '%s'", *type, name);
@@ -275,10 +297,22 @@ struct TarballInputScheme : CurlInputScheme
: hasTarballExtension(url.path));
}
- std::pair<StorePath, Input> fetch(ref<Store> store, const Input & input) override
+ std::pair<StorePath, Input> fetch(ref<Store> store, const Input & _input) override
{
- auto tree = downloadTarball(store, getStrAttr(input.attrs, "url"), input.getName(), false).first;
- return {std::move(tree.storePath), input};
+ Input input(_input);
+ auto url = getStrAttr(input.attrs, "url");
+ auto result = downloadTarball(store, url, input.getName(), false);
+
+ if (result.immutableUrl) {
+ auto immutableInput = Input::fromURL(*result.immutableUrl);
+ // FIXME: would be nice to support arbitrary flakerefs
+ // here, e.g. git flakes.
+ if (immutableInput.getType() != "tarball")
+ throw Error("tarball 'Link' headers that redirect to non-tarball URLs are not supported");
+ input = immutableInput;
+ }
+
+ return {result.tree.storePath, std::move(input)};
}
};
diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc
index 5b1c923cd..5e37f7ecb 100644
--- a/src/libstore/build/derivation-goal.cc
+++ b/src/libstore/build/derivation-goal.cc
@@ -9,6 +9,7 @@
#include "archive.hh"
#include "compression.hh"
#include "worker-protocol.hh"
+#include "worker-protocol-impl.hh"
#include "topo-sort.hh"
#include "callback.hh"
#include "local-store.hh" // TODO remove, along with remaining downcasts
@@ -1150,9 +1151,11 @@ HookReply DerivationGoal::tryBuildHook()
throw;
}
+ WorkerProto::WriteConn conn { hook->sink };
+
/* Tell the hook all the inputs that have to be copied to the
remote system. */
- worker_proto::write(worker.store, hook->sink, inputPaths);
+ WorkerProto::write(worker.store, conn, inputPaths);
/* Tell the hooks the missing outputs that have to be copied back
from the remote system. */
@@ -1163,7 +1166,7 @@ HookReply DerivationGoal::tryBuildHook()
if (buildMode != bmCheck && status.known && status.known->isValid()) continue;
missingOutputs.insert(outputName);
}
- worker_proto::write(worker.store, hook->sink, missingOutputs);
+ WorkerProto::write(worker.store, conn, missingOutputs);
}
hook->sink = FdSink();
diff --git a/src/libstore/build/entry-points.cc b/src/libstore/build/entry-points.cc
index 74eae0692..4aa4d6dca 100644
--- a/src/libstore/build/entry-points.cc
+++ b/src/libstore/build/entry-points.cc
@@ -31,11 +31,11 @@ void Store::buildPaths(const std::vector<DerivedPath> & reqs, BuildMode buildMod
}
if (failed.size() == 1 && ex) {
- ex->status = worker.exitStatus();
+ ex->status = worker.failingExitStatus();
throw std::move(*ex);
} else if (!failed.empty()) {
if (ex) logError(ex->info());
- throw Error(worker.exitStatus(), "build of %s failed", showPaths(failed));
+ throw Error(worker.failingExitStatus(), "build of %s failed", showPaths(failed));
}
}
@@ -102,15 +102,15 @@ void Store::ensurePath(const StorePath & path)
if (goal->exitCode != Goal::ecSuccess) {
if (goal->ex) {
- goal->ex->status = worker.exitStatus();
+ goal->ex->status = worker.failingExitStatus();
throw std::move(*goal->ex);
} else
- throw Error(worker.exitStatus(), "path '%s' does not exist and cannot be created", printStorePath(path));
+ throw Error(worker.failingExitStatus(), "path '%s' does not exist and cannot be created", printStorePath(path));
}
}
-void LocalStore::repairPath(const StorePath & path)
+void Store::repairPath(const StorePath & path)
{
Worker worker(*this, *this);
GoalPtr goal = worker.makePathSubstitutionGoal(path, Repair);
@@ -128,7 +128,7 @@ void LocalStore::repairPath(const StorePath & path)
goals.insert(worker.makeDerivationGoal(*info->deriver, OutputsSpec::All { }, bmRepair));
worker.run(goals);
} else
- throw Error(worker.exitStatus(), "cannot repair path '%s'", printStorePath(path));
+ throw Error(worker.failingExitStatus(), "cannot repair path '%s'", printStorePath(path));
}
}
diff --git a/src/libstore/build/hook-instance.cc b/src/libstore/build/hook-instance.cc
index 075ad554f..337c60bd4 100644
--- a/src/libstore/build/hook-instance.cc
+++ b/src/libstore/build/hook-instance.cc
@@ -5,14 +5,14 @@ namespace nix {
HookInstance::HookInstance()
{
- debug("starting build hook '%s'", settings.buildHook);
+ debug("starting build hook '%s'", concatStringsSep(" ", settings.buildHook.get()));
- auto buildHookArgs = tokenizeString<std::list<std::string>>(settings.buildHook.get());
+ auto buildHookArgs = settings.buildHook.get();
if (buildHookArgs.empty())
throw Error("'build-hook' setting is empty");
- auto buildHook = buildHookArgs.front();
+ auto buildHook = canonPath(buildHookArgs.front());
buildHookArgs.pop_front();
Strings args;
diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc
index 9f21a711a..53e6998e8 100644
--- a/src/libstore/build/local-derivation-goal.cc
+++ b/src/libstore/build/local-derivation-goal.cc
@@ -4,13 +4,12 @@
#include "worker.hh"
#include "builtins.hh"
#include "builtins/buildenv.hh"
-#include "references.hh"
+#include "path-references.hh"
#include "finally.hh"
#include "util.hh"
#include "archive.hh"
#include "compression.hh"
#include "daemon.hh"
-#include "worker-protocol.hh"
#include "topo-sort.hh"
#include "callback.hh"
#include "json-utils.hh"
@@ -65,8 +64,9 @@ void handleDiffHook(
const Path & tryA, const Path & tryB,
const Path & drvPath, const Path & tmpDir)
{
- auto diffHook = settings.diffHook;
- if (diffHook != "" && settings.runDiffHook) {
+ auto & diffHookOpt = settings.diffHook.get();
+ if (diffHookOpt && settings.runDiffHook) {
+ auto & diffHook = *diffHookOpt;
try {
auto diffRes = runProgram(RunOptions {
.program = diffHook,
@@ -357,7 +357,7 @@ bool LocalDerivationGoal::cleanupDecideWhetherDiskFull()
for (auto & [_, status] : initialOutputs) {
if (!status.known) continue;
if (buildMode != bmCheck && status.known->isValid()) continue;
- auto p = worker.store.printStorePath(status.known->path);
+ auto p = worker.store.toRealPath(status.known->path);
if (pathExists(chrootRootDir + p))
renameFile((chrootRootDir + p), p);
}
@@ -395,8 +395,9 @@ static void linkOrCopy(const Path & from, const Path & to)
bind-mount in this case?
It can also fail with EPERM in BeegFS v7 and earlier versions
+ or fail with EXDEV in OpenAFS
which don't allow hard-links to other directories */
- if (errno != EMLINK && errno != EPERM)
+ if (errno != EMLINK && errno != EPERM && errno != EXDEV)
throw SysError("linking '%s' to '%s'", to, from);
copyPath(from, to);
}
@@ -1423,7 +1424,8 @@ void LocalDerivationGoal::startDaemon()
Store::Params params;
params["path-info-cache-size"] = "0";
params["store"] = worker.store.storeDir;
- params["root"] = getLocalStore().rootDir;
+ if (auto & optRoot = getLocalStore().rootDir.get())
+ params["root"] = *optRoot;
params["state"] = "/no-such-path";
params["log"] = "/no-such-path";
auto store = make_ref<RestrictedStore>(params,
@@ -1452,7 +1454,7 @@ void LocalDerivationGoal::startDaemon()
(struct sockaddr *) &remoteAddr, &remoteAddrLen);
if (!remote) {
if (errno == EINTR || errno == EAGAIN) continue;
- if (errno == EINVAL) break;
+ if (errno == EINVAL || errno == ECONNABORTED) break;
throw SysError("accepting connection");
}
@@ -1482,8 +1484,22 @@ void LocalDerivationGoal::startDaemon()
void LocalDerivationGoal::stopDaemon()
{
- if (daemonSocket && shutdown(daemonSocket.get(), SHUT_RDWR) == -1)
- throw SysError("shutting down daemon socket");
+ if (daemonSocket && shutdown(daemonSocket.get(), SHUT_RDWR) == -1) {
+ // According to the POSIX standard, the 'shutdown' function should
+ // return an ENOTCONN error when attempting to shut down a socket that
+ // hasn't been connected yet. This situation occurs when the 'accept'
+ // function is called on a socket without any accepted connections,
+ // leaving the socket unconnected. While Linux doesn't seem to produce
+ // an error for sockets that have only been accepted, more
+ // POSIX-compliant operating systems like OpenBSD, macOS, and others do
+ // return the ENOTCONN error. Therefore, we handle this error here to
+ // avoid raising an exception for compliant behaviour.
+ if (errno == ENOTCONN) {
+ daemonSocket.close();
+ } else {
+ throw SysError("shutting down daemon socket");
+ }
+ }
if (daemonThread.joinable())
daemonThread.join();
@@ -1494,7 +1510,8 @@ void LocalDerivationGoal::stopDaemon()
thread.join();
daemonWorkerThreads.clear();
- daemonSocket = -1;
+ // release the socket.
+ daemonSocket.close();
}
@@ -1771,6 +1788,9 @@ void LocalDerivationGoal::runChild()
for (auto & path : { "/etc/resolv.conf", "/etc/services", "/etc/hosts" })
if (pathExists(path))
ss.push_back(path);
+
+ if (settings.caFile != "")
+ dirsInChroot.try_emplace("/etc/ssl/certs/ca-certificates.crt", settings.caFile, true);
}
for (auto & i : ss) dirsInChroot.emplace(i, i);
@@ -2371,18 +2391,21 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
continue;
auto references = *referencesOpt;
- auto rewriteOutput = [&]() {
+ auto rewriteOutput = [&](const StringMap & rewrites) {
/* Apply hash rewriting if necessary. */
- if (!outputRewrites.empty()) {
+ if (!rewrites.empty()) {
debug("rewriting hashes in '%1%'; cross fingers", actualPath);
- /* FIXME: this is in-memory. */
- StringSink sink;
- dumpPath(actualPath, sink);
+ /* FIXME: Is this actually streaming? */
+ auto source = sinkToSource([&](Sink & nextSink) {
+ RewritingSink rsink(rewrites, nextSink);
+ dumpPath(actualPath, rsink);
+ rsink.flush();
+ });
+ Path tmpPath = actualPath + ".tmp";
+ restorePath(tmpPath, *source);
deletePath(actualPath);
- sink.s = rewriteStrings(sink.s, outputRewrites);
- StringSource source(sink.s);
- restorePath(actualPath, source);
+ movePath(tmpPath, actualPath);
/* FIXME: set proper permissions in restorePath() so
we don't have to do another traversal. */
@@ -2431,7 +2454,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
"since recursive hashing is not enabled (one of outputHashMode={flat,text} is true)",
actualPath);
}
- rewriteOutput();
+ rewriteOutput(outputRewrites);
/* FIXME optimize and deduplicate with addToStore */
std::string oldHashPart { scratchPath->hashPart() };
HashModuloSink caSink { outputHash.hashType, oldHashPart };
@@ -2469,16 +2492,14 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
Hash::dummy,
};
if (*scratchPath != newInfo0.path) {
- // Also rewrite the output path
- auto source = sinkToSource([&](Sink & nextSink) {
- RewritingSink rsink2(oldHashPart, std::string(newInfo0.path.hashPart()), nextSink);
- dumpPath(actualPath, rsink2);
- rsink2.flush();
- });
- Path tmpPath = actualPath + ".tmp";
- restorePath(tmpPath, *source);
- deletePath(actualPath);
- movePath(tmpPath, actualPath);
+ // If the path has some self-references, we need to rewrite
+ // them.
+ // (note that this doesn't invalidate the ca hash we calculated
+ // above because it's computed *modulo the self-references*, so
+ // it already takes this rewrite into account).
+ rewriteOutput(
+ StringMap{{oldHashPart,
+ std::string(newInfo0.path.hashPart())}});
}
HashResult narHashAndSize = hashPath(htSHA256, actualPath);
@@ -2500,7 +2521,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
outputRewrites.insert_or_assign(
std::string { scratchPath->hashPart() },
std::string { requiredFinalPath.hashPart() });
- rewriteOutput();
+ rewriteOutput(outputRewrites);
auto narHashAndSize = hashPath(htSHA256, actualPath);
ValidPathInfo newInfo0 { requiredFinalPath, narHashAndSize.first };
newInfo0.narSize = narHashAndSize.second;
diff --git a/src/libstore/build/personality.cc b/src/libstore/build/personality.cc
index 4ad477869..1a6201758 100644
--- a/src/libstore/build/personality.cc
+++ b/src/libstore/build/personality.cc
@@ -21,7 +21,8 @@ void setPersonality(std::string_view system)
&& (std::string_view(SYSTEM) == "x86_64-linux"
|| (!strcmp(utsbuf.sysname, "Linux") && !strcmp(utsbuf.machine, "x86_64"))))
|| system == "armv7l-linux"
- || system == "armv6l-linux")
+ || system == "armv6l-linux"
+ || system == "armv5tel-linux")
{
if (personality(PER_LINUX32) == -1)
throw SysError("cannot set 32-bit personality");
diff --git a/src/libstore/build/worker.cc b/src/libstore/build/worker.cc
index ee334d54a..a9ca9cbbc 100644
--- a/src/libstore/build/worker.cc
+++ b/src/libstore/build/worker.cc
@@ -468,16 +468,9 @@ void Worker::waitForInput()
}
-unsigned int Worker::exitStatus()
+unsigned int Worker::failingExitStatus()
{
- /*
- * 1100100
- * ^^^^
- * |||`- timeout
- * ||`-- output hash mismatch
- * |`--- build failure
- * `---- not deterministic
- */
+ // See API docs in header for explanation
unsigned int mask = 0;
bool buildFailure = permanentFailure || timedOut || hashMismatch;
if (buildFailure)
diff --git a/src/libstore/build/worker.hh b/src/libstore/build/worker.hh
index 63624d910..5abceca0d 100644
--- a/src/libstore/build/worker.hh
+++ b/src/libstore/build/worker.hh
@@ -280,7 +280,28 @@ public:
*/
void waitForInput();
- unsigned int exitStatus();
+ /***
+ * The exit status in case of failure.
+ *
+ * In the case of a build failure, returned value follows this
+ * bitmask:
+ *
+ * ```
+ * 0b1100100
+ * ^^^^
+ * |||`- timeout
+ * ||`-- output hash mismatch
+ * |`--- build failure
+ * `---- not deterministic
+ * ```
+ *
+ * In other words, the failure code is at least 100 (0b1100100), but
+ * might also be greater.
+ *
+ * Otherwise (no build failure, but some other sort of failure by
+ * assumption), this returned value is 1.
+ */
+ unsigned int failingExitStatus();
/**
* Check whether the given valid path exists and has the right
diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc
index 5083497a9..ad3dee1a2 100644
--- a/src/libstore/daemon.cc
+++ b/src/libstore/daemon.cc
@@ -1,6 +1,7 @@
#include "daemon.hh"
#include "monitor-fd.hh"
#include "worker-protocol.hh"
+#include "worker-protocol-impl.hh"
#include "build-result.hh"
#include "store-api.hh"
#include "store-cast.hh"
@@ -259,13 +260,13 @@ struct ClientSettings
}
};
-static std::vector<DerivedPath> readDerivedPaths(Store & store, unsigned int clientVersion, Source & from)
+static std::vector<DerivedPath> readDerivedPaths(Store & store, unsigned int clientVersion, WorkerProto::ReadConn conn)
{
std::vector<DerivedPath> reqs;
if (GET_PROTOCOL_MINOR(clientVersion) >= 30) {
- reqs = worker_proto::read(store, from, Phantom<std::vector<DerivedPath>> {});
+ reqs = WorkerProto::Serialise<std::vector<DerivedPath>>::read(store, conn);
} else {
- for (auto & s : readStrings<Strings>(from))
+ for (auto & s : readStrings<Strings>(conn.from))
reqs.push_back(parsePathWithOutputs(store, s).toDerivedPath());
}
return reqs;
@@ -273,11 +274,14 @@ static std::vector<DerivedPath> readDerivedPaths(Store & store, unsigned int cli
static void performOp(TunnelLogger * logger, ref<Store> store,
TrustedFlag trusted, RecursiveFlag recursive, unsigned int clientVersion,
- Source & from, BufferedSink & to, unsigned int op)
+ Source & from, BufferedSink & to, WorkerProto::Op op)
{
+ WorkerProto::ReadConn rconn { .from = from };
+ WorkerProto::WriteConn wconn { .to = to };
+
switch (op) {
- case wopIsValidPath: {
+ case WorkerProto::Op::IsValidPath: {
auto path = store->parseStorePath(readString(from));
logger->startWork();
bool result = store->isValidPath(path);
@@ -286,8 +290,8 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
break;
}
- case wopQueryValidPaths: {
- auto paths = worker_proto::read(*store, from, Phantom<StorePathSet> {});
+ case WorkerProto::Op::QueryValidPaths: {
+ auto paths = WorkerProto::Serialise<StorePathSet>::read(*store, rconn);
SubstituteFlag substitute = NoSubstitute;
if (GET_PROTOCOL_MINOR(clientVersion) >= 27) {
@@ -300,11 +304,11 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
}
auto res = store->queryValidPaths(paths, substitute);
logger->stopWork();
- worker_proto::write(*store, to, res);
+ WorkerProto::write(*store, wconn, res);
break;
}
- case wopHasSubstitutes: {
+ case WorkerProto::Op::HasSubstitutes: {
auto path = store->parseStorePath(readString(from));
logger->startWork();
StorePathSet paths; // FIXME
@@ -315,16 +319,16 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
break;
}
- case wopQuerySubstitutablePaths: {
- auto paths = worker_proto::read(*store, from, Phantom<StorePathSet> {});
+ case WorkerProto::Op::QuerySubstitutablePaths: {
+ auto paths = WorkerProto::Serialise<StorePathSet>::read(*store, rconn);
logger->startWork();
auto res = store->querySubstitutablePaths(paths);
logger->stopWork();
- worker_proto::write(*store, to, res);
+ WorkerProto::write(*store, wconn, res);
break;
}
- case wopQueryPathHash: {
+ case WorkerProto::Op::QueryPathHash: {
auto path = store->parseStorePath(readString(from));
logger->startWork();
auto hash = store->queryPathInfo(path)->narHash;
@@ -333,27 +337,27 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
break;
}
- case wopQueryReferences:
- case wopQueryReferrers:
- case wopQueryValidDerivers:
- case wopQueryDerivationOutputs: {
+ case WorkerProto::Op::QueryReferences:
+ case WorkerProto::Op::QueryReferrers:
+ case WorkerProto::Op::QueryValidDerivers:
+ case WorkerProto::Op::QueryDerivationOutputs: {
auto path = store->parseStorePath(readString(from));
logger->startWork();
StorePathSet paths;
- if (op == wopQueryReferences)
+ if (op == WorkerProto::Op::QueryReferences)
for (auto & i : store->queryPathInfo(path)->references)
paths.insert(i);
- else if (op == wopQueryReferrers)
+ else if (op == WorkerProto::Op::QueryReferrers)
store->queryReferrers(path, paths);
- else if (op == wopQueryValidDerivers)
+ else if (op == WorkerProto::Op::QueryValidDerivers)
paths = store->queryValidDerivers(path);
else paths = store->queryDerivationOutputs(path);
logger->stopWork();
- worker_proto::write(*store, to, paths);
+ WorkerProto::write(*store, wconn, paths);
break;
}
- case wopQueryDerivationOutputNames: {
+ case WorkerProto::Op::QueryDerivationOutputNames: {
auto path = store->parseStorePath(readString(from));
logger->startWork();
auto names = store->readDerivation(path).outputNames();
@@ -362,16 +366,16 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
break;
}
- case wopQueryDerivationOutputMap: {
+ case WorkerProto::Op::QueryDerivationOutputMap: {
auto path = store->parseStorePath(readString(from));
logger->startWork();
auto outputs = store->queryPartialDerivationOutputMap(path);
logger->stopWork();
- worker_proto::write(*store, to, outputs);
+ WorkerProto::write(*store, wconn, outputs);
break;
}
- case wopQueryDeriver: {
+ case WorkerProto::Op::QueryDeriver: {
auto path = store->parseStorePath(readString(from));
logger->startWork();
auto info = store->queryPathInfo(path);
@@ -380,7 +384,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
break;
}
- case wopQueryPathFromHashPart: {
+ case WorkerProto::Op::QueryPathFromHashPart: {
auto hashPart = readString(from);
logger->startWork();
auto path = store->queryPathFromHashPart(hashPart);
@@ -389,11 +393,11 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
break;
}
- case wopAddToStore: {
+ case WorkerProto::Op::AddToStore: {
if (GET_PROTOCOL_MINOR(clientVersion) >= 25) {
auto name = readString(from);
auto camStr = readString(from);
- auto refs = worker_proto::read(*store, from, Phantom<StorePathSet> {});
+ auto refs = WorkerProto::Serialise<StorePathSet>::read(*store, rconn);
bool repairBool;
from >> repairBool;
auto repair = RepairFlag{repairBool};
@@ -475,7 +479,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
break;
}
- case wopAddMultipleToStore: {
+ case WorkerProto::Op::AddMultipleToStore: {
bool repair, dontCheckSigs;
from >> repair >> dontCheckSigs;
if (!trusted && dontCheckSigs)
@@ -492,10 +496,10 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
break;
}
- case wopAddTextToStore: {
+ case WorkerProto::Op::AddTextToStore: {
std::string suffix = readString(from);
std::string s = readString(from);
- auto refs = worker_proto::read(*store, from, Phantom<StorePathSet> {});
+ auto refs = WorkerProto::Serialise<StorePathSet>::read(*store, rconn);
logger->startWork();
auto path = store->addTextToStore(suffix, s, refs, NoRepair);
logger->stopWork();
@@ -503,7 +507,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
break;
}
- case wopExportPath: {
+ case WorkerProto::Op::ExportPath: {
auto path = store->parseStorePath(readString(from));
readInt(from); // obsolete
logger->startWork();
@@ -514,7 +518,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
break;
}
- case wopImportPaths: {
+ case WorkerProto::Op::ImportPaths: {
logger->startWork();
TunnelSource source(from, to);
auto paths = store->importPaths(source,
@@ -526,8 +530,8 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
break;
}
- case wopBuildPaths: {
- auto drvs = readDerivedPaths(*store, clientVersion, from);
+ case WorkerProto::Op::BuildPaths: {
+ auto drvs = readDerivedPaths(*store, clientVersion, rconn);
BuildMode mode = bmNormal;
if (GET_PROTOCOL_MINOR(clientVersion) >= 15) {
mode = (BuildMode) readInt(from);
@@ -551,8 +555,8 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
break;
}
- case wopBuildPathsWithResults: {
- auto drvs = readDerivedPaths(*store, clientVersion, from);
+ case WorkerProto::Op::BuildPathsWithResults: {
+ auto drvs = readDerivedPaths(*store, clientVersion, rconn);
BuildMode mode = bmNormal;
mode = (BuildMode) readInt(from);
@@ -567,12 +571,12 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
auto results = store->buildPathsWithResults(drvs, mode);
logger->stopWork();
- worker_proto::write(*store, to, results);
+ WorkerProto::write(*store, wconn, results);
break;
}
- case wopBuildDerivation: {
+ case WorkerProto::Op::BuildDerivation: {
auto drvPath = store->parseStorePath(readString(from));
BasicDerivation drv;
readDerivation(from, *store, drv, Derivation::nameFromPath(drvPath));
@@ -644,12 +648,12 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
DrvOutputs builtOutputs;
for (auto & [output, realisation] : res.builtOutputs)
builtOutputs.insert_or_assign(realisation.id, realisation);
- worker_proto::write(*store, to, builtOutputs);
+ WorkerProto::write(*store, wconn, builtOutputs);
}
break;
}
- case wopEnsurePath: {
+ case WorkerProto::Op::EnsurePath: {
auto path = store->parseStorePath(readString(from));
logger->startWork();
store->ensurePath(path);
@@ -658,7 +662,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
break;
}
- case wopAddTempRoot: {
+ case WorkerProto::Op::AddTempRoot: {
auto path = store->parseStorePath(readString(from));
logger->startWork();
store->addTempRoot(path);
@@ -667,7 +671,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
break;
}
- case wopAddIndirectRoot: {
+ case WorkerProto::Op::AddIndirectRoot: {
Path path = absPath(readString(from));
logger->startWork();
@@ -680,14 +684,14 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
}
// Obsolete.
- case wopSyncWithGC: {
+ case WorkerProto::Op::SyncWithGC: {
logger->startWork();
logger->stopWork();
to << 1;
break;
}
- case wopFindRoots: {
+ case WorkerProto::Op::FindRoots: {
logger->startWork();
auto & gcStore = require<GcStore>(*store);
Roots roots = gcStore.findRoots(!trusted);
@@ -706,10 +710,10 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
break;
}
- case wopCollectGarbage: {
+ case WorkerProto::Op::CollectGarbage: {
GCOptions options;
options.action = (GCOptions::GCAction) readInt(from);
- options.pathsToDelete = worker_proto::read(*store, from, Phantom<StorePathSet> {});
+ options.pathsToDelete = WorkerProto::Serialise<StorePathSet>::read(*store, rconn);
from >> options.ignoreLiveness >> options.maxFreed;
// obsolete fields
readInt(from);
@@ -730,7 +734,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
break;
}
- case wopSetOptions: {
+ case WorkerProto::Op::SetOptions: {
ClientSettings clientSettings;
@@ -767,7 +771,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
break;
}
- case wopQuerySubstitutablePathInfo: {
+ case WorkerProto::Op::QuerySubstitutablePathInfo: {
auto path = store->parseStorePath(readString(from));
logger->startWork();
SubstitutablePathInfos infos;
@@ -779,22 +783,22 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
else {
to << 1
<< (i->second.deriver ? store->printStorePath(*i->second.deriver) : "");
- worker_proto::write(*store, to, i->second.references);
+ WorkerProto::write(*store, wconn, i->second.references);
to << i->second.downloadSize
<< i->second.narSize;
}
break;
}
- case wopQuerySubstitutablePathInfos: {
+ case WorkerProto::Op::QuerySubstitutablePathInfos: {
SubstitutablePathInfos infos;
StorePathCAMap pathsMap = {};
if (GET_PROTOCOL_MINOR(clientVersion) < 22) {
- auto paths = worker_proto::read(*store, from, Phantom<StorePathSet> {});
+ auto paths = WorkerProto::Serialise<StorePathSet>::read(*store, rconn);
for (auto & path : paths)
pathsMap.emplace(path, std::nullopt);
} else
- pathsMap = worker_proto::read(*store, from, Phantom<StorePathCAMap> {});
+ pathsMap = WorkerProto::Serialise<StorePathCAMap>::read(*store, rconn);
logger->startWork();
store->querySubstitutablePathInfos(pathsMap, infos);
logger->stopWork();
@@ -802,21 +806,21 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
for (auto & i : infos) {
to << store->printStorePath(i.first)
<< (i.second.deriver ? store->printStorePath(*i.second.deriver) : "");
- worker_proto::write(*store, to, i.second.references);
+ WorkerProto::write(*store, wconn, i.second.references);
to << i.second.downloadSize << i.second.narSize;
}
break;
}
- case wopQueryAllValidPaths: {
+ case WorkerProto::Op::QueryAllValidPaths: {
logger->startWork();
auto paths = store->queryAllValidPaths();
logger->stopWork();
- worker_proto::write(*store, to, paths);
+ WorkerProto::write(*store, wconn, paths);
break;
}
- case wopQueryPathInfo: {
+ case WorkerProto::Op::QueryPathInfo: {
auto path = store->parseStorePath(readString(from));
std::shared_ptr<const ValidPathInfo> info;
logger->startWork();
@@ -837,14 +841,14 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
break;
}
- case wopOptimiseStore:
+ case WorkerProto::Op::OptimiseStore:
logger->startWork();
store->optimiseStore();
logger->stopWork();
to << 1;
break;
- case wopVerifyStore: {
+ case WorkerProto::Op::VerifyStore: {
bool checkContents, repair;
from >> checkContents >> repair;
logger->startWork();
@@ -856,19 +860,17 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
break;
}
- case wopAddSignatures: {
+ case WorkerProto::Op::AddSignatures: {
auto path = store->parseStorePath(readString(from));
StringSet sigs = readStrings<StringSet>(from);
logger->startWork();
- if (!trusted)
- throw Error("you are not privileged to add signatures");
store->addSignatures(path, sigs);
logger->stopWork();
to << 1;
break;
}
- case wopNarFromPath: {
+ case WorkerProto::Op::NarFromPath: {
auto path = store->parseStorePath(readString(from));
logger->startWork();
logger->stopWork();
@@ -876,7 +878,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
break;
}
- case wopAddToStoreNar: {
+ case WorkerProto::Op::AddToStoreNar: {
bool repair, dontCheckSigs;
auto path = store->parseStorePath(readString(from));
auto deriver = readString(from);
@@ -884,7 +886,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
ValidPathInfo info { path, narHash };
if (deriver != "")
info.deriver = store->parseStorePath(deriver);
- info.references = worker_proto::read(*store, from, Phantom<StorePathSet> {});
+ info.references = WorkerProto::Serialise<StorePathSet>::read(*store, rconn);
from >> info.registrationTime >> info.narSize >> info.ultimate;
info.sigs = readStrings<StringSet>(from);
info.ca = ContentAddress::parseOpt(readString(from));
@@ -928,21 +930,21 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
break;
}
- case wopQueryMissing: {
- auto targets = readDerivedPaths(*store, clientVersion, from);
+ case WorkerProto::Op::QueryMissing: {
+ auto targets = readDerivedPaths(*store, clientVersion, rconn);
logger->startWork();
StorePathSet willBuild, willSubstitute, unknown;
uint64_t downloadSize, narSize;
store->queryMissing(targets, willBuild, willSubstitute, unknown, downloadSize, narSize);
logger->stopWork();
- worker_proto::write(*store, to, willBuild);
- worker_proto::write(*store, to, willSubstitute);
- worker_proto::write(*store, to, unknown);
+ WorkerProto::write(*store, wconn, willBuild);
+ WorkerProto::write(*store, wconn, willSubstitute);
+ WorkerProto::write(*store, wconn, unknown);
to << downloadSize << narSize;
break;
}
- case wopRegisterDrvOutput: {
+ case WorkerProto::Op::RegisterDrvOutput: {
logger->startWork();
if (GET_PROTOCOL_MINOR(clientVersion) < 31) {
auto outputId = DrvOutput::parse(readString(from));
@@ -950,14 +952,14 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
store->registerDrvOutput(Realisation{
.id = outputId, .outPath = outputPath});
} else {
- auto realisation = worker_proto::read(*store, from, Phantom<Realisation>());
+ auto realisation = WorkerProto::Serialise<Realisation>::read(*store, rconn);
store->registerDrvOutput(realisation);
}
logger->stopWork();
break;
}
- case wopQueryRealisation: {
+ case WorkerProto::Op::QueryRealisation: {
logger->startWork();
auto outputId = DrvOutput::parse(readString(from));
auto info = store->queryRealisation(outputId);
@@ -965,16 +967,16 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
if (GET_PROTOCOL_MINOR(clientVersion) < 31) {
std::set<StorePath> outPaths;
if (info) outPaths.insert(info->outPath);
- worker_proto::write(*store, to, outPaths);
+ WorkerProto::write(*store, wconn, outPaths);
} else {
std::set<Realisation> realisations;
if (info) realisations.insert(*info);
- worker_proto::write(*store, to, realisations);
+ WorkerProto::write(*store, wconn, realisations);
}
break;
}
- case wopAddBuildLog: {
+ case WorkerProto::Op::AddBuildLog: {
StorePath path{readString(from)};
logger->startWork();
if (!trusted)
@@ -991,6 +993,10 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
break;
}
+ case WorkerProto::Op::QueryFailedPaths:
+ case WorkerProto::Op::ClearFailedPaths:
+ throw Error("Removed operation %1%", op);
+
default:
throw Error("invalid operation %1%", op);
}
@@ -1045,7 +1051,8 @@ void processConnection(
auto temp = trusted
? store->isTrustedClient()
: std::optional { NotTrusted };
- worker_proto::write(*store, to, temp);
+ WorkerProto::WriteConn wconn { .to = to };
+ WorkerProto::write(*store, wconn, temp);
}
/* Send startup error messages to the client. */
@@ -1058,9 +1065,9 @@ void processConnection(
/* Process client requests. */
while (true) {
- WorkerOp op;
+ WorkerProto::Op op;
try {
- op = (WorkerOp) readInt(from);
+ op = (enum WorkerProto::Op) readInt(from);
} catch (Interrupted & e) {
break;
} catch (EndOfFile & e) {
diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc
index d56dc727b..6f63685d4 100644
--- a/src/libstore/derivations.cc
+++ b/src/libstore/derivations.cc
@@ -1,9 +1,11 @@
#include "derivations.hh"
+#include "downstream-placeholder.hh"
#include "store-api.hh"
#include "globals.hh"
#include "util.hh"
#include "split.hh"
#include "worker-protocol.hh"
+#include "worker-protocol-impl.hh"
#include "fs-accessor.hh"
#include <boost/container/small_vector.hpp>
#include <nlohmann/json.hpp>
@@ -748,7 +750,8 @@ Source & readDerivation(Source & in, const Store & store, BasicDerivation & drv,
drv.outputs.emplace(std::move(name), std::move(output));
}
- drv.inputSrcs = worker_proto::read(store, in, Phantom<StorePathSet> {});
+ drv.inputSrcs = WorkerProto::Serialise<StorePathSet>::read(store,
+ WorkerProto::ReadConn { .from = in });
in >> drv.platform >> drv.builder;
drv.args = readStrings<Strings>(in);
@@ -796,7 +799,9 @@ void writeDerivation(Sink & out, const Store & store, const BasicDerivation & dr
},
}, i.second.raw());
}
- worker_proto::write(store, out, drv.inputSrcs);
+ WorkerProto::write(store,
+ WorkerProto::WriteConn { .to = out },
+ drv.inputSrcs);
out << drv.platform << drv.builder << drv.args;
out << drv.env.size();
for (auto & i : drv.env)
@@ -810,13 +815,7 @@ std::string hashPlaceholder(const std::string_view outputName)
return "/" + hashString(htSHA256, concatStrings("nix-output:", outputName)).to_string(Base32, false);
}
-std::string downstreamPlaceholder(const Store & store, const StorePath & drvPath, std::string_view outputName)
-{
- auto drvNameWithExtension = drvPath.name();
- auto drvName = drvNameWithExtension.substr(0, drvNameWithExtension.size() - 4);
- auto clearText = "nix-upstream-output:" + std::string { drvPath.hashPart() } + ":" + outputPathName(drvName, outputName);
- return "/" + hashString(htSHA256, clearText).to_string(Base32, false);
-}
+
static void rewriteDerivation(Store & store, BasicDerivation & drv, const StringMap & rewrites)
@@ -880,7 +879,7 @@ std::optional<BasicDerivation> Derivation::tryResolve(
for (auto & outputName : inputOutputs) {
if (auto actualPath = get(inputDrvOutputs, { inputDrv, outputName })) {
inputRewrites.emplace(
- downstreamPlaceholder(store, inputDrv, outputName),
+ DownstreamPlaceholder::unknownCaOutput(inputDrv, outputName).render(),
store.printStorePath(*actualPath));
resolved.inputSrcs.insert(*actualPath);
} else {
diff --git a/src/libstore/derivations.hh b/src/libstore/derivations.hh
index 1e2143f31..fa79f77fd 100644
--- a/src/libstore/derivations.hh
+++ b/src/libstore/derivations.hh
@@ -6,6 +6,7 @@
#include "hash.hh"
#include "content-address.hh"
#include "repair-flag.hh"
+#include "derived-path.hh"
#include "sync.hh"
#include "comparator.hh"
@@ -495,17 +496,6 @@ void writeDerivation(Sink & out, const Store & store, const BasicDerivation & dr
*/
std::string hashPlaceholder(const std::string_view outputName);
-/**
- * This creates an opaque and almost certainly unique string
- * deterministically from a derivation path and output name.
- *
- * It is used as a placeholder to allow derivations to refer to
- * content-addressed paths whose content --- and thus the path
- * themselves --- isn't yet known. This occurs when a derivation has a
- * dependency which is a CA derivation.
- */
-std::string downstreamPlaceholder(const Store & store, const StorePath & drvPath, std::string_view outputName);
-
extern const Hash impureOutputHash;
}
diff --git a/src/libstore/downstream-placeholder.cc b/src/libstore/downstream-placeholder.cc
new file mode 100644
index 000000000..1752738f2
--- /dev/null
+++ b/src/libstore/downstream-placeholder.cc
@@ -0,0 +1,39 @@
+#include "downstream-placeholder.hh"
+#include "derivations.hh"
+
+namespace nix {
+
+std::string DownstreamPlaceholder::render() const
+{
+ return "/" + hash.to_string(Base32, false);
+}
+
+
+DownstreamPlaceholder DownstreamPlaceholder::unknownCaOutput(
+ const StorePath & drvPath,
+ std::string_view outputName)
+{
+ auto drvNameWithExtension = drvPath.name();
+ auto drvName = drvNameWithExtension.substr(0, drvNameWithExtension.size() - 4);
+ auto clearText = "nix-upstream-output:" + std::string { drvPath.hashPart() } + ":" + outputPathName(drvName, outputName);
+ return DownstreamPlaceholder {
+ hashString(htSHA256, clearText)
+ };
+}
+
+DownstreamPlaceholder DownstreamPlaceholder::unknownDerivation(
+ const DownstreamPlaceholder & placeholder,
+ std::string_view outputName,
+ const ExperimentalFeatureSettings & xpSettings)
+{
+ xpSettings.require(Xp::DynamicDerivations);
+ auto compressed = compressHash(placeholder.hash, 20);
+ auto clearText = "nix-computed-output:"
+ + compressed.to_string(Base32, false)
+ + ":" + std::string { outputName };
+ return DownstreamPlaceholder {
+ hashString(htSHA256, clearText)
+ };
+}
+
+}
diff --git a/src/libstore/downstream-placeholder.hh b/src/libstore/downstream-placeholder.hh
new file mode 100644
index 000000000..f0c0dee77
--- /dev/null
+++ b/src/libstore/downstream-placeholder.hh
@@ -0,0 +1,75 @@
+#pragma once
+///@file
+
+#include "hash.hh"
+#include "path.hh"
+
+namespace nix {
+
+/**
+ * Downstream Placeholders are opaque and almost certainly unique values
+ * used to allow derivations to refer to store objects which are yet to
+ * be built and for we do not yet have store paths for.
+ *
+ * They correspond to `DerivedPaths` that are not `DerivedPath::Opaque`,
+ * except for the cases involving input addressing or fixed outputs
+ * where we do know a store path for the derivation output in advance.
+ *
+ * Unlike `DerivationPath`, however, `DownstreamPlaceholder` is
+ * purposefully opaque and obfuscated. This is so they are hard to
+ * create by accident, and so substituting them (once we know what the
+ * path to store object is) is unlikely to capture other stuff it
+ * shouldn't.
+ *
+ * We use them with `Derivation`: the `render()` method is called to
+ * render an opaque string which can be used in the derivation, and the
+ * resolving logic can substitute those strings for store paths when
+ * resolving `Derivation.inputDrvs` to `BasicDerivation.inputSrcs`.
+ */
+class DownstreamPlaceholder
+{
+ /**
+ * `DownstreamPlaceholder` is just a newtype of `Hash`.
+ * This its only field.
+ */
+ Hash hash;
+
+ /**
+ * Newtype constructor
+ */
+ DownstreamPlaceholder(Hash hash) : hash(hash) { }
+
+public:
+ /**
+ * This creates an opaque and almost certainly unique string
+ * deterministically from the placeholder.
+ */
+ std::string render() const;
+
+ /**
+ * Create a placeholder for an unknown output of a content-addressed
+ * derivation.
+ *
+ * The derivation itself is known (we have a store path for it), but
+ * the output doesn't yet have a known store path.
+ */
+ static DownstreamPlaceholder unknownCaOutput(
+ const StorePath & drvPath,
+ std::string_view outputName);
+
+ /**
+ * Create a placehold for the output of an unknown derivation.
+ *
+ * The derivation is not yet known because it is a dynamic
+ * derivaiton --- it is itself an output of another derivation ---
+ * and we just have (another) placeholder for it.
+ *
+ * @param xpSettings Stop-gap to avoid globals during unit tests.
+ */
+ static DownstreamPlaceholder unknownDerivation(
+ const DownstreamPlaceholder & drvPlaceholder,
+ std::string_view outputName,
+ const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
+};
+
+}
diff --git a/src/libstore/export-import.cc b/src/libstore/export-import.cc
index 4eb838b68..e866aeb42 100644
--- a/src/libstore/export-import.cc
+++ b/src/libstore/export-import.cc
@@ -2,6 +2,7 @@
#include "store-api.hh"
#include "archive.hh"
#include "worker-protocol.hh"
+#include "worker-protocol-impl.hh"
#include <algorithm>
@@ -45,7 +46,9 @@ void Store::exportPath(const StorePath & path, Sink & sink)
teeSink
<< exportMagic
<< printStorePath(path);
- worker_proto::write(*this, teeSink, info->references);
+ WorkerProto::write(*this,
+ WorkerProto::WriteConn { .to = teeSink },
+ info->references);
teeSink
<< (info->deriver ? printStorePath(*info->deriver) : "")
<< 0;
@@ -73,7 +76,8 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs)
//Activity act(*logger, lvlInfo, "importing path '%s'", info.path);
- auto references = worker_proto::read(*this, source, Phantom<StorePathSet> {});
+ auto references = WorkerProto::Serialise<StorePathSet>::read(*this,
+ WorkerProto::ReadConn { .from = source });
auto deriver = readString(source);
auto narHash = hashString(htSHA256, saved.s);
diff --git a/src/libstore/filetransfer.cc b/src/libstore/filetransfer.cc
index 2346accbe..38b691279 100644
--- a/src/libstore/filetransfer.cc
+++ b/src/libstore/filetransfer.cc
@@ -186,9 +186,9 @@ struct curlFileTransfer : public FileTransfer
size_t realSize = size * nmemb;
std::string line((char *) contents, realSize);
printMsg(lvlVomit, "got header for '%s': %s", request.uri, trim(line));
+
static std::regex statusLine("HTTP/[^ ]+ +[0-9]+(.*)", std::regex::extended | std::regex::icase);
- std::smatch match;
- if (std::regex_match(line, match, statusLine)) {
+ if (std::smatch match; std::regex_match(line, match, statusLine)) {
result.etag = "";
result.data.clear();
result.bodySize = 0;
@@ -196,9 +196,11 @@ struct curlFileTransfer : public FileTransfer
acceptRanges = false;
encoding = "";
} else {
+
auto i = line.find(':');
if (i != std::string::npos) {
std::string name = toLower(trim(line.substr(0, i)));
+
if (name == "etag") {
result.etag = trim(line.substr(i + 1));
/* Hack to work around a GitHub bug: it sends
@@ -212,10 +214,22 @@ struct curlFileTransfer : public FileTransfer
debug("shutting down on 200 HTTP response with expected ETag");
return 0;
}
- } else if (name == "content-encoding")
+ }
+
+ else if (name == "content-encoding")
encoding = trim(line.substr(i + 1));
+
else if (name == "accept-ranges" && toLower(trim(line.substr(i + 1))) == "bytes")
acceptRanges = true;
+
+ else if (name == "link" || name == "x-amz-meta-link") {
+ auto value = trim(line.substr(i + 1));
+ static std::regex linkRegex("<([^>]*)>; rel=\"immutable\"", std::regex::extended | std::regex::icase);
+ if (std::smatch match; std::regex_match(value, match, linkRegex))
+ result.immutableUrl = match.str(1);
+ else
+ debug("got invalid link header '%s'", value);
+ }
}
}
return realSize;
@@ -345,7 +359,7 @@ struct curlFileTransfer : public FileTransfer
{
auto httpStatus = getHTTPStatus();
- char * effectiveUriCStr;
+ char * effectiveUriCStr = nullptr;
curl_easy_getinfo(req, CURLINFO_EFFECTIVE_URL, &effectiveUriCStr);
if (effectiveUriCStr)
result.effectiveUri = effectiveUriCStr;
diff --git a/src/libstore/filetransfer.hh b/src/libstore/filetransfer.hh
index 378c6ff78..a3b0dde1f 100644
--- a/src/libstore/filetransfer.hh
+++ b/src/libstore/filetransfer.hh
@@ -80,6 +80,10 @@ struct FileTransferResult
std::string effectiveUri;
std::string data;
uint64_t bodySize = 0;
+ /* An "immutable" URL for this resource (i.e. one whose contents
+ will never change), as returned by the `Link: <url>;
+ rel="immutable"` header. */
+ std::optional<std::string> immutableUrl;
};
class Store;
diff --git a/src/libstore/gc.cc b/src/libstore/gc.cc
index 0038ec802..20720fb99 100644
--- a/src/libstore/gc.cc
+++ b/src/libstore/gc.cc
@@ -110,6 +110,11 @@ void LocalStore::createTempRootsFile()
void LocalStore::addTempRoot(const StorePath & path)
{
+ if (readOnly) {
+ debug("Read-only store doesn't support creating lock files for temp roots, but nothing can be deleted anyways.");
+ return;
+ }
+
createTempRootsFile();
/* Open/create the global GC lock file. */
@@ -563,7 +568,7 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results)
/* On macOS, accepted sockets inherit the
non-blocking flag from the server socket, so
explicitly make it blocking. */
- if (fcntl(fdServer.get(), F_SETFL, fcntl(fdServer.get(), F_GETFL) & ~O_NONBLOCK) == -1)
+ if (fcntl(fdClient.get(), F_SETFL, fcntl(fdClient.get(), F_GETFL) & ~O_NONBLOCK) == -1)
abort();
while (true) {
diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc
index 4c66d08ee..5a4cb1824 100644
--- a/src/libstore/globals.cc
+++ b/src/libstore/globals.cc
@@ -77,7 +77,33 @@ Settings::Settings()
allowedImpureHostPrefixes = tokenizeString<StringSet>("/System/Library /usr/lib /dev /bin/sh");
#endif
- buildHook = getSelfExe().value_or("nix") + " __build-remote";
+ /* Set the build hook location
+
+ For builds we perform a self-invocation, so Nix has to be self-aware.
+ That is, it has to know where it is installed. We don't think it's sentient.
+
+ Normally, nix is installed according to `nixBinDir`, which is set at compile time,
+ but can be overridden. This makes for a great default that works even if this
+ code is linked as a library into some other program whose main is not aware
+ that it might need to be a build remote hook.
+
+ However, it may not have been installed at all. For example, if it's a static build,
+ there's a good chance that it has been moved out of its installation directory.
+ That makes `nixBinDir` useless. Instead, we'll query the OS for the path to the
+ current executable, using `getSelfExe()`.
+
+ As a last resort, we resort to `PATH`. Hopefully we find a `nix` there that's compatible.
+ If you're porting Nix to a new platform, that might be good enough for a while, but
+ you'll want to improve `getSelfExe()` to work on your platform.
+ */
+ std::string nixExePath = nixBinDir + "/nix";
+ if (!pathExists(nixExePath)) {
+ nixExePath = getSelfExe().value_or("nix");
+ }
+ buildHook = {
+ nixExePath,
+ "__build-remote",
+ };
}
void loadConfFile()
@@ -183,7 +209,7 @@ bool Settings::isWSL1()
Path Settings::getDefaultSSLCertFile()
{
for (auto & fn : {"/etc/ssl/certs/ca-certificates.crt", "/nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt"})
- if (pathExists(fn)) return fn;
+ if (pathAccessible(fn)) return fn;
return "";
}
diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh
index a7cf36d83..a19b43086 100644
--- a/src/libstore/globals.hh
+++ b/src/libstore/globals.hh
@@ -236,7 +236,7 @@ public:
)",
{"build-timeout"}};
- PathSetting buildHook{this, true, "", "build-hook",
+ Setting<Strings> buildHook{this, {}, "build-hook",
R"(
The path to the helper program that executes remote builds.
@@ -575,8 +575,8 @@ public:
line.
)"};
- PathSetting diffHook{
- this, true, "", "diff-hook",
+ OptionalPathSetting diffHook{
+ this, std::nullopt, "diff-hook",
R"(
Absolute path to an executable capable of diffing build
results. The hook is executed if `run-diff-hook` is true, and the
@@ -710,32 +710,29 @@ public:
Strings{"https://cache.nixos.org/"},
"substituters",
R"(
- A list of [URLs of Nix stores](@docroot@/command-ref/new-cli/nix3-help-stores.md#store-url-format)
- to be used as substituters, separated by whitespace.
- Substituters are tried based on their Priority value, which each substituter can set
- independently. Lower value means higher priority.
- The default is `https://cache.nixos.org`, with a Priority of 40.
+ A list of [URLs of Nix stores](@docroot@/command-ref/new-cli/nix3-help-stores.md#store-url-format) to be used as substituters, separated by whitespace.
+ A substituter is an additional [store]{@docroot@/glossary.md##gloss-store} from which Nix can obtain [store objects](@docroot@/glossary.md#gloss-store-object) instead of building them.
- At least one of the following conditions must be met for Nix to use
- a substituter:
+ Substituters are tried based on their priority value, which each substituter can set independently.
+ Lower value means higher priority.
+ The default is `https://cache.nixos.org`, which has a priority of 40.
- - the substituter is in the [`trusted-substituters`](#conf-trusted-substituters) list
- - the user calling Nix is in the [`trusted-users`](#conf-trusted-users) list
+ At least one of the following conditions must be met for Nix to use a substituter:
- In addition, each store path should be trusted as described
- in [`trusted-public-keys`](#conf-trusted-public-keys)
+ - The substituter is in the [`trusted-substituters`](#conf-trusted-substituters) list
+ - The user calling Nix is in the [`trusted-users`](#conf-trusted-users) list
+
+ In addition, each store path should be trusted as described in [`trusted-public-keys`](#conf-trusted-public-keys)
)",
{"binary-caches"}};
Setting<StringSet> trustedSubstituters{
this, {}, "trusted-substituters",
R"(
- A list of [URLs of Nix stores](@docroot@/command-ref/new-cli/nix3-help-stores.md#store-url-format),
- separated by whitespace. These are
- not used by default, but can be enabled by users of the Nix daemon
- by specifying `--option substituters urls` on the command
- line. Unprivileged users are only allowed to pass a subset of the
- URLs listed in `substituters` and `trusted-substituters`.
+ A list of [Nix store URLs](@docroot@/command-ref/new-cli/nix3-help-stores.md#store-url-format), separated by whitespace.
+ These are not used by default, but users of the Nix daemon can enable them by specifying [`substituters`](#conf-substituters).
+
+ Unprivileged users (those set in only [`allowed-users`](#conf-allowed-users) but not [`trusted-users`](#conf-trusted-users)) can pass as `substituters` only those URLs listed in `trusted-substituters`.
)",
{"trusted-binary-caches"}};
@@ -915,12 +912,11 @@ public:
this, {}, "hashed-mirrors",
R"(
A list of web servers used by `builtins.fetchurl` to obtain files by
- hash. The default is `http://tarballs.nixos.org/`. Given a hash type
- *ht* and a base-16 hash *h*, Nix will try to download the file from
- *hashed-mirror*/*ht*/*h*. This allows files to be downloaded even if
- they have disappeared from their original URI. For example, given
- the default mirror `http://tarballs.nixos.org/`, when building the
- derivation
+ hash. Given a hash type *ht* and a base-16 hash *h*, Nix will try to
+ download the file from *hashed-mirror*/*ht*/*h*. This allows files to
+ be downloaded even if they have disappeared from their original URI.
+ For example, given an example mirror `http://tarballs.nixos.org/`,
+ when building the derivation
```nix
builtins.fetchurl {
@@ -1014,6 +1010,18 @@ public:
| `~/.nix-profile` | `$XDG_STATE_HOME/nix/profile` |
| `~/.nix-defexpr` | `$XDG_STATE_HOME/nix/defexpr` |
| `~/.nix-channels` | `$XDG_STATE_HOME/nix/channels` |
+
+ If you already have Nix installed and are using [profiles](@docroot@/package-management/profiles.md) or [channels](@docroot@/package-management/channels.md), you should migrate manually when you enable this option.
+ If `$XDG_STATE_HOME` is not set, use `$HOME/.local/state/nix` instead of `$XDG_STATE_HOME/nix`.
+ This can be achieved with the following shell commands:
+
+ ```sh
+ nix_state_home=${XDG_STATE_HOME-$HOME/.local/state}/nix
+ mkdir -p $nix_state_home
+ mv $HOME/.nix-profile $nix_state_home/profile
+ mv $HOME/.nix-defexpr $nix_state_home/defexpr
+ mv $HOME/.nix-channels $nix_state_home/channels
+ ```
)"
};
};
diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc
index 2012584e0..fa17d606d 100644
--- a/src/libstore/legacy-ssh-store.cc
+++ b/src/libstore/legacy-ssh-store.cc
@@ -7,6 +7,7 @@
#include "store-api.hh"
#include "path-with-outputs.hh"
#include "worker-protocol.hh"
+#include "worker-protocol-impl.hh"
#include "ssh.hh"
#include "derivations.hh"
#include "callback.hh"
@@ -47,6 +48,42 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
FdSource from;
int remoteVersion;
bool good = true;
+
+ /**
+ * Coercion to `WorkerProto::ReadConn`. This makes it easy to use the
+ * factored out worker protocol searlizers with a
+ * `LegacySSHStore::Connection`.
+ *
+ * The worker protocol connection types are unidirectional, unlike
+ * this type.
+ *
+ * @todo Use server protocol serializers, not worker protocol
+ * serializers, once we have made that distiction.
+ */
+ operator WorkerProto::ReadConn ()
+ {
+ return WorkerProto::ReadConn {
+ .from = from,
+ };
+ }
+
+ /*
+ * Coercion to `WorkerProto::WriteConn`. This makes it easy to use the
+ * factored out worker protocol searlizers with a
+ * `LegacySSHStore::Connection`.
+ *
+ * The worker protocol connection types are unidirectional, unlike
+ * this type.
+ *
+ * @todo Use server protocol serializers, not worker protocol
+ * serializers, once we have made that distiction.
+ */
+ operator WorkerProto::WriteConn ()
+ {
+ return WorkerProto::WriteConn {
+ .to = to,
+ };
+ }
};
std::string host;
@@ -133,7 +170,7 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
debug("querying remote host '%s' for info on '%s'", host, printStorePath(path));
- conn->to << cmdQueryPathInfos << PathSet{printStorePath(path)};
+ conn->to << ServeProto::Command::QueryPathInfos << PathSet{printStorePath(path)};
conn->to.flush();
auto p = readString(conn->from);
@@ -146,7 +183,7 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
auto deriver = readString(conn->from);
if (deriver != "")
info->deriver = parseStorePath(deriver);
- info->references = worker_proto::read(*this, conn->from, Phantom<StorePathSet> {});
+ info->references = WorkerProto::Serialise<StorePathSet>::read(*this, *conn);
readLongLong(conn->from); // download size
info->narSize = readLongLong(conn->from);
@@ -176,11 +213,11 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 5) {
conn->to
- << cmdAddToStoreNar
+ << ServeProto::Command::AddToStoreNar
<< printStorePath(info.path)
<< (info.deriver ? printStorePath(*info.deriver) : "")
<< info.narHash.to_string(Base16, false);
- worker_proto::write(*this, conn->to, info.references);
+ WorkerProto::write(*this, *conn, info.references);
conn->to
<< info.registrationTime
<< info.narSize
@@ -198,7 +235,7 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
} else {
conn->to
- << cmdImportPaths
+ << ServeProto::Command::ImportPaths
<< 1;
try {
copyNAR(source, conn->to);
@@ -209,7 +246,7 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
conn->to
<< exportMagic
<< printStorePath(info.path);
- worker_proto::write(*this, conn->to, info.references);
+ WorkerProto::write(*this, *conn, info.references);
conn->to
<< (info.deriver ? printStorePath(*info.deriver) : "")
<< 0
@@ -226,7 +263,7 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
{
auto conn(connections->get());
- conn->to << cmdDumpStorePath << printStorePath(path);
+ conn->to << ServeProto::Command::DumpStorePath << printStorePath(path);
conn->to.flush();
copyNAR(conn->from, sink);
}
@@ -279,7 +316,7 @@ public:
auto conn(connections->get());
conn->to
- << cmdBuildDerivation
+ << ServeProto::Command::BuildDerivation
<< printStorePath(drvPath);
writeDerivation(conn->to, *this, drv);
@@ -294,7 +331,7 @@ public:
if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 3)
conn->from >> status.timesBuilt >> status.isNonDeterministic >> status.startTime >> status.stopTime;
if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 6) {
- auto builtOutputs = worker_proto::read(*this, conn->from, Phantom<DrvOutputs> {});
+ auto builtOutputs = WorkerProto::Serialise<DrvOutputs>::read(*this, *conn);
for (auto && [output, realisation] : builtOutputs)
status.builtOutputs.insert_or_assign(
std::move(output.outputName),
@@ -310,7 +347,7 @@ public:
auto conn(connections->get());
- conn->to << cmdBuildPaths;
+ conn->to << ServeProto::Command::BuildPaths;
Strings ss;
for (auto & p : drvPaths) {
auto sOrDrvPath = StorePathWithOutputs::tryFromDerivedPath(p);
@@ -344,6 +381,17 @@ public:
virtual ref<FSAccessor> getFSAccessor() override
{ unsupported("getFSAccessor"); }
+ /**
+ * The default instance would schedule the work on the client side, but
+ * for consistency with `buildPaths` and `buildDerivation` it should happen
+ * on the remote side.
+ *
+ * We make this fail for now so we can add implement this properly later
+ * without it being a breaking change.
+ */
+ void repairPath(const StorePath & path) override
+ { unsupported("repairPath"); }
+
void computeFSClosure(const StorePathSet & paths,
StorePathSet & out, bool flipDirection = false,
bool includeOutputs = false, bool includeDerivers = false) override
@@ -356,12 +404,12 @@ public:
auto conn(connections->get());
conn->to
- << cmdQueryClosure
+ << ServeProto::Command::QueryClosure
<< includeOutputs;
- worker_proto::write(*this, conn->to, paths);
+ WorkerProto::write(*this, *conn, paths);
conn->to.flush();
- for (auto & i : worker_proto::read(*this, conn->from, Phantom<StorePathSet> {}))
+ for (auto & i : WorkerProto::Serialise<StorePathSet>::read(*this, *conn))
out.insert(i);
}
@@ -371,13 +419,13 @@ public:
auto conn(connections->get());
conn->to
- << cmdQueryValidPaths
+ << ServeProto::Command::QueryValidPaths
<< false // lock
<< maybeSubstitute;
- worker_proto::write(*this, conn->to, paths);
+ WorkerProto::write(*this, *conn, paths);
conn->to.flush();
- return worker_proto::read(*this, conn->from, Phantom<StorePathSet> {});
+ return WorkerProto::Serialise<StorePathSet>::read(*this, *conn);
}
void connect() override
diff --git a/src/libstore/local-fs-store.hh b/src/libstore/local-fs-store.hh
index a03bb88f5..2ee2ef0c8 100644
--- a/src/libstore/local-fs-store.hh
+++ b/src/libstore/local-fs-store.hh
@@ -15,22 +15,22 @@ struct LocalFSStoreConfig : virtual StoreConfig
// it to omit the call to the Setting constructor. Clang works fine
// either way.
- const PathSetting rootDir{(StoreConfig*) this, true, "",
+ const OptionalPathSetting rootDir{(StoreConfig*) this, std::nullopt,
"root",
"Directory prefixed to all other paths."};
- const PathSetting stateDir{(StoreConfig*) this, false,
- rootDir != "" ? rootDir + "/nix/var/nix" : settings.nixStateDir,
+ const PathSetting stateDir{(StoreConfig*) this,
+ rootDir.get() ? *rootDir.get() + "/nix/var/nix" : settings.nixStateDir,
"state",
"Directory where Nix will store state."};
- const PathSetting logDir{(StoreConfig*) this, false,
- rootDir != "" ? rootDir + "/nix/var/log/nix" : settings.nixLogDir,
+ const PathSetting logDir{(StoreConfig*) this,
+ rootDir.get() ? *rootDir.get() + "/nix/var/log/nix" : settings.nixLogDir,
"log",
"directory where Nix will store log files."};
- const PathSetting realStoreDir{(StoreConfig*) this, false,
- rootDir != "" ? rootDir + "/nix/store" : storeDir, "real",
+ const PathSetting realStoreDir{(StoreConfig*) this,
+ rootDir.get() ? *rootDir.get() + "/nix/store" : storeDir, "real",
"Physical path of the Nix store."};
};
diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc
index 7fb312c37..e69460e6c 100644
--- a/src/libstore/local-store.cc
+++ b/src/libstore/local-store.cc
@@ -190,7 +190,11 @@ LocalStore::LocalStore(const Params & params)
/* Create missing state directories if they don't already exist. */
createDirs(realStoreDir);
- makeStoreWritable();
+ if (readOnly) {
+ experimentalFeatureSettings.require(Xp::ReadOnlyLocalStore);
+ } else {
+ makeStoreWritable();
+ }
createDirs(linksDir);
Path profilesDir = stateDir + "/profiles";
createDirs(profilesDir);
@@ -204,8 +208,10 @@ LocalStore::LocalStore(const Params & params)
for (auto & perUserDir : {profilesDir + "/per-user", gcRootsDir + "/per-user"}) {
createDirs(perUserDir);
- if (chmod(perUserDir.c_str(), 0755) == -1)
- throw SysError("could not set permissions on '%s' to 755", perUserDir);
+ if (!readOnly) {
+ if (chmod(perUserDir.c_str(), 0755) == -1)
+ throw SysError("could not set permissions on '%s' to 755", perUserDir);
+ }
}
/* Optionally, create directories and set permissions for a
@@ -269,10 +275,12 @@ LocalStore::LocalStore(const Params & params)
/* Acquire the big fat lock in shared mode to make sure that no
schema upgrade is in progress. */
- Path globalLockPath = dbDir + "/big-lock";
- globalLock = openLockFile(globalLockPath.c_str(), true);
+ if (!readOnly) {
+ Path globalLockPath = dbDir + "/big-lock";
+ globalLock = openLockFile(globalLockPath.c_str(), true);
+ }
- if (!lockFile(globalLock.get(), ltRead, false)) {
+ if (!readOnly && !lockFile(globalLock.get(), ltRead, false)) {
printInfo("waiting for the big Nix store lock...");
lockFile(globalLock.get(), ltRead, true);
}
@@ -280,6 +288,14 @@ LocalStore::LocalStore(const Params & params)
/* Check the current database schema and if necessary do an
upgrade. */
int curSchema = getSchema();
+ if (readOnly && curSchema < nixSchemaVersion) {
+ debug("current schema version: %d", curSchema);
+ debug("supported schema version: %d", nixSchemaVersion);
+ throw Error(curSchema == 0 ?
+ "database does not exist, and cannot be created in read-only mode" :
+ "database schema needs migrating, but this cannot be done in read-only mode");
+ }
+
if (curSchema > nixSchemaVersion)
throw Error("current Nix store schema is version %1%, but I only support %2%",
curSchema, nixSchemaVersion);
@@ -344,7 +360,11 @@ LocalStore::LocalStore(const Params & params)
else openDB(*state, false);
if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) {
- migrateCASchema(state->db, dbDir + "/ca-schema", globalLock);
+ if (!readOnly) {
+ migrateCASchema(state->db, dbDir + "/ca-schema", globalLock);
+ } else {
+ throw Error("need to migrate to content-addressed schema, but this cannot be done in read-only mode");
+ }
}
/* Prepare SQL statements. */
@@ -475,13 +495,20 @@ int LocalStore::getSchema()
void LocalStore::openDB(State & state, bool create)
{
- if (access(dbDir.c_str(), R_OK | W_OK))
+ if (create && readOnly) {
+ throw Error("cannot create database while in read-only mode");
+ }
+
+ if (access(dbDir.c_str(), R_OK | (readOnly ? 0 : W_OK)))
throw SysError("Nix database directory '%1%' is not writable", dbDir);
/* Open the Nix database. */
std::string dbPath = dbDir + "/db.sqlite";
auto & db(state.db);
- state.db = SQLite(dbPath, create);
+ auto openMode = readOnly ? SQLiteOpenMode::Immutable
+ : create ? SQLiteOpenMode::Normal
+ : SQLiteOpenMode::NoCreate;
+ state.db = SQLite(dbPath, openMode);
#ifdef __CYGWIN__
/* The cygwin version of sqlite3 has a patch which calls
diff --git a/src/libstore/local-store.hh b/src/libstore/local-store.hh
index 55add18dd..8a3b0b43f 100644
--- a/src/libstore/local-store.hh
+++ b/src/libstore/local-store.hh
@@ -46,6 +46,23 @@ struct LocalStoreConfig : virtual LocalFSStoreConfig
"require-sigs",
"Whether store paths copied into this store should have a trusted signature."};
+ Setting<bool> readOnly{(StoreConfig*) this,
+ false,
+ "read-only",
+ R"(
+ Allow this store to be opened when its [database](@docroot@/glossary.md#gloss-nix-database) is on a read-only filesystem.
+
+ Normally Nix will attempt to open the store database in read-write mode, even for querying (when write access is not needed), causing it to fail if the database is on a read-only filesystem.
+
+ Enable read-only mode to disable locking and open the SQLite database with the [`immutable` parameter](https://www.sqlite.org/c3ref/open.html) set.
+
+ > **Warning**
+ > Do not use this unless the filesystem is read-only.
+ >
+ > Using it when the filesystem is writable can cause incorrect query results or corruption errors if the database is changed by another process.
+ > While the filesystem the database resides on might appear to be read-only, consider whether another user or system might have write access to it.
+ )"};
+
const std::string name() override { return "Local Store"; }
std::string doc() override;
@@ -240,8 +257,6 @@ public:
void vacuumDB();
- void repairPath(const StorePath & path) override;
-
void addSignatures(const StorePath & storePath, const StringSet & sigs) override;
/**
@@ -271,6 +286,10 @@ public:
private:
+ /**
+ * Retrieve the current version of the database schema.
+ * If the database does not exist yet, the version returned will be 0.
+ */
int getSchema();
void openDB(State & state, bool create);
diff --git a/src/libstore/make-content-addressed.cc b/src/libstore/make-content-addressed.cc
index 53fe04704..626a22480 100644
--- a/src/libstore/make-content-addressed.cc
+++ b/src/libstore/make-content-addressed.cc
@@ -80,4 +80,15 @@ std::map<StorePath, StorePath> makeContentAddressed(
return remappings;
}
+StorePath makeContentAddressed(
+ Store & srcStore,
+ Store & dstStore,
+ const StorePath & fromPath)
+{
+ auto remappings = makeContentAddressed(srcStore, dstStore, StorePathSet { fromPath });
+ auto i = remappings.find(fromPath);
+ assert(i != remappings.end());
+ return i->second;
+}
+
}
diff --git a/src/libstore/make-content-addressed.hh b/src/libstore/make-content-addressed.hh
index 2ce6ec7bc..60bb2b477 100644
--- a/src/libstore/make-content-addressed.hh
+++ b/src/libstore/make-content-addressed.hh
@@ -5,9 +5,20 @@
namespace nix {
+/** Rewrite a closure of store paths to be completely content addressed.
+ */
std::map<StorePath, StorePath> makeContentAddressed(
Store & srcStore,
Store & dstStore,
- const StorePathSet & storePaths);
+ const StorePathSet & rootPaths);
+
+/** Rewrite a closure of a store path to be completely content addressed.
+ *
+ * This is a convenience function for the case where you only have one root path.
+ */
+StorePath makeContentAddressed(
+ Store & srcStore,
+ Store & dstStore,
+ const StorePath & rootPath);
}
diff --git a/src/libstore/path-info.cc b/src/libstore/path-info.cc
index e60d7abe0..981bbfb14 100644
--- a/src/libstore/path-info.cc
+++ b/src/libstore/path-info.cc
@@ -1,5 +1,7 @@
#include "path-info.hh"
#include "worker-protocol.hh"
+#include "worker-protocol-impl.hh"
+#include "store-api.hh"
namespace nix {
@@ -131,7 +133,8 @@ ValidPathInfo ValidPathInfo::read(Source & source, const Store & store, unsigned
auto narHash = Hash::parseAny(readString(source), htSHA256);
ValidPathInfo info(path, narHash);
if (deriver != "") info.deriver = store.parseStorePath(deriver);
- info.references = worker_proto::read(store, source, Phantom<StorePathSet> {});
+ info.references = WorkerProto::Serialise<StorePathSet>::read(store,
+ WorkerProto::ReadConn { .from = source });
source >> info.registrationTime >> info.narSize;
if (format >= 16) {
source >> info.ultimate;
@@ -152,7 +155,9 @@ void ValidPathInfo::write(
sink << store.printStorePath(path);
sink << (deriver ? store.printStorePath(*deriver) : "")
<< narHash.to_string(Base16, false);
- worker_proto::write(store, sink, references);
+ WorkerProto::write(store,
+ WorkerProto::WriteConn { .to = sink },
+ references);
sink << registrationTime << narSize;
if (format >= 16) {
sink << ultimate
diff --git a/src/libstore/path-references.cc b/src/libstore/path-references.cc
new file mode 100644
index 000000000..33cf66ce3
--- /dev/null
+++ b/src/libstore/path-references.cc
@@ -0,0 +1,73 @@
+#include "path-references.hh"
+#include "hash.hh"
+#include "util.hh"
+#include "archive.hh"
+
+#include <map>
+#include <cstdlib>
+#include <mutex>
+#include <algorithm>
+
+
+namespace nix {
+
+
+PathRefScanSink::PathRefScanSink(StringSet && hashes, std::map<std::string, StorePath> && backMap)
+ : RefScanSink(std::move(hashes))
+ , backMap(std::move(backMap))
+{ }
+
+PathRefScanSink PathRefScanSink::fromPaths(const StorePathSet & refs)
+{
+ StringSet hashes;
+ std::map<std::string, StorePath> backMap;
+
+ for (auto & i : refs) {
+ std::string hashPart(i.hashPart());
+ auto inserted = backMap.emplace(hashPart, i).second;
+ assert(inserted);
+ hashes.insert(hashPart);
+ }
+
+ return PathRefScanSink(std::move(hashes), std::move(backMap));
+}
+
+StorePathSet PathRefScanSink::getResultPaths()
+{
+ /* Map the hashes found back to their store paths. */
+ StorePathSet found;
+ for (auto & i : getResult()) {
+ auto j = backMap.find(i);
+ assert(j != backMap.end());
+ found.insert(j->second);
+ }
+
+ return found;
+}
+
+
+std::pair<StorePathSet, HashResult> scanForReferences(
+ const std::string & path,
+ const StorePathSet & refs)
+{
+ HashSink hashSink { htSHA256 };
+ auto found = scanForReferences(hashSink, path, refs);
+ auto hash = hashSink.finish();
+ return std::pair<StorePathSet, HashResult>(found, hash);
+}
+
+StorePathSet scanForReferences(
+ Sink & toTee,
+ const Path & path,
+ const StorePathSet & refs)
+{
+ PathRefScanSink refsSink = PathRefScanSink::fromPaths(refs);
+ TeeSink sink { refsSink, toTee };
+
+ /* Look for the hashes in the NAR dump of the path. */
+ dumpPath(path, sink);
+
+ return refsSink.getResultPaths();
+}
+
+}
diff --git a/src/libstore/path-references.hh b/src/libstore/path-references.hh
new file mode 100644
index 000000000..7b44e3261
--- /dev/null
+++ b/src/libstore/path-references.hh
@@ -0,0 +1,25 @@
+#pragma once
+
+#include "references.hh"
+#include "path.hh"
+
+namespace nix {
+
+std::pair<StorePathSet, HashResult> scanForReferences(const Path & path, const StorePathSet & refs);
+
+StorePathSet scanForReferences(Sink & toTee, const Path & path, const StorePathSet & refs);
+
+class PathRefScanSink : public RefScanSink
+{
+ std::map<std::string, StorePath> backMap;
+
+ PathRefScanSink(StringSet && hashes, std::map<std::string, StorePath> && backMap);
+
+public:
+
+ static PathRefScanSink fromPaths(const StorePathSet & refs);
+
+ StorePathSet getResultPaths();
+};
+
+}
diff --git a/src/libstore/path.cc b/src/libstore/path.cc
index 46be54281..552e83114 100644
--- a/src/libstore/path.cc
+++ b/src/libstore/path.cc
@@ -9,8 +9,8 @@ static void checkName(std::string_view path, std::string_view name)
if (name.empty())
throw BadStorePath("store path '%s' has an empty name", path);
if (name.size() > StorePath::MaxPathLen)
- throw BadStorePath("store path '%s' has a name longer than '%d characters",
- StorePath::MaxPathLen, path);
+ throw BadStorePath("store path '%s' has a name longer than %d characters",
+ path, StorePath::MaxPathLen);
// See nameRegexStr for the definition
for (auto c : name)
if (!((c >= '0' && c <= '9')
diff --git a/src/libstore/profiles.cc b/src/libstore/profiles.cc
index ba5c8583f..4e9955948 100644
--- a/src/libstore/profiles.cc
+++ b/src/libstore/profiles.cc
@@ -13,8 +13,10 @@
namespace nix {
-/* Parse a generation name of the format
- `<profilename>-<number>-link'. */
+/**
+ * Parse a generation name of the format
+ * `<profilename>-<number>-link'.
+ */
static std::optional<GenerationNumber> parseName(const std::string & profileName, const std::string & name)
{
if (name.substr(0, profileName.size() + 1) != profileName + "-") return {};
@@ -28,7 +30,6 @@ static std::optional<GenerationNumber> parseName(const std::string & profileName
}
-
std::pair<Generations, std::optional<GenerationNumber>> findGenerations(Path profile)
{
Generations gens;
@@ -61,15 +62,16 @@ std::pair<Generations, std::optional<GenerationNumber>> findGenerations(Path pro
}
-static void makeName(const Path & profile, GenerationNumber num,
- Path & outLink)
+/**
+ * Create a generation name that can be parsed by `parseName()`.
+ */
+static Path makeName(const Path & profile, GenerationNumber num)
{
- Path prefix = fmt("%1%-%2%", profile, num);
- outLink = prefix + "-link";
+ return fmt("%s-%s-link", profile, num);
}
-Path createGeneration(ref<LocalFSStore> store, Path profile, StorePath outPath)
+Path createGeneration(LocalFSStore & store, Path profile, StorePath outPath)
{
/* The new generation number should be higher than old the
previous ones. */
@@ -79,7 +81,7 @@ Path createGeneration(ref<LocalFSStore> store, Path profile, StorePath outPath)
if (gens.size() > 0) {
Generation last = gens.back();
- if (readLink(last.path) == store->printStorePath(outPath)) {
+ if (readLink(last.path) == store.printStorePath(outPath)) {
/* We only create a new generation symlink if it differs
from the last one.
@@ -89,7 +91,7 @@ Path createGeneration(ref<LocalFSStore> store, Path profile, StorePath outPath)
return last.path;
}
- num = gens.back().number;
+ num = last.number;
} else {
num = 0;
}
@@ -100,9 +102,8 @@ Path createGeneration(ref<LocalFSStore> store, Path profile, StorePath outPath)
to the permanent roots (of which the GC would have a stale
view). If we didn't do it this way, the GC might remove the
user environment etc. we've just built. */
- Path generation;
- makeName(profile, num + 1, generation);
- store->addPermRoot(outPath, generation);
+ Path generation = makeName(profile, num + 1);
+ store.addPermRoot(outPath, generation);
return generation;
}
@@ -117,12 +118,19 @@ static void removeFile(const Path & path)
void deleteGeneration(const Path & profile, GenerationNumber gen)
{
- Path generation;
- makeName(profile, gen, generation);
+ Path generation = makeName(profile, gen);
removeFile(generation);
}
-
+/**
+ * Delete a generation with dry-run mode.
+ *
+ * Like `deleteGeneration()` but:
+ *
+ * - We log what we are going to do.
+ *
+ * - We only actually delete if `dryRun` is false.
+ */
static void deleteGeneration2(const Path & profile, GenerationNumber gen, bool dryRun)
{
if (dryRun)
@@ -150,27 +158,36 @@ void deleteGenerations(const Path & profile, const std::set<GenerationNumber> &
}
}
+/**
+ * Advanced the iterator until the given predicate `cond` returns `true`.
+ */
+static inline void iterDropUntil(Generations & gens, auto && i, auto && cond)
+{
+ for (; i != gens.rend() && !cond(*i); ++i);
+}
+
void deleteGenerationsGreaterThan(const Path & profile, GenerationNumber max, bool dryRun)
{
+ if (max == 0)
+ throw Error("Must keep at least one generation, otherwise the current one would be deleted");
+
PathLocks lock;
lockProfile(lock, profile);
- bool fromCurGen = false;
- auto [gens, curGen] = findGenerations(profile);
- for (auto i = gens.rbegin(); i != gens.rend(); ++i) {
- if (i->number == curGen) {
- fromCurGen = true;
- max--;
- continue;
- }
- if (fromCurGen) {
- if (max) {
- max--;
- continue;
- }
- deleteGeneration2(profile, i->number, dryRun);
- }
- }
+ auto [gens, _curGen] = findGenerations(profile);
+ auto curGen = _curGen;
+
+ auto i = gens.rbegin();
+
+ // Find the current generation
+ iterDropUntil(gens, i, [&](auto & g) { return g.number == curGen; });
+
+ // Skip over `max` generations, preserving them
+ for (auto keep = 0; i != gens.rend() && keep < max; ++i, ++keep);
+
+ // Delete the rest
+ for (; i != gens.rend(); ++i)
+ deleteGeneration2(profile, i->number, dryRun);
}
void deleteOldGenerations(const Path & profile, bool dryRun)
@@ -193,23 +210,33 @@ void deleteGenerationsOlderThan(const Path & profile, time_t t, bool dryRun)
auto [gens, curGen] = findGenerations(profile);
- bool canDelete = false;
- for (auto i = gens.rbegin(); i != gens.rend(); ++i)
- if (canDelete) {
- assert(i->creationTime < t);
- if (i->number != curGen)
- deleteGeneration2(profile, i->number, dryRun);
- } else if (i->creationTime < t) {
- /* We may now start deleting generations, but we don't
- delete this generation yet, because this generation was
- still the one that was active at the requested point in
- time. */
- canDelete = true;
- }
+ auto i = gens.rbegin();
+
+ // Predicate that the generation is older than the given time.
+ auto older = [&](auto & g) { return g.creationTime < t; };
+
+ // Find the first older generation, if one exists
+ iterDropUntil(gens, i, older);
+
+ /* Take the previous generation
+
+ We don't want delete this one yet because it
+ existed at the requested point in time, and
+ we want to be able to roll back to it. */
+ if (i != gens.rend()) ++i;
+
+ // Delete all previous generations (unless current).
+ for (; i != gens.rend(); ++i) {
+ /* Creating date and generations should be monotonic, so lower
+ numbered derivations should also be older. */
+ assert(older(*i));
+ if (i->number != curGen)
+ deleteGeneration2(profile, i->number, dryRun);
+ }
}
-void deleteGenerationsOlderThan(const Path & profile, std::string_view timeSpec, bool dryRun)
+time_t parseOlderThanTimeSpec(std::string_view timeSpec)
{
if (timeSpec.empty() || timeSpec[timeSpec.size() - 1] != 'd')
throw UsageError("invalid number of days specifier '%1%', expected something like '14d'", timeSpec);
@@ -221,9 +248,7 @@ void deleteGenerationsOlderThan(const Path & profile, std::string_view timeSpec,
if (!days || *days < 1)
throw UsageError("invalid number of days specifier '%1%'", timeSpec);
- time_t oldTime = curTime - *days * 24 * 3600;
-
- deleteGenerationsOlderThan(profile, oldTime, dryRun);
+ return curTime - *days * 24 * 3600;
}
diff --git a/src/libstore/profiles.hh b/src/libstore/profiles.hh
index 4e1f42e83..193c0bf21 100644
--- a/src/libstore/profiles.hh
+++ b/src/libstore/profiles.hh
@@ -1,7 +1,11 @@
#pragma once
-///@file
+/**
+ * @file Implementation of Profiles.
+ *
+ * See the manual for additional information.
+ */
- #include "types.hh"
+#include "types.hh"
#include "pathlocks.hh"
#include <time.h>
@@ -12,41 +16,166 @@ namespace nix {
class StorePath;
+/**
+ * A positive number identifying a generation for a given profile.
+ *
+ * Generation numbers are assigned sequentially. Each new generation is
+ * assigned 1 + the current highest generation number.
+ */
typedef uint64_t GenerationNumber;
+/**
+ * A generation is a revision of a profile.
+ *
+ * Each generation is a mapping (key-value pair) from an identifier
+ * (`number`) to a store object (specified by `path`).
+ */
struct Generation
{
+ /**
+ * The number of a generation is its unique identifier within the
+ * profile.
+ */
GenerationNumber number;
+ /**
+ * The store path identifies the store object that is the contents
+ * of the generation.
+ *
+ * These store paths / objects are not unique to the generation
+ * within a profile. Nix tries to ensure successive generations have
+ * distinct contents to avoid bloat, but nothing stops two
+ * non-adjacent generations from having the same contents.
+ *
+ * @todo Use `StorePath` instead of `Path`?
+ */
Path path;
+
+ /**
+ * When the generation was created. This is extra metadata about the
+ * generation used to make garbage collecting old generations more
+ * convenient.
+ */
time_t creationTime;
};
+/**
+ * All the generations of a profile
+ */
typedef std::list<Generation> Generations;
/**
- * Returns the list of currently present generations for the specified
- * profile, sorted by generation number. Also returns the number of
- * the current generation.
+ * Find all generations for the given profile.
+ *
+ * @param profile A profile specified by its name and location combined
+ * into a path. E.g. if "foo" is the name of the profile, and "/bar/baz"
+ * is the directory it is in, then the path "/bar/baz/foo" would be the
+ * argument for this parameter.
+ *
+ * @return The pair of:
+ *
+ * - The list of currently present generations for the specified profile,
+ * sorted by ascending generation number.
+ *
+ * - The number of the current/active generation.
+ *
+ * Note that the current/active generation need not be the latest one.
*/
std::pair<Generations, std::optional<GenerationNumber>> findGenerations(Path profile);
class LocalFSStore;
-Path createGeneration(ref<LocalFSStore> store, Path profile, StorePath outPath);
+/**
+ * Create a new generation of the given profile
+ *
+ * If the previous generation (not the currently active one!) has a
+ * distinct store object, a fresh generation number is mapped to the
+ * given store object, referenced by path. Otherwise, the previous
+ * generation is assumed.
+ *
+ * The behavior of reusing existing generations like this makes this
+ * procedure idempotent. It also avoids clutter.
+ */
+Path createGeneration(LocalFSStore & store, Path profile, StorePath outPath);
+/**
+ * Unconditionally delete a generation
+ *
+ * @param profile A profile specified by its name and location combined into a path.
+ *
+ * @param gen The generation number specifying exactly which generation
+ * to delete.
+ *
+ * Because there is no check of whether the generation to delete is
+ * active, this is somewhat unsafe.
+ *
+ * @todo Should we expose this at all?
+ */
void deleteGeneration(const Path & profile, GenerationNumber gen);
+/**
+ * Delete the given set of generations.
+ *
+ * @param profile The profile, specified by its name and location combined into a path, whose generations we want to delete.
+ *
+ * @param gensToDelete The generations to delete, specified by a set of
+ * numbers.
+ *
+ * @param dryRun Log what would be deleted instead of actually doing
+ * so.
+ *
+ * Trying to delete the currently active generation will fail, and cause
+ * no generations to be deleted.
+ */
void deleteGenerations(const Path & profile, const std::set<GenerationNumber> & gensToDelete, bool dryRun);
+/**
+ * Delete generations older than `max` passed the current generation.
+ *
+ * @param profile The profile, specified by its name and location combined into a path, whose generations we want to delete.
+ *
+ * @param max How many generations to keep up to the current one. Must
+ * be at least 1 so we don't delete the current one.
+ *
+ * @param dryRun Log what would be deleted instead of actually doing
+ * so.
+ */
void deleteGenerationsGreaterThan(const Path & profile, GenerationNumber max, bool dryRun);
+/**
+ * Delete all generations other than the current one
+ *
+ * @param profile The profile, specified by its name and location combined into a path, whose generations we want to delete.
+ *
+ * @param dryRun Log what would be deleted instead of actually doing
+ * so.
+ */
void deleteOldGenerations(const Path & profile, bool dryRun);
+/**
+ * Delete generations older than `t`, except for the most recent one
+ * older than `t`.
+ *
+ * @param profile The profile, specified by its name and location combined into a path, whose generations we want to delete.
+ *
+ * @param dryRun Log what would be deleted instead of actually doing
+ * so.
+ */
void deleteGenerationsOlderThan(const Path & profile, time_t t, bool dryRun);
-void deleteGenerationsOlderThan(const Path & profile, std::string_view timeSpec, bool dryRun);
+/**
+ * Parse a temp spec intended for `deleteGenerationsOlderThan()`.
+ *
+ * Throws an exception if `timeSpec` fails to parse.
+ */
+time_t parseOlderThanTimeSpec(std::string_view timeSpec);
+/**
+ * Smaller wrapper around `replaceSymlink` for replacing the current
+ * generation of a profile. Does not enforce proper structure.
+ *
+ * @todo Always use `switchGeneration()` instead, and delete this.
+ */
void switchLink(Path link, Path target);
/**
diff --git a/src/libstore/remote-store-connection.hh b/src/libstore/remote-store-connection.hh
new file mode 100644
index 000000000..d32d91a60
--- /dev/null
+++ b/src/libstore/remote-store-connection.hh
@@ -0,0 +1,97 @@
+#include "remote-store.hh"
+#include "worker-protocol.hh"
+
+namespace nix {
+
+/**
+ * Bidirectional connection (send and receive) used by the Remote Store
+ * implementation.
+ *
+ * Contains `Source` and `Sink` for actual communication, along with
+ * other information learned when negotiating the connection.
+ */
+struct RemoteStore::Connection
+{
+ /**
+ * Send with this.
+ */
+ FdSink to;
+
+ /**
+ * Receive with this.
+ */
+ FdSource from;
+
+ /**
+ * Worker protocol version used for the connection.
+ *
+ * Despite its name, I think it is actually the maximum version both
+ * sides support. (If the maximum doesn't exist, we would fail to
+ * establish a connection and produce a value of this type.)
+ */
+ unsigned int daemonVersion;
+
+ /**
+ * Whether the remote side trusts us or not.
+ *
+ * 3 values: "yes", "no", or `std::nullopt` for "unknown".
+ *
+ * Note that the "remote side" might not be just the end daemon, but
+ * also an intermediary forwarder that can make its own trusting
+ * decisions. This would be the intersection of all their trust
+ * decisions, since it takes only one link in the chain to start
+ * denying operations.
+ */
+ std::optional<TrustedFlag> remoteTrustsUs;
+
+ /**
+ * The version of the Nix daemon that is processing our requests.
+ *
+ * Do note, it may or may not communicating with another daemon,
+ * rather than being an "end" `LocalStore` or similar.
+ */
+ std::optional<std::string> daemonNixVersion;
+
+ /**
+ * Time this connection was established.
+ */
+ std::chrono::time_point<std::chrono::steady_clock> startTime;
+
+ /**
+ * Coercion to `WorkerProto::ReadConn`. This makes it easy to use the
+ * factored out worker protocol searlizers with a
+ * `RemoteStore::Connection`.
+ *
+ * The worker protocol connection types are unidirectional, unlike
+ * this type.
+ */
+ operator WorkerProto::ReadConn ()
+ {
+ return WorkerProto::ReadConn {
+ .from = from,
+ };
+ }
+
+ /**
+ * Coercion to `WorkerProto::WriteConn`. This makes it easy to use the
+ * factored out worker protocol searlizers with a
+ * `RemoteStore::Connection`.
+ *
+ * The worker protocol connection types are unidirectional, unlike
+ * this type.
+ */
+ operator WorkerProto::WriteConn ()
+ {
+ return WorkerProto::WriteConn {
+ .to = to,
+ };
+ }
+
+ virtual ~Connection();
+
+ virtual void closeWrite() = 0;
+
+ std::exception_ptr processStderr(Sink * sink = 0, Source * source = 0, bool flush = true);
+};
+
+}
diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc
index 0ed17a6ce..1e2104e1f 100644
--- a/src/libstore/remote-store.cc
+++ b/src/libstore/remote-store.cc
@@ -5,7 +5,9 @@
#include "remote-fs-accessor.hh"
#include "build-result.hh"
#include "remote-store.hh"
+#include "remote-store-connection.hh"
#include "worker-protocol.hh"
+#include "worker-protocol-impl.hh"
#include "archive.hh"
#include "globals.hh"
#include "derivations.hh"
@@ -18,189 +20,6 @@
namespace nix {
-namespace worker_proto {
-
-std::string read(const Store & store, Source & from, Phantom<std::string> _)
-{
- return readString(from);
-}
-
-void write(const Store & store, Sink & out, const std::string & str)
-{
- out << str;
-}
-
-
-StorePath read(const Store & store, Source & from, Phantom<StorePath> _)
-{
- return store.parseStorePath(readString(from));
-}
-
-void write(const Store & store, Sink & out, const StorePath & storePath)
-{
- out << store.printStorePath(storePath);
-}
-
-
-std::optional<TrustedFlag> read(const Store & store, Source & from, Phantom<std::optional<TrustedFlag>> _)
-{
- auto temp = readNum<uint8_t>(from);
- switch (temp) {
- case 0:
- return std::nullopt;
- case 1:
- return { Trusted };
- case 2:
- return { NotTrusted };
- default:
- throw Error("Invalid trusted status from remote");
- }
-}
-
-void write(const Store & store, Sink & out, const std::optional<TrustedFlag> & optTrusted)
-{
- if (!optTrusted)
- out << (uint8_t)0;
- else {
- switch (*optTrusted) {
- case Trusted:
- out << (uint8_t)1;
- break;
- case NotTrusted:
- out << (uint8_t)2;
- break;
- default:
- assert(false);
- };
- }
-}
-
-
-ContentAddress read(const Store & store, Source & from, Phantom<ContentAddress> _)
-{
- return ContentAddress::parse(readString(from));
-}
-
-void write(const Store & store, Sink & out, const ContentAddress & ca)
-{
- out << renderContentAddress(ca);
-}
-
-
-DerivedPath read(const Store & store, Source & from, Phantom<DerivedPath> _)
-{
- auto s = readString(from);
- return DerivedPath::parseLegacy(store, s);
-}
-
-void write(const Store & store, Sink & out, const DerivedPath & req)
-{
- out << req.to_string_legacy(store);
-}
-
-
-Realisation read(const Store & store, Source & from, Phantom<Realisation> _)
-{
- std::string rawInput = readString(from);
- return Realisation::fromJSON(
- nlohmann::json::parse(rawInput),
- "remote-protocol"
- );
-}
-
-void write(const Store & store, Sink & out, const Realisation & realisation)
-{
- out << realisation.toJSON().dump();
-}
-
-
-DrvOutput read(const Store & store, Source & from, Phantom<DrvOutput> _)
-{
- return DrvOutput::parse(readString(from));
-}
-
-void write(const Store & store, Sink & out, const DrvOutput & drvOutput)
-{
- out << drvOutput.to_string();
-}
-
-
-KeyedBuildResult read(const Store & store, Source & from, Phantom<KeyedBuildResult> _)
-{
- auto path = worker_proto::read(store, from, Phantom<DerivedPath> {});
- auto br = worker_proto::read(store, from, Phantom<BuildResult> {});
- return KeyedBuildResult {
- std::move(br),
- /* .path = */ std::move(path),
- };
-}
-
-void write(const Store & store, Sink & to, const KeyedBuildResult & res)
-{
- worker_proto::write(store, to, res.path);
- worker_proto::write(store, to, static_cast<const BuildResult &>(res));
-}
-
-
-BuildResult read(const Store & store, Source & from, Phantom<BuildResult> _)
-{
- BuildResult res;
- res.status = (BuildResult::Status) readInt(from);
- from
- >> res.errorMsg
- >> res.timesBuilt
- >> res.isNonDeterministic
- >> res.startTime
- >> res.stopTime;
- auto builtOutputs = worker_proto::read(store, from, Phantom<DrvOutputs> {});
- for (auto && [output, realisation] : builtOutputs)
- res.builtOutputs.insert_or_assign(
- std::move(output.outputName),
- std::move(realisation));
- return res;
-}
-
-void write(const Store & store, Sink & to, const BuildResult & res)
-{
- to
- << res.status
- << res.errorMsg
- << res.timesBuilt
- << res.isNonDeterministic
- << res.startTime
- << res.stopTime;
- DrvOutputs builtOutputs;
- for (auto & [output, realisation] : res.builtOutputs)
- builtOutputs.insert_or_assign(realisation.id, realisation);
- worker_proto::write(store, to, builtOutputs);
-}
-
-
-std::optional<StorePath> read(const Store & store, Source & from, Phantom<std::optional<StorePath>> _)
-{
- auto s = readString(from);
- return s == "" ? std::optional<StorePath> {} : store.parseStorePath(s);
-}
-
-void write(const Store & store, Sink & out, const std::optional<StorePath> & storePathOpt)
-{
- out << (storePathOpt ? store.printStorePath(*storePathOpt) : "");
-}
-
-
-std::optional<ContentAddress> read(const Store & store, Source & from, Phantom<std::optional<ContentAddress>> _)
-{
- return ContentAddress::parseOpt(readString(from));
-}
-
-void write(const Store & store, Sink & out, const std::optional<ContentAddress> & caOpt)
-{
- out << (caOpt ? renderContentAddress(*caOpt) : "");
-}
-
-}
-
-
/* TODO: Separate these store impls into different files, give them better names */
RemoteStore::RemoteStore(const Params & params)
: RemoteStoreConfig(params)
@@ -283,7 +102,7 @@ void RemoteStore::initConnection(Connection & conn)
}
if (GET_PROTOCOL_MINOR(conn.daemonVersion) >= 35) {
- conn.remoteTrustsUs = worker_proto::read(*this, conn.from, Phantom<std::optional<TrustedFlag>> {});
+ conn.remoteTrustsUs = WorkerProto::Serialise<std::optional<TrustedFlag>>::read(*this, conn);
} else {
// We don't know the answer; protocol to old.
conn.remoteTrustsUs = std::nullopt;
@@ -302,7 +121,7 @@ void RemoteStore::initConnection(Connection & conn)
void RemoteStore::setOptions(Connection & conn)
{
- conn.to << wopSetOptions
+ conn.to << WorkerProto::Op::SetOptions
<< settings.keepFailed
<< settings.keepGoing
<< settings.tryFallback
@@ -367,6 +186,7 @@ struct ConnectionHandle
}
RemoteStore::Connection * operator -> () { return &*handle; }
+ RemoteStore::Connection & operator * () { return *handle; }
void processStderr(Sink * sink = 0, Source * source = 0, bool flush = true)
{
@@ -394,7 +214,7 @@ void RemoteStore::setOptions()
bool RemoteStore::isValidPathUncached(const StorePath & path)
{
auto conn(getConnection());
- conn->to << wopIsValidPath << printStorePath(path);
+ conn->to << WorkerProto::Op::IsValidPath << printStorePath(path);
conn.processStderr();
return readInt(conn->from);
}
@@ -409,13 +229,13 @@ StorePathSet RemoteStore::queryValidPaths(const StorePathSet & paths, Substitute
if (isValidPath(i)) res.insert(i);
return res;
} else {
- conn->to << wopQueryValidPaths;
- worker_proto::write(*this, conn->to, paths);
+ conn->to << WorkerProto::Op::QueryValidPaths;
+ WorkerProto::write(*this, *conn, paths);
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 27) {
conn->to << (settings.buildersUseSubstitutes ? 1 : 0);
}
conn.processStderr();
- return worker_proto::read(*this, conn->from, Phantom<StorePathSet> {});
+ return WorkerProto::Serialise<StorePathSet>::read(*this, *conn);
}
}
@@ -423,9 +243,9 @@ StorePathSet RemoteStore::queryValidPaths(const StorePathSet & paths, Substitute
StorePathSet RemoteStore::queryAllValidPaths()
{
auto conn(getConnection());
- conn->to << wopQueryAllValidPaths;
+ conn->to << WorkerProto::Op::QueryAllValidPaths;
conn.processStderr();
- return worker_proto::read(*this, conn->from, Phantom<StorePathSet> {});
+ return WorkerProto::Serialise<StorePathSet>::read(*this, *conn);
}
@@ -435,16 +255,16 @@ StorePathSet RemoteStore::querySubstitutablePaths(const StorePathSet & paths)
if (GET_PROTOCOL_MINOR(conn->daemonVersion) < 12) {
StorePathSet res;
for (auto & i : paths) {
- conn->to << wopHasSubstitutes << printStorePath(i);
+ conn->to << WorkerProto::Op::HasSubstitutes << printStorePath(i);
conn.processStderr();
if (readInt(conn->from)) res.insert(i);
}
return res;
} else {
- conn->to << wopQuerySubstitutablePaths;
- worker_proto::write(*this, conn->to, paths);
+ conn->to << WorkerProto::Op::QuerySubstitutablePaths;
+ WorkerProto::write(*this, *conn, paths);
conn.processStderr();
- return worker_proto::read(*this, conn->from, Phantom<StorePathSet> {});
+ return WorkerProto::Serialise<StorePathSet>::read(*this, *conn);
}
}
@@ -459,14 +279,14 @@ void RemoteStore::querySubstitutablePathInfos(const StorePathCAMap & pathsMap, S
for (auto & i : pathsMap) {
SubstitutablePathInfo info;
- conn->to << wopQuerySubstitutablePathInfo << printStorePath(i.first);
+ conn->to << WorkerProto::Op::QuerySubstitutablePathInfo << printStorePath(i.first);
conn.processStderr();
unsigned int reply = readInt(conn->from);
if (reply == 0) continue;
auto deriver = readString(conn->from);
if (deriver != "")
info.deriver = parseStorePath(deriver);
- info.references = worker_proto::read(*this, conn->from, Phantom<StorePathSet> {});
+ info.references = WorkerProto::Serialise<StorePathSet>::read(*this, *conn);
info.downloadSize = readLongLong(conn->from);
info.narSize = readLongLong(conn->from);
infos.insert_or_assign(i.first, std::move(info));
@@ -474,14 +294,14 @@ void RemoteStore::querySubstitutablePathInfos(const StorePathCAMap & pathsMap, S
} else {
- conn->to << wopQuerySubstitutablePathInfos;
+ conn->to << WorkerProto::Op::QuerySubstitutablePathInfos;
if (GET_PROTOCOL_MINOR(conn->daemonVersion) < 22) {
StorePathSet paths;
for (auto & path : pathsMap)
paths.insert(path.first);
- worker_proto::write(*this, conn->to, paths);
+ WorkerProto::write(*this, *conn, paths);
} else
- worker_proto::write(*this, conn->to, pathsMap);
+ WorkerProto::write(*this, *conn, pathsMap);
conn.processStderr();
size_t count = readNum<size_t>(conn->from);
for (size_t n = 0; n < count; n++) {
@@ -489,7 +309,7 @@ void RemoteStore::querySubstitutablePathInfos(const StorePathCAMap & pathsMap, S
auto deriver = readString(conn->from);
if (deriver != "")
info.deriver = parseStorePath(deriver);
- info.references = worker_proto::read(*this, conn->from, Phantom<StorePathSet> {});
+ info.references = WorkerProto::Serialise<StorePathSet>::read(*this, *conn);
info.downloadSize = readLongLong(conn->from);
info.narSize = readLongLong(conn->from);
}
@@ -505,7 +325,7 @@ void RemoteStore::queryPathInfoUncached(const StorePath & path,
std::shared_ptr<const ValidPathInfo> info;
{
auto conn(getConnection());
- conn->to << wopQueryPathInfo << printStorePath(path);
+ conn->to << WorkerProto::Op::QueryPathInfo << printStorePath(path);
try {
conn.processStderr();
} catch (Error & e) {
@@ -530,9 +350,9 @@ void RemoteStore::queryReferrers(const StorePath & path,
StorePathSet & referrers)
{
auto conn(getConnection());
- conn->to << wopQueryReferrers << printStorePath(path);
+ conn->to << WorkerProto::Op::QueryReferrers << printStorePath(path);
conn.processStderr();
- for (auto & i : worker_proto::read(*this, conn->from, Phantom<StorePathSet> {}))
+ for (auto & i : WorkerProto::Serialise<StorePathSet>::read(*this, *conn))
referrers.insert(i);
}
@@ -540,9 +360,9 @@ void RemoteStore::queryReferrers(const StorePath & path,
StorePathSet RemoteStore::queryValidDerivers(const StorePath & path)
{
auto conn(getConnection());
- conn->to << wopQueryValidDerivers << printStorePath(path);
+ conn->to << WorkerProto::Op::QueryValidDerivers << printStorePath(path);
conn.processStderr();
- return worker_proto::read(*this, conn->from, Phantom<StorePathSet> {});
+ return WorkerProto::Serialise<StorePathSet>::read(*this, *conn);
}
@@ -552,9 +372,9 @@ StorePathSet RemoteStore::queryDerivationOutputs(const StorePath & path)
return Store::queryDerivationOutputs(path);
}
auto conn(getConnection());
- conn->to << wopQueryDerivationOutputs << printStorePath(path);
+ conn->to << WorkerProto::Op::QueryDerivationOutputs << printStorePath(path);
conn.processStderr();
- return worker_proto::read(*this, conn->from, Phantom<StorePathSet> {});
+ return WorkerProto::Serialise<StorePathSet>::read(*this, *conn);
}
@@ -562,9 +382,9 @@ std::map<std::string, std::optional<StorePath>> RemoteStore::queryPartialDerivat
{
if (GET_PROTOCOL_MINOR(getProtocol()) >= 0x16) {
auto conn(getConnection());
- conn->to << wopQueryDerivationOutputMap << printStorePath(path);
+ conn->to << WorkerProto::Op::QueryDerivationOutputMap << printStorePath(path);
conn.processStderr();
- return worker_proto::read(*this, conn->from, Phantom<std::map<std::string, std::optional<StorePath>>> {});
+ return WorkerProto::Serialise<std::map<std::string, std::optional<StorePath>>>::read(*this, *conn);
} else {
// Fallback for old daemon versions.
// For floating-CA derivations (and their co-dependencies) this is an
@@ -585,7 +405,7 @@ std::map<std::string, std::optional<StorePath>> RemoteStore::queryPartialDerivat
std::optional<StorePath> RemoteStore::queryPathFromHashPart(const std::string & hashPart)
{
auto conn(getConnection());
- conn->to << wopQueryPathFromHashPart << hashPart;
+ conn->to << WorkerProto::Op::QueryPathFromHashPart << hashPart;
conn.processStderr();
Path path = readString(conn->from);
if (path.empty()) return {};
@@ -607,10 +427,10 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 25) {
conn->to
- << wopAddToStore
+ << WorkerProto::Op::AddToStore
<< name
<< caMethod.render(hashType);
- worker_proto::write(*this, conn->to, references);
+ WorkerProto::write(*this, *conn, references);
conn->to << repair;
// The dump source may invoke the store, so we need to make some room.
@@ -634,13 +454,13 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
throw UnimplementedError("When adding text-hashed data called '%s', only SHA-256 is supported but '%s' was given",
name, printHashType(hashType));
std::string s = dump.drain();
- conn->to << wopAddTextToStore << name << s;
- worker_proto::write(*this, conn->to, references);
+ conn->to << WorkerProto::Op::AddTextToStore << name << s;
+ WorkerProto::write(*this, *conn, references);
conn.processStderr();
},
[&](const FileIngestionMethod & fim) -> void {
conn->to
- << wopAddToStore
+ << WorkerProto::Op::AddToStore
<< name
<< ((hashType == htSHA256 && fim == FileIngestionMethod::Recursive) ? 0 : 1) /* backwards compatibility hack */
<< (fim == FileIngestionMethod::Recursive ? 1 : 0)
@@ -692,7 +512,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source,
auto conn(getConnection());
if (GET_PROTOCOL_MINOR(conn->daemonVersion) < 18) {
- conn->to << wopImportPaths;
+ conn->to << WorkerProto::Op::ImportPaths;
auto source2 = sinkToSource([&](Sink & sink) {
sink << 1 // == path follows
@@ -701,7 +521,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source,
sink
<< exportMagic
<< printStorePath(info.path);
- worker_proto::write(*this, sink, info.references);
+ WorkerProto::write(*this, *conn, info.references);
sink
<< (info.deriver ? printStorePath(*info.deriver) : "")
<< 0 // == no legacy signature
@@ -711,16 +531,16 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source,
conn.processStderr(0, source2.get());
- auto importedPaths = worker_proto::read(*this, conn->from, Phantom<StorePathSet> {});
+ auto importedPaths = WorkerProto::Serialise<StorePathSet>::read(*this, *conn);
assert(importedPaths.size() <= 1);
}
else {
- conn->to << wopAddToStoreNar
+ conn->to << WorkerProto::Op::AddToStoreNar
<< printStorePath(info.path)
<< (info.deriver ? printStorePath(*info.deriver) : "")
<< info.narHash.to_string(Base16, false);
- worker_proto::write(*this, conn->to, info.references);
+ WorkerProto::write(*this, *conn, info.references);
conn->to << info.registrationTime << info.narSize
<< info.ultimate << info.sigs << renderContentAddress(info.ca)
<< repair << !checkSigs;
@@ -764,7 +584,7 @@ void RemoteStore::addMultipleToStore(
if (GET_PROTOCOL_MINOR(getConnection()->daemonVersion) >= 32) {
auto conn(getConnection());
conn->to
- << wopAddMultipleToStore
+ << WorkerProto::Op::AddMultipleToStore
<< repair
<< !checkSigs;
conn.withFramedSink([&](Sink & sink) {
@@ -788,12 +608,12 @@ StorePath RemoteStore::addTextToStore(
void RemoteStore::registerDrvOutput(const Realisation & info)
{
auto conn(getConnection());
- conn->to << wopRegisterDrvOutput;
+ conn->to << WorkerProto::Op::RegisterDrvOutput;
if (GET_PROTOCOL_MINOR(conn->daemonVersion) < 31) {
conn->to << info.id.to_string();
conn->to << std::string(info.outPath.to_string());
} else {
- worker_proto::write(*this, conn->to, info);
+ WorkerProto::write(*this, *conn, info);
}
conn.processStderr();
}
@@ -809,20 +629,20 @@ void RemoteStore::queryRealisationUncached(const DrvOutput & id,
return callback(nullptr);
}
- conn->to << wopQueryRealisation;
+ conn->to << WorkerProto::Op::QueryRealisation;
conn->to << id.to_string();
conn.processStderr();
auto real = [&]() -> std::shared_ptr<const Realisation> {
if (GET_PROTOCOL_MINOR(conn->daemonVersion) < 31) {
- auto outPaths = worker_proto::read(
- *this, conn->from, Phantom<std::set<StorePath>> {});
+ auto outPaths = WorkerProto::Serialise<std::set<StorePath>>::read(
+ *this, *conn);
if (outPaths.empty())
return nullptr;
return std::make_shared<const Realisation>(Realisation { .id = id, .outPath = *outPaths.begin() });
} else {
- auto realisations = worker_proto::read(
- *this, conn->from, Phantom<std::set<Realisation>> {});
+ auto realisations = WorkerProto::Serialise<std::set<Realisation>>::read(
+ *this, *conn);
if (realisations.empty())
return nullptr;
return std::make_shared<const Realisation>(*realisations.begin());
@@ -833,10 +653,10 @@ void RemoteStore::queryRealisationUncached(const DrvOutput & id,
} catch (...) { return callback.rethrow(); }
}
-static void writeDerivedPaths(RemoteStore & store, ConnectionHandle & conn, const std::vector<DerivedPath> & reqs)
+static void writeDerivedPaths(RemoteStore & store, RemoteStore::Connection & conn, const std::vector<DerivedPath> & reqs)
{
- if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 30) {
- worker_proto::write(store, conn->to, reqs);
+ if (GET_PROTOCOL_MINOR(conn.daemonVersion) >= 30) {
+ WorkerProto::write(store, conn, reqs);
} else {
Strings ss;
for (auto & p : reqs) {
@@ -848,12 +668,12 @@ static void writeDerivedPaths(RemoteStore & store, ConnectionHandle & conn, cons
[&](const StorePath & drvPath) {
throw Error("trying to request '%s', but daemon protocol %d.%d is too old (< 1.29) to request a derivation file",
store.printStorePath(drvPath),
- GET_PROTOCOL_MAJOR(conn->daemonVersion),
- GET_PROTOCOL_MINOR(conn->daemonVersion));
+ GET_PROTOCOL_MAJOR(conn.daemonVersion),
+ GET_PROTOCOL_MINOR(conn.daemonVersion));
},
}, sOrDrvPath);
}
- conn->to << ss;
+ conn.to << ss;
}
}
@@ -877,9 +697,9 @@ void RemoteStore::buildPaths(const std::vector<DerivedPath> & drvPaths, BuildMod
copyDrvsFromEvalStore(drvPaths, evalStore);
auto conn(getConnection());
- conn->to << wopBuildPaths;
+ conn->to << WorkerProto::Op::BuildPaths;
assert(GET_PROTOCOL_MINOR(conn->daemonVersion) >= 13);
- writeDerivedPaths(*this, conn, drvPaths);
+ writeDerivedPaths(*this, *conn, drvPaths);
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 15)
conn->to << buildMode;
else
@@ -902,11 +722,11 @@ std::vector<KeyedBuildResult> RemoteStore::buildPathsWithResults(
auto & conn = *conn_;
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 34) {
- conn->to << wopBuildPathsWithResults;
- writeDerivedPaths(*this, conn, paths);
+ conn->to << WorkerProto::Op::BuildPathsWithResults;
+ writeDerivedPaths(*this, *conn, paths);
conn->to << buildMode;
conn.processStderr();
- return worker_proto::read(*this, conn->from, Phantom<std::vector<KeyedBuildResult>> {});
+ return WorkerProto::Serialise<std::vector<KeyedBuildResult>>::read(*this, *conn);
} else {
// Avoid deadlock.
conn_.reset();
@@ -978,7 +798,7 @@ BuildResult RemoteStore::buildDerivation(const StorePath & drvPath, const BasicD
BuildMode buildMode)
{
auto conn(getConnection());
- conn->to << wopBuildDerivation << printStorePath(drvPath);
+ conn->to << WorkerProto::Op::BuildDerivation << printStorePath(drvPath);
writeDerivation(conn->to, *this, drv);
conn->to << buildMode;
conn.processStderr();
@@ -989,7 +809,7 @@ BuildResult RemoteStore::buildDerivation(const StorePath & drvPath, const BasicD
conn->from >> res.timesBuilt >> res.isNonDeterministic >> res.startTime >> res.stopTime;
}
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 28) {
- auto builtOutputs = worker_proto::read(*this, conn->from, Phantom<DrvOutputs> {});
+ auto builtOutputs = WorkerProto::Serialise<DrvOutputs>::read(*this, *conn);
for (auto && [output, realisation] : builtOutputs)
res.builtOutputs.insert_or_assign(
std::move(output.outputName),
@@ -1002,7 +822,7 @@ BuildResult RemoteStore::buildDerivation(const StorePath & drvPath, const BasicD
void RemoteStore::ensurePath(const StorePath & path)
{
auto conn(getConnection());
- conn->to << wopEnsurePath << printStorePath(path);
+ conn->to << WorkerProto::Op::EnsurePath << printStorePath(path);
conn.processStderr();
readInt(conn->from);
}
@@ -1011,7 +831,7 @@ void RemoteStore::ensurePath(const StorePath & path)
void RemoteStore::addTempRoot(const StorePath & path)
{
auto conn(getConnection());
- conn->to << wopAddTempRoot << printStorePath(path);
+ conn->to << WorkerProto::Op::AddTempRoot << printStorePath(path);
conn.processStderr();
readInt(conn->from);
}
@@ -1020,7 +840,7 @@ void RemoteStore::addTempRoot(const StorePath & path)
void RemoteStore::addIndirectRoot(const Path & path)
{
auto conn(getConnection());
- conn->to << wopAddIndirectRoot << path;
+ conn->to << WorkerProto::Op::AddIndirectRoot << path;
conn.processStderr();
readInt(conn->from);
}
@@ -1029,7 +849,7 @@ void RemoteStore::addIndirectRoot(const Path & path)
Roots RemoteStore::findRoots(bool censor)
{
auto conn(getConnection());
- conn->to << wopFindRoots;
+ conn->to << WorkerProto::Op::FindRoots;
conn.processStderr();
size_t count = readNum<size_t>(conn->from);
Roots result;
@@ -1047,8 +867,8 @@ void RemoteStore::collectGarbage(const GCOptions & options, GCResults & results)
auto conn(getConnection());
conn->to
- << wopCollectGarbage << options.action;
- worker_proto::write(*this, conn->to, options.pathsToDelete);
+ << WorkerProto::Op::CollectGarbage << options.action;
+ WorkerProto::write(*this, *conn, options.pathsToDelete);
conn->to << options.ignoreLiveness
<< options.maxFreed
/* removed options */
@@ -1070,7 +890,7 @@ void RemoteStore::collectGarbage(const GCOptions & options, GCResults & results)
void RemoteStore::optimiseStore()
{
auto conn(getConnection());
- conn->to << wopOptimiseStore;
+ conn->to << WorkerProto::Op::OptimiseStore;
conn.processStderr();
readInt(conn->from);
}
@@ -1079,7 +899,7 @@ void RemoteStore::optimiseStore()
bool RemoteStore::verifyStore(bool checkContents, RepairFlag repair)
{
auto conn(getConnection());
- conn->to << wopVerifyStore << checkContents << repair;
+ conn->to << WorkerProto::Op::VerifyStore << checkContents << repair;
conn.processStderr();
return readInt(conn->from);
}
@@ -1088,7 +908,7 @@ bool RemoteStore::verifyStore(bool checkContents, RepairFlag repair)
void RemoteStore::addSignatures(const StorePath & storePath, const StringSet & sigs)
{
auto conn(getConnection());
- conn->to << wopAddSignatures << printStorePath(storePath) << sigs;
+ conn->to << WorkerProto::Op::AddSignatures << printStorePath(storePath) << sigs;
conn.processStderr();
readInt(conn->from);
}
@@ -1104,12 +924,12 @@ void RemoteStore::queryMissing(const std::vector<DerivedPath> & targets,
// Don't hold the connection handle in the fallback case
// to prevent a deadlock.
goto fallback;
- conn->to << wopQueryMissing;
- writeDerivedPaths(*this, conn, targets);
+ conn->to << WorkerProto::Op::QueryMissing;
+ writeDerivedPaths(*this, *conn, targets);
conn.processStderr();
- willBuild = worker_proto::read(*this, conn->from, Phantom<StorePathSet> {});
- willSubstitute = worker_proto::read(*this, conn->from, Phantom<StorePathSet> {});
- unknown = worker_proto::read(*this, conn->from, Phantom<StorePathSet> {});
+ willBuild = WorkerProto::Serialise<StorePathSet>::read(*this, *conn);
+ willSubstitute = WorkerProto::Serialise<StorePathSet>::read(*this, *conn);
+ unknown = WorkerProto::Serialise<StorePathSet>::read(*this, *conn);
conn->from >> downloadSize >> narSize;
return;
}
@@ -1123,7 +943,7 @@ void RemoteStore::queryMissing(const std::vector<DerivedPath> & targets,
void RemoteStore::addBuildLog(const StorePath & drvPath, std::string_view log)
{
auto conn(getConnection());
- conn->to << wopAddBuildLog << drvPath.to_string();
+ conn->to << WorkerProto::Op::AddBuildLog << drvPath.to_string();
StringSource source(log);
conn.withFramedSink([&](Sink & sink) {
source.drainInto(sink);
@@ -1175,7 +995,7 @@ RemoteStore::Connection::~Connection()
void RemoteStore::narFromPath(const StorePath & path, Sink & sink)
{
auto conn(connections->get());
- conn->to << wopNarFromPath << printStorePath(path);
+ conn->to << WorkerProto::Op::NarFromPath << printStorePath(path);
conn->processStderr();
copyNAR(conn->from, sink);
}
diff --git a/src/libstore/remote-store.hh b/src/libstore/remote-store.hh
index 82e4656ab..cb7a71acf 100644
--- a/src/libstore/remote-store.hh
+++ b/src/libstore/remote-store.hh
@@ -137,6 +137,17 @@ public:
bool verifyStore(bool checkContents, RepairFlag repair) override;
+ /**
+ * The default instance would schedule the work on the client side, but
+ * for consistency with `buildPaths` and `buildDerivation` it should happen
+ * on the remote side.
+ *
+ * We make this fail for now so we can add implement this properly later
+ * without it being a breaking change.
+ */
+ void repairPath(const StorePath & path) override
+ { unsupported("repairPath"); }
+
void addSignatures(const StorePath & storePath, const StringSet & sigs) override;
void queryMissing(const std::vector<DerivedPath> & targets,
@@ -155,21 +166,7 @@ public:
void flushBadConnections();
- struct Connection
- {
- FdSink to;
- FdSource from;
- unsigned int daemonVersion;
- std::optional<TrustedFlag> remoteTrustsUs;
- std::optional<std::string> daemonNixVersion;
- std::chrono::time_point<std::chrono::steady_clock> startTime;
-
- virtual ~Connection();
-
- virtual void closeWrite() = 0;
-
- std::exception_ptr processStderr(Sink * sink = 0, Source * source = 0, bool flush = true);
- };
+ struct Connection;
ref<Connection> openConnectionWrapper();
diff --git a/src/libstore/serve-protocol.hh b/src/libstore/serve-protocol.hh
index 553fd3a09..7e43b3969 100644
--- a/src/libstore/serve-protocol.hh
+++ b/src/libstore/serve-protocol.hh
@@ -10,16 +10,52 @@ namespace nix {
#define GET_PROTOCOL_MAJOR(x) ((x) & 0xff00)
#define GET_PROTOCOL_MINOR(x) ((x) & 0x00ff)
-typedef enum {
- cmdQueryValidPaths = 1,
- cmdQueryPathInfos = 2,
- cmdDumpStorePath = 3,
- cmdImportPaths = 4,
- cmdExportPaths = 5,
- cmdBuildPaths = 6,
- cmdQueryClosure = 7,
- cmdBuildDerivation = 8,
- cmdAddToStoreNar = 9,
-} ServeCommand;
+/**
+ * The "serve protocol", used by ssh:// stores.
+ *
+ * This `struct` is basically just a `namespace`; We use a type rather
+ * than a namespace just so we can use it as a template argument.
+ */
+struct ServeProto
+{
+ /**
+ * Enumeration of all the request types for the protocol.
+ */
+ enum struct Command : uint64_t;
+};
+
+enum struct ServeProto::Command : uint64_t
+{
+ QueryValidPaths = 1,
+ QueryPathInfos = 2,
+ DumpStorePath = 3,
+ ImportPaths = 4,
+ ExportPaths = 5,
+ BuildPaths = 6,
+ QueryClosure = 7,
+ BuildDerivation = 8,
+ AddToStoreNar = 9,
+};
+
+/**
+ * Convenience for sending operation codes.
+ *
+ * @todo Switch to using `ServeProto::Serialize` instead probably. But
+ * this was not done at this time so there would be less churn.
+ */
+inline Sink & operator << (Sink & sink, ServeProto::Command op)
+{
+ return sink << (uint64_t) op;
+}
+
+/**
+ * Convenience for debugging.
+ *
+ * @todo Perhaps render known opcodes more nicely.
+ */
+inline std::ostream & operator << (std::ostream & s, ServeProto::Command op)
+{
+ return s << (uint64_t) op;
+}
}
diff --git a/src/libstore/sqlite.cc b/src/libstore/sqlite.cc
index df334c23c..7c8decb74 100644
--- a/src/libstore/sqlite.cc
+++ b/src/libstore/sqlite.cc
@@ -1,6 +1,7 @@
#include "sqlite.hh"
#include "globals.hh"
#include "util.hh"
+#include "url.hh"
#include <sqlite3.h>
@@ -50,15 +51,17 @@ static void traceSQL(void * x, const char * sql)
notice("SQL<[%1%]>", sql);
};
-SQLite::SQLite(const Path & path, bool create)
+SQLite::SQLite(const Path & path, SQLiteOpenMode mode)
{
// useSQLiteWAL also indicates what virtual file system we need. Using
// `unix-dotfile` is needed on NFS file systems and on Windows' Subsystem
// for Linux (WSL) where useSQLiteWAL should be false by default.
const char *vfs = settings.useSQLiteWAL ? 0 : "unix-dotfile";
- int flags = SQLITE_OPEN_READWRITE;
- if (create) flags |= SQLITE_OPEN_CREATE;
- int ret = sqlite3_open_v2(path.c_str(), &db, flags, vfs);
+ bool immutable = mode == SQLiteOpenMode::Immutable;
+ int flags = immutable ? SQLITE_OPEN_READONLY : SQLITE_OPEN_READWRITE;
+ if (mode == SQLiteOpenMode::Normal) flags |= SQLITE_OPEN_CREATE;
+ auto uri = "file:" + percentEncode(path) + "?immutable=" + (immutable ? "1" : "0");
+ int ret = sqlite3_open_v2(uri.c_str(), &db, SQLITE_OPEN_URI | flags, vfs);
if (ret != SQLITE_OK) {
const char * err = sqlite3_errstr(ret);
throw Error("cannot open SQLite database '%s': %s", path, err);
diff --git a/src/libstore/sqlite.hh b/src/libstore/sqlite.hh
index 6e14852cb..0c08267f7 100644
--- a/src/libstore/sqlite.hh
+++ b/src/libstore/sqlite.hh
@@ -11,6 +11,27 @@ struct sqlite3_stmt;
namespace nix {
+enum class SQLiteOpenMode {
+ /**
+ * Open the database in read-write mode.
+ * If the database does not exist, it will be created.
+ */
+ Normal,
+ /**
+ * Open the database in read-write mode.
+ * Fails with an error if the database does not exist.
+ */
+ NoCreate,
+ /**
+ * Open the database in immutable mode.
+ * In addition to the database being read-only,
+ * no wal or journal files will be created by sqlite.
+ * Use this mode if the database is on a read-only filesystem.
+ * Fails with an error if the database does not exist.
+ */
+ Immutable,
+};
+
/**
* RAII wrapper to close a SQLite database automatically.
*/
@@ -18,7 +39,7 @@ struct SQLite
{
sqlite3 * db = 0;
SQLite() { }
- SQLite(const Path & path, bool create = true);
+ SQLite(const Path & path, SQLiteOpenMode mode = SQLiteOpenMode::Normal);
SQLite(const SQLite & from) = delete;
SQLite& operator = (const SQLite & from) = delete;
SQLite& operator = (SQLite && from) { db = from.db; from.db = 0; return *this; }
diff --git a/src/libstore/ssh-store.cc b/src/libstore/ssh-store.cc
index 962221ad2..0200076c0 100644
--- a/src/libstore/ssh-store.cc
+++ b/src/libstore/ssh-store.cc
@@ -1,6 +1,7 @@
#include "ssh-store-config.hh"
#include "store-api.hh"
#include "remote-store.hh"
+#include "remote-store-connection.hh"
#include "remote-fs-accessor.hh"
#include "archive.hh"
#include "worker-protocol.hh"
diff --git a/src/libstore/ssh.cc b/src/libstore/ssh.cc
index 6f6deda51..fae99d75b 100644
--- a/src/libstore/ssh.cc
+++ b/src/libstore/ssh.cc
@@ -41,6 +41,11 @@ void SSHMaster::addCommonSSHOpts(Strings & args)
args.push_back("-oLocalCommand=echo started");
}
+bool SSHMaster::isMasterRunning() {
+ auto res = runProgram(RunOptions {.program = "ssh", .args = {"-O", "check", host}, .mergeStderrToStdout = true});
+ return res.first == 0;
+}
+
std::unique_ptr<SSHMaster::Connection> SSHMaster::startCommand(const std::string & command)
{
Path socketPath = startMaster();
@@ -97,7 +102,7 @@ std::unique_ptr<SSHMaster::Connection> SSHMaster::startCommand(const std::string
// Wait for the SSH connection to be established,
// So that we don't overwrite the password prompt with our progress bar.
- if (!fakeSSH && !useMaster) {
+ if (!fakeSSH && !useMaster && !isMasterRunning()) {
std::string reply;
try {
reply = readLine(out.readSide.get());
@@ -133,6 +138,8 @@ Path SSHMaster::startMaster()
logger->pause();
Finally cleanup = [&]() { logger->resume(); };
+ bool wasMasterRunning = isMasterRunning();
+
state->sshMaster = startProcess([&]() {
restoreProcessContext();
@@ -152,13 +159,15 @@ Path SSHMaster::startMaster()
out.writeSide = -1;
- std::string reply;
- try {
- reply = readLine(out.readSide.get());
- } catch (EndOfFile & e) { }
+ if (!wasMasterRunning) {
+ std::string reply;
+ try {
+ reply = readLine(out.readSide.get());
+ } catch (EndOfFile & e) { }
- if (reply != "started")
- throw Error("failed to start SSH master connection to '%s'", host);
+ if (reply != "started")
+ throw Error("failed to start SSH master connection to '%s'", host);
+ }
return state->socketPath;
}
diff --git a/src/libstore/ssh.hh b/src/libstore/ssh.hh
index c86a8a986..94b952af9 100644
--- a/src/libstore/ssh.hh
+++ b/src/libstore/ssh.hh
@@ -28,6 +28,7 @@ private:
Sync<State> state_;
void addCommonSSHOpts(Strings & args);
+ bool isMasterRunning();
public:
diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh
index bad610014..14a862eef 100644
--- a/src/libstore/store-api.hh
+++ b/src/libstore/store-api.hh
@@ -114,7 +114,7 @@ struct StoreConfig : public Config
return "";
}
- const PathSetting storeDir_{this, false, settings.nixStore,
+ const PathSetting storeDir_{this, settings.nixStore,
"store",
R"(
Logical location of the Nix store, usually
@@ -679,8 +679,7 @@ public:
* Repair the contents of the given path by redownloading it using
* a substituter (if available).
*/
- virtual void repairPath(const StorePath & path)
- { unsupported("repairPath"); }
+ virtual void repairPath(const StorePath & path);
/**
* Add signatures to the specified store path. The signatures are
diff --git a/src/libstore/tests/downstream-placeholder.cc b/src/libstore/tests/downstream-placeholder.cc
new file mode 100644
index 000000000..ec3e1000f
--- /dev/null
+++ b/src/libstore/tests/downstream-placeholder.cc
@@ -0,0 +1,33 @@
+#include <gtest/gtest.h>
+
+#include "downstream-placeholder.hh"
+
+namespace nix {
+
+TEST(DownstreamPlaceholder, unknownCaOutput) {
+ ASSERT_EQ(
+ DownstreamPlaceholder::unknownCaOutput(
+ StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv" },
+ "out").render(),
+ "/0c6rn30q4frawknapgwq386zq358m8r6msvywcvc89n6m5p2dgbz");
+}
+
+TEST(DownstreamPlaceholder, unknownDerivation) {
+ /**
+ * We set these in tests rather than the regular globals so we don't have
+ * to worry about race conditions if the tests run concurrently.
+ */
+ ExperimentalFeatureSettings mockXpSettings;
+ mockXpSettings.set("experimental-features", "dynamic-derivations ca-derivations");
+
+ ASSERT_EQ(
+ DownstreamPlaceholder::unknownDerivation(
+ DownstreamPlaceholder::unknownCaOutput(
+ StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv.drv" },
+ "out"),
+ "out",
+ mockXpSettings).render(),
+ "/0gn6agqxjyyalf0dpihgyf49xq5hqxgw100f0wydnj6yqrhqsb3w");
+}
+
+}
diff --git a/src/libstore/uds-remote-store.cc b/src/libstore/uds-remote-store.cc
index 0fb7c38e9..69dae2da5 100644
--- a/src/libstore/uds-remote-store.cc
+++ b/src/libstore/uds-remote-store.cc
@@ -13,6 +13,14 @@
namespace nix {
+std::string UDSRemoteStoreConfig::doc()
+{
+ return
+ #include "uds-remote-store.md"
+ ;
+}
+
+
UDSRemoteStore::UDSRemoteStore(const Params & params)
: StoreConfig(params)
, LocalFSStoreConfig(params)
diff --git a/src/libstore/uds-remote-store.hh b/src/libstore/uds-remote-store.hh
index bd1dcb67c..2bd6517fa 100644
--- a/src/libstore/uds-remote-store.hh
+++ b/src/libstore/uds-remote-store.hh
@@ -2,6 +2,7 @@
///@file
#include "remote-store.hh"
+#include "remote-store-connection.hh"
#include "local-fs-store.hh"
namespace nix {
@@ -17,12 +18,7 @@ struct UDSRemoteStoreConfig : virtual LocalFSStoreConfig, virtual RemoteStoreCon
const std::string name() override { return "Local Daemon Store"; }
- std::string doc() override
- {
- return
- #include "uds-remote-store.md"
- ;
- }
+ std::string doc() override;
};
class UDSRemoteStore : public virtual UDSRemoteStoreConfig, public virtual LocalFSStore, public virtual RemoteStore
diff --git a/src/libstore/worker-protocol-impl.hh b/src/libstore/worker-protocol-impl.hh
new file mode 100644
index 000000000..d3d2792ff
--- /dev/null
+++ b/src/libstore/worker-protocol-impl.hh
@@ -0,0 +1,78 @@
+#pragma once
+/**
+ * @file
+ *
+ * Template implementations (as opposed to mere declarations).
+ *
+ * This file is an exmample of the "impl.hh" pattern. See the
+ * contributing guide.
+ */
+
+#include "worker-protocol.hh"
+
+namespace nix {
+
+template<typename T>
+std::vector<T> WorkerProto::Serialise<std::vector<T>>::read(const Store & store, WorkerProto::ReadConn conn)
+{
+ std::vector<T> resSet;
+ auto size = readNum<size_t>(conn.from);
+ while (size--) {
+ resSet.push_back(WorkerProto::Serialise<T>::read(store, conn));
+ }
+ return resSet;
+}
+
+template<typename T>
+void WorkerProto::Serialise<std::vector<T>>::write(const Store & store, WorkerProto::WriteConn conn, const std::vector<T> & resSet)
+{
+ conn.to << resSet.size();
+ for (auto & key : resSet) {
+ WorkerProto::Serialise<T>::write(store, conn, key);
+ }
+}
+
+template<typename T>
+std::set<T> WorkerProto::Serialise<std::set<T>>::read(const Store & store, WorkerProto::ReadConn conn)
+{
+ std::set<T> resSet;
+ auto size = readNum<size_t>(conn.from);
+ while (size--) {
+ resSet.insert(WorkerProto::Serialise<T>::read(store, conn));
+ }
+ return resSet;
+}
+
+template<typename T>
+void WorkerProto::Serialise<std::set<T>>::write(const Store & store, WorkerProto::WriteConn conn, const std::set<T> & resSet)
+{
+ conn.to << resSet.size();
+ for (auto & key : resSet) {
+ WorkerProto::Serialise<T>::write(store, conn, key);
+ }
+}
+
+template<typename K, typename V>
+std::map<K, V> WorkerProto::Serialise<std::map<K, V>>::read(const Store & store, WorkerProto::ReadConn conn)
+{
+ std::map<K, V> resMap;
+ auto size = readNum<size_t>(conn.from);
+ while (size--) {
+ auto k = WorkerProto::Serialise<K>::read(store, conn);
+ auto v = WorkerProto::Serialise<V>::read(store, conn);
+ resMap.insert_or_assign(std::move(k), std::move(v));
+ }
+ return resMap;
+}
+
+template<typename K, typename V>
+void WorkerProto::Serialise<std::map<K, V>>::write(const Store & store, WorkerProto::WriteConn conn, const std::map<K, V> & resMap)
+{
+ conn.to << resMap.size();
+ for (auto & i : resMap) {
+ WorkerProto::Serialise<K>::write(store, conn, i.first);
+ WorkerProto::Serialise<V>::write(store, conn, i.second);
+ }
+}
+
+}
diff --git a/src/libstore/worker-protocol.cc b/src/libstore/worker-protocol.cc
new file mode 100644
index 000000000..a23130743
--- /dev/null
+++ b/src/libstore/worker-protocol.cc
@@ -0,0 +1,193 @@
+#include "serialise.hh"
+#include "util.hh"
+#include "path-with-outputs.hh"
+#include "store-api.hh"
+#include "build-result.hh"
+#include "worker-protocol.hh"
+#include "worker-protocol-impl.hh"
+#include "archive.hh"
+#include "derivations.hh"
+
+#include <nlohmann/json.hpp>
+
+namespace nix {
+
+std::string WorkerProto::Serialise<std::string>::read(const Store & store, WorkerProto::ReadConn conn)
+{
+ return readString(conn.from);
+}
+
+void WorkerProto::Serialise<std::string>::write(const Store & store, WorkerProto::WriteConn conn, const std::string & str)
+{
+ conn.to << str;
+}
+
+
+StorePath WorkerProto::Serialise<StorePath>::read(const Store & store, WorkerProto::ReadConn conn)
+{
+ return store.parseStorePath(readString(conn.from));
+}
+
+void WorkerProto::Serialise<StorePath>::write(const Store & store, WorkerProto::WriteConn conn, const StorePath & storePath)
+{
+ conn.to << store.printStorePath(storePath);
+}
+
+
+std::optional<TrustedFlag> WorkerProto::Serialise<std::optional<TrustedFlag>>::read(const Store & store, WorkerProto::ReadConn conn)
+{
+ auto temp = readNum<uint8_t>(conn.from);
+ switch (temp) {
+ case 0:
+ return std::nullopt;
+ case 1:
+ return { Trusted };
+ case 2:
+ return { NotTrusted };
+ default:
+ throw Error("Invalid trusted status from remote");
+ }
+}
+
+void WorkerProto::Serialise<std::optional<TrustedFlag>>::write(const Store & store, WorkerProto::WriteConn conn, const std::optional<TrustedFlag> & optTrusted)
+{
+ if (!optTrusted)
+ conn.to << (uint8_t)0;
+ else {
+ switch (*optTrusted) {
+ case Trusted:
+ conn.to << (uint8_t)1;
+ break;
+ case NotTrusted:
+ conn.to << (uint8_t)2;
+ break;
+ default:
+ assert(false);
+ };
+ }
+}
+
+
+ContentAddress WorkerProto::Serialise<ContentAddress>::read(const Store & store, WorkerProto::ReadConn conn)
+{
+ return ContentAddress::parse(readString(conn.from));
+}
+
+void WorkerProto::Serialise<ContentAddress>::write(const Store & store, WorkerProto::WriteConn conn, const ContentAddress & ca)
+{
+ conn.to << renderContentAddress(ca);
+}
+
+
+DerivedPath WorkerProto::Serialise<DerivedPath>::read(const Store & store, WorkerProto::ReadConn conn)
+{
+ auto s = readString(conn.from);
+ return DerivedPath::parseLegacy(store, s);
+}
+
+void WorkerProto::Serialise<DerivedPath>::write(const Store & store, WorkerProto::WriteConn conn, const DerivedPath & req)
+{
+ conn.to << req.to_string_legacy(store);
+}
+
+
+Realisation WorkerProto::Serialise<Realisation>::read(const Store & store, WorkerProto::ReadConn conn)
+{
+ std::string rawInput = readString(conn.from);
+ return Realisation::fromJSON(
+ nlohmann::json::parse(rawInput),
+ "remote-protocol"
+ );
+}
+
+void WorkerProto::Serialise<Realisation>::write(const Store & store, WorkerProto::WriteConn conn, const Realisation & realisation)
+{
+ conn.to << realisation.toJSON().dump();
+}
+
+
+DrvOutput WorkerProto::Serialise<DrvOutput>::read(const Store & store, WorkerProto::ReadConn conn)
+{
+ return DrvOutput::parse(readString(conn.from));
+}
+
+void WorkerProto::Serialise<DrvOutput>::write(const Store & store, WorkerProto::WriteConn conn, const DrvOutput & drvOutput)
+{
+ conn.to << drvOutput.to_string();
+}
+
+
+KeyedBuildResult WorkerProto::Serialise<KeyedBuildResult>::read(const Store & store, WorkerProto::ReadConn conn)
+{
+ auto path = WorkerProto::Serialise<DerivedPath>::read(store, conn);
+ auto br = WorkerProto::Serialise<BuildResult>::read(store, conn);
+ return KeyedBuildResult {
+ std::move(br),
+ /* .path = */ std::move(path),
+ };
+}
+
+void WorkerProto::Serialise<KeyedBuildResult>::write(const Store & store, WorkerProto::WriteConn conn, const KeyedBuildResult & res)
+{
+ WorkerProto::write(store, conn, res.path);
+ WorkerProto::write(store, conn, static_cast<const BuildResult &>(res));
+}
+
+
+BuildResult WorkerProto::Serialise<BuildResult>::read(const Store & store, WorkerProto::ReadConn conn)
+{
+ BuildResult res;
+ res.status = (BuildResult::Status) readInt(conn.from);
+ conn.from
+ >> res.errorMsg
+ >> res.timesBuilt
+ >> res.isNonDeterministic
+ >> res.startTime
+ >> res.stopTime;
+ auto builtOutputs = WorkerProto::Serialise<DrvOutputs>::read(store, conn);
+ for (auto && [output, realisation] : builtOutputs)
+ res.builtOutputs.insert_or_assign(
+ std::move(output.outputName),
+ std::move(realisation));
+ return res;
+}
+
+void WorkerProto::Serialise<BuildResult>::write(const Store & store, WorkerProto::WriteConn conn, const BuildResult & res)
+{
+ conn.to
+ << res.status
+ << res.errorMsg
+ << res.timesBuilt
+ << res.isNonDeterministic
+ << res.startTime
+ << res.stopTime;
+ DrvOutputs builtOutputs;
+ for (auto & [output, realisation] : res.builtOutputs)
+ builtOutputs.insert_or_assign(realisation.id, realisation);
+ WorkerProto::write(store, conn, builtOutputs);
+}
+
+
+std::optional<StorePath> WorkerProto::Serialise<std::optional<StorePath>>::read(const Store & store, WorkerProto::ReadConn conn)
+{
+ auto s = readString(conn.from);
+ return s == "" ? std::optional<StorePath> {} : store.parseStorePath(s);
+}
+
+void WorkerProto::Serialise<std::optional<StorePath>>::write(const Store & store, WorkerProto::WriteConn conn, const std::optional<StorePath> & storePathOpt)
+{
+ conn.to << (storePathOpt ? store.printStorePath(*storePathOpt) : "");
+}
+
+
+std::optional<ContentAddress> WorkerProto::Serialise<std::optional<ContentAddress>>::read(const Store & store, WorkerProto::ReadConn conn)
+{
+ return ContentAddress::parseOpt(readString(conn.from));
+}
+
+void WorkerProto::Serialise<std::optional<ContentAddress>>::write(const Store & store, WorkerProto::WriteConn conn, const std::optional<ContentAddress> & caOpt)
+{
+ conn.to << (caOpt ? renderContentAddress(*caOpt) : "");
+}
+
+}
diff --git a/src/libstore/worker-protocol.hh b/src/libstore/worker-protocol.hh
index 34b2fc17b..ff762c924 100644
--- a/src/libstore/worker-protocol.hh
+++ b/src/libstore/worker-protocol.hh
@@ -1,7 +1,6 @@
#pragma once
///@file
-#include "store-api.hh"
#include "serialise.hh"
namespace nix {
@@ -15,57 +14,6 @@ namespace nix {
#define GET_PROTOCOL_MINOR(x) ((x) & 0x00ff)
-/**
- * Enumeration of all the request types for the "worker protocol", used
- * by unix:// and ssh-ng:// stores.
- */
-typedef enum {
- wopIsValidPath = 1,
- wopHasSubstitutes = 3,
- wopQueryPathHash = 4, // obsolete
- wopQueryReferences = 5, // obsolete
- wopQueryReferrers = 6,
- wopAddToStore = 7,
- wopAddTextToStore = 8, // obsolete since 1.25, Nix 3.0. Use wopAddToStore
- wopBuildPaths = 9,
- wopEnsurePath = 10,
- wopAddTempRoot = 11,
- wopAddIndirectRoot = 12,
- wopSyncWithGC = 13,
- wopFindRoots = 14,
- wopExportPath = 16, // obsolete
- wopQueryDeriver = 18, // obsolete
- wopSetOptions = 19,
- wopCollectGarbage = 20,
- wopQuerySubstitutablePathInfo = 21,
- wopQueryDerivationOutputs = 22, // obsolete
- wopQueryAllValidPaths = 23,
- wopQueryFailedPaths = 24,
- wopClearFailedPaths = 25,
- wopQueryPathInfo = 26,
- wopImportPaths = 27, // obsolete
- wopQueryDerivationOutputNames = 28, // obsolete
- wopQueryPathFromHashPart = 29,
- wopQuerySubstitutablePathInfos = 30,
- wopQueryValidPaths = 31,
- wopQuerySubstitutablePaths = 32,
- wopQueryValidDerivers = 33,
- wopOptimiseStore = 34,
- wopVerifyStore = 35,
- wopBuildDerivation = 36,
- wopAddSignatures = 37,
- wopNarFromPath = 38,
- wopAddToStoreNar = 39,
- wopQueryMissing = 40,
- wopQueryDerivationOutputMap = 41,
- wopRegisterDrvOutput = 42,
- wopQueryRealisation = 43,
- wopAddMultipleToStore = 44,
- wopAddBuildLog = 45,
- wopBuildPathsWithResults = 46,
-} WorkerOp;
-
-
#define STDERR_NEXT 0x6f6c6d67
#define STDERR_READ 0x64617461 // data needed from source
#define STDERR_WRITE 0x64617416 // data for sink
@@ -79,45 +27,208 @@ typedef enum {
class Store;
struct Source;
+// items being serialised
+struct DerivedPath;
+struct DrvOutput;
+struct Realisation;
+struct BuildResult;
+struct KeyedBuildResult;
+enum TrustedFlag : bool;
+
+
/**
- * Used to guide overloading
+ * The "worker protocol", used by unix:// and ssh-ng:// stores.
*
- * See https://en.cppreference.com/w/cpp/language/adl for the broader
- * concept of what is going on here.
+ * This `struct` is basically just a `namespace`; We use a type rather
+ * than a namespace just so we can use it as a template argument.
*/
-template<typename T>
-struct Phantom {};
+struct WorkerProto
+{
+ /**
+ * Enumeration of all the request types for the protocol.
+ */
+ enum struct Op : uint64_t;
+
+ /**
+ * A unidirectional read connection, to be used by the read half of the
+ * canonical serializers below.
+ *
+ * This currently is just a `Source &`, but more fields will be added
+ * later.
+ */
+ struct ReadConn {
+ Source & from;
+ };
+
+ /**
+ * A unidirectional write connection, to be used by the write half of the
+ * canonical serializers below.
+ *
+ * This currently is just a `Sink &`, but more fields will be added
+ * later.
+ */
+ struct WriteConn {
+ Sink & to;
+ };
+
+ /**
+ * Data type for canonical pairs of serialisers for the worker protocol.
+ *
+ * See https://en.cppreference.com/w/cpp/language/adl for the broader
+ * concept of what is going on here.
+ */
+ template<typename T>
+ struct Serialise;
+ // This is the definition of `Serialise` we *want* to put here, but
+ // do not do so.
+ //
+ // The problem is that if we do so, C++ will think we have
+ // seralisers for *all* types. We don't, of course, but that won't
+ // cause an error until link time. That makes for long debug cycles
+ // when there is a missing serialiser.
+ //
+ // By not defining it globally, and instead letting individual
+ // serialisers specialise the type, we get back the compile-time
+ // errors we would like. When no serialiser exists, C++ sees an
+ // abstract "incomplete" type with no definition, and any attempt to
+ // use `to` or `from` static methods is a compile-time error because
+ // they don't exist on an incomplete type.
+ //
+ // This makes for a quicker debug cycle, as desired.
+#if 0
+ {
+ static T read(const Store & store, ReadConn conn);
+ static void write(const Store & store, WriteConn conn, const T & t);
+ };
+#endif
+
+ /**
+ * Wrapper function around `WorkerProto::Serialise<T>::write` that allows us to
+ * infer the type instead of having to write it down explicitly.
+ */
+ template<typename T>
+ static void write(const Store & store, WriteConn conn, const T & t)
+ {
+ WorkerProto::Serialise<T>::write(store, conn, t);
+ }
+};
+enum struct WorkerProto::Op : uint64_t
+{
+ IsValidPath = 1,
+ HasSubstitutes = 3,
+ QueryPathHash = 4, // obsolete
+ QueryReferences = 5, // obsolete
+ QueryReferrers = 6,
+ AddToStore = 7,
+ AddTextToStore = 8, // obsolete since 1.25, Nix 3.0. Use WorkerProto::Op::AddToStore
+ BuildPaths = 9,
+ EnsurePath = 10,
+ AddTempRoot = 11,
+ AddIndirectRoot = 12,
+ SyncWithGC = 13,
+ FindRoots = 14,
+ ExportPath = 16, // obsolete
+ QueryDeriver = 18, // obsolete
+ SetOptions = 19,
+ CollectGarbage = 20,
+ QuerySubstitutablePathInfo = 21,
+ QueryDerivationOutputs = 22, // obsolete
+ QueryAllValidPaths = 23,
+ QueryFailedPaths = 24,
+ ClearFailedPaths = 25,
+ QueryPathInfo = 26,
+ ImportPaths = 27, // obsolete
+ QueryDerivationOutputNames = 28, // obsolete
+ QueryPathFromHashPart = 29,
+ QuerySubstitutablePathInfos = 30,
+ QueryValidPaths = 31,
+ QuerySubstitutablePaths = 32,
+ QueryValidDerivers = 33,
+ OptimiseStore = 34,
+ VerifyStore = 35,
+ BuildDerivation = 36,
+ AddSignatures = 37,
+ NarFromPath = 38,
+ AddToStoreNar = 39,
+ QueryMissing = 40,
+ QueryDerivationOutputMap = 41,
+ RegisterDrvOutput = 42,
+ QueryRealisation = 43,
+ AddMultipleToStore = 44,
+ AddBuildLog = 45,
+ BuildPathsWithResults = 46,
+};
-namespace worker_proto {
-/* FIXME maybe move more stuff inside here */
+/**
+ * Convenience for sending operation codes.
+ *
+ * @todo Switch to using `WorkerProto::Serialise` instead probably. But
+ * this was not done at this time so there would be less churn.
+ */
+inline Sink & operator << (Sink & sink, WorkerProto::Op op)
+{
+ return sink << (uint64_t) op;
+}
-#define MAKE_WORKER_PROTO(TEMPLATE, T) \
- TEMPLATE T read(const Store & store, Source & from, Phantom< T > _); \
- TEMPLATE void write(const Store & store, Sink & out, const T & str)
+/**
+ * Convenience for debugging.
+ *
+ * @todo Perhaps render known opcodes more nicely.
+ */
+inline std::ostream & operator << (std::ostream & s, WorkerProto::Op op)
+{
+ return s << (uint64_t) op;
+}
-MAKE_WORKER_PROTO(, std::string);
-MAKE_WORKER_PROTO(, StorePath);
-MAKE_WORKER_PROTO(, ContentAddress);
-MAKE_WORKER_PROTO(, DerivedPath);
-MAKE_WORKER_PROTO(, Realisation);
-MAKE_WORKER_PROTO(, DrvOutput);
-MAKE_WORKER_PROTO(, BuildResult);
-MAKE_WORKER_PROTO(, KeyedBuildResult);
-MAKE_WORKER_PROTO(, std::optional<TrustedFlag>);
+/**
+ * Declare a canonical serialiser pair for the worker protocol.
+ *
+ * We specialise the struct merely to indicate that we are implementing
+ * the function for the given type.
+ *
+ * Some sort of `template<...>` must be used with the caller for this to
+ * be legal specialization syntax. See below for what that looks like in
+ * practice.
+ */
+#define MAKE_WORKER_PROTO(T) \
+ struct WorkerProto::Serialise< T > { \
+ static T read(const Store & store, WorkerProto::ReadConn conn); \
+ static void write(const Store & store, WorkerProto::WriteConn conn, const T & t); \
+ };
+
+template<>
+MAKE_WORKER_PROTO(std::string);
+template<>
+MAKE_WORKER_PROTO(StorePath);
+template<>
+MAKE_WORKER_PROTO(ContentAddress);
+template<>
+MAKE_WORKER_PROTO(DerivedPath);
+template<>
+MAKE_WORKER_PROTO(Realisation);
+template<>
+MAKE_WORKER_PROTO(DrvOutput);
+template<>
+MAKE_WORKER_PROTO(BuildResult);
+template<>
+MAKE_WORKER_PROTO(KeyedBuildResult);
+template<>
+MAKE_WORKER_PROTO(std::optional<TrustedFlag>);
-MAKE_WORKER_PROTO(template<typename T>, std::vector<T>);
-MAKE_WORKER_PROTO(template<typename T>, std::set<T>);
+template<typename T>
+MAKE_WORKER_PROTO(std::vector<T>);
+template<typename T>
+MAKE_WORKER_PROTO(std::set<T>);
-#define X_ template<typename K, typename V>
-#define Y_ std::map<K, V>
-MAKE_WORKER_PROTO(X_, Y_);
+template<typename K, typename V>
+#define X_ std::map<K, V>
+MAKE_WORKER_PROTO(X_);
#undef X_
-#undef Y_
/**
* These use the empty string for the null case, relying on the fact
- * that the underlying types never serialize to the empty string.
+ * that the underlying types never serialise to the empty string.
*
* We do this instead of a generic std::optional<T> instance because
* ordinal tags (0 or 1, here) are a bit of a compatability hazard. For
@@ -129,72 +240,9 @@ MAKE_WORKER_PROTO(X_, Y_);
* worker protocol harder to implement in other languages where such
* specializations may not be allowed.
*/
-MAKE_WORKER_PROTO(, std::optional<StorePath>);
-MAKE_WORKER_PROTO(, std::optional<ContentAddress>);
-
-template<typename T>
-std::vector<T> read(const Store & store, Source & from, Phantom<std::vector<T>> _)
-{
- std::vector<T> resSet;
- auto size = readNum<size_t>(from);
- while (size--) {
- resSet.push_back(read(store, from, Phantom<T> {}));
- }
- return resSet;
-}
-
-template<typename T>
-void write(const Store & store, Sink & out, const std::vector<T> & resSet)
-{
- out << resSet.size();
- for (auto & key : resSet) {
- write(store, out, key);
- }
-}
-
-template<typename T>
-std::set<T> read(const Store & store, Source & from, Phantom<std::set<T>> _)
-{
- std::set<T> resSet;
- auto size = readNum<size_t>(from);
- while (size--) {
- resSet.insert(read(store, from, Phantom<T> {}));
- }
- return resSet;
-}
-
-template<typename T>
-void write(const Store & store, Sink & out, const std::set<T> & resSet)
-{
- out << resSet.size();
- for (auto & key : resSet) {
- write(store, out, key);
- }
-}
-
-template<typename K, typename V>
-std::map<K, V> read(const Store & store, Source & from, Phantom<std::map<K, V>> _)
-{
- std::map<K, V> resMap;
- auto size = readNum<size_t>(from);
- while (size--) {
- auto k = read(store, from, Phantom<K> {});
- auto v = read(store, from, Phantom<V> {});
- resMap.insert_or_assign(std::move(k), std::move(v));
- }
- return resMap;
-}
-
-template<typename K, typename V>
-void write(const Store & store, Sink & out, const std::map<K, V> & resMap)
-{
- out << resMap.size();
- for (auto & i : resMap) {
- write(store, out, i.first);
- write(store, out, i.second);
- }
-}
-
-}
+template<>
+MAKE_WORKER_PROTO(std::optional<StorePath>);
+template<>
+MAKE_WORKER_PROTO(std::optional<ContentAddress>);
}
diff --git a/src/libutil/abstract-setting-to-json.hh b/src/libutil/abstract-setting-to-json.hh
index 7b6c3fcb5..d506dfb74 100644
--- a/src/libutil/abstract-setting-to-json.hh
+++ b/src/libutil/abstract-setting-to-json.hh
@@ -3,6 +3,7 @@
#include <nlohmann/json.hpp>
#include "config.hh"
+#include "json-utils.hh"
namespace nix {
template<typename T>
diff --git a/src/libutil/args.cc b/src/libutil/args.cc
index 081dbeb28..3cf3ed9ca 100644
--- a/src/libutil/args.cc
+++ b/src/libutil/args.cc
@@ -1,10 +1,9 @@
#include "args.hh"
#include "hash.hh"
+#include "json-utils.hh"
#include <glob.h>
-#include <nlohmann/json.hpp>
-
namespace nix {
void Args::addFlag(Flag && flag_)
@@ -247,11 +246,7 @@ nlohmann::json Args::toJSON()
j["arity"] = flag->handler.arity;
if (!flag->labels.empty())
j["labels"] = flag->labels;
- // TODO With C++23 use `std::optional::tranform`
- if (auto & xp = flag->experimentalFeature)
- j["experimental-feature"] = showExperimentalFeature(*xp);
- else
- j["experimental-feature"] = nullptr;
+ j["experimental-feature"] = flag->experimentalFeature;
flags[name] = std::move(j);
}
@@ -416,11 +411,7 @@ nlohmann::json MultiCommand::toJSON()
cat["id"] = command->category();
cat["description"] = trim(categories[command->category()]);
j["category"] = std::move(cat);
- // TODO With C++23 use `std::optional::tranform`
- if (auto xp = command->experimentalFeature())
- cat["experimental-feature"] = showExperimentalFeature(*xp);
- else
- cat["experimental-feature"] = nullptr;
+ cat["experimental-feature"] = command->experimentalFeature();
cmds[name] = std::move(j);
}
diff --git a/src/libutil/config-impl.hh b/src/libutil/config-impl.hh
index b6cae5ec3..b9639e761 100644
--- a/src/libutil/config-impl.hh
+++ b/src/libutil/config-impl.hh
@@ -4,6 +4,9 @@
*
* Template implementations (as opposed to mere declarations).
*
+ * This file is an exmample of the "impl.hh" pattern. See the
+ * contributing guide.
+ *
* One only needs to include this when one is declaring a
* `BaseClass<CustomType>` setting, or as derived class of such an
* instantiation.
@@ -50,8 +53,11 @@ template<> void BaseSetting<std::set<ExperimentalFeature>>::appendOrSet(std::set
template<typename T>
void BaseSetting<T>::appendOrSet(T && newValue, bool append)
{
- static_assert(!trait::appendable, "using default `appendOrSet` implementation with an appendable type");
+ static_assert(
+ !trait::appendable,
+ "using default `appendOrSet` implementation with an appendable type");
assert(!append);
+
value = std::move(newValue);
}
@@ -68,4 +74,60 @@ void BaseSetting<T>::set(const std::string & str, bool append)
}
}
+template<> void BaseSetting<bool>::convertToArg(Args & args, const std::string & category);
+
+template<typename T>
+void BaseSetting<T>::convertToArg(Args & args, const std::string & category)
+{
+ args.addFlag({
+ .longName = name,
+ .description = fmt("Set the `%s` setting.", name),
+ .category = category,
+ .labels = {"value"},
+ .handler = {[this](std::string s) { overridden = true; set(s); }},
+ .experimentalFeature = experimentalFeature,
+ });
+
+ if (isAppendable())
+ args.addFlag({
+ .longName = "extra-" + name,
+ .description = fmt("Append to the `%s` setting.", name),
+ .category = category,
+ .labels = {"value"},
+ .handler = {[this](std::string s) { overridden = true; set(s, true); }},
+ .experimentalFeature = experimentalFeature,
+ });
+}
+
+#define DECLARE_CONFIG_SERIALISER(TY) \
+ template<> TY BaseSetting< TY >::parse(const std::string & str) const; \
+ template<> std::string BaseSetting< TY >::to_string() const;
+
+DECLARE_CONFIG_SERIALISER(std::string)
+DECLARE_CONFIG_SERIALISER(std::optional<std::string>)
+DECLARE_CONFIG_SERIALISER(bool)
+DECLARE_CONFIG_SERIALISER(Strings)
+DECLARE_CONFIG_SERIALISER(StringSet)
+DECLARE_CONFIG_SERIALISER(StringMap)
+DECLARE_CONFIG_SERIALISER(std::set<ExperimentalFeature>)
+
+template<typename T>
+T BaseSetting<T>::parse(const std::string & str) const
+{
+ static_assert(std::is_integral<T>::value, "Integer required.");
+
+ if (auto n = string2Int<T>(str))
+ return *n;
+ else
+ throw UsageError("setting '%s' has invalid value '%s'", name, str);
+}
+
+template<typename T>
+std::string BaseSetting<T>::to_string() const
+{
+ static_assert(std::is_integral<T>::value, "Integer required.");
+
+ return std::to_string(value);
+}
+
}
diff --git a/src/libutil/config.cc b/src/libutil/config.cc
index 085a884dc..38d406e8a 100644
--- a/src/libutil/config.cc
+++ b/src/libutil/config.cc
@@ -219,29 +219,6 @@ void AbstractSetting::convertToArg(Args & args, const std::string & category)
{
}
-template<typename T>
-void BaseSetting<T>::convertToArg(Args & args, const std::string & category)
-{
- args.addFlag({
- .longName = name,
- .description = fmt("Set the `%s` setting.", name),
- .category = category,
- .labels = {"value"},
- .handler = {[this](std::string s) { overridden = true; set(s); }},
- .experimentalFeature = experimentalFeature,
- });
-
- if (isAppendable())
- args.addFlag({
- .longName = "extra-" + name,
- .description = fmt("Append to the `%s` setting.", name),
- .category = category,
- .labels = {"value"},
- .handler = {[this](std::string s) { overridden = true; set(s, true); }},
- .experimentalFeature = experimentalFeature,
- });
-}
-
template<> std::string BaseSetting<std::string>::parse(const std::string & str) const
{
return str;
@@ -252,21 +229,17 @@ template<> std::string BaseSetting<std::string>::to_string() const
return value;
}
-template<typename T>
-T BaseSetting<T>::parse(const std::string & str) const
+template<> std::optional<std::string> BaseSetting<std::optional<std::string>>::parse(const std::string & str) const
{
- static_assert(std::is_integral<T>::value, "Integer required.");
- if (auto n = string2Int<T>(str))
- return *n;
+ if (str == "")
+ return std::nullopt;
else
- throw UsageError("setting '%s' has invalid value '%s'", name, str);
+ return { str };
}
-template<typename T>
-std::string BaseSetting<T>::to_string() const
+template<> std::string BaseSetting<std::optional<std::string>>::to_string() const
{
- static_assert(std::is_integral<T>::value, "Integer required.");
- return std::to_string(value);
+ return value ? *value : "";
}
template<> bool BaseSetting<bool>::parse(const std::string & str) const
@@ -403,17 +376,27 @@ template class BaseSetting<StringSet>;
template class BaseSetting<StringMap>;
template class BaseSetting<std::set<ExperimentalFeature>>;
-Path PathSetting::parse(const std::string & str) const
+static Path parsePath(const AbstractSetting & s, const std::string & str)
{
- if (str == "") {
- if (allowEmpty)
- return "";
- else
- throw UsageError("setting '%s' cannot be empty", name);
- } else
+ if (str == "")
+ throw UsageError("setting '%s' is a path and paths cannot be empty", s.name);
+ else
return canonPath(str);
}
+Path PathSetting::parse(const std::string & str) const
+{
+ return parsePath(*this, str);
+}
+
+std::optional<Path> OptionalPathSetting::parse(const std::string & str) const
+{
+ if (str == "")
+ return std::nullopt;
+ else
+ return parsePath(*this, str);
+}
+
bool GlobalConfig::set(const std::string & name, const std::string & value)
{
for (auto & config : *configRegistrations)
diff --git a/src/libutil/config.hh b/src/libutil/config.hh
index 2675baed7..cc8532587 100644
--- a/src/libutil/config.hh
+++ b/src/libutil/config.hh
@@ -353,21 +353,20 @@ public:
/**
* A special setting for Paths. These are automatically canonicalised
* (e.g. "/foo//bar/" becomes "/foo/bar").
+ *
+ * It is mandatory to specify a path; i.e. the empty string is not
+ * permitted.
*/
class PathSetting : public BaseSetting<Path>
{
- bool allowEmpty;
-
public:
PathSetting(Config * options,
- bool allowEmpty,
const Path & def,
const std::string & name,
const std::string & description,
const std::set<std::string> & aliases = {})
: BaseSetting<Path>(def, true, name, description, aliases)
- , allowEmpty(allowEmpty)
{
options->addSetting(this);
}
@@ -379,6 +378,30 @@ public:
void operator =(const Path & v) { this->assign(v); }
};
+/**
+ * Like `PathSetting`, but the absence of a path is also allowed.
+ *
+ * `std::optional` is used instead of the empty string for clarity.
+ */
+class OptionalPathSetting : public BaseSetting<std::optional<Path>>
+{
+public:
+
+ OptionalPathSetting(Config * options,
+ const std::optional<Path> & def,
+ const std::string & name,
+ const std::string & description,
+ const std::set<std::string> & aliases = {})
+ : BaseSetting<std::optional<Path>>(def, true, name, description, aliases)
+ {
+ options->addSetting(this);
+ }
+
+ std::optional<Path> parse(const std::string & str) const override;
+
+ void operator =(const std::optional<Path> & v) { this->assign(v); }
+};
+
struct GlobalConfig : public AbstractConfig
{
typedef std::vector<Config*> ConfigRegistrations;
diff --git a/src/libutil/experimental-features.cc b/src/libutil/experimental-features.cc
index ad0ec0427..7c4112d32 100644
--- a/src/libutil/experimental-features.cc
+++ b/src/libutil/experimental-features.cc
@@ -12,7 +12,7 @@ struct ExperimentalFeatureDetails
std::string_view description;
};
-constexpr std::array<ExperimentalFeatureDetails, 13> xpFeatureDetails = {{
+constexpr std::array<ExperimentalFeatureDetails, 15> xpFeatureDetails = {{
{
.tag = Xp::CaDerivations,
.name = "ca-derivations",
@@ -50,6 +50,8 @@ constexpr std::array<ExperimentalFeatureDetails, 13> xpFeatureDetails = {{
or other impure derivations can rely on impure derivations. Finally,
an impure derivation cannot also be
[content-addressed](#xp-feature-ca-derivations).
+
+ This is a more explicit alternative to using [`builtins.currentTime`](@docroot@/language/builtin-constants.md#builtins-currentTime).
)",
},
{
@@ -207,6 +209,23 @@ constexpr std::array<ExperimentalFeatureDetails, 13> xpFeatureDetails = {{
- "text hashing" derivation outputs, so we can build .drv
files.
+
+ - dependencies in derivations on the outputs of
+ derivations that are themselves derivations outputs.
+ )",
+ },
+ {
+ .tag = Xp::ParseTomlTimestamps,
+ .name = "parse-toml-timestamps",
+ .description = R"(
+ Allow parsing of timestamps in builtins.fromTOML.
+ )",
+ },
+ {
+ .tag = Xp::ReadOnlyLocalStore,
+ .name = "read-only-local-store",
+ .description = R"(
+ Allow the use of the `read-only` parameter in [local store](@docroot@/command-ref/new-cli/nix3-help-stores.md#local-store) URIs.
)",
},
}};
@@ -243,7 +262,7 @@ std::string_view showExperimentalFeature(const ExperimentalFeature tag)
return xpFeatureDetails[(size_t)tag].name;
}
-nlohmann::json documentExperimentalFeatures()
+nlohmann::json documentExperimentalFeatures()
{
StringMap res;
for (auto & xpFeature : xpFeatureDetails)
diff --git a/src/libutil/experimental-features.hh b/src/libutil/experimental-features.hh
index 409100592..faf2e9398 100644
--- a/src/libutil/experimental-features.hh
+++ b/src/libutil/experimental-features.hh
@@ -3,7 +3,7 @@
#include "comparator.hh"
#include "error.hh"
-#include "nlohmann/json_fwd.hpp"
+#include "json-utils.hh"
#include "types.hh"
namespace nix {
@@ -30,6 +30,8 @@ enum struct ExperimentalFeature
DiscardReferences,
DaemonTrustOverride,
DynamicDerivations,
+ ParseTomlTimestamps,
+ ReadOnlyLocalStore,
};
/**
@@ -92,4 +94,10 @@ public:
void to_json(nlohmann::json &, const ExperimentalFeature &);
void from_json(const nlohmann::json &, ExperimentalFeature &);
+/**
+ * It is always rendered as a string
+ */
+template<>
+struct json_avoids_null<ExperimentalFeature> : std::true_type {};
+
}
diff --git a/src/libutil/filesystem.cc b/src/libutil/filesystem.cc
index 56be76ecc..11cc0c0e7 100644
--- a/src/libutil/filesystem.cc
+++ b/src/libutil/filesystem.cc
@@ -63,30 +63,19 @@ std::pair<AutoCloseFD, Path> createTempFile(const Path & prefix)
return {std::move(fd), tmpl};
}
-void createSymlink(const Path & target, const Path & link,
- std::optional<time_t> mtime)
+void createSymlink(const Path & target, const Path & link)
{
if (symlink(target.c_str(), link.c_str()))
throw SysError("creating symlink from '%1%' to '%2%'", link, target);
- if (mtime) {
- struct timeval times[2];
- times[0].tv_sec = *mtime;
- times[0].tv_usec = 0;
- times[1].tv_sec = *mtime;
- times[1].tv_usec = 0;
- if (lutimes(link.c_str(), times))
- throw SysError("setting time of symlink '%s'", link);
- }
}
-void replaceSymlink(const Path & target, const Path & link,
- std::optional<time_t> mtime)
+void replaceSymlink(const Path & target, const Path & link)
{
for (unsigned int n = 0; true; n++) {
Path tmp = canonPath(fmt("%s/.%d_%s", dirOf(link), n, baseNameOf(link)));
try {
- createSymlink(target, tmp, mtime);
+ createSymlink(target, tmp);
} catch (SysError & e) {
if (e.errNo == EEXIST) continue;
throw;
diff --git a/src/libutil/json-utils.cc b/src/libutil/json-utils.cc
new file mode 100644
index 000000000..d7220e71d
--- /dev/null
+++ b/src/libutil/json-utils.cc
@@ -0,0 +1,19 @@
+#include "json-utils.hh"
+
+namespace nix {
+
+const nlohmann::json * get(const nlohmann::json & map, const std::string & key)
+{
+ auto i = map.find(key);
+ if (i == map.end()) return nullptr;
+ return &*i;
+}
+
+nlohmann::json * get(nlohmann::json & map, const std::string & key)
+{
+ auto i = map.find(key);
+ if (i == map.end()) return nullptr;
+ return &*i;
+}
+
+}
diff --git a/src/libutil/json-utils.hh b/src/libutil/json-utils.hh
index eb00e954f..5e63c1af4 100644
--- a/src/libutil/json-utils.hh
+++ b/src/libutil/json-utils.hh
@@ -2,21 +2,77 @@
///@file
#include <nlohmann/json.hpp>
+#include <list>
namespace nix {
-const nlohmann::json * get(const nlohmann::json & map, const std::string & key)
-{
- auto i = map.find(key);
- if (i == map.end()) return nullptr;
- return &*i;
-}
+const nlohmann::json * get(const nlohmann::json & map, const std::string & key);
+
+nlohmann::json * get(nlohmann::json & map, const std::string & key);
+
+/**
+ * For `adl_serializer<std::optional<T>>` below, we need to track what
+ * types are not already using `null`. Only for them can we use `null`
+ * to represent `std::nullopt`.
+ */
+template<typename T>
+struct json_avoids_null;
+
+/**
+ * Handle numbers in default impl
+ */
+template<typename T>
+struct json_avoids_null : std::bool_constant<std::is_integral<T>::value> {};
+
+template<>
+struct json_avoids_null<std::nullptr_t> : std::false_type {};
+
+template<>
+struct json_avoids_null<bool> : std::true_type {};
+
+template<>
+struct json_avoids_null<std::string> : std::true_type {};
+
+template<typename T>
+struct json_avoids_null<std::vector<T>> : std::true_type {};
+
+template<typename T>
+struct json_avoids_null<std::list<T>> : std::true_type {};
+
+template<typename K, typename V>
+struct json_avoids_null<std::map<K, V>> : std::true_type {};
-nlohmann::json * get(nlohmann::json & map, const std::string & key)
-{
- auto i = map.find(key);
- if (i == map.end()) return nullptr;
- return &*i;
}
+namespace nlohmann {
+
+/**
+ * This "instance" is widely requested, see
+ * https://github.com/nlohmann/json/issues/1749, but momentum has stalled
+ * out. Writing there here in Nix as a stop-gap.
+ *
+ * We need to make sure the underlying type does not use `null` for this to
+ * round trip. We do that with a static assert.
+ */
+template<typename T>
+struct adl_serializer<std::optional<T>> {
+ static std::optional<T> from_json(const json & json) {
+ static_assert(
+ nix::json_avoids_null<T>::value,
+ "null is already in use for underlying type's JSON");
+ return json.is_null()
+ ? std::nullopt
+ : std::optional { adl_serializer<T>::from_json(json) };
+ }
+ static void to_json(json & json, std::optional<T> t) {
+ static_assert(
+ nix::json_avoids_null<T>::value,
+ "null is already in use for underlying type's JSON");
+ if (t)
+ adl_serializer<T>::to_json(json, *t);
+ else
+ json = nullptr;
+ }
+};
+
}
diff --git a/src/libstore/references.cc b/src/libutil/references.cc
index 345f4528b..7f59b4c09 100644
--- a/src/libstore/references.cc
+++ b/src/libutil/references.cc
@@ -6,6 +6,7 @@
#include <map>
#include <cstdlib>
#include <mutex>
+#include <algorithm>
namespace nix {
@@ -66,69 +67,20 @@ void RefScanSink::operator () (std::string_view data)
}
-PathRefScanSink::PathRefScanSink(StringSet && hashes, std::map<std::string, StorePath> && backMap)
- : RefScanSink(std::move(hashes))
- , backMap(std::move(backMap))
-{ }
-
-PathRefScanSink PathRefScanSink::fromPaths(const StorePathSet & refs)
+RewritingSink::RewritingSink(const std::string & from, const std::string & to, Sink & nextSink)
+ : RewritingSink({{from, to}}, nextSink)
{
- StringSet hashes;
- std::map<std::string, StorePath> backMap;
-
- for (auto & i : refs) {
- std::string hashPart(i.hashPart());
- auto inserted = backMap.emplace(hashPart, i).second;
- assert(inserted);
- hashes.insert(hashPart);
- }
-
- return PathRefScanSink(std::move(hashes), std::move(backMap));
}
-StorePathSet PathRefScanSink::getResultPaths()
+RewritingSink::RewritingSink(const StringMap & rewrites, Sink & nextSink)
+ : rewrites(rewrites), nextSink(nextSink)
{
- /* Map the hashes found back to their store paths. */
- StorePathSet found;
- for (auto & i : getResult()) {
- auto j = backMap.find(i);
- assert(j != backMap.end());
- found.insert(j->second);
+ std::string::size_type maxRewriteSize = 0;
+ for (auto & [from, to] : rewrites) {
+ assert(from.size() == to.size());
+ maxRewriteSize = std::max(maxRewriteSize, from.size());
}
-
- return found;
-}
-
-
-std::pair<StorePathSet, HashResult> scanForReferences(
- const std::string & path,
- const StorePathSet & refs)
-{
- HashSink hashSink { htSHA256 };
- auto found = scanForReferences(hashSink, path, refs);
- auto hash = hashSink.finish();
- return std::pair<StorePathSet, HashResult>(found, hash);
-}
-
-StorePathSet scanForReferences(
- Sink & toTee,
- const Path & path,
- const StorePathSet & refs)
-{
- PathRefScanSink refsSink = PathRefScanSink::fromPaths(refs);
- TeeSink sink { refsSink, toTee };
-
- /* Look for the hashes in the NAR dump of the path. */
- dumpPath(path, sink);
-
- return refsSink.getResultPaths();
-}
-
-
-RewritingSink::RewritingSink(const std::string & from, const std::string & to, Sink & nextSink)
- : from(from), to(to), nextSink(nextSink)
-{
- assert(from.size() == to.size());
+ this->maxRewriteSize = maxRewriteSize;
}
void RewritingSink::operator () (std::string_view data)
@@ -136,13 +88,13 @@ void RewritingSink::operator () (std::string_view data)
std::string s(prev);
s.append(data);
- size_t j = 0;
- while ((j = s.find(from, j)) != std::string::npos) {
- matches.push_back(pos + j);
- s.replace(j, from.size(), to);
- }
+ s = rewriteStrings(s, rewrites);
- prev = s.size() < from.size() ? s : std::string(s, s.size() - from.size() + 1, from.size() - 1);
+ prev = s.size() < maxRewriteSize
+ ? s
+ : maxRewriteSize == 0
+ ? ""
+ : std::string(s, s.size() - maxRewriteSize + 1, maxRewriteSize - 1);
auto consumed = s.size() - prev.size();
diff --git a/src/libstore/references.hh b/src/libutil/references.hh
index 52d71b333..f0baeffe1 100644
--- a/src/libstore/references.hh
+++ b/src/libutil/references.hh
@@ -2,14 +2,9 @@
///@file
#include "hash.hh"
-#include "path.hh"
namespace nix {
-std::pair<StorePathSet, HashResult> scanForReferences(const Path & path, const StorePathSet & refs);
-
-StorePathSet scanForReferences(Sink & toTee, const Path & path, const StorePathSet & refs);
-
class RefScanSink : public Sink
{
StringSet hashes;
@@ -28,28 +23,18 @@ public:
void operator () (std::string_view data) override;
};
-class PathRefScanSink : public RefScanSink
-{
- std::map<std::string, StorePath> backMap;
-
- PathRefScanSink(StringSet && hashes, std::map<std::string, StorePath> && backMap);
-
-public:
-
- static PathRefScanSink fromPaths(const StorePathSet & refs);
-
- StorePathSet getResultPaths();
-};
-
struct RewritingSink : Sink
{
- std::string from, to, prev;
+ const StringMap rewrites;
+ std::string::size_type maxRewriteSize;
+ std::string prev;
Sink & nextSink;
uint64_t pos = 0;
std::vector<uint64_t> matches;
RewritingSink(const std::string & from, const std::string & to, Sink & nextSink);
+ RewritingSink(const StringMap & rewrites, Sink & nextSink);
void operator () (std::string_view data) override;
diff --git a/src/libutil/tests/references.cc b/src/libutil/tests/references.cc
new file mode 100644
index 000000000..a517d9aa1
--- /dev/null
+++ b/src/libutil/tests/references.cc
@@ -0,0 +1,46 @@
+#include "references.hh"
+#include <gtest/gtest.h>
+
+namespace nix {
+
+using std::string;
+
+struct RewriteParams {
+ string originalString, finalString;
+ StringMap rewrites;
+
+ friend std::ostream& operator<<(std::ostream& os, const RewriteParams& bar) {
+ StringSet strRewrites;
+ for (auto & [from, to] : bar.rewrites)
+ strRewrites.insert(from + "->" + to);
+ return os <<
+ "OriginalString: " << bar.originalString << std::endl <<
+ "Rewrites: " << concatStringsSep(",", strRewrites) << std::endl <<
+ "Expected result: " << bar.finalString;
+ }
+};
+
+class RewriteTest : public ::testing::TestWithParam<RewriteParams> {
+};
+
+TEST_P(RewriteTest, IdentityRewriteIsIdentity) {
+ RewriteParams param = GetParam();
+ StringSink rewritten;
+ auto rewriter = RewritingSink(param.rewrites, rewritten);
+ rewriter(param.originalString);
+ rewriter.flush();
+ ASSERT_EQ(rewritten.s, param.finalString);
+}
+
+INSTANTIATE_TEST_CASE_P(
+ references,
+ RewriteTest,
+ ::testing::Values(
+ RewriteParams{ "foooo", "baroo", {{"foo", "bar"}, {"bar", "baz"}}},
+ RewriteParams{ "foooo", "bazoo", {{"fou", "bar"}, {"foo", "baz"}}},
+ RewriteParams{ "foooo", "foooo", {}}
+ )
+);
+
+}
+
diff --git a/src/libutil/tests/tests.cc b/src/libutil/tests/tests.cc
index 250e83a38..f3c1e8248 100644
--- a/src/libutil/tests/tests.cc
+++ b/src/libutil/tests/tests.cc
@@ -202,7 +202,7 @@ namespace nix {
}
TEST(pathExists, bogusPathDoesNotExist) {
- ASSERT_FALSE(pathExists("/home/schnitzel/darmstadt/pommes"));
+ ASSERT_FALSE(pathExists("/schnitzel/darmstadt/pommes"));
}
/* ----------------------------------------------------------------------------
diff --git a/src/libutil/util.cc b/src/libutil/util.cc
index 21d1c8dcd..26f9dc8a8 100644
--- a/src/libutil/util.cc
+++ b/src/libutil/util.cc
@@ -266,6 +266,17 @@ bool pathExists(const Path & path)
return false;
}
+bool pathAccessible(const Path & path)
+{
+ try {
+ return pathExists(path);
+ } catch (SysError & e) {
+ // swallow EPERM
+ if (e.errNo == EPERM) return false;
+ throw;
+ }
+}
+
Path readLink(const Path & path)
{
@@ -1141,9 +1152,9 @@ std::vector<char *> stringsToCharPtrs(const Strings & ss)
}
std::string runProgram(Path program, bool searchPath, const Strings & args,
- const std::optional<std::string> & input)
+ const std::optional<std::string> & input, bool isInteractive)
{
- auto res = runProgram(RunOptions {.program = program, .searchPath = searchPath, .args = args, .input = input});
+ auto res = runProgram(RunOptions {.program = program, .searchPath = searchPath, .args = args, .input = input, .isInteractive = isInteractive});
if (!statusOk(res.first))
throw ExecError(res.first, "program '%1%' %2%", program, statusToString(res.first));
@@ -1193,6 +1204,16 @@ void runProgram2(const RunOptions & options)
// case), so we can't use it if we alter the environment
processOptions.allowVfork = !options.environment;
+ std::optional<Finally<std::function<void()>>> resumeLoggerDefer;
+ if (options.isInteractive) {
+ logger->pause();
+ resumeLoggerDefer.emplace(
+ []() {
+ logger->resume();
+ }
+ );
+ }
+
/* Fork. */
Pid pid = startProcess([&]() {
if (options.environment)
@@ -1832,6 +1853,7 @@ void setStackSize(size_t stackSize)
#if __linux__
static AutoCloseFD fdSavedMountNamespace;
+static AutoCloseFD fdSavedRoot;
#endif
void saveMountNamespace()
@@ -1839,10 +1861,11 @@ void saveMountNamespace()
#if __linux__
static std::once_flag done;
std::call_once(done, []() {
- AutoCloseFD fd = open("/proc/self/ns/mnt", O_RDONLY);
- if (!fd)
+ fdSavedMountNamespace = open("/proc/self/ns/mnt", O_RDONLY);
+ if (!fdSavedMountNamespace)
throw SysError("saving parent mount namespace");
- fdSavedMountNamespace = std::move(fd);
+
+ fdSavedRoot = open("/proc/self/root", O_RDONLY);
});
#endif
}
@@ -1855,9 +1878,16 @@ void restoreMountNamespace()
if (fdSavedMountNamespace && setns(fdSavedMountNamespace.get(), CLONE_NEWNS) == -1)
throw SysError("restoring parent mount namespace");
- if (chdir(savedCwd.c_str()) == -1) {
- throw SysError("restoring cwd");
+
+ if (fdSavedRoot) {
+ if (fchdir(fdSavedRoot.get()))
+ throw SysError("chdir into saved root");
+ if (chroot("."))
+ throw SysError("chroot into saved root");
}
+
+ if (chdir(savedCwd.c_str()) == -1)
+ throw SysError("restoring cwd");
} catch (Error & e) {
debug(e.msg());
}
diff --git a/src/libutil/util.hh b/src/libutil/util.hh
index 040fed68f..b302d6f45 100644
--- a/src/libutil/util.hh
+++ b/src/libutil/util.hh
@@ -121,6 +121,14 @@ struct stat lstat(const Path & path);
bool pathExists(const Path & path);
/**
+ * A version of pathExists that returns false on a permission error.
+ * Useful for inferring default paths across directories that might not
+ * be readable.
+ * @return true iff the given path can be accessed and exists
+ */
+bool pathAccessible(const Path & path);
+
+/**
* Read the contents (target) of a symbolic link. The result is not
* in any way canonicalised.
*/
@@ -248,14 +256,12 @@ inline Paths createDirs(PathView path)
/**
* Create a symlink.
*/
-void createSymlink(const Path & target, const Path & link,
- std::optional<time_t> mtime = {});
+void createSymlink(const Path & target, const Path & link);
/**
* Atomically create or replace a symlink.
*/
-void replaceSymlink(const Path & target, const Path & link,
- std::optional<time_t> mtime = {});
+void replaceSymlink(const Path & target, const Path & link);
void renameFile(const Path & src, const Path & dst);
@@ -415,7 +421,7 @@ pid_t startProcess(std::function<void()> fun, const ProcessOptions & options = P
*/
std::string runProgram(Path program, bool searchPath = false,
const Strings & args = Strings(),
- const std::optional<std::string> & input = {});
+ const std::optional<std::string> & input = {}, bool isInteractive = false);
struct RunOptions
{
@@ -430,6 +436,7 @@ struct RunOptions
Source * standardIn = nullptr;
Sink * standardOut = nullptr;
bool mergeStderrToStdout = false;
+ bool isInteractive = false;
};
std::pair<int, std::string> runProgram(RunOptions && options);
diff --git a/src/nix-channel/nix-channel.cc b/src/nix-channel/nix-channel.cc
index 740737ffe..c1c8edd1d 100755
--- a/src/nix-channel/nix-channel.cc
+++ b/src/nix-channel/nix-channel.cc
@@ -177,6 +177,7 @@ static int main_nix_channel(int argc, char ** argv)
cRemove,
cList,
cUpdate,
+ cListGenerations,
cRollback
} cmd = cNone;
std::vector<std::string> args;
@@ -193,6 +194,8 @@ static int main_nix_channel(int argc, char ** argv)
cmd = cList;
} else if (*arg == "--update") {
cmd = cUpdate;
+ } else if (*arg == "--list-generations") {
+ cmd = cListGenerations;
} else if (*arg == "--rollback") {
cmd = cRollback;
} else {
@@ -237,6 +240,11 @@ static int main_nix_channel(int argc, char ** argv)
case cUpdate:
update(StringSet(args.begin(), args.end()));
break;
+ case cListGenerations:
+ if (!args.empty())
+ throw UsageError("'--list-generations' expects no arguments");
+ std::cout << runProgram(settings.nixBinDir + "/nix-env", false, {"--profile", profile, "--list-generations"}) << std::flush;
+ break;
case cRollback:
if (args.size() > 1)
throw UsageError("'--rollback' has at most one argument");
diff --git a/src/nix-collect-garbage/nix-collect-garbage.cc b/src/nix-collect-garbage/nix-collect-garbage.cc
index 3cc57af4e..70af53b28 100644
--- a/src/nix-collect-garbage/nix-collect-garbage.cc
+++ b/src/nix-collect-garbage/nix-collect-garbage.cc
@@ -41,9 +41,10 @@ void removeOldGenerations(std::string dir)
}
if (link.find("link") != std::string::npos) {
printInfo("removing old generations of profile %s", path);
- if (deleteOlderThan != "")
- deleteGenerationsOlderThan(path, deleteOlderThan, dryRun);
- else
+ if (deleteOlderThan != "") {
+ auto t = parseOlderThanTimeSpec(deleteOlderThan);
+ deleteGenerationsOlderThan(path, t, dryRun);
+ } else
deleteOldGenerations(path, dryRun);
}
} else if (type == DT_DIR) {
@@ -77,7 +78,12 @@ static int main_nix_collect_garbage(int argc, char * * argv)
return true;
});
- if (removeOld) removeOldGenerations(profilesDir());
+ if (removeOld) {
+ std::set<Path> dirsToClean = {
+ profilesDir(), settings.nixStateDir + "/profiles", dirOf(getDefaultProfile())};
+ for (auto & dir : dirsToClean)
+ removeOldGenerations(dir);
+ }
// Run the actual garbage collector.
if (!dryRun) {
diff --git a/src/nix-env/nix-env.cc b/src/nix-env/nix-env.cc
index 5e94f2d14..91b073b49 100644
--- a/src/nix-env/nix-env.cc
+++ b/src/nix-env/nix-env.cc
@@ -772,7 +772,7 @@ static void opSet(Globals & globals, Strings opFlags, Strings opArgs)
debug("switching to new user environment");
Path generation = createGeneration(
- ref<LocalFSStore>(store2),
+ *store2,
globals.profile,
drv.queryOutPath());
switchLink(globals.profile, generation);
@@ -1356,13 +1356,14 @@ static void opDeleteGenerations(Globals & globals, Strings opFlags, Strings opAr
if (opArgs.size() == 1 && opArgs.front() == "old") {
deleteOldGenerations(globals.profile, globals.dryRun);
} else if (opArgs.size() == 1 && opArgs.front().find('d') != std::string::npos) {
- deleteGenerationsOlderThan(globals.profile, opArgs.front(), globals.dryRun);
+ auto t = parseOlderThanTimeSpec(opArgs.front());
+ deleteGenerationsOlderThan(globals.profile, t, globals.dryRun);
} else if (opArgs.size() == 1 && opArgs.front().find('+') != std::string::npos) {
if (opArgs.front().size() < 2)
throw Error("invalid number of generations '%1%'", opArgs.front());
auto str_max = opArgs.front().substr(1);
auto max = string2Int<GenerationNumber>(str_max);
- if (!max || *max == 0)
+ if (!max)
throw Error("invalid number of generations to keep '%1%'", opArgs.front());
deleteGenerationsGreaterThan(globals.profile, *max, globals.dryRun);
} else {
diff --git a/src/nix-env/user-env.cc b/src/nix-env/user-env.cc
index 9e916abc4..d12d70f33 100644
--- a/src/nix-env/user-env.cc
+++ b/src/nix-env/user-env.cc
@@ -158,7 +158,7 @@ bool createUserEnv(EvalState & state, DrvInfos & elems,
}
debug("switching to new user environment");
- Path generation = createGeneration(ref<LocalFSStore>(store2), profile, topLevelOut);
+ Path generation = createGeneration(*store2, profile, topLevelOut);
switchLink(profile, generation);
}
diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc
index 40f30eb63..caa0248f1 100644
--- a/src/nix-store/nix-store.cc
+++ b/src/nix-store/nix-store.cc
@@ -12,6 +12,7 @@
#include "shared.hh"
#include "util.hh"
#include "worker-protocol.hh"
+#include "worker-protocol-impl.hh"
#include "graphml.hh"
#include "legacy.hh"
#include "path-with-outputs.hh"
@@ -806,6 +807,9 @@ static void opServe(Strings opFlags, Strings opArgs)
out.flush();
unsigned int clientVersion = readInt(in);
+ WorkerProto::ReadConn rconn { .from = in };
+ WorkerProto::WriteConn wconn { .to = out };
+
auto getBuildSettings = [&]() {
// FIXME: changing options here doesn't work if we're
// building through the daemon.
@@ -837,19 +841,19 @@ static void opServe(Strings opFlags, Strings opArgs)
};
while (true) {
- ServeCommand cmd;
+ ServeProto::Command cmd;
try {
- cmd = (ServeCommand) readInt(in);
+ cmd = (ServeProto::Command) readInt(in);
} catch (EndOfFile & e) {
break;
}
switch (cmd) {
- case cmdQueryValidPaths: {
+ case ServeProto::Command::QueryValidPaths: {
bool lock = readInt(in);
bool substitute = readInt(in);
- auto paths = worker_proto::read(*store, in, Phantom<StorePathSet> {});
+ auto paths = WorkerProto::Serialise<StorePathSet>::read(*store, rconn);
if (lock && writeAllowed)
for (auto & path : paths)
store->addTempRoot(path);
@@ -858,19 +862,19 @@ static void opServe(Strings opFlags, Strings opArgs)
store->substitutePaths(paths);
}
- worker_proto::write(*store, out, store->queryValidPaths(paths));
+ WorkerProto::write(*store, wconn, store->queryValidPaths(paths));
break;
}
- case cmdQueryPathInfos: {
- auto paths = worker_proto::read(*store, in, Phantom<StorePathSet> {});
+ case ServeProto::Command::QueryPathInfos: {
+ auto paths = WorkerProto::Serialise<StorePathSet>::read(*store, rconn);
// !!! Maybe we want a queryPathInfos?
for (auto & i : paths) {
try {
auto info = store->queryPathInfo(i);
out << store->printStorePath(info->path)
<< (info->deriver ? store->printStorePath(*info->deriver) : "");
- worker_proto::write(*store, out, info->references);
+ WorkerProto::write(*store, wconn, info->references);
// !!! Maybe we want compression?
out << info->narSize // downloadSize
<< info->narSize;
@@ -885,24 +889,24 @@ static void opServe(Strings opFlags, Strings opArgs)
break;
}
- case cmdDumpStorePath:
+ case ServeProto::Command::DumpStorePath:
store->narFromPath(store->parseStorePath(readString(in)), out);
break;
- case cmdImportPaths: {
+ case ServeProto::Command::ImportPaths: {
if (!writeAllowed) throw Error("importing paths is not allowed");
store->importPaths(in, NoCheckSigs); // FIXME: should we skip sig checking?
out << 1; // indicate success
break;
}
- case cmdExportPaths: {
+ case ServeProto::Command::ExportPaths: {
readInt(in); // obsolete
- store->exportPaths(worker_proto::read(*store, in, Phantom<StorePathSet> {}), out);
+ store->exportPaths(WorkerProto::Serialise<StorePathSet>::read(*store, rconn), out);
break;
}
- case cmdBuildPaths: {
+ case ServeProto::Command::BuildPaths: {
if (!writeAllowed) throw Error("building paths is not allowed");
@@ -923,7 +927,7 @@ static void opServe(Strings opFlags, Strings opArgs)
break;
}
- case cmdBuildDerivation: { /* Used by hydra-queue-runner. */
+ case ServeProto::Command::BuildDerivation: { /* Used by hydra-queue-runner. */
if (!writeAllowed) throw Error("building paths is not allowed");
@@ -944,22 +948,22 @@ static void opServe(Strings opFlags, Strings opArgs)
DrvOutputs builtOutputs;
for (auto & [output, realisation] : status.builtOutputs)
builtOutputs.insert_or_assign(realisation.id, realisation);
- worker_proto::write(*store, out, builtOutputs);
+ WorkerProto::write(*store, wconn, builtOutputs);
}
break;
}
- case cmdQueryClosure: {
+ case ServeProto::Command::QueryClosure: {
bool includeOutputs = readInt(in);
StorePathSet closure;
- store->computeFSClosure(worker_proto::read(*store, in, Phantom<StorePathSet> {}),
+ store->computeFSClosure(WorkerProto::Serialise<StorePathSet>::read(*store, rconn),
closure, false, includeOutputs);
- worker_proto::write(*store, out, closure);
+ WorkerProto::write(*store, wconn, closure);
break;
}
- case cmdAddToStoreNar: {
+ case ServeProto::Command::AddToStoreNar: {
if (!writeAllowed) throw Error("importing paths is not allowed");
auto path = readString(in);
@@ -970,7 +974,7 @@ static void opServe(Strings opFlags, Strings opArgs)
};
if (deriver != "")
info.deriver = store->parseStorePath(deriver);
- info.references = worker_proto::read(*store, in, Phantom<StorePathSet> {});
+ info.references = WorkerProto::Serialise<StorePathSet>::read(*store, rconn);
in >> info.registrationTime >> info.narSize >> info.ultimate;
info.sigs = readStrings<StringSet>(in);
info.ca = ContentAddress::parseOpt(readString(in));
diff --git a/src/nix/app.cc b/src/nix/app.cc
index fd4569bb4..e678b54f0 100644
--- a/src/nix/app.cc
+++ b/src/nix/app.cc
@@ -7,6 +7,7 @@
#include "names.hh"
#include "command.hh"
#include "derivations.hh"
+#include "downstream-placeholder.hh"
namespace nix {
@@ -23,7 +24,7 @@ StringPairs resolveRewrites(
if (auto drvDep = std::get_if<BuiltPathBuilt>(&dep.path))
for (auto & [ outputName, outputPath ] : drvDep->outputs)
res.emplace(
- downstreamPlaceholder(store, drvDep->drvPath, outputName),
+ DownstreamPlaceholder::unknownCaOutput(drvDep->drvPath, outputName).render(),
store.printStorePath(outputPath)
);
return res;
diff --git a/src/nix/build.md b/src/nix/build.md
index ee414dc86..0fbb39cc3 100644
--- a/src/nix/build.md
+++ b/src/nix/build.md
@@ -44,7 +44,7 @@ R""(
`release.nix`:
```console
- # nix build -f release.nix build.x86_64-linux
+ # nix build --file release.nix build.x86_64-linux
```
* Build a NixOS system configuration from a flake, and make a profile
diff --git a/src/nix/copy.md b/src/nix/copy.md
index 25e0ddadc..199006436 100644
--- a/src/nix/copy.md
+++ b/src/nix/copy.md
@@ -15,7 +15,7 @@ R""(
SSH:
```console
- # nix copy -s --to ssh://server /run/current-system
+ # nix copy --substitute-on-destination --to ssh://server /run/current-system
```
The `-s` flag causes the remote machine to try to substitute missing
diff --git a/src/nix/daemon.cc b/src/nix/daemon.cc
index c1a91c63d..1511f9e6e 100644
--- a/src/nix/daemon.cc
+++ b/src/nix/daemon.cc
@@ -4,6 +4,7 @@
#include "shared.hh"
#include "local-store.hh"
#include "remote-store.hh"
+#include "remote-store-connection.hh"
#include "util.hh"
#include "serialise.hh"
#include "archive.hh"
@@ -24,6 +25,7 @@
#include <sys/stat.h>
#include <sys/socket.h>
#include <sys/un.h>
+#include <sys/select.h>
#include <errno.h>
#include <pwd.h>
#include <grp.h>
@@ -54,19 +56,16 @@ struct AuthorizationSettings : Config {
Setting<Strings> trustedUsers{
this, {"root"}, "trusted-users",
R"(
- A list of names of users (separated by whitespace) that have
- additional rights when connecting to the Nix daemon, such as the
- ability to specify additional binary caches, or to import unsigned
- NARs. You can also specify groups by prefixing them with `@`; for
- instance, `@wheel` means all users in the `wheel` group. The default
- is `root`.
+ A list of user names, separated by whitespace.
+ These users will have additional rights when connecting to the Nix daemon, such as the ability to specify additional [substituters](#conf-substituters), or to import unsigned [NARs](@docroot@/glossary.md#gloss-nar).
+
+ You can also specify groups by prefixing names with `@`.
+ For instance, `@wheel` means all users in the `wheel` group.
> **Warning**
>
- > Adding a user to `trusted-users` is essentially equivalent to
- > giving that user root access to the system. For example, the user
- > can set `sandbox-paths` and thereby obtain read access to
- > directories that are otherwise inacessible to them.
+ > Adding a user to `trusted-users` is essentially equivalent to giving that user root access to the system.
+ > For example, the user can access or replace store path contents that are critical for system security.
)"};
/**
@@ -75,12 +74,16 @@ struct AuthorizationSettings : Config {
Setting<Strings> allowedUsers{
this, {"*"}, "allowed-users",
R"(
- A list of names of users (separated by whitespace) that are allowed
- to connect to the Nix daemon. As with the `trusted-users` option,
- you can specify groups by prefixing them with `@`. Also, you can
- allow all users by specifying `*`. The default is `*`.
+ A list user names, separated by whitespace.
+ These users are allowed to connect to the Nix daemon.
+
+ You can specify groups by prefixing names with `@`.
+ For instance, `@wheel` means all users in the `wheel` group.
+ Also, you can allow all users by specifying `*`.
- Note that trusted users are always allowed to connect.
+ > **Note**
+ >
+ > Trusted users (set in [`trusted-users`](#conf-trusted-users)) can always connect to the Nix daemon.
)"};
};
diff --git a/src/nix/develop.md b/src/nix/develop.md
index c49b39669..1b5a8aeba 100644
--- a/src/nix/develop.md
+++ b/src/nix/develop.md
@@ -69,7 +69,7 @@ R""(
* Run a series of script commands:
```console
- # nix develop --command bash -c "mkdir build && cmake .. && make"
+ # nix develop --command bash --command "mkdir build && cmake .. && make"
```
# Description
diff --git a/src/nix/eval.md b/src/nix/eval.md
index 3b510737a..48d5aa597 100644
--- a/src/nix/eval.md
+++ b/src/nix/eval.md
@@ -18,7 +18,7 @@ R""(
* Evaluate a Nix expression from a file:
```console
- # nix eval -f ./my-nixpkgs hello.name
+ # nix eval --file ./my-nixpkgs hello.name
```
* Get the current version of the `nixpkgs` flake:
diff --git a/src/nix/flake-check.md b/src/nix/flake-check.md
index 07031c909..c8307f8d8 100644
--- a/src/nix/flake-check.md
+++ b/src/nix/flake-check.md
@@ -68,6 +68,6 @@ The following flake output attributes must be
In addition, the `hydraJobs` output is evaluated in the same way as
Hydra's `hydra-eval-jobs` (i.e. as a arbitrarily deeply nested
attribute set of derivations). Similarly, the
-`legacyPackages`.*system* output is evaluated like `nix-env -qa`.
+`legacyPackages`.*system* output is evaluated like `nix-env --query --available `.
)""
diff --git a/src/nix/flake.cc b/src/nix/flake.cc
index 3db655aeb..b5f5d0cac 100644
--- a/src/nix/flake.cc
+++ b/src/nix/flake.cc
@@ -179,6 +179,8 @@ struct CmdFlakeMetadata : FlakeCommand, MixJSON
j["locked"] = fetchers::attrsToJSON(flake.lockedRef.toAttrs());
if (auto rev = flake.lockedRef.input.getRev())
j["revision"] = rev->to_string(Base16, false);
+ if (auto dirtyRev = fetchers::maybeGetStrAttr(flake.lockedRef.toAttrs(), "dirtyRev"))
+ j["dirtyRevision"] = *dirtyRev;
if (auto revCount = flake.lockedRef.input.getRevCount())
j["revCount"] = *revCount;
if (auto lastModified = flake.lockedRef.input.getLastModified())
@@ -204,6 +206,10 @@ struct CmdFlakeMetadata : FlakeCommand, MixJSON
logger->cout(
ANSI_BOLD "Revision:" ANSI_NORMAL " %s",
rev->to_string(Base16, false));
+ if (auto dirtyRev = fetchers::maybeGetStrAttr(flake.lockedRef.toAttrs(), "dirtyRev"))
+ logger->cout(
+ ANSI_BOLD "Revision:" ANSI_NORMAL " %s",
+ *dirtyRev);
if (auto revCount = flake.lockedRef.input.getRevCount())
logger->cout(
ANSI_BOLD "Revisions:" ANSI_NORMAL " %s",
@@ -259,6 +265,7 @@ struct CmdFlakeInfo : CmdFlakeMetadata
struct CmdFlakeCheck : FlakeCommand
{
bool build = true;
+ bool checkAllSystems = false;
CmdFlakeCheck()
{
@@ -267,6 +274,11 @@ struct CmdFlakeCheck : FlakeCommand
.description = "Do not build checks.",
.handler = {&build, false}
});
+ addFlag({
+ .longName = "all-systems",
+ .description = "Check the outputs for all systems.",
+ .handler = {&checkAllSystems, true}
+ });
}
std::string description() override
@@ -292,6 +304,7 @@ struct CmdFlakeCheck : FlakeCommand
lockFlags.applyNixConfig = true;
auto flake = lockFlake();
+ auto localSystem = std::string(settings.thisSystem.get());
bool hasErrors = false;
auto reportError = [&](const Error & e) {
@@ -307,6 +320,8 @@ struct CmdFlakeCheck : FlakeCommand
}
};
+ std::set<std::string> omittedSystems;
+
// FIXME: rewrite to use EvalCache.
auto resolve = [&] (PosIdx p) {
@@ -327,6 +342,15 @@ struct CmdFlakeCheck : FlakeCommand
reportError(Error("'%s' is not a valid system type, at %s", system, resolve(pos)));
};
+ auto checkSystemType = [&](const std::string & system, const PosIdx pos) {
+ if (!checkAllSystems && system != localSystem) {
+ omittedSystems.insert(system);
+ return false;
+ } else {
+ return true;
+ }
+ };
+
auto checkDerivation = [&](const std::string & attrPath, Value & v, const PosIdx pos) -> std::optional<StorePath> {
try {
auto drvInfo = getDerivation(*state, v, false);
@@ -362,8 +386,10 @@ struct CmdFlakeCheck : FlakeCommand
auto checkOverlay = [&](const std::string & attrPath, Value & v, const PosIdx pos) {
try {
state->forceValue(v, pos);
- if (!v.isLambda()
- || v.lambda.fun->hasFormals()
+ if (!v.isLambda()) {
+ throw Error("overlay is not a function, but %s instead", showType(v));
+ }
+ if (v.lambda.fun->hasFormals()
|| !argHasName(v.lambda.fun->arg, "final"))
throw Error("overlay does not take an argument named 'final'");
auto body = dynamic_cast<ExprLambda *>(v.lambda.fun->body);
@@ -509,16 +535,18 @@ struct CmdFlakeCheck : FlakeCommand
for (auto & attr : *vOutput.attrs) {
const auto & attr_name = state->symbols[attr.name];
checkSystemName(attr_name, attr.pos);
- state->forceAttrs(*attr.value, attr.pos, "");
- for (auto & attr2 : *attr.value->attrs) {
- auto drvPath = checkDerivation(
- fmt("%s.%s.%s", name, attr_name, state->symbols[attr2.name]),
- *attr2.value, attr2.pos);
- if (drvPath && attr_name == settings.thisSystem.get()) {
- drvPaths.push_back(DerivedPath::Built {
- .drvPath = *drvPath,
- .outputs = OutputsSpec::All { },
- });
+ if (checkSystemType(attr_name, attr.pos)) {
+ state->forceAttrs(*attr.value, attr.pos, "");
+ for (auto & attr2 : *attr.value->attrs) {
+ auto drvPath = checkDerivation(
+ fmt("%s.%s.%s", name, attr_name, state->symbols[attr2.name]),
+ *attr2.value, attr2.pos);
+ if (drvPath && attr_name == settings.thisSystem.get()) {
+ drvPaths.push_back(DerivedPath::Built {
+ .drvPath = *drvPath,
+ .outputs = OutputsSpec::All { },
+ });
+ }
}
}
}
@@ -529,9 +557,11 @@ struct CmdFlakeCheck : FlakeCommand
for (auto & attr : *vOutput.attrs) {
const auto & attr_name = state->symbols[attr.name];
checkSystemName(attr_name, attr.pos);
- checkApp(
- fmt("%s.%s", name, attr_name),
- *attr.value, attr.pos);
+ if (checkSystemType(attr_name, attr.pos)) {
+ checkApp(
+ fmt("%s.%s", name, attr_name),
+ *attr.value, attr.pos);
+ };
}
}
@@ -540,11 +570,13 @@ struct CmdFlakeCheck : FlakeCommand
for (auto & attr : *vOutput.attrs) {
const auto & attr_name = state->symbols[attr.name];
checkSystemName(attr_name, attr.pos);
- state->forceAttrs(*attr.value, attr.pos, "");
- for (auto & attr2 : *attr.value->attrs)
- checkDerivation(
- fmt("%s.%s.%s", name, attr_name, state->symbols[attr2.name]),
- *attr2.value, attr2.pos);
+ if (checkSystemType(attr_name, attr.pos)) {
+ state->forceAttrs(*attr.value, attr.pos, "");
+ for (auto & attr2 : *attr.value->attrs)
+ checkDerivation(
+ fmt("%s.%s.%s", name, attr_name, state->symbols[attr2.name]),
+ *attr2.value, attr2.pos);
+ };
}
}
@@ -553,11 +585,13 @@ struct CmdFlakeCheck : FlakeCommand
for (auto & attr : *vOutput.attrs) {
const auto & attr_name = state->symbols[attr.name];
checkSystemName(attr_name, attr.pos);
- state->forceAttrs(*attr.value, attr.pos, "");
- for (auto & attr2 : *attr.value->attrs)
- checkApp(
- fmt("%s.%s.%s", name, attr_name, state->symbols[attr2.name]),
- *attr2.value, attr2.pos);
+ if (checkSystemType(attr_name, attr.pos)) {
+ state->forceAttrs(*attr.value, attr.pos, "");
+ for (auto & attr2 : *attr.value->attrs)
+ checkApp(
+ fmt("%s.%s.%s", name, attr_name, state->symbols[attr2.name]),
+ *attr2.value, attr2.pos);
+ };
}
}
@@ -566,9 +600,11 @@ struct CmdFlakeCheck : FlakeCommand
for (auto & attr : *vOutput.attrs) {
const auto & attr_name = state->symbols[attr.name];
checkSystemName(attr_name, attr.pos);
- checkDerivation(
- fmt("%s.%s", name, attr_name),
- *attr.value, attr.pos);
+ if (checkSystemType(attr_name, attr.pos)) {
+ checkDerivation(
+ fmt("%s.%s", name, attr_name),
+ *attr.value, attr.pos);
+ };
}
}
@@ -577,9 +613,11 @@ struct CmdFlakeCheck : FlakeCommand
for (auto & attr : *vOutput.attrs) {
const auto & attr_name = state->symbols[attr.name];
checkSystemName(attr_name, attr.pos);
- checkApp(
- fmt("%s.%s", name, attr_name),
- *attr.value, attr.pos);
+ if (checkSystemType(attr_name, attr.pos) ) {
+ checkApp(
+ fmt("%s.%s", name, attr_name),
+ *attr.value, attr.pos);
+ };
}
}
@@ -587,6 +625,7 @@ struct CmdFlakeCheck : FlakeCommand
state->forceAttrs(vOutput, pos, "");
for (auto & attr : *vOutput.attrs) {
checkSystemName(state->symbols[attr.name], attr.pos);
+ checkSystemType(state->symbols[attr.name], attr.pos);
// FIXME: do getDerivations?
}
}
@@ -636,9 +675,11 @@ struct CmdFlakeCheck : FlakeCommand
for (auto & attr : *vOutput.attrs) {
const auto & attr_name = state->symbols[attr.name];
checkSystemName(attr_name, attr.pos);
- checkBundler(
- fmt("%s.%s", name, attr_name),
- *attr.value, attr.pos);
+ if (checkSystemType(attr_name, attr.pos)) {
+ checkBundler(
+ fmt("%s.%s", name, attr_name),
+ *attr.value, attr.pos);
+ };
}
}
@@ -647,12 +688,14 @@ struct CmdFlakeCheck : FlakeCommand
for (auto & attr : *vOutput.attrs) {
const auto & attr_name = state->symbols[attr.name];
checkSystemName(attr_name, attr.pos);
- state->forceAttrs(*attr.value, attr.pos, "");
- for (auto & attr2 : *attr.value->attrs) {
- checkBundler(
- fmt("%s.%s.%s", name, attr_name, state->symbols[attr2.name]),
- *attr2.value, attr2.pos);
- }
+ if (checkSystemType(attr_name, attr.pos)) {
+ state->forceAttrs(*attr.value, attr.pos, "");
+ for (auto & attr2 : *attr.value->attrs) {
+ checkBundler(
+ fmt("%s.%s.%s", name, attr_name, state->symbols[attr2.name]),
+ *attr2.value, attr2.pos);
+ }
+ };
}
}
@@ -685,7 +728,15 @@ struct CmdFlakeCheck : FlakeCommand
}
if (hasErrors)
throw Error("some errors were encountered during the evaluation");
- }
+
+ if (!omittedSystems.empty()) {
+ warn(
+ "The check omitted these incompatible systems: %s\n"
+ "Use '--all-systems' to check all.",
+ concatStringsSep(", ", omittedSystems)
+ );
+ };
+ };
};
static Strings defaultTemplateAttrPathsPrefixes{"templates."};
diff --git a/src/nix/flake.md b/src/nix/flake.md
index 456fd0ea1..92f477917 100644
--- a/src/nix/flake.md
+++ b/src/nix/flake.md
@@ -71,8 +71,6 @@ inputs.nixpkgs = {
Here are some examples of flake references in their URL-like representation:
-* `.`: The flake in the current directory.
-* `/home/alice/src/patchelf`: A flake in some other directory.
* `nixpkgs`: The `nixpkgs` entry in the flake registry.
* `nixpkgs/a3a3dda3bacf61e8a39258a0ed9c924eeca8e293`: The `nixpkgs`
entry in the flake registry, with its Git revision overridden to a
@@ -93,6 +91,23 @@ Here are some examples of flake references in their URL-like representation:
* `https://github.com/NixOS/patchelf/archive/master.tar.gz`: A tarball
flake.
+## Path-like syntax
+
+Flakes corresponding to a local path can also be referred to by a direct path reference, either `/absolute/path/to/the/flake` or `./relative/path/to/the/flake` (note that the leading `./` is mandatory for relative paths to avoid any ambiguity).
+
+The semantic of such a path is as follows:
+
+* If the directory is part of a Git repository, then the input will be treated as a `git+file:` URL, otherwise it will be treated as a `path:` url;
+* If the directory doesn't contain a `flake.nix` file, then Nix will search for such a file upwards in the file system hierarchy until it finds any of:
+ 1. The Git repository root, or
+ 2. The filesystem root (/), or
+ 3. A folder on a different mount point.
+
+### Examples
+
+* `.`: The flake to which the current directory belongs to.
+* `/home/alice/src/patchelf`: A flake in some other directory.
+
## Flake reference attributes
The following generic flake reference attributes are supported:
diff --git a/src/nix/main.cc b/src/nix/main.cc
index ce0bed2a3..650c79d14 100644
--- a/src/nix/main.cc
+++ b/src/nix/main.cc
@@ -352,7 +352,7 @@ void mainWrapped(int argc, char * * argv)
return;
}
- if (argc == 2 && std::string(argv[1]) == "__dump-builtins") {
+ if (argc == 2 && std::string(argv[1]) == "__dump-language") {
experimentalFeatureSettings.experimentalFeatures = {
Xp::Flakes,
Xp::FetchClosure,
@@ -360,17 +360,34 @@ void mainWrapped(int argc, char * * argv)
evalSettings.pureEval = false;
EvalState state({}, openStore("dummy://"));
auto res = nlohmann::json::object();
- auto builtins = state.baseEnv.values[0]->attrs;
- for (auto & builtin : *builtins) {
- auto b = nlohmann::json::object();
- if (!builtin.value->isPrimOp()) continue;
- auto primOp = builtin.value->primOp;
- if (!primOp->doc) continue;
- b["arity"] = primOp->arity;
- b["args"] = primOp->args;
- b["doc"] = trim(stripIndentation(primOp->doc));
- res[state.symbols[builtin.name]] = std::move(b);
- }
+ res["builtins"] = ({
+ auto builtinsJson = nlohmann::json::object();
+ auto builtins = state.baseEnv.values[0]->attrs;
+ for (auto & builtin : *builtins) {
+ auto b = nlohmann::json::object();
+ if (!builtin.value->isPrimOp()) continue;
+ auto primOp = builtin.value->primOp;
+ if (!primOp->doc) continue;
+ b["arity"] = primOp->arity;
+ b["args"] = primOp->args;
+ b["doc"] = trim(stripIndentation(primOp->doc));
+ b["experimental-feature"] = primOp->experimentalFeature;
+ builtinsJson[state.symbols[builtin.name]] = std::move(b);
+ }
+ std::move(builtinsJson);
+ });
+ res["constants"] = ({
+ auto constantsJson = nlohmann::json::object();
+ for (auto & [name, info] : state.constantInfos) {
+ auto c = nlohmann::json::object();
+ if (!info.doc) continue;
+ c["doc"] = trim(stripIndentation(info.doc));
+ c["type"] = showType(info.type, false);
+ c["impure-only"] = info.impureOnly;
+ constantsJson[name] = std::move(c);
+ }
+ std::move(constantsJson);
+ });
logger->cout("%s", res);
return;
}
diff --git a/src/nix/nar-ls.md b/src/nix/nar-ls.md
index d373f9715..5a03c5d82 100644
--- a/src/nix/nar-ls.md
+++ b/src/nix/nar-ls.md
@@ -5,7 +5,7 @@ R""(
* To list a specific file in a NAR:
```console
- # nix nar ls -l ./hello.nar /bin/hello
+ # nix nar ls --long ./hello.nar /bin/hello
-r-xr-xr-x 38184 hello
```
@@ -13,7 +13,7 @@ R""(
format:
```console
- # nix nar ls --json -R ./hello.nar /bin
+ # nix nar ls --json --recursive ./hello.nar /bin
{"type":"directory","entries":{"hello":{"type":"regular","size":38184,"executable":true,"narOffset":400}}}
```
diff --git a/src/nix/nix.md b/src/nix/nix.md
index 1ef6c7fcd..e0f459d6b 100644
--- a/src/nix/nix.md
+++ b/src/nix/nix.md
@@ -63,7 +63,7 @@ The following types of installable are supported by most commands:
- [Nix file](#nix-file), optionally qualified by an attribute path
- [Nix expression](#nix-expression), optionally qualified by an attribute path
-For most commands, if no installable is specified, `.` as assumed.
+For most commands, if no installable is specified, `.` is assumed.
That is, Nix will operate on the default flake output attribute of the flake in the current directory.
### Flake output attribute
@@ -102,6 +102,7 @@ way:
available in the flake. If this is undesirable, specify `path:<directory>` explicitly;
For example, if `/foo/bar` is a git repository with the following structure:
+
```
.
└── baz
@@ -197,7 +198,7 @@ operate are determined as follows:
of all outputs of the `glibc` package in the binary cache:
```console
- # nix path-info -S --eval-store auto --store https://cache.nixos.org 'nixpkgs#glibc^*'
+ # nix path-info --closure-size --eval-store auto --store https://cache.nixos.org 'nixpkgs#glibc^*'
/nix/store/g02b1lpbddhymmcjb923kf0l7s9nww58-glibc-2.33-123 33208200
/nix/store/851dp95qqiisjifi639r0zzg5l465ny4-glibc-2.33-123-bin 36142896
/nix/store/kdgs3q6r7xdff1p7a9hnjr43xw2404z7-glibc-2.33-123-debug 155787312
@@ -208,7 +209,7 @@ operate are determined as follows:
and likewise, using a store path to a "drv" file to specify the derivation:
```console
- # nix path-info -S '/nix/store/gzaflydcr6sb3567hap9q6srzx8ggdgg-glibc-2.33-78.drv^*'
+ # nix path-info --closure-size '/nix/store/gzaflydcr6sb3567hap9q6srzx8ggdgg-glibc-2.33-78.drv^*'
```
* If you didn't specify the desired outputs, but the derivation has an
diff --git a/src/nix/path-info.md b/src/nix/path-info.md
index 6ad23a02e..2dda866d0 100644
--- a/src/nix/path-info.md
+++ b/src/nix/path-info.md
@@ -13,7 +13,7 @@ R""(
closure, sorted by size:
```console
- # nix path-info -rS /run/current-system | sort -nk2
+ # nix path-info --recursive --closure-size /run/current-system | sort -nk2
/nix/store/hl5xwp9kdrd1zkm0idm3kkby9q66z404-empty 96
/nix/store/27324qvqhnxj3rncazmxc4mwy79kz8ha-nameservers 112
@@ -25,7 +25,7 @@ R""(
readable sizes:
```console
- # nix path-info -rsSh nixpkgs#rustc
+ # nix path-info --recursive --size --closure-size --human-readable nixpkgs#rustc
/nix/store/01rrgsg5zk3cds0xgdsq40zpk6g51dz9-ncurses-6.2-dev 386.7K 69.1M
/nix/store/0q783wnvixpqz6dxjp16nw296avgczam-libpfm-4.11.0 5.9M 37.4M
@@ -34,7 +34,7 @@ R""(
* Check the existence of a path in a binary cache:
```console
- # nix path-info -r /nix/store/blzxgyvrk32ki6xga10phr4sby2xf25q-geeqie-1.5.1 --store https://cache.nixos.org/
+ # nix path-info --recursive /nix/store/blzxgyvrk32ki6xga10phr4sby2xf25q-geeqie-1.5.1 --store https://cache.nixos.org/
path '/nix/store/blzxgyvrk32ki6xga10phr4sby2xf25q-geeqie-1.5.1' is not valid
```
@@ -57,7 +57,7 @@ R""(
size:
```console
- # nix path-info --json --all -S \
+ # nix path-info --json --all --closure-size \
| jq 'map(select(.closureSize > 1e9)) | sort_by(.closureSize) | map([.path, .closureSize])'
[
…,
diff --git a/src/nix/profile-list.md b/src/nix/profile-list.md
index fa786162f..5d7fcc0ec 100644
--- a/src/nix/profile-list.md
+++ b/src/nix/profile-list.md
@@ -6,26 +6,48 @@ R""(
```console
# nix profile list
- 0 flake:nixpkgs#legacyPackages.x86_64-linux.spotify github:NixOS/nixpkgs/c23db78bbd474c4d0c5c3c551877523b4a50db06#legacyPackages.x86_64-linux.spotify /nix/store/akpdsid105phbbvknjsdh7hl4v3fhjkr-spotify-1.1.46.916.g416cacf1
- 1 flake:nixpkgs#legacyPackages.x86_64-linux.zoom-us github:NixOS/nixpkgs/c23db78bbd474c4d0c5c3c551877523b4a50db06#legacyPackages.x86_64-linux.zoom-us /nix/store/89pmjmbih5qpi7accgacd17ybpgp4xfm-zoom-us-5.4.53350.1027
- 2 flake:blender-bin#packages.x86_64-linux.default github:edolstra/nix-warez/d09d7eea893dcb162e89bc67f6dc1ced14abfc27?dir=blender#packages.x86_64-linux.default /nix/store/zfgralhqjnam662kqsgq6isjw8lhrflz-blender-bin-2.91.0
+ Index: 0
+ Flake attribute: legacyPackages.x86_64-linux.gdb
+ Original flake URL: flake:nixpkgs
+ Locked flake URL: github:NixOS/nixpkgs/7b38b03d76ab71bdc8dc325e3f6338d984cc35ca
+ Store paths: /nix/store/indzcw5wvlhx6vwk7k4iq29q15chvr3d-gdb-11.1
+
+ Index: 1
+ Flake attribute: packages.x86_64-linux.default
+ Original flake URL: flake:blender-bin
+ Locked flake URL: github:edolstra/nix-warez/91f2ffee657bf834e4475865ae336e2379282d34?dir=blender
+ Store paths: /nix/store/i798sxl3j40wpdi1rgf391id1b5klw7g-blender-bin-3.1.2
```
+ Note that you can unambiguously rebuild a package from a profile
+ through its locked flake URL and flake attribute, e.g.
+
+ ```console
+ # nix build github:edolstra/nix-warez/91f2ffee657bf834e4475865ae336e2379282d34?dir=blender#packages.x86_64-linux.default
+ ```
+
+ will build the package with index 1 shown above.
+
# Description
This command shows what packages are currently installed in a
-profile. The output consists of one line per package, with the
-following fields:
+profile. For each installed package, it shows the following
+information:
+
+* `Index`: An integer that can be used to unambiguously identify the
+ package in invocations of `nix profile remove` and `nix profile
+ upgrade`.
-* An integer that can be used to unambiguously identify the package in
- invocations of `nix profile remove` and `nix profile upgrade`.
+* `Flake attribute`: The flake output attribute path that provides the
+ package (e.g. `packages.x86_64-linux.hello`).
-* The original ("unlocked") flake reference and output attribute path
- used at installation time.
+* `Original flake URL`: The original ("unlocked") flake reference
+ specified by the user when the package was first installed via `nix
+ profile install`.
-* The locked flake reference to which the unlocked flake reference was
- resolved.
+* `Locked flake URL`: The locked flake reference to which the original
+ flake reference was resolved.
-* The store path(s) of the package.
+* `Store paths`: The store path(s) of the package.
)""
diff --git a/src/nix/profile.cc b/src/nix/profile.cc
index fd63b3519..b833b5192 100644
--- a/src/nix/profile.cc
+++ b/src/nix/profile.cc
@@ -21,7 +21,7 @@ struct ProfileElementSource
{
FlakeRef originalRef;
// FIXME: record original attrpath.
- FlakeRef resolvedRef;
+ FlakeRef lockedRef;
std::string attrPath;
ExtendedOutputsSpec outputs;
@@ -31,6 +31,11 @@ struct ProfileElementSource
std::tuple(originalRef.to_string(), attrPath, outputs) <
std::tuple(other.originalRef.to_string(), other.attrPath, other.outputs);
}
+
+ std::string to_string() const
+ {
+ return fmt("%s#%s%s", originalRef, attrPath, outputs.to_string());
+ }
};
const int defaultPriority = 5;
@@ -42,16 +47,30 @@ struct ProfileElement
bool active = true;
int priority = defaultPriority;
- std::string describe() const
+ std::string identifier() const
{
if (source)
- return fmt("%s#%s%s", source->originalRef, source->attrPath, source->outputs.to_string());
+ return source->to_string();
StringSet names;
for (auto & path : storePaths)
names.insert(DrvName(path.name()).name);
return concatStringsSep(", ", names);
}
+ /**
+ * Return a string representing an installable corresponding to the current
+ * element, either a flakeref or a plain store path
+ */
+ std::set<std::string> toInstallables(Store & store)
+ {
+ if (source)
+ return {source->to_string()};
+ StringSet rawPaths;
+ for (auto & path : storePaths)
+ rawPaths.insert(store.printStorePath(path));
+ return rawPaths;
+ }
+
std::string versions() const
{
StringSet versions;
@@ -62,7 +81,7 @@ struct ProfileElement
bool operator < (const ProfileElement & other) const
{
- return std::tuple(describe(), storePaths) < std::tuple(other.describe(), other.storePaths);
+ return std::tuple(identifier(), storePaths) < std::tuple(other.identifier(), other.storePaths);
}
void updateStorePaths(
@@ -149,7 +168,7 @@ struct ProfileManifest
}
}
- std::string toJSON(Store & store) const
+ nlohmann::json toJSON(Store & store) const
{
auto array = nlohmann::json::array();
for (auto & element : elements) {
@@ -162,7 +181,7 @@ struct ProfileManifest
obj["priority"] = element.priority;
if (element.source) {
obj["originalUrl"] = element.source->originalRef.to_string();
- obj["url"] = element.source->resolvedRef.to_string();
+ obj["url"] = element.source->lockedRef.to_string();
obj["attrPath"] = element.source->attrPath;
obj["outputs"] = element.source->outputs;
}
@@ -171,7 +190,7 @@ struct ProfileManifest
nlohmann::json json;
json["version"] = 2;
json["elements"] = array;
- return json.dump();
+ return json;
}
StorePath build(ref<Store> store)
@@ -191,7 +210,7 @@ struct ProfileManifest
buildProfile(tempDir, std::move(pkgs));
- writeFile(tempDir + "/manifest.json", toJSON(*store));
+ writeFile(tempDir + "/manifest.json", toJSON(*store).dump());
/* Add the symlink tree to the store. */
StringSink sink;
@@ -237,13 +256,13 @@ struct ProfileManifest
bool changes = false;
while (i != prevElems.end() || j != curElems.end()) {
- if (j != curElems.end() && (i == prevElems.end() || i->describe() > j->describe())) {
- logger->cout("%s%s: ∅ -> %s", indent, j->describe(), j->versions());
+ if (j != curElems.end() && (i == prevElems.end() || i->identifier() > j->identifier())) {
+ logger->cout("%s%s: ∅ -> %s", indent, j->identifier(), j->versions());
changes = true;
++j;
}
- else if (i != prevElems.end() && (j == curElems.end() || i->describe() < j->describe())) {
- logger->cout("%s%s: %s -> ∅", indent, i->describe(), i->versions());
+ else if (i != prevElems.end() && (j == curElems.end() || i->identifier() < j->identifier())) {
+ logger->cout("%s%s: %s -> ∅", indent, i->identifier(), i->versions());
changes = true;
++i;
}
@@ -251,7 +270,7 @@ struct ProfileManifest
auto v1 = i->versions();
auto v2 = j->versions();
if (v1 != v2) {
- logger->cout("%s%s: %s -> %s", indent, i->describe(), v1, v2);
+ logger->cout("%s%s: %s -> %s", indent, i->identifier(), v1, v2);
changes = true;
}
++i;
@@ -330,7 +349,7 @@ struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile
if (auto * info2 = dynamic_cast<ExtraPathInfoFlake *>(&*info)) {
element.source = ProfileElementSource {
.originalRef = info2->flake.originalRef,
- .resolvedRef = info2->flake.resolvedRef,
+ .lockedRef = info2->flake.lockedRef,
.attrPath = info2->value.attrPath,
.outputs = info2->value.extendedOutputsSpec,
};
@@ -363,10 +382,10 @@ struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile
auto profileElement = *it;
for (auto & storePath : profileElement.storePaths) {
if (conflictError.fileA.starts_with(store->printStorePath(storePath))) {
- return std::pair(conflictError.fileA, profileElement.source->originalRef);
+ return std::pair(conflictError.fileA, profileElement.toInstallables(*store));
}
if (conflictError.fileB.starts_with(store->printStorePath(storePath))) {
- return std::pair(conflictError.fileB, profileElement.source->originalRef);
+ return std::pair(conflictError.fileB, profileElement.toInstallables(*store));
}
}
}
@@ -375,9 +394,9 @@ struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile
// There are 2 conflicting files. We need to find out which one is from the already installed package and
// which one is the package that is the new package that is being installed.
// The first matching package is the one that was already installed (original).
- auto [originalConflictingFilePath, originalConflictingRef] = findRefByFilePath(manifest.elements.begin(), manifest.elements.end());
+ auto [originalConflictingFilePath, originalConflictingRefs] = findRefByFilePath(manifest.elements.begin(), manifest.elements.end());
// The last matching package is the one that was going to be installed (new).
- auto [newConflictingFilePath, newConflictingRef] = findRefByFilePath(manifest.elements.rbegin(), manifest.elements.rend());
+ auto [newConflictingFilePath, newConflictingRefs] = findRefByFilePath(manifest.elements.rbegin(), manifest.elements.rend());
throw Error(
"An existing package already provides the following file:\n"
@@ -403,8 +422,8 @@ struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile
" nix profile install %4% --priority %7%\n",
originalConflictingFilePath,
newConflictingFilePath,
- originalConflictingRef.to_string(),
- newConflictingRef.to_string(),
+ concatStringsSep(" ", originalConflictingRefs),
+ concatStringsSep(" ", newConflictingRefs),
conflictError.priority,
conflictError.priority - 1,
conflictError.priority + 1
@@ -491,7 +510,7 @@ struct CmdProfileRemove : virtual EvalCommand, MixDefaultProfile, MixProfileElem
if (!matches(*store, element, i, matchers)) {
newManifest.elements.push_back(std::move(element));
} else {
- notice("removing '%s'", element.describe());
+ notice("removing '%s'", element.identifier());
}
}
@@ -569,14 +588,14 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf
assert(infop);
auto & info = *infop;
- if (element.source->resolvedRef == info.flake.resolvedRef) continue;
+ if (element.source->lockedRef == info.flake.lockedRef) continue;
printInfo("upgrading '%s' from flake '%s' to '%s'",
- element.source->attrPath, element.source->resolvedRef, info.flake.resolvedRef);
+ element.source->attrPath, element.source->lockedRef, info.flake.lockedRef);
element.source = ProfileElementSource {
.originalRef = installable->flakeRef,
- .resolvedRef = info.flake.resolvedRef,
+ .lockedRef = info.flake.lockedRef,
.attrPath = info.value.attrPath,
.outputs = installable->extendedOutputsSpec,
};
@@ -616,7 +635,7 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf
}
};
-struct CmdProfileList : virtual EvalCommand, virtual StoreCommand, MixDefaultProfile
+struct CmdProfileList : virtual EvalCommand, virtual StoreCommand, MixDefaultProfile, MixJSON
{
std::string description() override
{
@@ -634,12 +653,22 @@ struct CmdProfileList : virtual EvalCommand, virtual StoreCommand, MixDefaultPro
{
ProfileManifest manifest(*getEvalState(), *profile);
- for (size_t i = 0; i < manifest.elements.size(); ++i) {
- auto & element(manifest.elements[i]);
- logger->cout("%d %s %s %s", i,
- element.source ? element.source->originalRef.to_string() + "#" + element.source->attrPath + element.source->outputs.to_string() : "-",
- element.source ? element.source->resolvedRef.to_string() + "#" + element.source->attrPath + element.source->outputs.to_string() : "-",
- concatStringsSep(" ", store->printStorePathSet(element.storePaths)));
+ if (json) {
+ std::cout << manifest.toJSON(*store).dump() << "\n";
+ } else {
+ for (size_t i = 0; i < manifest.elements.size(); ++i) {
+ auto & element(manifest.elements[i]);
+ if (i) logger->cout("");
+ logger->cout("Index: " ANSI_BOLD "%s" ANSI_NORMAL "%s",
+ i,
+ element.active ? "" : " " ANSI_RED "(inactive)" ANSI_NORMAL);
+ if (element.source) {
+ logger->cout("Flake attribute: %s%s", element.source->attrPath, element.source->outputs.to_string());
+ logger->cout("Original flake URL: %s", element.source->originalRef.to_string());
+ logger->cout("Locked flake URL: %s", element.source->lockedRef.to_string());
+ }
+ logger->cout("Store paths: %s", concatStringsSep(" ", store->printStorePathSet(element.storePaths)));
+ }
}
}
};
@@ -787,9 +816,10 @@ struct CmdProfileWipeHistory : virtual StoreCommand, MixDefaultProfile, MixDryRu
void run(ref<Store> store) override
{
- if (minAge)
- deleteGenerationsOlderThan(*profile, *minAge, dryRun);
- else
+ if (minAge) {
+ auto t = parseOlderThanTimeSpec(*minAge);
+ deleteGenerationsOlderThan(*profile, t, dryRun);
+ } else
deleteOldGenerations(*profile, dryRun);
}
};
diff --git a/src/nix/search.md b/src/nix/search.md
index 4caa90654..0c5d22549 100644
--- a/src/nix/search.md
+++ b/src/nix/search.md
@@ -52,12 +52,12 @@ R""(
* Search for packages containing `neovim` but hide ones containing either `gui` or `python`:
```console
- # nix search nixpkgs neovim -e 'python|gui'
+ # nix search nixpkgs neovim --exclude 'python|gui'
```
or
```console
- # nix search nixpkgs neovim -e 'python' -e 'gui'
+ # nix search nixpkgs neovim --exclude 'python' --exclude 'gui'
```
# Description
diff --git a/src/nix/shell.md b/src/nix/shell.md
index 13a389103..1668104b1 100644
--- a/src/nix/shell.md
+++ b/src/nix/shell.md
@@ -19,26 +19,26 @@ R""(
* Run GNU Hello:
```console
- # nix shell nixpkgs#hello -c hello --greeting 'Hi everybody!'
+ # nix shell nixpkgs#hello --command hello --greeting 'Hi everybody!'
Hi everybody!
```
* Run multiple commands in a shell environment:
```console
- # nix shell nixpkgs#gnumake -c sh -c "cd src && make"
+ # nix shell nixpkgs#gnumake --command sh --command "cd src && make"
```
* Run GNU Hello in a chroot store:
```console
- # nix shell --store ~/my-nix nixpkgs#hello -c hello
+ # nix shell --store ~/my-nix nixpkgs#hello --command hello
```
* Start a shell providing GNU Hello in a chroot store:
```console
- # nix shell --store ~/my-nix nixpkgs#hello nixpkgs#bashInteractive -c bash
+ # nix shell --store ~/my-nix nixpkgs#hello nixpkgs#bashInteractive --command bash
```
Note that it's necessary to specify `bash` explicitly because your
diff --git a/src/nix/store-ls.md b/src/nix/store-ls.md
index 836efce42..14c4627c9 100644
--- a/src/nix/store-ls.md
+++ b/src/nix/store-ls.md
@@ -5,7 +5,7 @@ R""(
* To list the contents of a store path in a binary cache:
```console
- # nix store ls --store https://cache.nixos.org/ -lR /nix/store/0i2jd68mp5g6h2sa5k9c85rb80sn8hi9-hello-2.10
+ # nix store ls --store https://cache.nixos.org/ --long --recursive /nix/store/0i2jd68mp5g6h2sa5k9c85rb80sn8hi9-hello-2.10
dr-xr-xr-x 0 ./bin
-r-xr-xr-x 38184 ./bin/hello
dr-xr-xr-x 0 ./share
@@ -15,7 +15,7 @@ R""(
* To show information about a specific file in a binary cache:
```console
- # nix store ls --store https://cache.nixos.org/ -l /nix/store/0i2jd68mp5g6h2sa5k9c85rb80sn8hi9-hello-2.10/bin/hello
+ # nix store ls --store https://cache.nixos.org/ --long /nix/store/0i2jd68mp5g6h2sa5k9c85rb80sn8hi9-hello-2.10/bin/hello
-r-xr-xr-x 38184 hello
```
diff --git a/src/nix/upgrade-nix.cc b/src/nix/upgrade-nix.cc
index 3997c98bf..d05c23fb7 100644
--- a/src/nix/upgrade-nix.cc
+++ b/src/nix/upgrade-nix.cc
@@ -146,7 +146,7 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand
auto req = FileTransferRequest(storePathsUrl);
auto res = getFileTransfer()->download(req);
- auto state = std::make_unique<EvalState>(Strings(), store);
+ auto state = std::make_unique<EvalState>(SearchPath{}, store);
auto v = state->allocValue();
state->eval(state->parseExprFromString(res.data, state->rootPath(CanonPath("/no-such-path"))), *v);
Bindings & bindings(*state->allocBindings(0));
diff --git a/src/nix/upgrade-nix.md b/src/nix/upgrade-nix.md
index 08757aebd..cce88c397 100644
--- a/src/nix/upgrade-nix.md
+++ b/src/nix/upgrade-nix.md
@@ -11,7 +11,7 @@ R""(
* Upgrade Nix in a specific profile:
```console
- # nix upgrade-nix -p ~alice/.local/state/nix/profiles/profile
+ # nix upgrade-nix --profile ~alice/.local/state/nix/profiles/profile
```
# Description
diff --git a/src/nix/verify.md b/src/nix/verify.md
index cc1122c02..e1d55eab4 100644
--- a/src/nix/verify.md
+++ b/src/nix/verify.md
@@ -12,7 +12,7 @@ R""(
signatures:
```console
- # nix store verify -r -n2 --no-contents $(type -p firefox)
+ # nix store verify --recursive --sigs-needed 2 --no-contents $(type -p firefox)
```
* Verify a store path in the binary cache `https://cache.nixos.org/`: