aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/build-remote/build-remote.cc16
-rw-r--r--src/libcmd/command.cc23
-rw-r--r--src/libcmd/command.hh26
-rw-r--r--src/libcmd/installables.cc70
-rw-r--r--src/libcmd/local.mk4
-rw-r--r--src/libcmd/markdown.cc8
-rw-r--r--src/libexpr/attr-set.hh8
-rw-r--r--src/libexpr/eval.cc443
-rw-r--r--src/libexpr/eval.hh43
-rw-r--r--src/libexpr/flake/config.cc23
-rw-r--r--src/libexpr/flake/flake.cc166
-rw-r--r--src/libexpr/flake/flake.hh12
-rw-r--r--src/libexpr/flake/flakeref.cc6
-rw-r--r--src/libexpr/get-drvs.cc22
-rw-r--r--src/libexpr/lexer.l95
-rw-r--r--src/libexpr/local.mk4
-rw-r--r--src/libexpr/nixexpr.cc57
-rw-r--r--src/libexpr/nixexpr.hh52
-rw-r--r--src/libexpr/parser.y86
-rw-r--r--src/libexpr/primops.cc422
-rw-r--r--src/libexpr/primops.hh4
-rw-r--r--src/libexpr/primops/context.cc7
-rw-r--r--src/libexpr/primops/fetchMercurial.cc5
-rw-r--r--src/libexpr/primops/fetchTree.cc141
-rw-r--r--src/libexpr/value-to-json.cc26
-rw-r--r--src/libexpr/value-to-json.hh4
-rw-r--r--src/libexpr/value-to-xml.cc31
-rw-r--r--src/libexpr/value-to-xml.hh4
-rw-r--r--src/libexpr/value.hh35
-rw-r--r--src/libfetchers/fetchers.hh3
-rw-r--r--src/libfetchers/git.cc45
-rw-r--r--src/libfetchers/github.cc10
-rw-r--r--src/libfetchers/local.mk2
-rw-r--r--src/libfetchers/mercurial.cc44
-rw-r--r--src/libfetchers/path.cc26
-rw-r--r--src/libmain/local.mk4
-rw-r--r--src/libmain/progress-bar.cc18
-rw-r--r--src/libmain/shared.cc48
-rw-r--r--src/libmain/shared.hh1
-rw-r--r--src/libstore/binary-cache-store.cc71
-rw-r--r--src/libstore/binary-cache-store.hh15
-rw-r--r--src/libstore/build/derivation-goal.cc30
-rw-r--r--src/libstore/build/drv-output-substitution-goal.cc44
-rw-r--r--src/libstore/build/drv-output-substitution-goal.hh14
-rw-r--r--src/libstore/build/entry-points.cc9
-rw-r--r--src/libstore/build/local-derivation-goal.cc126
-rw-r--r--src/libstore/build/worker.cc6
-rw-r--r--src/libstore/content-address.cc22
-rw-r--r--src/libstore/daemon.cc62
-rw-r--r--src/libstore/derivations.cc46
-rw-r--r--src/libstore/derivations.hh4
-rw-r--r--src/libstore/derived-path.cc10
-rw-r--r--src/libstore/dummy-store.cc5
-rw-r--r--src/libstore/filetransfer.cc21
-rw-r--r--src/libstore/gc.cc763
-rw-r--r--src/libstore/globals.cc20
-rw-r--r--src/libstore/globals.hh19
-rw-r--r--src/libstore/http-binary-cache-store.cc4
-rw-r--r--src/libstore/legacy-ssh-store.cc30
-rw-r--r--src/libstore/local-store.cc86
-rw-r--r--src/libstore/local-store.hh36
-rw-r--r--src/libstore/local.mk4
-rw-r--r--src/libstore/machines.cc96
-rw-r--r--src/libstore/misc.cc9
-rw-r--r--src/libstore/names.cc2
-rw-r--r--src/libstore/names.hh2
-rw-r--r--src/libstore/path-with-outputs.cc4
-rw-r--r--src/libstore/pathlocks.cc13
-rw-r--r--src/libstore/pathlocks.hh14
-rw-r--r--src/libstore/profiles.cc37
-rw-r--r--src/libstore/profiles.hh9
-rw-r--r--src/libstore/references.cc93
-rw-r--r--src/libstore/references.hh24
-rw-r--r--src/libstore/remote-store.cc82
-rw-r--r--src/libstore/remote-store.hh12
-rw-r--r--src/libstore/s3-binary-cache-store.cc6
-rw-r--r--src/libstore/sandbox-defaults.sb5
-rw-r--r--src/libstore/serve-protocol.hh2
-rw-r--r--src/libstore/sqlite.cc7
-rw-r--r--src/libstore/ssh-store.cc5
-rw-r--r--src/libstore/store-api.cc119
-rw-r--r--src/libstore/store-api.hh50
-rw-r--r--src/libstore/tests/local.mk15
-rw-r--r--src/libstore/tests/machines.cc169
-rw-r--r--src/libstore/tests/references.cc45
-rw-r--r--src/libstore/tests/test-data/machines.bad_format1
-rw-r--r--src/libstore/tests/test-data/machines.valid3
-rw-r--r--src/libstore/uds-remote-store.cc28
-rw-r--r--src/libstore/uds-remote-store.hh6
-rw-r--r--src/libutil/ansicolor.hh2
-rw-r--r--src/libutil/archive.cc4
-rw-r--r--src/libutil/args.cc1
-rw-r--r--src/libutil/args.hh16
-rw-r--r--src/libutil/compression.cc24
-rw-r--r--src/libutil/compression.hh4
-rw-r--r--src/libutil/config.cc32
-rw-r--r--src/libutil/config.hh3
-rw-r--r--src/libutil/error.cc10
-rw-r--r--src/libutil/experimental-features.cc59
-rw-r--r--src/libutil/experimental-features.hh56
-rw-r--r--src/libutil/fmt.hh2
-rw-r--r--src/libutil/local.mk2
-rw-r--r--src/libutil/logging.cc4
-rw-r--r--src/libutil/logging.hh5
-rw-r--r--src/libutil/ref.hh45
-rw-r--r--src/libutil/serialise.cc3
-rw-r--r--src/libutil/tarfile.cc33
-rw-r--r--src/libutil/tarfile.hh3
-rw-r--r--src/libutil/tests/logging.cc4
-rw-r--r--src/libutil/tests/tests.cc17
-rw-r--r--src/libutil/util.cc159
-rw-r--r--src/libutil/util.hh63
-rwxr-xr-xsrc/nix-build/nix-build.cc14
-rw-r--r--src/nix-env/nix-env.cc225
-rw-r--r--src/nix-env/user-env.cc4
-rw-r--r--src/nix-instantiate/nix-instantiate.cc4
-rw-r--r--src/nix-store/nix-store.cc3
-rw-r--r--src/nix/build.cc6
-rw-r--r--src/nix/bundle.cc2
-rw-r--r--src/nix/copy.cc2
-rw-r--r--src/nix/daemon.cc6
-rw-r--r--src/nix/develop.cc25
-rw-r--r--src/nix/develop.md1
-rw-r--r--src/nix/eval.cc2
-rw-r--r--src/nix/flake-check.md18
-rw-r--r--src/nix/flake-show.md3
-rw-r--r--src/nix/flake.cc158
-rw-r--r--src/nix/flake.md2
-rw-r--r--src/nix/local.mk4
-rw-r--r--src/nix/log.cc4
-rw-r--r--src/nix/main.cc61
-rw-r--r--src/nix/make-content-addressable.cc2
-rw-r--r--src/nix/path-info.cc2
-rw-r--r--src/nix/path-info.md2
-rw-r--r--src/nix/prefetch.cc22
-rw-r--r--src/nix/profile-history.md4
-rw-r--r--src/nix/profile-rollback.md26
-rw-r--r--src/nix/profile-wipe-history.md20
-rw-r--r--src/nix/profile.cc92
-rw-r--r--src/nix/realisation.cc4
-rw-r--r--src/nix/registry.cc1
-rw-r--r--src/nix/registry.md4
-rw-r--r--src/nix/repl.cc94
-rw-r--r--src/nix/repl.md7
-rw-r--r--src/nix/run.cc2
-rw-r--r--src/nix/search.cc1
-rw-r--r--src/nix/show-derivation.cc8
-rw-r--r--src/nix/sigs.cc7
-rw-r--r--src/nix/store-delete.cc2
-rw-r--r--src/nix/store-repair.cc2
-rw-r--r--src/nix/verify.cc2
151 files changed, 3743 insertions, 2005 deletions
diff --git a/src/build-remote/build-remote.cc b/src/build-remote/build-remote.cc
index 0559aeaf4..9d541b45d 100644
--- a/src/build-remote/build-remote.cc
+++ b/src/build-remote/build-remote.cc
@@ -18,6 +18,7 @@
#include "derivations.hh"
#include "local-store.hh"
#include "legacy.hh"
+#include "experimental-features.hh"
using namespace nix;
using std::cin;
@@ -130,11 +131,14 @@ static int main_build_remote(int argc, char * * argv)
for (auto & m : machines) {
debug("considering building on remote machine '%s'", m.storeUri);
- if (m.enabled && std::find(m.systemTypes.begin(),
- m.systemTypes.end(),
- neededSystem) != m.systemTypes.end() &&
+ if (m.enabled
+ && (neededSystem == "builtin"
+ || std::find(m.systemTypes.begin(),
+ m.systemTypes.end(),
+ neededSystem) != m.systemTypes.end()) &&
m.allSupported(requiredFeatures) &&
- m.mandatoryMet(requiredFeatures)) {
+ m.mandatoryMet(requiredFeatures))
+ {
rightType = true;
AutoCloseFD free;
uint64_t load = 0;
@@ -295,7 +299,7 @@ connected:
std::set<Realisation> missingRealisations;
StorePathSet missingPaths;
- if (settings.isExperimentalFeatureEnabled("ca-derivations") && !derivationHasKnownOutputPaths(drv.type())) {
+ if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations) && !derivationHasKnownOutputPaths(drv.type())) {
for (auto & outputName : wantedOutputs) {
auto thisOutputHash = outputHashes.at(outputName);
auto thisOutputId = DrvOutput{ thisOutputHash, outputName };
@@ -327,7 +331,7 @@ connected:
for (auto & realisation : missingRealisations) {
// Should hold, because if the feature isn't enabled the set
// of missing realisations should be empty
- settings.requireExperimentalFeature("ca-derivations");
+ settings.requireExperimentalFeature(Xp::CaDerivations);
store->registerDrvOutput(realisation);
}
diff --git a/src/libcmd/command.cc b/src/libcmd/command.cc
index 2daf43aa7..fd3edfc46 100644
--- a/src/libcmd/command.cc
+++ b/src/libcmd/command.cc
@@ -120,7 +120,7 @@ void BuiltPathsCommand::run(ref<Store> store)
// XXX: This only computes the store path closure, ignoring
// intermediate realisations
StorePathSet pathsRoots, pathsClosure;
- for (auto & root: paths) {
+ for (auto & root : paths) {
auto rootFromThis = root.outPaths();
pathsRoots.insert(rootFromThis.begin(), rootFromThis.end());
}
@@ -138,17 +138,20 @@ StorePathsCommand::StorePathsCommand(bool recursive)
{
}
-void StorePathsCommand::run(ref<Store> store, BuiltPaths paths)
+void StorePathsCommand::run(ref<Store> store, BuiltPaths && paths)
{
- StorePaths storePaths;
- for (auto& builtPath : paths)
- for (auto& p : builtPath.outPaths())
- storePaths.push_back(p);
+ StorePathSet storePaths;
+ for (auto & builtPath : paths)
+ for (auto & p : builtPath.outPaths())
+ storePaths.insert(p);
- run(store, std::move(storePaths));
+ auto sorted = store->topoSortPaths(storePaths);
+ std::reverse(sorted.begin(), sorted.end());
+
+ run(store, std::move(sorted));
}
-void StorePathCommand::run(ref<Store> store, std::vector<StorePath> storePaths)
+void StorePathCommand::run(ref<Store> store, std::vector<StorePath> && storePaths)
{
if (storePaths.size() != 1)
throw UsageError("this command requires exactly one store path");
@@ -200,10 +203,10 @@ void MixProfile::updateProfile(const BuiltPaths & buildables)
for (auto & buildable : buildables) {
std::visit(overloaded {
- [&](BuiltPath::Opaque bo) {
+ [&](const BuiltPath::Opaque & bo) {
result.push_back(bo.path);
},
- [&](BuiltPath::Built bfd) {
+ [&](const BuiltPath::Built & bfd) {
for (auto & output : bfd.outputs) {
result.push_back(output.second);
}
diff --git a/src/libcmd/command.hh b/src/libcmd/command.hh
index f3625ed0d..07f398468 100644
--- a/src/libcmd/command.hh
+++ b/src/libcmd/command.hh
@@ -108,6 +108,8 @@ enum class Realise {
exists. */
Derivation,
/* Evaluate in dry-run mode. Postcondition: nothing. */
+ // FIXME: currently unused, but could be revived if we can
+ // evaluate derivations in-memory.
Nothing
};
@@ -167,7 +169,7 @@ public:
using StoreCommand::run;
- virtual void run(ref<Store> store, BuiltPaths paths) = 0;
+ virtual void run(ref<Store> store, BuiltPaths && paths) = 0;
void run(ref<Store> store) override;
@@ -180,9 +182,9 @@ struct StorePathsCommand : public BuiltPathsCommand
using BuiltPathsCommand::run;
- virtual void run(ref<Store> store, std::vector<StorePath> storePaths) = 0;
+ virtual void run(ref<Store> store, std::vector<StorePath> && storePaths) = 0;
- void run(ref<Store> store, BuiltPaths paths) override;
+ void run(ref<Store> store, BuiltPaths && paths) override;
};
/* A command that operates on exactly one store path. */
@@ -192,7 +194,7 @@ struct StorePathCommand : public StorePathsCommand
virtual void run(ref<Store> store, const StorePath & storePath) = 0;
- void run(ref<Store> store, std::vector<StorePath> storePaths) override;
+ void run(ref<Store> store, std::vector<StorePath> && storePaths) override;
};
/* A helper class for registering commands globally. */
@@ -223,15 +225,18 @@ static RegisterCommand registerCommand2(std::vector<std::string> && name)
return RegisterCommand(std::move(name), [](){ return make_ref<T>(); });
}
-BuiltPaths build(ref<Store> evalStore, ref<Store> store, Realise mode,
- std::vector<std::shared_ptr<Installable>> installables, BuildMode bMode = bmNormal);
+BuiltPaths build(
+ ref<Store> evalStore,
+ ref<Store> store, Realise mode,
+ const std::vector<std::shared_ptr<Installable>> & installables,
+ BuildMode bMode = bmNormal);
std::set<StorePath> toStorePaths(
ref<Store> evalStore,
ref<Store> store,
Realise mode,
OperateOn operateOn,
- std::vector<std::shared_ptr<Installable>> installables);
+ const std::vector<std::shared_ptr<Installable>> & installables);
StorePath toStorePath(
ref<Store> evalStore,
@@ -240,8 +245,9 @@ StorePath toStorePath(
OperateOn operateOn,
std::shared_ptr<Installable> installable);
-std::set<StorePath> toDerivations(ref<Store> store,
- std::vector<std::shared_ptr<Installable>> installables,
+std::set<StorePath> toDerivations(
+ ref<Store> store,
+ const std::vector<std::shared_ptr<Installable>> & installables,
bool useDeriver = false);
BuiltPaths toBuiltPaths(
@@ -249,7 +255,7 @@ BuiltPaths toBuiltPaths(
ref<Store> store,
Realise mode,
OperateOn operateOn,
- std::vector<std::shared_ptr<Installable>> installables);
+ const std::vector<std::shared_ptr<Installable>> & installables);
/* Helper function to generate args that invoke $EDITOR on
filename:lineno. */
diff --git a/src/libcmd/installables.cc b/src/libcmd/installables.cc
index e3ce564b0..ef200b1d2 100644
--- a/src/libcmd/installables.cc
+++ b/src/libcmd/installables.cc
@@ -291,6 +291,9 @@ void completeFlakeRefWithFragment(
void completeFlakeRef(ref<Store> store, std::string_view prefix)
{
+ if (!settings.isExperimentalFeatureEnabled(Xp::Flakes))
+ return;
+
if (prefix == "")
completions->add(".");
@@ -654,6 +657,17 @@ std::vector<std::shared_ptr<Installable>> SourceExprCommand::parseInstallables(
for (auto & s : ss) {
std::exception_ptr ex;
+ if (s.find('/') != std::string::npos) {
+ try {
+ result.push_back(std::make_shared<InstallableStorePath>(store, store->followLinksToStorePath(s)));
+ continue;
+ } catch (BadStorePath &) {
+ } catch (...) {
+ if (!ex)
+ ex = std::current_exception();
+ }
+ }
+
try {
auto [flakeRef, fragment] = parseFlakeRefWithFragment(s, absPath("."));
result.push_back(std::make_shared<InstallableFlake>(
@@ -668,25 +682,7 @@ std::vector<std::shared_ptr<Installable>> SourceExprCommand::parseInstallables(
ex = std::current_exception();
}
- if (s.find('/') != std::string::npos) {
- try {
- result.push_back(std::make_shared<InstallableStorePath>(store, store->followLinksToStorePath(s)));
- continue;
- } catch (BadStorePath &) {
- } catch (...) {
- if (!ex)
- ex = std::current_exception();
- }
- }
-
std::rethrow_exception(ex);
-
- /*
- throw Error(
- pathExists(s)
- ? "path '%s' is not a flake or a store path"
- : "don't know how to handle argument '%s'", s);
- */
}
}
@@ -704,13 +700,13 @@ std::shared_ptr<Installable> SourceExprCommand::parseInstallable(
BuiltPaths getBuiltPaths(ref<Store> evalStore, ref<Store> store, const DerivedPaths & hopefullyBuiltPaths)
{
BuiltPaths res;
- for (auto & b : hopefullyBuiltPaths)
+ for (const auto & b : hopefullyBuiltPaths)
std::visit(
overloaded{
- [&](DerivedPath::Opaque bo) {
+ [&](const DerivedPath::Opaque & bo) {
res.push_back(BuiltPath::Opaque{bo.path});
},
- [&](DerivedPath::Built bfd) {
+ [&](const DerivedPath::Built & bfd) {
OutputPathMap outputs;
auto drv = evalStore->readDerivation(bfd.drvPath);
auto outputHashes = staticOutputHashes(*evalStore, drv); // FIXME: expensive
@@ -721,7 +717,7 @@ BuiltPaths getBuiltPaths(ref<Store> evalStore, ref<Store> store, const DerivedPa
"the derivation '%s' doesn't have an output named '%s'",
store->printStorePath(bfd.drvPath), output);
if (settings.isExperimentalFeatureEnabled(
- "ca-derivations")) {
+ Xp::CaDerivations)) {
auto outputId =
DrvOutput{outputHashes.at(output), output};
auto realisation =
@@ -729,7 +725,7 @@ BuiltPaths getBuiltPaths(ref<Store> evalStore, ref<Store> store, const DerivedPa
if (!realisation)
throw Error(
"cannot operate on an output of unbuilt "
- "content-addresed derivation '%s'",
+ "content-addressed derivation '%s'",
outputId.to_string());
outputs.insert_or_assign(
output, realisation->outPath);
@@ -750,8 +746,12 @@ BuiltPaths getBuiltPaths(ref<Store> evalStore, ref<Store> store, const DerivedPa
return res;
}
-BuiltPaths build(ref<Store> evalStore, ref<Store> store, Realise mode,
- std::vector<std::shared_ptr<Installable>> installables, BuildMode bMode)
+BuiltPaths build(
+ ref<Store> evalStore,
+ ref<Store> store,
+ Realise mode,
+ const std::vector<std::shared_ptr<Installable>> & installables,
+ BuildMode bMode)
{
if (mode == Realise::Nothing)
settings.readOnlyMode = true;
@@ -763,7 +763,7 @@ BuiltPaths build(ref<Store> evalStore, ref<Store> store, Realise mode,
pathsToBuild.insert(pathsToBuild.end(), b.begin(), b.end());
}
- if (mode == Realise::Nothing)
+ if (mode == Realise::Nothing || mode == Realise::Derivation)
printMissing(store, pathsToBuild, lvlError);
else if (mode == Realise::Outputs)
store->buildPaths(pathsToBuild, bMode, evalStore);
@@ -776,7 +776,7 @@ BuiltPaths toBuiltPaths(
ref<Store> store,
Realise mode,
OperateOn operateOn,
- std::vector<std::shared_ptr<Installable>> installables)
+ const std::vector<std::shared_ptr<Installable>> & installables)
{
if (operateOn == OperateOn::Output)
return build(evalStore, store, mode, installables);
@@ -795,7 +795,7 @@ StorePathSet toStorePaths(
ref<Store> evalStore,
ref<Store> store,
Realise mode, OperateOn operateOn,
- std::vector<std::shared_ptr<Installable>> installables)
+ const std::vector<std::shared_ptr<Installable>> & installables)
{
StorePathSet outPaths;
for (auto & path : toBuiltPaths(evalStore, store, mode, operateOn, installables)) {
@@ -819,15 +819,17 @@ StorePath toStorePath(
return *paths.begin();
}
-StorePathSet toDerivations(ref<Store> store,
- std::vector<std::shared_ptr<Installable>> installables, bool useDeriver)
+StorePathSet toDerivations(
+ ref<Store> store,
+ const std::vector<std::shared_ptr<Installable>> & installables,
+ bool useDeriver)
{
StorePathSet drvPaths;
- for (auto & i : installables)
- for (auto & b : i->toDerivedPaths())
+ for (const auto & i : installables)
+ for (const auto & b : i->toDerivedPaths())
std::visit(overloaded {
- [&](DerivedPath::Opaque bo) {
+ [&](const DerivedPath::Opaque & bo) {
if (!useDeriver)
throw Error("argument '%s' did not evaluate to a derivation", i->what());
auto derivers = store->queryValidDerivers(bo.path);
@@ -836,7 +838,7 @@ StorePathSet toDerivations(ref<Store> store,
// FIXME: use all derivers?
drvPaths.insert(*derivers.begin());
},
- [&](DerivedPath::Built bfd) {
+ [&](const DerivedPath::Built & bfd) {
drvPaths.insert(bfd.drvPath);
},
}, b.raw());
diff --git a/src/libcmd/local.mk b/src/libcmd/local.mk
index 431d8ec06..8b0662753 100644
--- a/src/libcmd/local.mk
+++ b/src/libcmd/local.mk
@@ -8,8 +8,8 @@ libcmd_SOURCES := $(wildcard $(d)/*.cc)
libcmd_CXXFLAGS += -I src/libutil -I src/libstore -I src/libexpr -I src/libmain -I src/libfetchers
-libcmd_LDFLAGS = -llowdown -pthread
+libcmd_LDFLAGS += -llowdown -pthread
libcmd_LIBS = libstore libutil libexpr libmain libfetchers
-$(eval $(call install-file-in, $(d)/nix-cmd.pc, $(prefix)/lib/pkgconfig, 0644))
+$(eval $(call install-file-in, $(d)/nix-cmd.pc, $(libdir)/pkgconfig, 0644))
diff --git a/src/libcmd/markdown.cc b/src/libcmd/markdown.cc
index d25113d93..29bb4d31e 100644
--- a/src/libcmd/markdown.cc
+++ b/src/libcmd/markdown.cc
@@ -12,7 +12,7 @@ std::string renderMarkdownToTerminal(std::string_view markdown)
struct lowdown_opts opts {
.type = LOWDOWN_TERM,
.maxdepth = 20,
- .cols = std::min(getWindowSize().second, (unsigned short) 80),
+ .cols = std::max(getWindowSize().second, (unsigned short) 80),
.hmargin = 0,
.vmargin = 0,
.feat = LOWDOWN_COMMONMARK | LOWDOWN_FENCED | LOWDOWN_DEFLIST | LOWDOWN_TABLES,
@@ -25,7 +25,7 @@ std::string renderMarkdownToTerminal(std::string_view markdown)
Finally freeDoc([&]() { lowdown_doc_free(doc); });
size_t maxn = 0;
- auto node = lowdown_doc_parse(doc, &maxn, markdown.data(), markdown.size());
+ auto node = lowdown_doc_parse(doc, &maxn, markdown.data(), markdown.size(), nullptr);
if (!node)
throw Error("cannot parse Markdown document");
Finally freeNode([&]() { lowdown_node_free(node); });
@@ -40,11 +40,11 @@ std::string renderMarkdownToTerminal(std::string_view markdown)
throw Error("cannot allocate Markdown output buffer");
Finally freeBuffer([&]() { lowdown_buf_free(buf); });
- int rndr_res = lowdown_term_rndr(buf, nullptr, renderer, node);
+ int rndr_res = lowdown_term_rndr(buf, renderer, node);
if (!rndr_res)
throw Error("allocation error while rendering Markdown");
- return std::string(buf->data, buf->size);
+ return filterANSIEscapes(std::string(buf->data, buf->size), !shouldANSI());
}
}
diff --git a/src/libexpr/attr-set.hh b/src/libexpr/attr-set.hh
index 1da8d91df..7d6ffc9f3 100644
--- a/src/libexpr/attr-set.hh
+++ b/src/libexpr/attr-set.hh
@@ -17,8 +17,8 @@ struct Attr
{
Symbol name;
Value * value;
- Pos * pos;
- Attr(Symbol name, Value * value, Pos * pos = &noPos)
+ ptr<Pos> pos;
+ Attr(Symbol name, Value * value, ptr<Pos> pos = ptr(&noPos))
: name(name), value(value), pos(pos) { };
Attr() : pos(&noPos) { };
bool operator < (const Attr & a) const
@@ -35,13 +35,13 @@ class Bindings
{
public:
typedef uint32_t size_t;
- Pos *pos;
+ ptr<Pos> pos;
private:
size_t size_, capacity_;
Attr attrs[0];
- Bindings(size_t capacity) : size_(0), capacity_(capacity) { }
+ Bindings(size_t capacity) : pos(&noPos), size_(0), capacity_(capacity) { }
Bindings(const Bindings & bindings) = delete;
public:
diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc
index 327f7e974..b987e1888 100644
--- a/src/libexpr/eval.cc
+++ b/src/libexpr/eval.cc
@@ -119,8 +119,8 @@ void printValue(std::ostream & str, std::set<const Value *> & active, const Valu
case tList2:
case tListN:
str << "[ ";
- for (unsigned int n = 0; n < v.listSize(); ++n) {
- printValue(str, active, *v.listElems()[n]);
+ for (auto v2 : v.listItems()) {
+ printValue(str, active, *v2);
str << " ";
}
str << "]";
@@ -445,12 +445,12 @@ EvalState::EvalState(
StorePathSet closure;
store->computeFSClosure(store->toStorePath(r.second).first, closure);
for (auto & path : closure)
- allowedPaths->insert(store->printStorePath(path));
+ allowPath(path);
} catch (InvalidPath &) {
- allowedPaths->insert(r.second);
+ allowPath(r.second);
}
} else
- allowedPaths->insert(r.second);
+ allowPath(r.second);
}
}
@@ -465,6 +465,35 @@ EvalState::~EvalState()
}
+void EvalState::requireExperimentalFeatureOnEvaluation(
+ const ExperimentalFeature & feature,
+ const std::string_view fName,
+ const Pos & pos)
+{
+ if (!settings.isExperimentalFeatureEnabled(feature)) {
+ throw EvalError({
+ .msg = hintfmt(
+ "Cannot call '%2%' because experimental Nix feature '%1%' is disabled. You can enable it via '--extra-experimental-features %1%'.",
+ feature,
+ fName
+ ),
+ .errPos = pos
+ });
+ }
+}
+
+void EvalState::allowPath(const Path & path)
+{
+ if (allowedPaths)
+ allowedPaths->insert(path);
+}
+
+void EvalState::allowPath(const StorePath & storePath)
+{
+ if (allowedPaths)
+ allowedPaths->insert(store->toRealPath(storePath));
+}
+
Path EvalState::checkSourcePath(const Path & path_)
{
if (!allowedPaths) return path_;
@@ -490,8 +519,12 @@ Path EvalState::checkSourcePath(const Path & path_)
}
}
- if (!found)
- throw RestrictedPathError("access to path '%1%' is forbidden in restricted mode", abspath);
+ if (!found) {
+ auto modeInformation = evalSettings.pureEval
+ ? "in pure eval mode (use '--impure' to override)"
+ : "in restricted mode";
+ throw RestrictedPathError("access to absolute path '%1%' is forbidden %2%", abspath, modeInformation);
+ }
/* Resolve symlinks. */
debug(format("checking access to '%s'") % abspath);
@@ -504,7 +537,7 @@ Path EvalState::checkSourcePath(const Path & path_)
}
}
- throw RestrictedPathError("access to path '%1%' is forbidden in restricted mode", path);
+ throw RestrictedPathError("access to canonical path '%1%' is forbidden in restricted mode", path);
}
@@ -554,14 +587,20 @@ Value * EvalState::addConstant(const string & name, Value & v)
{
Value * v2 = allocValue();
*v2 = v;
- staticBaseEnv.vars[symbols.create(name)] = baseEnvDispl;
- baseEnv.values[baseEnvDispl++] = v2;
- string name2 = string(name, 0, 2) == "__" ? string(name, 2) : name;
- baseEnv.values[0]->attrs->push_back(Attr(symbols.create(name2), v2));
+ addConstant(name, v2);
return v2;
}
+void EvalState::addConstant(const string & name, Value * v)
+{
+ staticBaseEnv.vars.emplace_back(symbols.create(name), baseEnvDispl);
+ baseEnv.values[baseEnvDispl++] = v;
+ string name2 = string(name, 0, 2) == "__" ? string(name, 2) : name;
+ baseEnv.values[0]->attrs->push_back(Attr(symbols.create(name2), v));
+}
+
+
Value * EvalState::addPrimOp(const string & name,
size_t arity, PrimOpFun primOp)
{
@@ -580,7 +619,7 @@ Value * EvalState::addPrimOp(const string & name,
Value * v = allocValue();
v->mkPrimOp(new PrimOp { .fun = primOp, .arity = arity, .name = sym });
- staticBaseEnv.vars[symbols.create(name)] = baseEnvDispl;
+ staticBaseEnv.vars.emplace_back(symbols.create(name), baseEnvDispl);
baseEnv.values[baseEnvDispl++] = v;
baseEnv.values[0]->attrs->push_back(Attr(sym, v));
return v;
@@ -606,7 +645,7 @@ Value * EvalState::addPrimOp(PrimOp && primOp)
Value * v = allocValue();
v->mkPrimOp(new PrimOp(std::move(primOp)));
- staticBaseEnv.vars[envName] = baseEnvDispl;
+ staticBaseEnv.vars.emplace_back(envName, baseEnvDispl);
baseEnv.values[baseEnvDispl++] = v;
baseEnv.values[0]->attrs->push_back(Attr(primOp.name, v));
return v;
@@ -756,7 +795,7 @@ void mkPath(Value & v, const char * s)
inline Value * EvalState::lookupVar(Env * env, const ExprVar & var, bool noEval)
{
- for (size_t l = var.level; l; --l, env = env->up) ;
+ for (auto l = var.level; l; --l, env = env->up) ;
if (!var.fromWith) return env->values[var.displ];
@@ -770,7 +809,7 @@ inline Value * EvalState::lookupVar(Env * env, const ExprVar & var, bool noEval)
}
Bindings::iterator j = env->values[0]->attrs->find(var.name);
if (j != env->values[0]->attrs->end()) {
- if (countCalls && j->pos) attrSelects[*j->pos]++;
+ if (countCalls) attrSelects[*j->pos]++;
return j->value;
}
if (!env->prevWith)
@@ -825,9 +864,9 @@ void EvalState::mkThunk_(Value & v, Expr * expr)
}
-void EvalState::mkPos(Value & v, Pos * pos)
+void EvalState::mkPos(Value & v, ptr<Pos> pos)
{
- if (pos && pos->file.set()) {
+ if (pos->file.set()) {
mkAttrs(v, 3);
mkString(*allocAttr(v, sFile), pos->file);
mkInt(*allocAttr(v, sLine), pos->line);
@@ -895,23 +934,41 @@ void EvalState::evalFile(const Path & path_, Value & v, bool mustBeTrivial)
return;
}
- Path path2 = resolveExprPath(path);
- if ((i = fileEvalCache.find(path2)) != fileEvalCache.end()) {
+ Path resolvedPath = resolveExprPath(path);
+ if ((i = fileEvalCache.find(resolvedPath)) != fileEvalCache.end()) {
v = i->second;
return;
}
- printTalkative("evaluating file '%1%'", path2);
+ printTalkative("evaluating file '%1%'", resolvedPath);
Expr * e = nullptr;
- auto j = fileParseCache.find(path2);
+ auto j = fileParseCache.find(resolvedPath);
if (j != fileParseCache.end())
e = j->second;
if (!e)
- e = parseExprFromFile(checkSourcePath(path2));
+ e = parseExprFromFile(checkSourcePath(resolvedPath));
+
+ cacheFile(path, resolvedPath, e, v, mustBeTrivial);
+}
+
+
+void EvalState::resetFileCache()
+{
+ fileEvalCache.clear();
+ fileParseCache.clear();
+}
- fileParseCache[path2] = e;
+
+void EvalState::cacheFile(
+ const Path & path,
+ const Path & resolvedPath,
+ Expr * e,
+ Value & v,
+ bool mustBeTrivial)
+{
+ fileParseCache[resolvedPath] = e;
try {
// Enforce that 'flake.nix' is a direct attrset, not a
@@ -921,19 +978,12 @@ void EvalState::evalFile(const Path & path_, Value & v, bool mustBeTrivial)
throw EvalError("file '%s' must be an attribute set", path);
eval(e, v);
} catch (Error & e) {
- addErrorTrace(e, "while evaluating the file '%1%':", path2);
+ addErrorTrace(e, "while evaluating the file '%1%':", resolvedPath);
throw;
}
- fileEvalCache[path2] = v;
- if (path != path2) fileEvalCache[path] = v;
-}
-
-
-void EvalState::resetFileCache()
-{
- fileEvalCache.clear();
- fileParseCache.clear();
+ fileEvalCache[resolvedPath] = v;
+ if (path != resolvedPath) fileEvalCache[path] = v;
}
@@ -1018,7 +1068,7 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v)
/* The recursive attributes are evaluated in the new
environment, while the inherited attributes are evaluated
in the original environment. */
- size_t displ = 0;
+ Displacement displ = 0;
for (auto & i : attrs) {
Value * vAttr;
if (hasOverrides && !i.second.inherited) {
@@ -1027,7 +1077,7 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v)
} else
vAttr = i.second.e->maybeThunk(state, i.second.inherited ? env : env2);
env2.values[displ++] = vAttr;
- v.attrs->push_back(Attr(i.first, vAttr, &i.second.pos));
+ v.attrs->push_back(Attr(i.first, vAttr, ptr(&i.second.pos)));
}
/* If the rec contains an attribute called `__overrides', then
@@ -1059,7 +1109,7 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v)
else
for (auto & i : attrs)
- v.attrs->push_back(Attr(i.first, i.second.e->maybeThunk(state, env), &i.second.pos));
+ v.attrs->push_back(Attr(i.first, i.second.e->maybeThunk(state, env), ptr(&i.second.pos)));
/* Dynamic attrs apply *after* rec and __overrides. */
for (auto & i : dynamicAttrs) {
@@ -1076,11 +1126,11 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v)
i.valueExpr->setName(nameSym);
/* Keep sorted order so find can catch duplicates */
- v.attrs->push_back(Attr(nameSym, i.valueExpr->maybeThunk(state, *dynamicEnv), &i.pos));
+ v.attrs->push_back(Attr(nameSym, i.valueExpr->maybeThunk(state, *dynamicEnv), ptr(&i.pos)));
v.attrs->sort(); // FIXME: inefficient
}
- v.attrs->pos = &pos;
+ v.attrs->pos = ptr(&pos);
}
@@ -1094,7 +1144,7 @@ void ExprLet::eval(EvalState & state, Env & env, Value & v)
/* The recursive attributes are evaluated in the new environment,
while the inherited attributes are evaluated in the original
environment. */
- size_t displ = 0;
+ Displacement displ = 0;
for (auto & i : attrs->attrs)
env2.values[displ++] = i.second.e->maybeThunk(state, i.second.inherited ? env : env2);
@@ -1105,8 +1155,8 @@ void ExprLet::eval(EvalState & state, Env & env, Value & v)
void ExprList::eval(EvalState & state, Env & env, Value & v)
{
state.mkList(v, elems.size());
- for (size_t n = 0; n < elems.size(); ++n)
- v.listElems()[n] = elems[n]->maybeThunk(state, env);
+ for (auto [n, v2] : enumerate(v.listItems()))
+ const_cast<Value * &>(v2) = elems[n]->maybeThunk(state, env);
}
@@ -1138,7 +1188,7 @@ static string showAttrPath(EvalState & state, Env & env, const AttrPath & attrPa
void ExprSelect::eval(EvalState & state, Env & env, Value & v)
{
Value vTmp;
- Pos * pos2 = 0;
+ ptr<Pos> pos2(&noPos);
Value * vAttrs = &vTmp;
e->eval(state, env, vTmp);
@@ -1164,13 +1214,13 @@ void ExprSelect::eval(EvalState & state, Env & env, Value & v)
}
vAttrs = j->value;
pos2 = j->pos;
- if (state.countCalls && pos2) state.attrSelects[*pos2]++;
+ if (state.countCalls) state.attrSelects[*pos2]++;
}
- state.forceValue(*vAttrs, ( pos2 != NULL ? *pos2 : this->pos ) );
+ state.forceValue(*vAttrs, (*pos2 != noPos ? *pos2 : this->pos ) );
} catch (Error & e) {
- if (pos2 && pos2->file != state.sDerivationNix)
+ if (*pos2 != noPos && pos2->file != state.sDerivationNix)
addErrorTrace(e, *pos2, "while evaluating the attribute '%1%'",
showAttrPath(state, env, attrPath));
throw;
@@ -1211,144 +1261,184 @@ void ExprLambda::eval(EvalState & state, Env & env, Value & v)
}
-void ExprApp::eval(EvalState & state, Env & env, Value & v)
+void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & vRes, const Pos & pos)
{
- /* FIXME: vFun prevents GCC from doing tail call optimisation. */
- Value vFun;
- e1->eval(state, env, vFun);
- state.callFunction(vFun, *(e2->maybeThunk(state, env)), v, pos);
-}
+ auto trace = evalSettings.traceFunctionCalls ? std::make_unique<FunctionCallTrace>(pos) : nullptr;
+ forceValue(fun, pos);
-void EvalState::callPrimOp(Value & fun, Value & arg, Value & v, const Pos & pos)
-{
- /* Figure out the number of arguments still needed. */
- size_t argsDone = 0;
- Value * primOp = &fun;
- while (primOp->isPrimOpApp()) {
- argsDone++;
- primOp = primOp->primOpApp.left;
- }
- assert(primOp->isPrimOp());
- auto arity = primOp->primOp->arity;
- auto argsLeft = arity - argsDone;
-
- if (argsLeft == 1) {
- /* We have all the arguments, so call the primop. */
-
- /* Put all the arguments in an array. */
- Value * vArgs[arity];
- auto n = arity - 1;
- vArgs[n--] = &arg;
- for (Value * arg = &fun; arg->isPrimOpApp(); arg = arg->primOpApp.left)
- vArgs[n--] = arg->primOpApp.right;
-
- /* And call the primop. */
- nrPrimOpCalls++;
- if (countCalls) primOpCalls[primOp->primOp->name]++;
- primOp->primOp->fun(*this, pos, vArgs, v);
- } else {
- Value * fun2 = allocValue();
- *fun2 = fun;
- v.mkPrimOpApp(fun2, &arg);
- }
-}
+ Value vCur(fun);
-void EvalState::callFunction(Value & fun, Value & arg, Value & v, const Pos & pos)
-{
- auto trace = evalSettings.traceFunctionCalls ? std::make_unique<FunctionCallTrace>(pos) : nullptr;
+ auto makeAppChain = [&]()
+ {
+ vRes = vCur;
+ for (size_t i = 0; i < nrArgs; ++i) {
+ auto fun2 = allocValue();
+ *fun2 = vRes;
+ vRes.mkPrimOpApp(fun2, args[i]);
+ }
+ };
- forceValue(fun, pos);
+ Attr * functor;
- if (fun.isPrimOp() || fun.isPrimOpApp()) {
- callPrimOp(fun, arg, v, pos);
- return;
- }
+ while (nrArgs > 0) {
- if (fun.type() == nAttrs) {
- auto found = fun.attrs->find(sFunctor);
- if (found != fun.attrs->end()) {
- /* fun may be allocated on the stack of the calling function,
- * but for functors we may keep a reference, so heap-allocate
- * a copy and use that instead.
- */
- auto & fun2 = *allocValue();
- fun2 = fun;
- /* !!! Should we use the attr pos here? */
- Value v2;
- callFunction(*found->value, fun2, v2, pos);
- return callFunction(v2, arg, v, pos);
- }
- }
+ if (vCur.isLambda()) {
- if (!fun.isLambda())
- throwTypeError(pos, "attempt to call something which is not a function but %1%", fun);
+ ExprLambda & lambda(*vCur.lambda.fun);
- ExprLambda & lambda(*fun.lambda.fun);
+ auto size =
+ (lambda.arg.empty() ? 0 : 1) +
+ (lambda.hasFormals() ? lambda.formals->formals.size() : 0);
+ Env & env2(allocEnv(size));
+ env2.up = vCur.lambda.env;
- auto size =
- (lambda.arg.empty() ? 0 : 1) +
- (lambda.matchAttrs ? lambda.formals->formals.size() : 0);
- Env & env2(allocEnv(size));
- env2.up = fun.lambda.env;
+ Displacement displ = 0;
- size_t displ = 0;
+ if (!lambda.hasFormals())
+ env2.values[displ++] = args[0];
- if (!lambda.matchAttrs)
- env2.values[displ++] = &arg;
+ else {
+ forceAttrs(*args[0], pos);
- else {
- forceAttrs(arg, pos);
-
- if (!lambda.arg.empty())
- env2.values[displ++] = &arg;
-
- /* For each formal argument, get the actual argument. If
- there is no matching actual argument but the formal
- argument has a default, use the default. */
- size_t attrsUsed = 0;
- for (auto & i : lambda.formals->formals) {
- Bindings::iterator j = arg.attrs->find(i.name);
- if (j == arg.attrs->end()) {
- if (!i.def) throwTypeError(pos, "%1% called without required argument '%2%'",
- lambda, i.name);
- env2.values[displ++] = i.def->maybeThunk(*this, env2);
+ if (!lambda.arg.empty())
+ env2.values[displ++] = args[0];
+
+ /* For each formal argument, get the actual argument. If
+ there is no matching actual argument but the formal
+ argument has a default, use the default. */
+ size_t attrsUsed = 0;
+ for (auto & i : lambda.formals->formals) {
+ auto j = args[0]->attrs->get(i.name);
+ if (!j) {
+ if (!i.def) throwTypeError(pos, "%1% called without required argument '%2%'",
+ lambda, i.name);
+ env2.values[displ++] = i.def->maybeThunk(*this, env2);
+ } else {
+ attrsUsed++;
+ env2.values[displ++] = j->value;
+ }
+ }
+
+ /* Check that each actual argument is listed as a formal
+ argument (unless the attribute match specifies a `...'). */
+ if (!lambda.formals->ellipsis && attrsUsed != args[0]->attrs->size()) {
+ /* Nope, so show the first unexpected argument to the
+ user. */
+ for (auto & i : *args[0]->attrs)
+ if (lambda.formals->argNames.find(i.name) == lambda.formals->argNames.end())
+ throwTypeError(pos, "%1% called with unexpected argument '%2%'", lambda, i.name);
+ abort(); // can't happen
+ }
+ }
+
+ nrFunctionCalls++;
+ if (countCalls) incrFunctionCall(&lambda);
+
+ /* Evaluate the body. */
+ try {
+ lambda.body->eval(*this, env2, vCur);
+ } catch (Error & e) {
+ if (loggerSettings.showTrace.get()) {
+ addErrorTrace(e, lambda.pos, "while evaluating %s",
+ (lambda.name.set()
+ ? "'" + (string) lambda.name + "'"
+ : "anonymous lambda"));
+ addErrorTrace(e, pos, "from call site%s", "");
+ }
+ throw;
+ }
+
+ nrArgs--;
+ args += 1;
+ }
+
+ else if (vCur.isPrimOp()) {
+
+ size_t argsLeft = vCur.primOp->arity;
+
+ if (nrArgs < argsLeft) {
+ /* We don't have enough arguments, so create a tPrimOpApp chain. */
+ makeAppChain();
+ return;
} else {
- attrsUsed++;
- env2.values[displ++] = j->value;
+ /* We have all the arguments, so call the primop. */
+ nrPrimOpCalls++;
+ if (countCalls) primOpCalls[vCur.primOp->name]++;
+ vCur.primOp->fun(*this, pos, args, vCur);
+
+ nrArgs -= argsLeft;
+ args += argsLeft;
+ }
+ }
+
+ else if (vCur.isPrimOpApp()) {
+ /* Figure out the number of arguments still needed. */
+ size_t argsDone = 0;
+ Value * primOp = &vCur;
+ while (primOp->isPrimOpApp()) {
+ argsDone++;
+ primOp = primOp->primOpApp.left;
+ }
+ assert(primOp->isPrimOp());
+ auto arity = primOp->primOp->arity;
+ auto argsLeft = arity - argsDone;
+
+ if (nrArgs < argsLeft) {
+ /* We still don't have enough arguments, so extend the tPrimOpApp chain. */
+ makeAppChain();
+ return;
+ } else {
+ /* We have all the arguments, so call the primop with
+ the previous and new arguments. */
+
+ Value * vArgs[arity];
+ auto n = argsDone;
+ for (Value * arg = &vCur; arg->isPrimOpApp(); arg = arg->primOpApp.left)
+ vArgs[--n] = arg->primOpApp.right;
+
+ for (size_t i = 0; i < argsLeft; ++i)
+ vArgs[argsDone + i] = args[i];
+
+ nrPrimOpCalls++;
+ if (countCalls) primOpCalls[primOp->primOp->name]++;
+ primOp->primOp->fun(*this, pos, vArgs, vCur);
+
+ nrArgs -= argsLeft;
+ args += argsLeft;
}
}
- /* Check that each actual argument is listed as a formal
- argument (unless the attribute match specifies a `...'). */
- if (!lambda.formals->ellipsis && attrsUsed != arg.attrs->size()) {
- /* Nope, so show the first unexpected argument to the
- user. */
- for (auto & i : *arg.attrs)
- if (lambda.formals->argNames.find(i.name) == lambda.formals->argNames.end())
- throwTypeError(pos, "%1% called with unexpected argument '%2%'", lambda, i.name);
- abort(); // can't happen
+ else if (vCur.type() == nAttrs && (functor = vCur.attrs->get(sFunctor))) {
+ /* 'vCur' may be allocated on the stack of the calling
+ function, but for functors we may keep a reference, so
+ heap-allocate a copy and use that instead. */
+ Value * args2[] = {allocValue(), args[0]};
+ *args2[0] = vCur;
+ /* !!! Should we use the attr pos here? */
+ callFunction(*functor->value, 2, args2, vCur, pos);
+ nrArgs--;
+ args++;
}
+
+ else
+ throwTypeError(pos, "attempt to call something which is not a function but %1%", vCur);
}
- nrFunctionCalls++;
- if (countCalls) incrFunctionCall(&lambda);
+ vRes = vCur;
+}
- /* Evaluate the body. This is conditional on showTrace, because
- catching exceptions makes this function not tail-recursive. */
- if (loggerSettings.showTrace.get())
- try {
- lambda.body->eval(*this, env2, v);
- } catch (Error & e) {
- addErrorTrace(e, lambda.pos, "while evaluating %s",
- (lambda.name.set()
- ? "'" + (string) lambda.name + "'"
- : "anonymous lambda"));
- addErrorTrace(e, pos, "from call site%s", "");
- throw;
- }
- else
- fun.lambda.fun->body->eval(*this, env2, v);
+
+void ExprCall::eval(EvalState & state, Env & env, Value & v)
+{
+ Value vFun;
+ fun->eval(state, env, vFun);
+
+ Value * vArgs[args.size()];
+ for (size_t i = 0; i < args.size(); ++i)
+ vArgs[i] = args[i]->maybeThunk(state, env);
+
+ state.callFunction(vFun, args.size(), vArgs, v, pos);
}
@@ -1374,7 +1464,7 @@ void EvalState::autoCallFunction(Bindings & args, Value & fun, Value & res)
}
}
- if (!fun.isLambda() || !fun.lambda.fun->matchAttrs) {
+ if (!fun.isLambda() || !fun.lambda.fun->hasFormals()) {
res = fun;
return;
}
@@ -1576,7 +1666,6 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v)
and none of the strings are allowed to have contexts. */
if (first) {
firstType = vTmp.type();
- first = false;
}
if (firstType == nInt) {
@@ -1597,7 +1686,12 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v)
} else
throwEvalError(pos, "cannot add %1% to a float", showType(vTmp));
} else
- s << state.coerceToString(pos, vTmp, context, false, firstType == nString);
+ /* skip canonization of first path, which would only be not
+ canonized in the first place if it's coming from a ./${foo} type
+ path */
+ s << state.coerceToString(pos, vTmp, context, false, firstType == nString, !first);
+
+ first = false;
}
if (firstType == nInt)
@@ -1616,7 +1710,7 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v)
void ExprPos::eval(EvalState & state, Env & env, Value & v)
{
- state.mkPos(v, &pos);
+ state.mkPos(v, ptr(&pos));
}
@@ -1642,8 +1736,8 @@ void EvalState::forceValueDeep(Value & v)
}
else if (v.isList()) {
- for (size_t n = 0; n < v.listSize(); ++n)
- recurse(*v.listElems()[n]);
+ for (auto v2 : v.listItems())
+ recurse(*v2);
}
};
@@ -1786,7 +1880,7 @@ std::optional<string> EvalState::tryAttrsToString(const Pos & pos, Value & v,
}
string EvalState::coerceToString(const Pos & pos, Value & v, PathSet & context,
- bool coerceMore, bool copyToStore)
+ bool coerceMore, bool copyToStore, bool canonicalizePath)
{
forceValue(v, pos);
@@ -1798,7 +1892,7 @@ string EvalState::coerceToString(const Pos & pos, Value & v, PathSet & context,
}
if (v.type() == nPath) {
- Path path(canonPath(v.path));
+ Path path(canonicalizePath ? canonPath(v.path) : v.path);
return copyToStore ? copyPathToStore(context, path) : path;
}
@@ -1827,12 +1921,12 @@ string EvalState::coerceToString(const Pos & pos, Value & v, PathSet & context,
if (v.isList()) {
string result;
- for (size_t n = 0; n < v.listSize(); ++n) {
- result += coerceToString(pos, *v.listElems()[n],
+ for (auto [n, v2] : enumerate(v.listItems())) {
+ result += coerceToString(pos, *v2,
context, coerceMore, copyToStore);
if (n < v.listSize() - 1
/* !!! not quite correct */
- && (!v.listElems()[n]->isList() || v.listElems()[n]->listSize() != 0))
+ && (!v2->isList() || v2->listSize() != 0))
result += " ";
}
return result;
@@ -1857,6 +1951,7 @@ string EvalState::copyPathToStore(PathSet & context, const Path & path)
? store->computeStorePathForPath(std::string(baseNameOf(path)), checkSourcePath(path)).first
: store->addToStore(std::string(baseNameOf(path)), checkSourcePath(path), FileIngestionMethod::Recursive, htSHA256, defaultPathFilter, repair);
dstPath = store->printStorePath(p);
+ allowPath(p);
srcToStore.insert_or_assign(path, std::move(p));
printMsg(lvlChatty, "copied source '%1%' -> '%2%'", path, dstPath);
}
diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh
index 6f3474854..1aab8e166 100644
--- a/src/libexpr/eval.hh
+++ b/src/libexpr/eval.hh
@@ -5,6 +5,7 @@
#include "nixexpr.hh"
#include "symbol-table.hh"
#include "config.hh"
+#include "experimental-features.hh"
#include <map>
#include <optional>
@@ -100,6 +101,8 @@ public:
/* Store used to build stuff. */
const ref<Store> buildStore;
+ RootValue vCallFlake = nullptr;
+ RootValue vImportedDrvToDerivation = nullptr;
private:
SrcToStore srcToStore;
@@ -138,10 +141,25 @@ public:
std::shared_ptr<Store> buildStore = nullptr);
~EvalState();
+ void requireExperimentalFeatureOnEvaluation(
+ const ExperimentalFeature &,
+ const std::string_view fName,
+ const Pos & pos
+ );
+
void addToSearchPath(const string & s);
SearchPath getSearchPath() { return searchPath; }
+ /* Allow access to a path. */
+ void allowPath(const Path & path);
+
+ /* Allow access to a store path. Note that this gets remapped to
+ the real store path if `store` is a chroot store. */
+ void allowPath(const StorePath & storePath);
+
+ /* Check whether access to a path is allowed and throw an error if
+ not. Otherwise return the canonicalised path. */
Path checkSourcePath(const Path & path);
void checkURI(const std::string & uri);
@@ -170,6 +188,14 @@ public:
trivial (i.e. doesn't require arbitrary computation). */
void evalFile(const Path & path, Value & v, bool mustBeTrivial = false);
+ /* Like `cacheFile`, but with an already parsed expression. */
+ void cacheFile(
+ const Path & path,
+ const Path & resolvedPath,
+ Expr * e,
+ Value & v,
+ bool mustBeTrivial = false);
+
void resetFileCache();
/* Look up a file in the search path. */
@@ -224,7 +250,8 @@ public:
booleans and lists to a string. If `copyToStore' is set,
referenced paths are copied to the Nix store as a side effect. */
string coerceToString(const Pos & pos, Value & v, PathSet & context,
- bool coerceMore = false, bool copyToStore = true);
+ bool coerceMore = false, bool copyToStore = true,
+ bool canonicalizePath = true);
string copyPathToStore(PathSet & context, const Path & path);
@@ -250,6 +277,8 @@ private:
Value * addConstant(const string & name, Value & v);
+ void addConstant(const string & name, Value * v);
+
Value * addPrimOp(const string & name,
size_t arity, PrimOpFun primOp);
@@ -289,8 +318,14 @@ public:
bool isFunctor(Value & fun);
- void callFunction(Value & fun, Value & arg, Value & v, const Pos & pos);
- void callPrimOp(Value & fun, Value & arg, Value & v, const Pos & pos);
+ // FIXME: use std::span
+ void callFunction(Value & fun, size_t nrArgs, Value * * args, Value & vRes, const Pos & pos);
+
+ void callFunction(Value & fun, Value & arg, Value & vRes, const Pos & pos)
+ {
+ Value * args[] = {&arg};
+ callFunction(fun, 1, args, vRes, pos);
+ }
/* Automatically call a function for which each argument has a
default value or has a binding in the `args' map. */
@@ -308,7 +343,7 @@ public:
void mkList(Value & v, size_t length);
void mkAttrs(Value & v, size_t capacity);
void mkThunk_(Value & v, Expr * expr);
- void mkPos(Value & v, Pos * pos);
+ void mkPos(Value & v, ptr<Pos> pos);
void concatLists(Value & v, size_t nrLists, Value * * lists, const Pos & pos);
diff --git a/src/libexpr/flake/config.cc b/src/libexpr/flake/config.cc
index 41b6f78ed..c03f4106c 100644
--- a/src/libexpr/flake/config.cc
+++ b/src/libexpr/flake/config.cc
@@ -1,4 +1,5 @@
#include "flake.hh"
+#include "globals.hh"
#include <nlohmann/json.hpp>
@@ -52,21 +53,19 @@ void ConfigFile::apply()
auto trustedList = readTrustedList();
bool trusted = false;
-
- if (auto saved = get(get(trustedList, name).value_or(std::map<std::string, bool>()), valueS)) {
+ if (nix::settings.acceptFlakeConfig){
+ trusted = true;
+ } else if (auto saved = get(get(trustedList, name).value_or(std::map<std::string, bool>()), valueS)) {
trusted = *saved;
+ warn("Using saved setting for '%s = %s' from ~/.local/share/nix/trusted-settings.json.", name,valueS);
} else {
// FIXME: filter ANSI escapes, newlines, \r, etc.
- if (std::tolower(logger->ask(fmt("do you want to allow configuration setting '%s' to be set to '" ANSI_RED "%s" ANSI_NORMAL "' (y/N)?", name, valueS)).value_or('n')) != 'y') {
- if (std::tolower(logger->ask("do you want to permanently mark this value as untrusted (y/N)?").value_or('n')) == 'y') {
- trustedList[name][valueS] = false;
- writeTrustedList(trustedList);
- }
- } else {
- if (std::tolower(logger->ask("do you want to permanently mark this value as trusted (y/N)?").value_or('n')) == 'y') {
- trustedList[name][valueS] = trusted = true;
- writeTrustedList(trustedList);
- }
+ if (std::tolower(logger->ask(fmt("do you want to allow configuration setting '%s' to be set to '" ANSI_RED "%s" ANSI_NORMAL "' (y/N)?", name, valueS)).value_or('n')) == 'y') {
+ trusted = true;
+ }
+ if (std::tolower(logger->ask(fmt("do you want to permanently mark this value as %s (y/N)?", trusted ? "trusted": "untrusted" )).value_or('n')) == 'y') {
+ trustedList[name][valueS] = trusted;
+ writeTrustedList(trustedList);
}
}
diff --git a/src/libexpr/flake/flake.cc b/src/libexpr/flake/flake.cc
index a2f100cbd..33d253eee 100644
--- a/src/libexpr/flake/flake.cc
+++ b/src/libexpr/flake/flake.cc
@@ -64,8 +64,7 @@ static std::tuple<fetchers::Tree, FlakeRef, FlakeRef> fetchOrSubstituteTree(
debug("got tree '%s' from '%s'",
state.store->printStorePath(tree.storePath), lockedRef);
- if (state.allowedPaths)
- state.allowedPaths->insert(tree.actualPath);
+ state.allowPath(tree.storePath);
assert(!originalRef.input.getNarHash() || tree.storePath == originalRef.input.computeStorePath(*state.store));
@@ -89,10 +88,12 @@ static void expectType(EvalState & state, ValueType type,
}
static std::map<FlakeId, FlakeInput> parseFlakeInputs(
- EvalState & state, Value * value, const Pos & pos);
+ EvalState & state, Value * value, const Pos & pos,
+ const std::optional<Path> & baseDir);
static FlakeInput parseFlakeInput(EvalState & state,
- const std::string & inputName, Value * value, const Pos & pos)
+ const std::string & inputName, Value * value, const Pos & pos,
+ const std::optional<Path> & baseDir)
{
expectType(state, nAttrs, *value, pos);
@@ -116,7 +117,7 @@ static FlakeInput parseFlakeInput(EvalState & state,
expectType(state, nBool, *attr.value, *attr.pos);
input.isFlake = attr.value->boolean;
} else if (attr.name == sInputs) {
- input.overrides = parseFlakeInputs(state, attr.value, *attr.pos);
+ input.overrides = parseFlakeInputs(state, attr.value, *attr.pos, baseDir);
} else if (attr.name == sFollows) {
expectType(state, nString, *attr.value, *attr.pos);
input.follows = parseInputPath(attr.value->string.s);
@@ -154,7 +155,7 @@ static FlakeInput parseFlakeInput(EvalState & state,
if (!attrs.empty())
throw Error("unexpected flake input attribute '%s', at %s", attrs.begin()->first, pos);
if (url)
- input.ref = parseFlakeRef(*url, {}, true);
+ input.ref = parseFlakeRef(*url, baseDir, true);
}
if (!input.follows && !input.ref)
@@ -164,7 +165,8 @@ static FlakeInput parseFlakeInput(EvalState & state,
}
static std::map<FlakeId, FlakeInput> parseFlakeInputs(
- EvalState & state, Value * value, const Pos & pos)
+ EvalState & state, Value * value, const Pos & pos,
+ const std::optional<Path> & baseDir)
{
std::map<FlakeId, FlakeInput> inputs;
@@ -175,7 +177,8 @@ static std::map<FlakeId, FlakeInput> parseFlakeInputs(
parseFlakeInput(state,
inputAttr.name,
inputAttr.value,
- *inputAttr.pos));
+ *inputAttr.pos,
+ baseDir));
}
return inputs;
@@ -191,7 +194,8 @@ static Flake getFlake(
state, originalRef, allowLookup, flakeCache);
// Guard against symlink attacks.
- auto flakeFile = canonPath(sourceInfo.actualPath + "/" + lockedRef.subdir + "/flake.nix");
+ auto flakeDir = canonPath(sourceInfo.actualPath + "/" + lockedRef.subdir);
+ auto flakeFile = canonPath(flakeDir + "/flake.nix");
if (!isInDir(flakeFile, sourceInfo.actualPath))
throw Error("'flake.nix' file of flake '%s' escapes from '%s'",
lockedRef, state.store->printStorePath(sourceInfo.storePath));
@@ -219,14 +223,14 @@ static Flake getFlake(
auto sInputs = state.symbols.create("inputs");
if (auto inputs = vInfo.attrs->get(sInputs))
- flake.inputs = parseFlakeInputs(state, inputs->value, *inputs->pos);
+ flake.inputs = parseFlakeInputs(state, inputs->value, *inputs->pos, flakeDir);
auto sOutputs = state.symbols.create("outputs");
if (auto outputs = vInfo.attrs->get(sOutputs)) {
expectType(state, nFunction, *outputs->value, *outputs->pos);
- if (outputs->value->isLambda() && outputs->value->lambda.fun->matchAttrs) {
+ if (outputs->value->isLambda() && outputs->value->lambda.fun->hasFormals()) {
for (auto & formal : outputs->value->lambda.fun->formals->formals) {
if (formal.name != state.sSelf)
flake.inputs.emplace(formal.name, FlakeInput {
@@ -253,8 +257,7 @@ static Flake getFlake(
flake.config.settings.insert({setting.name, state.forceBool(*setting.value, *setting.pos)});
else if (setting.value->type() == nList) {
std::vector<std::string> ss;
- for (unsigned int n = 0; n < setting.value->listSize(); ++n) {
- auto elem = setting.value->listElems()[n];
+ for (auto elem : setting.value->listItems()) {
if (elem->type() != nString)
throw TypeError("list element in flake configuration setting '%s' is %s while a string is expected",
setting.name, showType(*setting.value));
@@ -293,7 +296,7 @@ LockedFlake lockFlake(
const FlakeRef & topRef,
const LockFlags & lockFlags)
{
- settings.requireExperimentalFeature("flakes");
+ settings.requireExperimentalFeature(Xp::Flakes);
FlakeCache flakeCache;
@@ -303,7 +306,7 @@ LockedFlake lockFlake(
if (lockFlags.applyNixConfig) {
flake.config.apply();
- // FIXME: send new config to the daemon.
+ state.store->setOptions();
}
try {
@@ -325,25 +328,38 @@ LockedFlake lockFlake(
std::vector<FlakeRef> parents;
+ struct LockParent {
+ /* The path to this parent. */
+ InputPath path;
+
+ /* Whether we are currently inside a top-level lockfile
+ (inputs absolute) or subordinate lockfile (inputs
+ relative). */
+ bool absolute;
+ };
+
std::function<void(
const FlakeInputs & flakeInputs,
std::shared_ptr<Node> node,
const InputPath & inputPathPrefix,
- std::shared_ptr<const Node> oldNode)>
+ std::shared_ptr<const Node> oldNode,
+ const LockParent & parent,
+ const Path & parentPath)>
computeLocks;
computeLocks = [&](
const FlakeInputs & flakeInputs,
std::shared_ptr<Node> node,
const InputPath & inputPathPrefix,
- std::shared_ptr<const Node> oldNode)
+ std::shared_ptr<const Node> oldNode,
+ const LockParent & parent,
+ const Path & parentPath)
{
debug("computing lock file node '%s'", printInputPath(inputPathPrefix));
/* Get the overrides (i.e. attributes of the form
'inputs.nixops.inputs.nixpkgs.url = ...'). */
- // FIXME: check this
- for (auto & [id, input] : flake.inputs) {
+ for (auto & [id, input] : flakeInputs) {
for (auto & [idOverride, inputOverride] : input.overrides) {
auto inputPath(inputPathPrefix);
inputPath.push_back(id);
@@ -379,15 +395,19 @@ LockedFlake lockFlake(
path we haven't processed yet. */
if (input.follows) {
InputPath target;
- if (hasOverride || input.absolute)
- /* 'follows' from an override is relative to the
- root of the graph. */
+
+ if (parent.absolute && !hasOverride) {
target = *input.follows;
- else {
- /* Otherwise, it's relative to the current flake. */
- target = inputPathPrefix;
+ } else {
+ if (hasOverride) {
+ target = inputPathPrefix;
+ target.pop_back();
+ } else
+ target = parent.path;
+
for (auto & i : *input.follows) target.push_back(i);
}
+
debug("input '%s' follows '%s'", inputPathS, printInputPath(target));
node->inputs.insert_or_assign(id, target);
continue;
@@ -425,22 +445,18 @@ LockedFlake lockFlake(
update it. */
auto lb = lockFlags.inputUpdates.lower_bound(inputPath);
- auto hasChildUpdate =
+ auto mustRefetch =
lb != lockFlags.inputUpdates.end()
&& lb->size() > inputPath.size()
&& std::equal(inputPath.begin(), inputPath.end(), lb->begin());
- if (hasChildUpdate) {
- auto inputFlake = getFlake(
- state, oldLock->lockedRef, false, flakeCache);
- computeLocks(inputFlake.inputs, childNode, inputPath, oldLock);
- } else {
+ FlakeInputs fakeInputs;
+
+ if (!mustRefetch) {
/* No need to fetch this flake, we can be
lazy. However there may be new overrides on the
inputs of this flake, so we need to check
those. */
- FlakeInputs fakeInputs;
-
for (auto & i : oldLock->inputs) {
if (auto lockedNode = std::get_if<0>(&i.second)) {
fakeInputs.emplace(i.first, FlakeInput {
@@ -448,16 +464,33 @@ LockedFlake lockFlake(
.isFlake = (*lockedNode)->isFlake,
});
} else if (auto follows = std::get_if<1>(&i.second)) {
+ auto o = input.overrides.find(i.first);
+ // If the override disappeared, we have to refetch the flake,
+ // since some of the inputs may not be present in the lockfile.
+ if (o == input.overrides.end()) {
+ mustRefetch = true;
+ // There's no point populating the rest of the fake inputs,
+ // since we'll refetch the flake anyways.
+ break;
+ }
fakeInputs.emplace(i.first, FlakeInput {
.follows = *follows,
- .absolute = true
});
}
}
-
- computeLocks(fakeInputs, childNode, inputPath, oldLock);
}
+ LockParent newParent {
+ .path = inputPath,
+ .absolute = false
+ };
+
+ computeLocks(
+ mustRefetch
+ ? getFlake(state, oldLock->lockedRef, false, flakeCache).inputs
+ : fakeInputs,
+ childNode, inputPath, oldLock, newParent, parentPath);
+
} else {
/* We need to create a new lock file entry. So fetch
this input. */
@@ -467,7 +500,15 @@ LockedFlake lockFlake(
throw Error("cannot update flake input '%s' in pure mode", inputPathS);
if (input.isFlake) {
- auto inputFlake = getFlake(state, *input.ref, useRegistries, flakeCache);
+ Path localPath = parentPath;
+ FlakeRef localRef = *input.ref;
+
+ // If this input is a path, recurse it down.
+ // This allows us to resolve path inputs relative to the current flake.
+ if (localRef.input.getType() == "path")
+ localPath = absPath(*input.ref->input.getSourcePath(), parentPath);
+
+ auto inputFlake = getFlake(state, localRef, useRegistries, flakeCache);
/* Note: in case of an --override-input, we use
the *original* ref (input2.ref) for the
@@ -488,6 +529,13 @@ LockedFlake lockFlake(
parents.push_back(*input.ref);
Finally cleanup([&]() { parents.pop_back(); });
+ // Follows paths from existing inputs in the top-level lockfile are absolute,
+ // whereas paths in subordinate lockfiles are relative to those lockfiles.
+ LockParent newParent {
+ .path = inputPath,
+ .absolute = oldLock ? true : false
+ };
+
/* Recursively process the inputs of this
flake. Also, unless we already have this flake
in the top-level lock file, use this flake's
@@ -497,7 +545,8 @@ LockedFlake lockFlake(
oldLock
? std::dynamic_pointer_cast<const Node>(oldLock)
: LockFile::read(
- inputFlake.sourceInfo->actualPath + "/" + inputFlake.lockedRef.subdir + "/flake.lock").root);
+ inputFlake.sourceInfo->actualPath + "/" + inputFlake.lockedRef.subdir + "/flake.lock").root,
+ newParent, localPath);
}
else {
@@ -515,9 +564,17 @@ LockedFlake lockFlake(
}
};
+ LockParent parent {
+ .path = {},
+ .absolute = true
+ };
+
+ // Bring in the current ref for relative path resolution if we have it
+ auto parentPath = canonPath(flake.sourceInfo->actualPath + "/" + flake.lockedRef.subdir);
+
computeLocks(
flake.inputs, newLockFile.root, {},
- lockFlags.recreateLockFile ? nullptr : oldLockFile.root);
+ lockFlags.recreateLockFile ? nullptr : oldLockFile.root, parent, parentPath);
for (auto & i : lockFlags.inputOverrides)
if (!overridesUsed.count(i.first))
@@ -593,8 +650,10 @@ LockedFlake lockFlake(
}
} else
throw Error("cannot write modified lock file of flake '%s' (use '--no-write-lock-file' to ignore)", topRef);
- } else
+ } else {
warn("not writing modified lock file of flake '%s':\n%s", topRef, chomp(diff));
+ flake.forceDirty = true;
+ }
}
return LockedFlake { .flake = std::move(flake), .lockFile = std::move(newLockFile) };
@@ -617,26 +676,32 @@ void callFlake(EvalState & state,
mkString(*vLocks, lockedFlake.lockFile.to_string());
- emitTreeAttrs(state, *lockedFlake.flake.sourceInfo, lockedFlake.flake.lockedRef.input, *vRootSrc);
+ emitTreeAttrs(
+ state,
+ *lockedFlake.flake.sourceInfo,
+ lockedFlake.flake.lockedRef.input,
+ *vRootSrc,
+ false,
+ lockedFlake.flake.forceDirty);
mkString(*vRootSubdir, lockedFlake.flake.lockedRef.subdir);
- static RootValue vCallFlake = nullptr;
-
- if (!vCallFlake) {
- vCallFlake = allocRootValue(state.allocValue());
+ if (!state.vCallFlake) {
+ state.vCallFlake = allocRootValue(state.allocValue());
state.eval(state.parseExprFromString(
#include "call-flake.nix.gen.hh"
- , "/"), **vCallFlake);
+ , "/"), **state.vCallFlake);
}
- state.callFunction(**vCallFlake, *vLocks, *vTmp1, noPos);
+ state.callFunction(**state.vCallFlake, *vLocks, *vTmp1, noPos);
state.callFunction(*vTmp1, *vRootSrc, *vTmp2, noPos);
state.callFunction(*vTmp2, *vRootSubdir, vRes, noPos);
}
static void prim_getFlake(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
+ state.requireExperimentalFeatureOnEvaluation(Xp::Flakes, "builtins.getFlake", pos);
+
auto flakeRefS = state.forceStringNoCtx(*args[0], pos);
auto flakeRef = parseFlakeRef(flakeRefS, {}, true);
if (evalSettings.pureEval && !flakeRef.input.isImmutable())
@@ -646,13 +711,13 @@ static void prim_getFlake(EvalState & state, const Pos & pos, Value * * args, Va
lockFlake(state, flakeRef,
LockFlags {
.updateLockFile = false,
- .useRegistries = !evalSettings.pureEval && !settings.useRegistries,
+ .useRegistries = !evalSettings.pureEval && settings.useRegistries,
.allowMutable = !evalSettings.pureEval,
}),
v);
}
-static RegisterPrimOp r2("__getFlake", 1, prim_getFlake, "flakes");
+static RegisterPrimOp r2("__getFlake", 1, prim_getFlake);
}
@@ -662,8 +727,9 @@ Fingerprint LockedFlake::getFingerprint() const
// and we haven't changed it, then it's sufficient to use
// flake.sourceInfo.storePath for the fingerprint.
return hashString(htSHA256,
- fmt("%s;%d;%d;%s",
+ fmt("%s;%s;%d;%d;%s",
flake.sourceInfo->storePath.to_string(),
+ flake.lockedRef.subdir,
flake.lockedRef.input.getRevCount().value_or(0),
flake.lockedRef.input.getLastModified().value_or(0),
lockFile));
diff --git a/src/libexpr/flake/flake.hh b/src/libexpr/flake/flake.hh
index 15fd394f8..524b18af1 100644
--- a/src/libexpr/flake/flake.hh
+++ b/src/libexpr/flake/flake.hh
@@ -43,7 +43,6 @@ struct FlakeInput
std::optional<FlakeRef> ref;
bool isFlake = true; // true = process flake to get outputs, false = (fetched) static source path
std::optional<InputPath> follows;
- bool absolute = false; // whether 'follows' is relative to the flake root
FlakeInputs overrides;
};
@@ -59,9 +58,10 @@ struct ConfigFile
/* The contents of a flake.nix file. */
struct Flake
{
- FlakeRef originalRef; // the original flake specification (by the user)
- FlakeRef resolvedRef; // registry references and caching resolved to the specific underlying flake
- FlakeRef lockedRef; // the specific local store result of invoking the fetcher
+ FlakeRef originalRef; // the original flake specification (by the user)
+ FlakeRef resolvedRef; // registry references and caching resolved to the specific underlying flake
+ FlakeRef lockedRef; // the specific local store result of invoking the fetcher
+ bool forceDirty = false; // pretend that 'lockedRef' is dirty
std::optional<std::string> description;
std::shared_ptr<const fetchers::Tree> sourceInfo;
FlakeInputs inputs;
@@ -141,6 +141,8 @@ void emitTreeAttrs(
EvalState & state,
const fetchers::Tree & tree,
const fetchers::Input & input,
- Value & v, bool emptyRevFallback = false);
+ Value & v,
+ bool emptyRevFallback = false,
+ bool forceDirty = false);
}
diff --git a/src/libexpr/flake/flakeref.cc b/src/libexpr/flake/flakeref.cc
index 833e8a776..29128d789 100644
--- a/src/libexpr/flake/flakeref.cc
+++ b/src/libexpr/flake/flakeref.cc
@@ -172,8 +172,12 @@ std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
auto parsedURL = parseURL(url);
std::string fragment;
std::swap(fragment, parsedURL.fragment);
+
+ auto input = Input::fromURL(parsedURL);
+ input.parent = baseDir;
+
return std::make_pair(
- FlakeRef(Input::fromURL(parsedURL), get(parsedURL.query, "dir").value_or("")),
+ FlakeRef(std::move(input), get(parsedURL.query, "dir").value_or("")),
fragment);
}
}
diff --git a/src/libexpr/get-drvs.cc b/src/libexpr/get-drvs.cc
index f774e6493..ed4c47fbb 100644
--- a/src/libexpr/get-drvs.cc
+++ b/src/libexpr/get-drvs.cc
@@ -102,9 +102,9 @@ DrvInfo::Outputs DrvInfo::queryOutputs(bool onlyOutputsToInstall)
state->forceList(*i->value, *i->pos);
/* For each output... */
- for (unsigned int j = 0; j < i->value->listSize(); ++j) {
+ for (auto elem : i->value->listItems()) {
/* Evaluate the corresponding set. */
- string name = state->forceStringNoCtx(*i->value->listElems()[j], *i->pos);
+ string name = state->forceStringNoCtx(*elem, *i->pos);
Bindings::iterator out = attrs->find(state->symbols.create(name));
if (out == attrs->end()) continue; // FIXME: throw error?
state->forceAttrs(*out->value);
@@ -128,9 +128,9 @@ DrvInfo::Outputs DrvInfo::queryOutputs(bool onlyOutputsToInstall)
/* ^ this shows during `nix-env -i` right under the bad derivation */
if (!outTI->isList()) throw errMsg;
Outputs result;
- for (auto i = outTI->listElems(); i != outTI->listElems() + outTI->listSize(); ++i) {
- if ((*i)->type() != nString) throw errMsg;
- auto out = outputs.find((*i)->string.s);
+ for (auto elem : outTI->listItems()) {
+ if (elem->type() != nString) throw errMsg;
+ auto out = outputs.find(elem->string.s);
if (out == outputs.end()) throw errMsg;
result.insert(*out);
}
@@ -174,8 +174,8 @@ bool DrvInfo::checkMeta(Value & v)
{
state->forceValue(v);
if (v.type() == nList) {
- for (unsigned int n = 0; n < v.listSize(); ++n)
- if (!checkMeta(*v.listElems()[n])) return false;
+ for (auto elem : v.listItems())
+ if (!checkMeta(*elem)) return false;
return true;
}
else if (v.type() == nAttrs) {
@@ -364,10 +364,10 @@ static void getDerivations(EvalState & state, Value & vIn,
}
else if (v.type() == nList) {
- for (unsigned int n = 0; n < v.listSize(); ++n) {
- string pathPrefix2 = addToPath(pathPrefix, (format("%1%") % n).str());
- if (getDerivation(state, *v.listElems()[n], pathPrefix2, drvs, done, ignoreAssertionFailures))
- getDerivations(state, *v.listElems()[n], pathPrefix2, autoArgs, drvs, done, ignoreAssertionFailures);
+ for (auto [n, elem] : enumerate(v.listItems())) {
+ string pathPrefix2 = addToPath(pathPrefix, fmt("%d", n));
+ if (getDerivation(state, *elem, pathPrefix2, drvs, done, ignoreAssertionFailures))
+ getDerivations(state, *elem, pathPrefix2, autoArgs, drvs, done, ignoreAssertionFailures);
}
}
diff --git a/src/libexpr/lexer.l b/src/libexpr/lexer.l
index 27975dc9e..c18877e29 100644
--- a/src/libexpr/lexer.l
+++ b/src/libexpr/lexer.l
@@ -9,6 +9,9 @@
%s DEFAULT
%x STRING
%x IND_STRING
+%x INPATH
+%x INPATH_SLASH
+%x PATH_START
%{
@@ -25,6 +28,8 @@ using namespace nix;
namespace nix {
+// backup to recover from yyless(0)
+YYLTYPE prev_yylloc;
static void initLoc(YYLTYPE * loc)
{
@@ -35,6 +40,8 @@ static void initLoc(YYLTYPE * loc)
static void adjustLoc(YYLTYPE * loc, const char * s, size_t len)
{
+ prev_yylloc = *loc;
+
loc->first_line = loc->last_line;
loc->first_column = loc->last_column;
@@ -57,6 +64,7 @@ static void adjustLoc(YYLTYPE * loc, const char * s, size_t len)
}
+// FIXME: optimize
static Expr * unescapeStr(SymbolTable & symbols, const char * s, size_t length)
{
string t;
@@ -97,9 +105,12 @@ ANY .|\n
ID [a-zA-Z\_][a-zA-Z0-9\_\'\-]*
INT [0-9]+
FLOAT (([1-9][0-9]*\.[0-9]*)|(0?\.[0-9]+))([Ee][+-]?[0-9]+)?
-PATH [a-zA-Z0-9\.\_\-\+]*(\/[a-zA-Z0-9\.\_\-\+]+)+\/?
-HPATH \~(\/[a-zA-Z0-9\.\_\-\+]+)+\/?
-SPATH \<[a-zA-Z0-9\.\_\-\+]+(\/[a-zA-Z0-9\.\_\-\+]+)*\>
+PATH_CHAR [a-zA-Z0-9\.\_\-\+]
+PATH {PATH_CHAR}*(\/{PATH_CHAR}+)+\/?
+PATH_SEG {PATH_CHAR}*\/
+HPATH \~(\/{PATH_CHAR}+)+\/?
+HPATH_START \~\/
+SPATH \<{PATH_CHAR}+(\/{PATH_CHAR}+)*\>
URI [a-zA-Z][a-zA-Z0-9\+\-\.]*\:[a-zA-Z0-9\%\/\?\:\@\&\=\+\$\,\-\_\.\!\~\*\']+
@@ -200,17 +211,75 @@ or { return OR_KW; }
return IND_STR;
}
+{PATH_SEG}\$\{ |
+{HPATH_START}\$\{ {
+ PUSH_STATE(PATH_START);
+ yyless(0);
+ *yylloc = prev_yylloc;
+}
+
+<PATH_START>{PATH_SEG} {
+ POP_STATE();
+ PUSH_STATE(INPATH_SLASH);
+ yylval->path = strdup(yytext);
+ return PATH;
+}
+
+<PATH_START>{HPATH_START} {
+ POP_STATE();
+ PUSH_STATE(INPATH_SLASH);
+ yylval->path = strdup(yytext);
+ return HPATH;
+}
+
+{PATH} {
+ if (yytext[yyleng-1] == '/')
+ PUSH_STATE(INPATH_SLASH);
+ else
+ PUSH_STATE(INPATH);
+ yylval->path = strdup(yytext);
+ return PATH;
+}
+{HPATH} {
+ if (yytext[yyleng-1] == '/')
+ PUSH_STATE(INPATH_SLASH);
+ else
+ PUSH_STATE(INPATH);
+ yylval->path = strdup(yytext);
+ return HPATH;
+}
+
+<INPATH,INPATH_SLASH>\$\{ {
+ POP_STATE();
+ PUSH_STATE(INPATH);
+ PUSH_STATE(DEFAULT);
+ return DOLLAR_CURLY;
+}
+<INPATH,INPATH_SLASH>{PATH}|{PATH_SEG}|{PATH_CHAR}+ {
+ POP_STATE();
+ if (yytext[yyleng-1] == '/')
+ PUSH_STATE(INPATH_SLASH);
+ else
+ PUSH_STATE(INPATH);
+ yylval->e = new ExprString(data->symbols.create(string(yytext)));
+ return STR;
+}
+<INPATH>{ANY} |
+<INPATH><<EOF>> {
+ /* if we encounter a non-path character we inform the parser that the path has
+ ended with a PATH_END token and re-parse this character in the default
+ context (it may be ')', ';', or something of that sort) */
+ POP_STATE();
+ yyless(0);
+ *yylloc = prev_yylloc;
+ return PATH_END;
+}
+
+<INPATH_SLASH>{ANY} |
+<INPATH_SLASH><<EOF>> {
+ throw ParseError("path has a trailing slash");
+}
-{PATH} { if (yytext[yyleng-1] == '/')
- throw ParseError("path '%s' has a trailing slash", yytext);
- yylval->path = strdup(yytext);
- return PATH;
- }
-{HPATH} { if (yytext[yyleng-1] == '/')
- throw ParseError("path '%s' has a trailing slash", yytext);
- yylval->path = strdup(yytext);
- return HPATH;
- }
{SPATH} { yylval->path = strdup(yytext); return SPATH; }
{URI} { yylval->uri = strdup(yytext); return URI; }
diff --git a/src/libexpr/local.mk b/src/libexpr/local.mk
index 17b83aafd..016631647 100644
--- a/src/libexpr/local.mk
+++ b/src/libexpr/local.mk
@@ -15,7 +15,7 @@ libexpr_CXXFLAGS += -I src/libutil -I src/libstore -I src/libfetchers -I src/lib
libexpr_LIBS = libutil libstore libfetchers
-libexpr_LDFLAGS = -lboost_context -pthread
+libexpr_LDFLAGS += -lboost_context -pthread
ifdef HOST_LINUX
libexpr_LDFLAGS += -ldl
endif
@@ -35,7 +35,7 @@ $(d)/lexer-tab.cc $(d)/lexer-tab.hh: $(d)/lexer.l
clean-files += $(d)/parser-tab.cc $(d)/parser-tab.hh $(d)/lexer-tab.cc $(d)/lexer-tab.hh
-$(eval $(call install-file-in, $(d)/nix-expr.pc, $(prefix)/lib/pkgconfig, 0644))
+$(eval $(call install-file-in, $(d)/nix-expr.pc, $(libdir)/pkgconfig, 0644))
$(foreach i, $(wildcard src/libexpr/flake/*.hh), \
$(eval $(call install-file-in, $(i), $(includedir)/nix/flake, 0644)))
diff --git a/src/libexpr/nixexpr.cc b/src/libexpr/nixexpr.cc
index 492b819e7..57c2f6e44 100644
--- a/src/libexpr/nixexpr.cc
+++ b/src/libexpr/nixexpr.cc
@@ -124,7 +124,7 @@ void ExprList::show(std::ostream & str) const
void ExprLambda::show(std::ostream & str) const
{
str << "(";
- if (matchAttrs) {
+ if (hasFormals()) {
str << "{ ";
bool first = true;
for (auto & i : formals->formals) {
@@ -143,6 +143,16 @@ void ExprLambda::show(std::ostream & str) const
str << ": " << *body << ")";
}
+void ExprCall::show(std::ostream & str) const
+{
+ str << '(' << *fun;
+ for (auto e : args) {
+ str << ' ';
+ str << *e;
+ }
+ str << ')';
+}
+
void ExprLet::show(std::ostream & str) const
{
str << "(let ";
@@ -263,13 +273,13 @@ void ExprVar::bindVars(const StaticEnv & env)
/* Check whether the variable appears in the environment. If so,
set its level and displacement. */
const StaticEnv * curEnv;
- unsigned int level;
+ Level level;
int withLevel = -1;
for (curEnv = &env, level = 0; curEnv; curEnv = curEnv->up, level++) {
if (curEnv->isWith) {
if (withLevel == -1) withLevel = level;
} else {
- StaticEnv::Vars::const_iterator i = curEnv->vars.find(name);
+ auto i = curEnv->find(name);
if (i != curEnv->vars.end()) {
fromWith = false;
this->level = level;
@@ -311,14 +321,16 @@ void ExprOpHasAttr::bindVars(const StaticEnv & env)
void ExprAttrs::bindVars(const StaticEnv & env)
{
const StaticEnv * dynamicEnv = &env;
- StaticEnv newEnv(false, &env);
+ StaticEnv newEnv(false, &env, recursive ? attrs.size() : 0);
if (recursive) {
dynamicEnv = &newEnv;
- unsigned int displ = 0;
+ Displacement displ = 0;
for (auto & i : attrs)
- newEnv.vars[i.first] = i.second.displ = displ++;
+ newEnv.vars.emplace_back(i.first, i.second.displ = displ++);
+
+ // No need to sort newEnv since attrs is in sorted order.
for (auto & i : attrs)
i.second.e->bindVars(i.second.inherited ? env : newEnv);
@@ -342,15 +354,20 @@ void ExprList::bindVars(const StaticEnv & env)
void ExprLambda::bindVars(const StaticEnv & env)
{
- StaticEnv newEnv(false, &env);
+ StaticEnv newEnv(
+ false, &env,
+ (hasFormals() ? formals->formals.size() : 0) +
+ (arg.empty() ? 0 : 1));
- unsigned int displ = 0;
+ Displacement displ = 0;
- if (!arg.empty()) newEnv.vars[arg] = displ++;
+ if (!arg.empty()) newEnv.vars.emplace_back(arg, displ++);
- if (matchAttrs) {
+ if (hasFormals()) {
for (auto & i : formals->formals)
- newEnv.vars[i.name] = displ++;
+ newEnv.vars.emplace_back(i.name, displ++);
+
+ newEnv.sort();
for (auto & i : formals->formals)
if (i.def) i.def->bindVars(newEnv);
@@ -359,13 +376,22 @@ void ExprLambda::bindVars(const StaticEnv & env)
body->bindVars(newEnv);
}
+void ExprCall::bindVars(const StaticEnv & env)
+{
+ fun->bindVars(env);
+ for (auto e : args)
+ e->bindVars(env);
+}
+
void ExprLet::bindVars(const StaticEnv & env)
{
- StaticEnv newEnv(false, &env);
+ StaticEnv newEnv(false, &env, attrs->attrs.size());
- unsigned int displ = 0;
+ Displacement displ = 0;
for (auto & i : attrs->attrs)
- newEnv.vars[i.first] = i.second.displ = displ++;
+ newEnv.vars.emplace_back(i.first, i.second.displ = displ++);
+
+ // No need to sort newEnv since attrs->attrs is in sorted order.
for (auto & i : attrs->attrs)
i.second.e->bindVars(i.second.inherited ? env : newEnv);
@@ -379,7 +405,7 @@ void ExprWith::bindVars(const StaticEnv & env)
level so that `lookupVar' can look up variables in the previous
`with' if this one doesn't contain the desired attribute. */
const StaticEnv * curEnv;
- unsigned int level;
+ Level level;
prevWith = 0;
for (curEnv = &env, level = 1; curEnv; curEnv = curEnv->up, level++)
if (curEnv->isWith) {
@@ -452,5 +478,4 @@ size_t SymbolTable::totalSize() const
return n;
}
-
}
diff --git a/src/libexpr/nixexpr.hh b/src/libexpr/nixexpr.hh
index 51a14cd59..13256272c 100644
--- a/src/libexpr/nixexpr.hh
+++ b/src/libexpr/nixexpr.hh
@@ -4,8 +4,6 @@
#include "symbol-table.hh"
#include "error.hh"
-#include <map>
-
namespace nix {
@@ -135,6 +133,9 @@ struct ExprPath : Expr
Value * maybeThunk(EvalState & state, Env & env);
};
+typedef uint32_t Level;
+typedef uint32_t Displacement;
+
struct ExprVar : Expr
{
Pos pos;
@@ -150,8 +151,8 @@ struct ExprVar : Expr
value is obtained by getting the attribute named `name' from
the set stored in the environment that is `level' levels up
from the current one.*/
- unsigned int level;
- unsigned int displ;
+ Level level;
+ Displacement displ;
ExprVar(const Symbol & name) : name(name) { };
ExprVar(const Pos & pos, const Symbol & name) : pos(pos), name(name) { };
@@ -185,7 +186,7 @@ struct ExprAttrs : Expr
bool inherited;
Expr * e;
Pos pos;
- unsigned int displ; // displacement
+ Displacement displ; // displacement
AttrDef(Expr * e, const Pos & pos, bool inherited=false)
: inherited(inherited), e(e), pos(pos) { };
AttrDef() { };
@@ -233,11 +234,10 @@ struct ExprLambda : Expr
Pos pos;
Symbol name;
Symbol arg;
- bool matchAttrs;
Formals * formals;
Expr * body;
- ExprLambda(const Pos & pos, const Symbol & arg, bool matchAttrs, Formals * formals, Expr * body)
- : pos(pos), arg(arg), matchAttrs(matchAttrs), formals(formals), body(body)
+ ExprLambda(const Pos & pos, const Symbol & arg, Formals * formals, Expr * body)
+ : pos(pos), arg(arg), formals(formals), body(body)
{
if (!arg.empty() && formals && formals->argNames.find(arg) != formals->argNames.end())
throw ParseError({
@@ -247,6 +247,18 @@ struct ExprLambda : Expr
};
void setName(Symbol & name);
string showNamePos() const;
+ inline bool hasFormals() const { return formals != nullptr; }
+ COMMON_METHODS
+};
+
+struct ExprCall : Expr
+{
+ Expr * fun;
+ std::vector<Expr *> args;
+ Pos pos;
+ ExprCall(const Pos & pos, Expr * fun, std::vector<Expr *> && args)
+ : fun(fun), args(args), pos(pos)
+ { }
COMMON_METHODS
};
@@ -308,7 +320,6 @@ struct ExprOpNot : Expr
void eval(EvalState & state, Env & env, Value & v); \
};
-MakeBinOp(ExprApp, "")
MakeBinOp(ExprOpEq, "==")
MakeBinOp(ExprOpNEq, "!=")
MakeBinOp(ExprOpAnd, "&&")
@@ -342,9 +353,28 @@ struct StaticEnv
{
bool isWith;
const StaticEnv * up;
- typedef std::map<Symbol, unsigned int> Vars;
+
+ // Note: these must be in sorted order.
+ typedef std::vector<std::pair<Symbol, Displacement>> Vars;
Vars vars;
- StaticEnv(bool isWith, const StaticEnv * up) : isWith(isWith), up(up) { };
+
+ StaticEnv(bool isWith, const StaticEnv * up, size_t expectedSize = 0) : isWith(isWith), up(up) {
+ vars.reserve(expectedSize);
+ };
+
+ void sort()
+ {
+ std::sort(vars.begin(), vars.end(),
+ [](const Vars::value_type & a, const Vars::value_type & b) { return a.first < b.first; });
+ }
+
+ Vars::const_iterator find(const Symbol & name) const
+ {
+ Vars::value_type key(name, 0);
+ auto i = std::lower_bound(vars.begin(), vars.end(), key);
+ if (i != vars.end() && i->first == name) return i;
+ return vars.end();
+ }
};
diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y
index f948dde47..c1f4e72e0 100644
--- a/src/libexpr/parser.y
+++ b/src/libexpr/parser.y
@@ -33,11 +33,9 @@ namespace nix {
Symbol file;
FileOrigin origin;
std::optional<ErrorInfo> error;
- Symbol sLetBody;
ParseData(EvalState & state)
: state(state)
, symbols(state.symbols)
- , sLetBody(symbols.create("<let-body>"))
{ };
};
@@ -126,14 +124,14 @@ static void addAttr(ExprAttrs * attrs, AttrPath & attrPath,
auto j2 = jAttrs->attrs.find(ad.first);
if (j2 != jAttrs->attrs.end()) // Attr already defined in iAttrs, error.
dupAttr(ad.first, j2->second.pos, ad.second.pos);
- jAttrs->attrs[ad.first] = ad.second;
+ jAttrs->attrs.emplace(ad.first, ad.second);
}
} else {
dupAttr(attrPath, pos, j->second.pos);
}
} else {
// This attr path is not defined. Let's create it.
- attrs->attrs[i->symbol] = ExprAttrs::AttrDef(e, pos);
+ attrs->attrs.emplace(i->symbol, ExprAttrs::AttrDef(e, pos));
e->setName(i->symbol);
}
} else {
@@ -283,20 +281,20 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParseData * data, const char * err
}
%type <e> start expr expr_function expr_if expr_op
-%type <e> expr_app expr_select expr_simple
+%type <e> expr_select expr_simple expr_app
%type <list> expr_list
%type <attrs> binds
%type <formals> formals
%type <formal> formal
%type <attrNames> attrs attrpath
%type <string_parts> string_parts_interpolated ind_string_parts
-%type <e> string_parts string_attr
+%type <e> path_start string_parts string_attr
%type <id> attr
%token <id> ID ATTRPATH
%token <e> STR IND_STR
%token <n> INT
%token <nf> FLOAT
-%token <path> PATH HPATH SPATH
+%token <path> PATH HPATH SPATH PATH_END
%token <uri> URI
%token IF THEN ELSE ASSERT WITH LET IN REC INHERIT EQ NEQ AND OR IMPL OR_KW
%token DOLLAR_CURLY /* == ${ */
@@ -324,13 +322,13 @@ expr: expr_function;
expr_function
: ID ':' expr_function
- { $$ = new ExprLambda(CUR_POS, data->symbols.create($1), false, 0, $3); }
+ { $$ = new ExprLambda(CUR_POS, data->symbols.create($1), 0, $3); }
| '{' formals '}' ':' expr_function
- { $$ = new ExprLambda(CUR_POS, data->symbols.create(""), true, $2, $5); }
+ { $$ = new ExprLambda(CUR_POS, data->symbols.create(""), $2, $5); }
| '{' formals '}' '@' ID ':' expr_function
- { $$ = new ExprLambda(CUR_POS, data->symbols.create($5), true, $2, $7); }
+ { $$ = new ExprLambda(CUR_POS, data->symbols.create($5), $2, $7); }
| ID '@' '{' formals '}' ':' expr_function
- { $$ = new ExprLambda(CUR_POS, data->symbols.create($1), true, $4, $7); }
+ { $$ = new ExprLambda(CUR_POS, data->symbols.create($1), $4, $7); }
| ASSERT expr ';' expr_function
{ $$ = new ExprAssert(CUR_POS, $2, $4); }
| WITH expr ';' expr_function
@@ -353,13 +351,13 @@ expr_if
expr_op
: '!' expr_op %prec NOT { $$ = new ExprOpNot($2); }
- | '-' expr_op %prec NEGATE { $$ = new ExprApp(CUR_POS, new ExprApp(new ExprVar(data->symbols.create("__sub")), new ExprInt(0)), $2); }
+ | '-' expr_op %prec NEGATE { $$ = new ExprCall(CUR_POS, new ExprVar(data->symbols.create("__sub")), {new ExprInt(0), $2}); }
| expr_op EQ expr_op { $$ = new ExprOpEq($1, $3); }
| expr_op NEQ expr_op { $$ = new ExprOpNEq($1, $3); }
- | expr_op '<' expr_op { $$ = new ExprApp(CUR_POS, new ExprApp(new ExprVar(data->symbols.create("__lessThan")), $1), $3); }
- | expr_op LEQ expr_op { $$ = new ExprOpNot(new ExprApp(CUR_POS, new ExprApp(new ExprVar(data->symbols.create("__lessThan")), $3), $1)); }
- | expr_op '>' expr_op { $$ = new ExprApp(CUR_POS, new ExprApp(new ExprVar(data->symbols.create("__lessThan")), $3), $1); }
- | expr_op GEQ expr_op { $$ = new ExprOpNot(new ExprApp(CUR_POS, new ExprApp(new ExprVar(data->symbols.create("__lessThan")), $1), $3)); }
+ | expr_op '<' expr_op { $$ = new ExprCall(CUR_POS, new ExprVar(data->symbols.create("__lessThan")), {$1, $3}); }
+ | expr_op LEQ expr_op { $$ = new ExprOpNot(new ExprCall(CUR_POS, new ExprVar(data->symbols.create("__lessThan")), {$3, $1})); }
+ | expr_op '>' expr_op { $$ = new ExprCall(CUR_POS, new ExprVar(data->symbols.create("__lessThan")), {$3, $1}); }
+ | expr_op GEQ expr_op { $$ = new ExprOpNot(new ExprCall(CUR_POS, new ExprVar(data->symbols.create("__lessThan")), {$1, $3})); }
| expr_op AND expr_op { $$ = new ExprOpAnd(CUR_POS, $1, $3); }
| expr_op OR expr_op { $$ = new ExprOpOr(CUR_POS, $1, $3); }
| expr_op IMPL expr_op { $$ = new ExprOpImpl(CUR_POS, $1, $3); }
@@ -367,17 +365,22 @@ expr_op
| expr_op '?' attrpath { $$ = new ExprOpHasAttr($1, *$3); }
| expr_op '+' expr_op
{ $$ = new ExprConcatStrings(CUR_POS, false, new vector<Expr *>({$1, $3})); }
- | expr_op '-' expr_op { $$ = new ExprApp(CUR_POS, new ExprApp(new ExprVar(data->symbols.create("__sub")), $1), $3); }
- | expr_op '*' expr_op { $$ = new ExprApp(CUR_POS, new ExprApp(new ExprVar(data->symbols.create("__mul")), $1), $3); }
- | expr_op '/' expr_op { $$ = new ExprApp(CUR_POS, new ExprApp(new ExprVar(data->symbols.create("__div")), $1), $3); }
+ | expr_op '-' expr_op { $$ = new ExprCall(CUR_POS, new ExprVar(data->symbols.create("__sub")), {$1, $3}); }
+ | expr_op '*' expr_op { $$ = new ExprCall(CUR_POS, new ExprVar(data->symbols.create("__mul")), {$1, $3}); }
+ | expr_op '/' expr_op { $$ = new ExprCall(CUR_POS, new ExprVar(data->symbols.create("__div")), {$1, $3}); }
| expr_op CONCAT expr_op { $$ = new ExprOpConcatLists(CUR_POS, $1, $3); }
| expr_app
;
expr_app
- : expr_app expr_select
- { $$ = new ExprApp(CUR_POS, $1, $2); }
- | expr_select { $$ = $1; }
+ : expr_app expr_select {
+ if (auto e2 = dynamic_cast<ExprCall *>($1)) {
+ e2->args.push_back($2);
+ $$ = $1;
+ } else
+ $$ = new ExprCall(CUR_POS, $1, {$2});
+ }
+ | expr_select
;
expr_select
@@ -388,7 +391,7 @@ expr_select
| /* Backwards compatibility: because Nixpkgs has a rarely used
function named ‘or’, allow stuff like ‘map or [...]’. */
expr_simple OR_KW
- { $$ = new ExprApp(CUR_POS, $1, new ExprVar(CUR_POS, data->symbols.create("or"))); }
+ { $$ = new ExprCall(CUR_POS, $1, {new ExprVar(CUR_POS, data->symbols.create("or"))}); }
| expr_simple { $$ = $1; }
;
@@ -405,17 +408,20 @@ expr_simple
| IND_STRING_OPEN ind_string_parts IND_STRING_CLOSE {
$$ = stripIndentation(CUR_POS, data->symbols, *$2);
}
- | PATH { $$ = new ExprPath(absPath($1, data->basePath)); }
- | HPATH { $$ = new ExprPath(getHome() + string{$1 + 1}); }
+ | path_start PATH_END { $$ = $1; }
+ | path_start string_parts_interpolated PATH_END {
+ $2->insert($2->begin(), $1);
+ $$ = new ExprConcatStrings(CUR_POS, false, $2);
+ }
| SPATH {
string path($1 + 1, strlen($1) - 2);
- $$ = new ExprApp(CUR_POS,
- new ExprApp(new ExprVar(data->symbols.create("__findFile")),
- new ExprVar(data->symbols.create("__nixPath"))),
- new ExprString(data->symbols.create(path)));
+ $$ = new ExprCall(CUR_POS,
+ new ExprVar(data->symbols.create("__findFile")),
+ {new ExprVar(data->symbols.create("__nixPath")),
+ new ExprString(data->symbols.create(path))});
}
| URI {
- static bool noURLLiterals = settings.isExperimentalFeatureEnabled("no-url-literals");
+ static bool noURLLiterals = settings.isExperimentalFeatureEnabled(Xp::NoUrlLiterals);
if (noURLLiterals)
throw ParseError({
.msg = hintfmt("URL literals are disabled"),
@@ -452,6 +458,20 @@ string_parts_interpolated
}
;
+path_start
+ : PATH {
+ Path path(absPath($1, data->basePath));
+ /* add back in the trailing '/' to the first segment */
+ if ($1[strlen($1)-1] == '/' && strlen($1) > 1)
+ path += "/";
+ $$ = new ExprPath(path);
+ }
+ | HPATH {
+ Path path(getHome() + string($1 + 1));
+ $$ = new ExprPath(path);
+ }
+ ;
+
ind_string_parts
: ind_string_parts IND_STR { $$ = $1; $1->push_back($2); }
| ind_string_parts DOLLAR_CURLY expr '}' { $$ = $1; $1->push_back($3); }
@@ -466,7 +486,7 @@ binds
if ($$->attrs.find(i.symbol) != $$->attrs.end())
dupAttr(i.symbol, makeCurPos(@3, data), $$->attrs[i.symbol].pos);
Pos pos = makeCurPos(@3, data);
- $$->attrs[i.symbol] = ExprAttrs::AttrDef(new ExprVar(CUR_POS, i.symbol), pos, true);
+ $$->attrs.emplace(i.symbol, ExprAttrs::AttrDef(new ExprVar(CUR_POS, i.symbol), pos, true));
}
}
| binds INHERIT '(' expr ')' attrs ';'
@@ -475,7 +495,7 @@ binds
for (auto & i : *$6) {
if ($$->attrs.find(i.symbol) != $$->attrs.end())
dupAttr(i.symbol, makeCurPos(@6, data), $$->attrs[i.symbol].pos);
- $$->attrs[i.symbol] = ExprAttrs::AttrDef(new ExprSelect(CUR_POS, $4, i.symbol), makeCurPos(@6, data));
+ $$->attrs.emplace(i.symbol, ExprAttrs::AttrDef(new ExprSelect(CUR_POS, $4, i.symbol), makeCurPos(@6, data)));
}
}
| { $$ = new ExprAttrs(makeCurPos(@0, data)); }
@@ -735,7 +755,7 @@ std::pair<bool, std::string> EvalState::resolveSearchPathElem(const SearchPathEl
res = { true, path };
else {
logWarning({
- .msg = hintfmt("warning: Nix search path entry '%1%' does not exist, ignoring", elem.second)
+ .msg = hintfmt("Nix search path entry '%1%' does not exist, ignoring", elem.second)
});
res = { false, "" };
}
diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index bfe41c9fa..66af373d7 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -52,16 +52,13 @@ void EvalState::realiseContext(const PathSet & context)
if (drvs.empty()) return;
if (!evalSettings.enableImportFromDerivation)
- throw EvalError("attempted to realize '%1%' during evaluation but 'allow-import-from-derivation' is false",
+ throw Error(
+ "cannot build '%1%' during evaluation because the option 'allow-import-from-derivation' is disabled",
store->printStorePath(drvs.begin()->drvPath));
- /* For performance, prefetch all substitute info. */
- StorePathSet willBuild, willSubstitute, unknown;
- uint64_t downloadSize, narSize;
+ /* Build/substitute the context. */
std::vector<DerivedPath> buildReqs;
for (auto & d : drvs) buildReqs.emplace_back(DerivedPath { d });
- store->queryMissing(buildReqs, willBuild, willSubstitute, unknown, downloadSize, narSize);
-
store->buildPaths(buildReqs);
/* Add the output of this derivations to the allowed
@@ -73,7 +70,7 @@ void EvalState::realiseContext(const PathSet & context)
if (outputPaths.count(outputName) == 0)
throw Error("derivation '%s' does not have an output named '%s'",
store->printStorePath(drvPath), outputName);
- allowedPaths->insert(store->printStorePath(outputPaths.at(outputName)));
+ allowPath(outputPaths.at(outputName));
}
}
}
@@ -124,7 +121,7 @@ static void import(EvalState & state, const Pos & pos, Value & vPath, Value * vS
});
} catch (Error & e) {
e.addTrace(pos, "while importing '%s'", path);
- throw e;
+ throw;
}
Path realPath = state.checkSourcePath(state.toRealPath(path, context));
@@ -160,16 +157,15 @@ static void import(EvalState & state, const Pos & pos, Value & vPath, Value * vS
}
w.attrs->sort();
- static RootValue fun;
- if (!fun) {
- fun = allocRootValue(state.allocValue());
+ if (!state.vImportedDrvToDerivation) {
+ state.vImportedDrvToDerivation = allocRootValue(state.allocValue());
state.eval(state.parseExprFromString(
#include "imported-drv-to-derivation.nix.gen.hh"
- , "/"), **fun);
+ , "/"), **state.vImportedDrvToDerivation);
}
- state.forceFunction(**fun, pos);
- mkApp(v, **fun, w);
+ state.forceFunction(**state.vImportedDrvToDerivation, pos);
+ mkApp(v, **state.vImportedDrvToDerivation, w);
state.forceAttrs(v, pos);
}
@@ -188,14 +184,17 @@ static void import(EvalState & state, const Pos & pos, Value & vPath, Value * vS
Env * env = &state.allocEnv(vScope->attrs->size());
env->up = &state.baseEnv;
- StaticEnv staticEnv(false, &state.staticBaseEnv);
+ StaticEnv staticEnv(false, &state.staticBaseEnv, vScope->attrs->size());
unsigned int displ = 0;
for (auto & attr : *vScope->attrs) {
- staticEnv.vars[attr.name] = displ;
+ staticEnv.vars.emplace_back(attr.name, displ);
env->values[displ++] = attr.value;
}
+ // No need to call staticEnv.sort(), because
+ // args[0]->attrs is already sorted.
+
printTalkative("evaluating file '%1%'", realPath);
Expr * e = state.parseExprFromFile(resolveExprPath(realPath), staticEnv);
@@ -336,9 +335,8 @@ void prim_exec(EvalState & state, const Pos & pos, Value * * args, Value & v)
PathSet context;
auto program = state.coerceToString(pos, *elems[0], context, false, false);
Strings commandArgs;
- for (unsigned int i = 1; i < args[0]->listSize(); ++i) {
+ for (unsigned int i = 1; i < args[0]->listSize(); ++i)
commandArgs.emplace_back(state.coerceToString(pos, *elems[i], context, false, false));
- }
try {
state.realiseContext(context);
} catch (InvalidPathError & e) {
@@ -414,7 +412,7 @@ static RegisterPrimOp primop_isNull({
Return `true` if *e* evaluates to `null`, and `false` otherwise.
> **Warning**
- >
+ >
> This function is *deprecated*; just write `e == null` instead.
)",
.fun = prim_isNull,
@@ -518,7 +516,11 @@ static RegisterPrimOp primop_isPath({
struct CompareValues
{
- bool operator () (const Value * v1, const Value * v2) const
+ EvalState & state;
+
+ CompareValues(EvalState & state) : state(state) { };
+
+ bool operator () (Value * v1, Value * v2) const
{
if (v1->type() == nFloat && v2->type() == nInt)
return v1->fpoint < v2->integer;
@@ -535,6 +537,17 @@ struct CompareValues
return strcmp(v1->string.s, v2->string.s) < 0;
case nPath:
return strcmp(v1->path, v2->path) < 0;
+ case nList:
+ // Lexicographic comparison
+ for (size_t i = 0;; i++) {
+ if (i == v2->listSize()) {
+ return false;
+ } else if (i == v1->listSize()) {
+ return true;
+ } else if (!state.eqValues(*v1->listElems()[i], *v2->listElems()[i])) {
+ return (*this)(v1->listElems()[i], v2->listElems()[i]);
+ }
+ }
default:
throw EvalError("cannot compare %1% with %2%", showType(*v1), showType(*v2));
}
@@ -602,8 +615,8 @@ static void prim_genericClosure(EvalState & state, const Pos & pos, Value * * ar
state.forceList(*startSet->value, pos);
ValueList workSet;
- for (unsigned int n = 0; n < startSet->value->listSize(); ++n)
- workSet.push_back(startSet->value->listElems()[n]);
+ for (auto elem : startSet->value->listItems())
+ workSet.push_back(elem);
/* Get the operator. */
Bindings::iterator op = getAttr(
@@ -622,7 +635,8 @@ static void prim_genericClosure(EvalState & state, const Pos & pos, Value * * ar
ValueList res;
// `doneKeys' doesn't need to be a GC root, because its values are
// reachable from res.
- set<Value *, CompareValues> doneKeys;
+ auto cmp = CompareValues(state);
+ set<Value *, decltype(cmp)> doneKeys(cmp);
while (!workSet.empty()) {
Value * e = *(workSet.begin());
workSet.pop_front();
@@ -647,9 +661,9 @@ static void prim_genericClosure(EvalState & state, const Pos & pos, Value * * ar
state.forceList(call, pos);
/* Add the values returned by the operator to the work set. */
- for (unsigned int n = 0; n < call.listSize(); ++n) {
- state.forceValue(*call.listElems()[n], pos);
- workSet.push_back(call.listElems()[n]);
+ for (auto elem : call.listItems()) {
+ state.forceValue(*elem, pos);
+ workSet.push_back(elem);
}
}
@@ -989,16 +1003,17 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
}
if (i->name == state.sContentAddressed) {
- settings.requireExperimentalFeature("ca-derivations");
contentAddressed = state.forceBool(*i->value, pos);
+ if (contentAddressed)
+ settings.requireExperimentalFeature(Xp::CaDerivations);
}
/* The `args' attribute is special: it supplies the
command-line arguments to the builder. */
else if (i->name == state.sArgs) {
state.forceList(*i->value, pos);
- for (unsigned int n = 0; n < i->value->listSize(); ++n) {
- string s = state.coerceToString(posDrvName, *i->value->listElems()[n], context, true);
+ for (auto elem : i->value->listItems()) {
+ string s = state.coerceToString(posDrvName, *elem, context, true);
drv.args.push_back(s);
}
}
@@ -1012,7 +1027,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
if (i->name == state.sStructuredAttrs) continue;
auto placeholder(jsonObject->placeholder(key));
- printValueAsJSON(state, true, *i->value, placeholder, context);
+ printValueAsJSON(state, true, *i->value, pos, placeholder, context);
if (i->name == state.sBuilder)
drv.builder = state.forceString(*i->value, context, posDrvName);
@@ -1028,8 +1043,8 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
/* Require ‘outputs’ to be a list of strings. */
state.forceList(*i->value, posDrvName);
Strings ss;
- for (unsigned int n = 0; n < i->value->listSize(); ++n)
- ss.emplace_back(state.forceStringNoCtx(*i->value->listElems()[n], posDrvName));
+ for (auto elem : i->value->listItems())
+ ss.emplace_back(state.forceStringNoCtx(*elem, posDrvName));
handleOutputs(ss);
}
@@ -1174,7 +1189,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
// hash per output.
auto hashModulo = hashDerivationModulo(*state.store, Derivation(drv), true);
std::visit(overloaded {
- [&](Hash h) {
+ [&](Hash & h) {
for (auto & i : outputs) {
auto outPath = state.store->makeOutputPath(i, h, drvName);
drv.env[i] = state.store->printStorePath(outPath);
@@ -1186,11 +1201,11 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
});
}
},
- [&](CaOutputHashes) {
+ [&](CaOutputHashes &) {
// Shouldn't happen as the toplevel derivation is not CA.
assert(false);
},
- [&](DeferredHash _) {
+ [&](DeferredHash &) {
for (auto & i : outputs) {
drv.outputs.insert_or_assign(i,
DerivationOutput {
@@ -1444,20 +1459,19 @@ static void prim_findFile(EvalState & state, const Pos & pos, Value * * args, Va
SearchPath searchPath;
- for (unsigned int n = 0; n < args[0]->listSize(); ++n) {
- Value & v2(*args[0]->listElems()[n]);
- state.forceAttrs(v2, pos);
+ for (auto v2 : args[0]->listItems()) {
+ state.forceAttrs(*v2, pos);
string prefix;
- Bindings::iterator i = v2.attrs->find(state.symbols.create("prefix"));
- if (i != v2.attrs->end())
+ Bindings::iterator i = v2->attrs->find(state.symbols.create("prefix"));
+ if (i != v2->attrs->end())
prefix = state.forceStringNoCtx(*i->value, pos);
i = getAttr(
state,
"findFile",
"path",
- v2.attrs,
+ v2->attrs,
pos
);
@@ -1493,15 +1507,20 @@ static void prim_hashFile(EvalState & state, const Pos & pos, Value * * args, Va
string type = state.forceStringNoCtx(*args[0], pos);
std::optional<HashType> ht = parseHashType(type);
if (!ht)
- throw Error({
- .msg = hintfmt("unknown hash type '%1%'", type),
- .errPos = pos
- });
+ throw Error({
+ .msg = hintfmt("unknown hash type '%1%'", type),
+ .errPos = pos
+ });
- PathSet context; // discarded
- Path p = state.coerceToPath(pos, *args[1], context);
+ PathSet context;
+ Path path = state.coerceToPath(pos, *args[1], context);
+ try {
+ state.realiseContext(context);
+ } catch (InvalidPathError & e) {
+ throw EvalError("cannot read '%s' since path '%s' is not valid, at %s", path, e.path, pos);
+ }
- mkString(v, hashFile(*ht, state.checkSourcePath(p)).to_string(Base16, false), context);
+ mkString(v, hashFile(*ht, state.checkSourcePath(state.toRealPath(path, context))).to_string(Base16, false));
}
static RegisterPrimOp primop_hashFile({
@@ -1578,7 +1597,7 @@ static void prim_toXML(EvalState & state, const Pos & pos, Value * * args, Value
{
std::ostringstream out;
PathSet context;
- printValueAsXML(state, true, false, *args[0], out, context);
+ printValueAsXML(state, true, false, *args[0], out, context, pos);
mkString(v, out.str(), context);
}
@@ -1686,7 +1705,7 @@ static void prim_toJSON(EvalState & state, const Pos & pos, Value * * args, Valu
{
std::ostringstream out;
PathSet context;
- printValueAsJSON(state, true, *args[0], out, context);
+ printValueAsJSON(state, true, *args[0], pos, out, context);
mkString(v, out.str(), context);
}
@@ -1712,7 +1731,7 @@ static void prim_fromJSON(EvalState & state, const Pos & pos, Value * * args, Va
parseJSON(state, s, v);
} catch (JSONParseError &e) {
e.addTrace(pos, "while decoding a JSON string");
- throw e;
+ throw;
}
}
@@ -1842,50 +1861,79 @@ static RegisterPrimOp primop_toFile({
.fun = prim_toFile,
});
-static void addPath(EvalState & state, const Pos & pos, const string & name, const Path & path_,
- Value * filterFun, FileIngestionMethod method, const std::optional<Hash> expectedHash, Value & v)
+static void addPath(
+ EvalState & state,
+ const Pos & pos,
+ const string & name,
+ Path path,
+ Value * filterFun,
+ FileIngestionMethod method,
+ const std::optional<Hash> expectedHash,
+ Value & v,
+ const PathSet & context)
{
- const auto path = evalSettings.pureEval && expectedHash ?
- path_ :
- state.checkSourcePath(path_);
- PathFilter filter = filterFun ? ([&](const Path & path) {
- auto st = lstat(path);
+ try {
+ // FIXME: handle CA derivation outputs (where path needs to
+ // be rewritten to the actual output).
+ state.realiseContext(context);
- /* Call the filter function. The first argument is the path,
- the second is a string indicating the type of the file. */
- Value arg1;
- mkString(arg1, path);
+ StorePathSet refs;
- Value fun2;
- state.callFunction(*filterFun, arg1, fun2, noPos);
+ if (state.store->isInStore(path)) {
+ auto [storePath, subPath] = state.store->toStorePath(path);
+ // FIXME: we should scanForReferences on the path before adding it
+ refs = state.store->queryPathInfo(storePath)->references;
+ path = state.store->toRealPath(storePath) + subPath;
+ }
- Value arg2;
- mkString(arg2,
- S_ISREG(st.st_mode) ? "regular" :
- S_ISDIR(st.st_mode) ? "directory" :
- S_ISLNK(st.st_mode) ? "symlink" :
- "unknown" /* not supported, will fail! */);
+ path = evalSettings.pureEval && expectedHash
+ ? path
+ : state.checkSourcePath(path);
- Value res;
- state.callFunction(fun2, arg2, res, noPos);
+ PathFilter filter = filterFun ? ([&](const Path & path) {
+ auto st = lstat(path);
+
+ /* Call the filter function. The first argument is the path,
+ the second is a string indicating the type of the file. */
+ Value arg1;
+ mkString(arg1, path);
+
+ Value arg2;
+ mkString(arg2,
+ S_ISREG(st.st_mode) ? "regular" :
+ S_ISDIR(st.st_mode) ? "directory" :
+ S_ISLNK(st.st_mode) ? "symlink" :
+ "unknown" /* not supported, will fail! */);
+
+ Value * args []{&arg1, &arg2};
+ Value res;
+ state.callFunction(*filterFun, 2, args, res, pos);
- return state.forceBool(res, pos);
- }) : defaultPathFilter;
+ return state.forceBool(res, pos);
+ }) : defaultPathFilter;
- std::optional<StorePath> expectedStorePath;
- if (expectedHash)
- expectedStorePath = state.store->makeFixedOutputPath(method, *expectedHash, name);
- Path dstPath;
- if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) {
- dstPath = state.store->printStorePath(settings.readOnlyMode
- ? state.store->computeStorePathForPath(name, path, method, htSHA256, filter).first
- : state.store->addToStore(name, path, method, htSHA256, filter, state.repair));
- if (expectedHash && expectedStorePath != state.store->parseStorePath(dstPath))
- throw Error("store path mismatch in (possibly filtered) path added from '%s'", path);
- } else
- dstPath = state.store->printStorePath(*expectedStorePath);
+ std::optional<StorePath> expectedStorePath;
+ if (expectedHash)
+ expectedStorePath = state.store->makeFixedOutputPath(method, *expectedHash, name);
- mkString(v, dstPath, {dstPath});
+ Path dstPath;
+ if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) {
+ dstPath = state.store->printStorePath(settings.readOnlyMode
+ ? state.store->computeStorePathForPath(name, path, method, htSHA256, filter).first
+ : state.store->addToStore(name, path, method, htSHA256, filter, state.repair, refs));
+ if (expectedHash && expectedStorePath != state.store->parseStorePath(dstPath))
+ throw Error("store path mismatch in (possibly filtered) path added from '%s'", path);
+ } else
+ dstPath = state.store->printStorePath(*expectedStorePath);
+
+ mkString(v, dstPath, {dstPath});
+
+ state.allowPath(dstPath);
+
+ } catch (Error & e) {
+ e.addTrace(pos, "while adding path '%s'", path);
+ throw;
+ }
}
@@ -1893,11 +1941,6 @@ static void prim_filterSource(EvalState & state, const Pos & pos, Value * * args
{
PathSet context;
Path path = state.coerceToPath(pos, *args[1], context);
- if (!context.empty())
- throw EvalError({
- .msg = hintfmt("string '%1%' cannot refer to other paths", path),
- .errPos = pos
- });
state.forceValue(*args[0], pos);
if (args[0]->type() != nFunction)
@@ -1908,13 +1951,26 @@ static void prim_filterSource(EvalState & state, const Pos & pos, Value * * args
.errPos = pos
});
- addPath(state, pos, std::string(baseNameOf(path)), path, args[0], FileIngestionMethod::Recursive, std::nullopt, v);
+ addPath(state, pos, std::string(baseNameOf(path)), path, args[0], FileIngestionMethod::Recursive, std::nullopt, v, context);
}
static RegisterPrimOp primop_filterSource({
.name = "__filterSource",
.args = {"e1", "e2"},
.doc = R"(
+ > **Warning**
+ >
+ > `filterSource` should not be used to filter store paths. Since
+ > `filterSource` uses the name of the input directory while naming
+ > the output directory, doing so will produce a directory name in
+ > the form of `<hash2>-<hash>-<name>`, where `<hash>-<name>` is
+ > the name of the input directory. Since `<hash>` depends on the
+ > unfiltered directory, the name of the output directory will
+ > indirectly depend on files that are filtered out by the
+ > function. This will trigger a rebuild even when a filtered out
+ > file is changed. Use `builtins.path` instead, which allows
+ > specifying the name of the output directory.
+
This function allows you to copy sources into the Nix store while
filtering certain files. For instance, suppose that you want to use
the directory `source-dir` as an input to a Nix expression, e.g.
@@ -1961,18 +2017,13 @@ static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value
Value * filterFun = nullptr;
auto method = FileIngestionMethod::Recursive;
std::optional<Hash> expectedHash;
+ PathSet context;
for (auto & attr : *args[0]->attrs) {
const string & n(attr.name);
- if (n == "path") {
- PathSet context;
+ if (n == "path")
path = state.coerceToPath(*attr.pos, *attr.value, context);
- if (!context.empty())
- throw EvalError({
- .msg = hintfmt("string '%1%' cannot refer to other paths", path),
- .errPos = *attr.pos
- });
- } else if (attr.name == state.sName)
+ else if (attr.name == state.sName)
name = state.forceStringNoCtx(*attr.value, *attr.pos);
else if (n == "filter") {
state.forceValue(*attr.value, pos);
@@ -1995,7 +2046,7 @@ static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value
if (name.empty())
name = baseNameOf(path);
- addPath(state, pos, name, path, filterFun, method, expectedHash, v);
+ addPath(state, pos, name, path, filterFun, method, expectedHash, v, context);
}
static RegisterPrimOp primop_path({
@@ -2109,7 +2160,7 @@ void prim_getAttr(EvalState & state, const Pos & pos, Value * * args, Value & v)
pos
);
// !!! add to stack trace?
- if (state.countCalls && i->pos) state.attrSelects[*i->pos]++;
+ if (state.countCalls && *i->pos != noPos) state.attrSelects[*i->pos]++;
state.forceValue(*i->value, pos);
v = *i->value;
}
@@ -2186,9 +2237,9 @@ static void prim_removeAttrs(EvalState & state, const Pos & pos, Value * * args,
/* Get the attribute names to be removed. */
std::set<Symbol> names;
- for (unsigned int i = 0; i < args[1]->listSize(); ++i) {
- state.forceStringNoCtx(*args[1]->listElems()[i], pos);
- names.insert(state.symbols.create(args[1]->listElems()[i]->string.s));
+ for (auto elem : args[1]->listItems()) {
+ state.forceStringNoCtx(*elem, pos);
+ names.insert(state.symbols.create(elem->string.s));
}
/* Copy all attributes not in that set. Note that we don't need
@@ -2196,7 +2247,7 @@ static void prim_removeAttrs(EvalState & state, const Pos & pos, Value * * args,
vector. */
state.mkAttrs(v, args[0]->attrs->size());
for (auto & i : *args[0]->attrs) {
- if (names.find(i.name) == names.end())
+ if (!names.count(i.name))
v.attrs->push_back(i);
}
}
@@ -2230,15 +2281,14 @@ static void prim_listToAttrs(EvalState & state, const Pos & pos, Value * * args,
std::set<Symbol> seen;
- for (unsigned int i = 0; i < args[0]->listSize(); ++i) {
- Value & v2(*args[0]->listElems()[i]);
- state.forceAttrs(v2, pos);
+ for (auto v2 : args[0]->listItems()) {
+ state.forceAttrs(*v2, pos);
Bindings::iterator j = getAttr(
state,
"listToAttrs",
state.sName,
- v2.attrs,
+ v2->attrs,
pos
);
@@ -2250,7 +2300,7 @@ static void prim_listToAttrs(EvalState & state, const Pos & pos, Value * * args,
state,
"listToAttrs",
state.sValue,
- v2.attrs,
+ v2->attrs,
pos
);
v.attrs->push_back(Attr(sym, j2->value, j2->pos));
@@ -2317,11 +2367,10 @@ static void prim_catAttrs(EvalState & state, const Pos & pos, Value * * args, Va
Value * res[args[1]->listSize()];
unsigned int found = 0;
- for (unsigned int n = 0; n < args[1]->listSize(); ++n) {
- Value & v2(*args[1]->listElems()[n]);
- state.forceAttrs(v2, pos);
- Bindings::iterator i = v2.attrs->find(attrName);
- if (i != v2.attrs->end())
+ for (auto v2 : args[1]->listItems()) {
+ state.forceAttrs(*v2, pos);
+ Bindings::iterator i = v2->attrs->find(attrName);
+ if (i != v2->attrs->end())
res[found++] = i->value;
}
@@ -2360,7 +2409,7 @@ static void prim_functionArgs(EvalState & state, const Pos & pos, Value * * args
.errPos = pos
});
- if (!args[0]->lambda.fun->matchAttrs) {
+ if (!args[0]->lambda.fun->hasFormals()) {
state.mkAttrs(v, 0);
return;
}
@@ -2369,7 +2418,7 @@ static void prim_functionArgs(EvalState & state, const Pos & pos, Value * * args
for (auto & i : args[0]->lambda.fun->formals->formals) {
// !!! should optimise booleans (allocate only once)
Value * value = state.allocValue();
- v.attrs->push_back(Attr(i.name, value, &i.pos));
+ v.attrs->push_back(Attr(i.name, value, ptr(&i.pos)));
mkBool(*value, i.def);
}
v.attrs->sort();
@@ -2515,7 +2564,7 @@ static RegisterPrimOp primop_tail({
the argument isn’t a list or is an empty list.
> **Warning**
- >
+ >
> This function should generally be avoided since it's inefficient:
> unlike Haskell's `tail`, it takes O(n) time, so recursing over a
> list by repeatedly calling `tail` takes O(n^2) time.
@@ -2596,8 +2645,8 @@ static void prim_elem(EvalState & state, const Pos & pos, Value * * args, Value
{
bool res = false;
state.forceList(*args[1], pos);
- for (unsigned int n = 0; n < args[1]->listSize(); ++n)
- if (state.eqValues(*args[0], *args[1]->listElems()[n])) {
+ for (auto elem : args[1]->listItems())
+ if (state.eqValues(*args[0], *elem)) {
res = true;
break;
}
@@ -2656,11 +2705,10 @@ static void prim_foldlStrict(EvalState & state, const Pos & pos, Value * * args,
if (args[2]->listSize()) {
Value * vCur = args[1];
- for (unsigned int n = 0; n < args[2]->listSize(); ++n) {
- Value vTmp;
- state.callFunction(*args[0], *vCur, vTmp, pos);
+ for (auto [n, elem] : enumerate(args[2]->listItems())) {
+ Value * vs []{vCur, elem};
vCur = n == args[2]->listSize() - 1 ? &v : state.allocValue();
- state.callFunction(vTmp, *args[2]->listElems()[n], *vCur, pos);
+ state.callFunction(*args[0], 2, vs, *vCur, pos);
}
state.forceValue(v, pos);
} else {
@@ -2674,9 +2722,9 @@ static RegisterPrimOp primop_foldlStrict({
.args = {"op", "nul", "list"},
.doc = R"(
Reduce a list by applying a binary operator, from left to right,
- e.g. `foldl’ op nul [x0 x1 x2 ...] = op (op (op nul x0) x1) x2)
+ e.g. `foldl' op nul [x0 x1 x2 ...] = op (op (op nul x0) x1) x2)
...`. The operator is applied strictly, i.e., its arguments are
- evaluated first. For example, `foldl’ (x: y: x + y) 0 [1 2 3]`
+ evaluated first. For example, `foldl' (x: y: x + y) 0 [1 2 3]`
evaluates to 6.
)",
.fun = prim_foldlStrict,
@@ -2688,8 +2736,8 @@ static void anyOrAll(bool any, EvalState & state, const Pos & pos, Value * * arg
state.forceList(*args[1], pos);
Value vTmp;
- for (unsigned int n = 0; n < args[1]->listSize(); ++n) {
- state.callFunction(*args[0], *args[1]->listElems()[n], vTmp, pos);
+ for (auto elem : args[1]->listItems()) {
+ state.callFunction(*args[0], *elem, vTmp, pos);
bool res = state.forceBool(vTmp, pos);
if (res == any) {
mkBool(v, any);
@@ -2781,17 +2829,16 @@ static void prim_sort(EvalState & state, const Pos & pos, Value * * args, Value
v.listElems()[n] = args[1]->listElems()[n];
}
-
auto comparator = [&](Value * a, Value * b) {
/* Optimization: if the comparator is lessThan, bypass
callFunction. */
if (args[0]->isPrimOp() && args[0]->primOp->fun == prim_lessThan)
- return CompareValues()(a, b);
+ return CompareValues(state)(a, b);
- Value vTmp1, vTmp2;
- state.callFunction(*args[0], *a, vTmp1, pos);
- state.callFunction(vTmp1, *b, vTmp2, pos);
- return state.forceBool(vTmp2, pos);
+ Value * vs[] = {a, b};
+ Value vBool;
+ state.callFunction(*args[0], 2, vs, vBool, pos);
+ return state.forceBool(vBool, pos);
};
/* FIXME: std::sort can segfault if the comparator is not a strict
@@ -2881,6 +2928,56 @@ static RegisterPrimOp primop_partition({
.fun = prim_partition,
});
+static void prim_groupBy(EvalState & state, const Pos & pos, Value * * args, Value & v)
+{
+ state.forceFunction(*args[0], pos);
+ state.forceList(*args[1], pos);
+
+ ValueVectorMap attrs;
+
+ for (auto vElem : args[1]->listItems()) {
+ Value res;
+ state.callFunction(*args[0], *vElem, res, pos);
+ string name = state.forceStringNoCtx(res, pos);
+ Symbol sym = state.symbols.create(name);
+ auto vector = attrs.try_emplace(sym, ValueVector()).first;
+ vector->second.push_back(vElem);
+ }
+
+ state.mkAttrs(v, attrs.size());
+
+ for (auto & i : attrs) {
+ Value * list = state.allocAttr(v, i.first);
+ auto size = i.second.size();
+ state.mkList(*list, size);
+ memcpy(list->listElems(), i.second.data(), sizeof(Value *) * size);
+ }
+}
+
+static RegisterPrimOp primop_groupBy({
+ .name = "__groupBy",
+ .args = {"f", "list"},
+ .doc = R"(
+ Groups elements of *list* together by the string returned from the
+ function *f* called on each element. It returns an attribute set
+ where each attribute value contains the elements of *list* that are
+ mapped to the same corresponding attribute name returned by *f*.
+
+ For example,
+
+ ```nix
+ builtins.groupBy (builtins.substring 0 1) ["foo" "bar" "baz"]
+ ```
+
+ evaluates to
+
+ ```nix
+ { b = [ "bar" "baz" ]; f = [ "foo" ]; }
+ ```
+ )",
+ .fun = prim_groupBy,
+});
+
static void prim_concatMap(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
state.forceFunction(*args[0], pos);
@@ -2897,7 +2994,7 @@ static void prim_concatMap(EvalState & state, const Pos & pos, Value * * args, V
state.forceList(lists[n], lists[n].determinePos(args[0]->determinePos(pos)));
} catch (TypeError &e) {
e.addTrace(pos, hintfmt("while invoking '%s'", "concatMap"));
- throw e;
+ throw;
}
len += lists[n].listSize();
}
@@ -3068,7 +3165,7 @@ static void prim_lessThan(EvalState & state, const Pos & pos, Value * * args, Va
{
state.forceValue(*args[0], pos);
state.forceValue(*args[1], pos);
- CompareValues comp;
+ CompareValues comp{state};
mkBool(v, comp(args[0], args[1]));
}
@@ -3194,7 +3291,7 @@ static void prim_hashString(EvalState & state, const Pos & pos, Value * * args,
PathSet context; // discarded
string s = state.forceString(*args[1], context, pos);
- mkString(v, hashString(*ht, s).to_string(Base16, false), context);
+ mkString(v, hashString(*ht, s).to_string(Base16, false));
}
static RegisterPrimOp primop_hashString({
@@ -3419,9 +3516,9 @@ static void prim_concatStringsSep(EvalState & state, const Pos & pos, Value * *
res.reserve((args[1]->listSize() + 32) * sep.size());
bool first = true;
- for (unsigned int n = 0; n < args[1]->listSize(); ++n) {
+ for (auto elem : args[1]->listItems()) {
if (first) first = false; else res += sep;
- res += state.coerceToString(pos, *args[1]->listElems()[n], context);
+ res += state.coerceToString(pos, *elem, context);
}
mkString(v, res, context);
@@ -3450,14 +3547,14 @@ static void prim_replaceStrings(EvalState & state, const Pos & pos, Value * * ar
vector<string> from;
from.reserve(args[0]->listSize());
- for (unsigned int n = 0; n < args[0]->listSize(); ++n)
- from.push_back(state.forceString(*args[0]->listElems()[n], pos));
+ for (auto elem : args[0]->listItems())
+ from.push_back(state.forceString(*elem, pos));
vector<std::pair<string, PathSet>> to;
to.reserve(args[1]->listSize());
- for (unsigned int n = 0; n < args[1]->listSize(); ++n) {
+ for (auto elem : args[1]->listItems()) {
PathSet ctx;
- auto s = state.forceString(*args[1]->listElems()[n], ctx, pos);
+ auto s = state.forceString(*elem, ctx, pos);
to.push_back(std::make_pair(std::move(s), std::move(ctx)));
}
@@ -3601,15 +3698,13 @@ static RegisterPrimOp primop_splitVersion({
RegisterPrimOp::PrimOps * RegisterPrimOp::primOps;
-RegisterPrimOp::RegisterPrimOp(std::string name, size_t arity, PrimOpFun fun,
- std::optional<std::string> requiredFeature)
+RegisterPrimOp::RegisterPrimOp(std::string name, size_t arity, PrimOpFun fun)
{
if (!primOps) primOps = new PrimOps;
primOps->push_back({
.name = name,
.args = {},
.arity = arity,
- .requiredFeature = std::move(requiredFeature),
.fun = fun
});
}
@@ -3660,7 +3755,7 @@ void EvalState::createBaseEnv()
language feature gets added. It's not necessary to increase it
when primops get added, because you can just use `builtins ?
primOp' to check. */
- mkInt(v, 5);
+ mkInt(v, 6);
addConstant("__langVersion", v);
// Miscellaneous
@@ -3683,26 +3778,31 @@ void EvalState::createBaseEnv()
if (RegisterPrimOp::primOps)
for (auto & primOp : *RegisterPrimOp::primOps)
- if (!primOp.requiredFeature || settings.isExperimentalFeatureEnabled(*primOp.requiredFeature))
- addPrimOp({
- .fun = primOp.fun,
- .arity = std::max(primOp.args.size(), primOp.arity),
- .name = symbols.create(primOp.name),
- .args = std::move(primOp.args),
- .doc = primOp.doc,
- });
+ addPrimOp({
+ .fun = primOp.fun,
+ .arity = std::max(primOp.args.size(), primOp.arity),
+ .name = symbols.create(primOp.name),
+ .args = primOp.args,
+ .doc = primOp.doc,
+ });
/* Add a wrapper around the derivation primop that computes the
`drvPath' and `outPath' attributes lazily. */
sDerivationNix = symbols.create("//builtin/derivation.nix");
- eval(parse(
- #include "primops/derivation.nix.gen.hh"
- , foFile, sDerivationNix, "/", staticBaseEnv), v);
- addConstant("derivation", v);
+ auto vDerivation = allocValue();
+ addConstant("derivation", vDerivation);
/* Now that we've added all primops, sort the `builtins' set,
because attribute lookups expect it to be sorted. */
baseEnv.values[0]->attrs->sort();
+
+ staticBaseEnv.sort();
+
+ /* Note: we have to initialize the 'derivation' constant *after*
+ building baseEnv/staticBaseEnv because it uses 'builtins'. */
+ eval(parse(
+ #include "primops/derivation.nix.gen.hh"
+ , foFile, sDerivationNix, "/", staticBaseEnv), *vDerivation);
}
diff --git a/src/libexpr/primops.hh b/src/libexpr/primops.hh
index 9d42d6539..5b16e075f 100644
--- a/src/libexpr/primops.hh
+++ b/src/libexpr/primops.hh
@@ -15,7 +15,6 @@ struct RegisterPrimOp
std::vector<std::string> args;
size_t arity = 0;
const char * doc;
- std::optional<std::string> requiredFeature;
PrimOpFun fun;
};
@@ -28,8 +27,7 @@ struct RegisterPrimOp
RegisterPrimOp(
std::string name,
size_t arity,
- PrimOpFun fun,
- std::optional<std::string> requiredFeature = {});
+ PrimOpFun fun);
RegisterPrimOp(Info && info);
};
diff --git a/src/libexpr/primops/context.cc b/src/libexpr/primops/context.cc
index 31cf812b4..20545afd0 100644
--- a/src/libexpr/primops/context.cc
+++ b/src/libexpr/primops/context.cc
@@ -118,9 +118,8 @@ static void prim_getContext(EvalState & state, const Pos & pos, Value * * args,
auto & outputsVal = *state.allocAttr(infoVal, state.sOutputs);
state.mkList(outputsVal, info.second.outputs.size());
size_t i = 0;
- for (const auto & output : info.second.outputs) {
+ for (const auto & output : info.second.outputs)
mkString(*(outputsVal.listElems()[i++] = state.allocValue()), output);
- }
}
infoVal.attrs->sort();
}
@@ -181,8 +180,8 @@ static void prim_appendContext(EvalState & state, const Pos & pos, Value * * arg
.errPos = *i.pos
});
}
- for (unsigned int n = 0; n < iter->value->listSize(); ++n) {
- auto name = state.forceStringNoCtx(*iter->value->listElems()[n], *iter->pos);
+ for (auto elem : iter->value->listItems()) {
+ auto name = state.forceStringNoCtx(*elem, *iter->pos);
context.insert("!" + name + "!" + string(i.name));
}
}
diff --git a/src/libexpr/primops/fetchMercurial.cc b/src/libexpr/primops/fetchMercurial.cc
index 3f88ccb91..c23480853 100644
--- a/src/libexpr/primops/fetchMercurial.cc
+++ b/src/libexpr/primops/fetchMercurial.cc
@@ -15,7 +15,7 @@ static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * ar
std::string name = "source";
PathSet context;
- state.forceValue(*args[0]);
+ state.forceValue(*args[0], pos);
if (args[0]->type() == nAttrs) {
@@ -84,8 +84,7 @@ static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * ar
mkInt(*state.allocAttr(v, state.symbols.create("revCount")), *revCount);
v.attrs->sort();
- if (state.allowedPaths)
- state.allowedPaths->insert(tree.actualPath);
+ state.allowPath(tree.storePath);
}
static RegisterPrimOp r_fetchMercurial("fetchMercurial", 1, prim_fetchMercurial);
diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc
index 730db84ed..079513873 100644
--- a/src/libexpr/primops/fetchTree.cc
+++ b/src/libexpr/primops/fetchTree.cc
@@ -16,7 +16,8 @@ void emitTreeAttrs(
const fetchers::Tree & tree,
const fetchers::Input & input,
Value & v,
- bool emptyRevFallback)
+ bool emptyRevFallback,
+ bool forceDirty)
{
assert(input.isImmutable());
@@ -33,24 +34,28 @@ void emitTreeAttrs(
mkString(*state.allocAttr(v, state.symbols.create("narHash")),
narHash->to_string(SRI, true));
- if (auto rev = input.getRev()) {
- mkString(*state.allocAttr(v, state.symbols.create("rev")), rev->gitRev());
- mkString(*state.allocAttr(v, state.symbols.create("shortRev")), rev->gitShortRev());
- } else if (emptyRevFallback) {
- // Backwards compat for `builtins.fetchGit`: dirty repos return an empty sha1 as rev
- auto emptyHash = Hash(htSHA1);
- mkString(*state.allocAttr(v, state.symbols.create("rev")), emptyHash.gitRev());
- mkString(*state.allocAttr(v, state.symbols.create("shortRev")), emptyHash.gitShortRev());
- }
-
if (input.getType() == "git")
mkBool(*state.allocAttr(v, state.symbols.create("submodules")),
fetchers::maybeGetBoolAttr(input.attrs, "submodules").value_or(false));
- if (auto revCount = input.getRevCount())
- mkInt(*state.allocAttr(v, state.symbols.create("revCount")), *revCount);
- else if (emptyRevFallback)
- mkInt(*state.allocAttr(v, state.symbols.create("revCount")), 0);
+ if (!forceDirty) {
+
+ if (auto rev = input.getRev()) {
+ mkString(*state.allocAttr(v, state.symbols.create("rev")), rev->gitRev());
+ mkString(*state.allocAttr(v, state.symbols.create("shortRev")), rev->gitShortRev());
+ } else if (emptyRevFallback) {
+ // Backwards compat for `builtins.fetchGit`: dirty repos return an empty sha1 as rev
+ auto emptyHash = Hash(htSHA1);
+ mkString(*state.allocAttr(v, state.symbols.create("rev")), emptyHash.gitRev());
+ mkString(*state.allocAttr(v, state.symbols.create("shortRev")), emptyHash.gitShortRev());
+ }
+
+ if (auto revCount = input.getRevCount())
+ mkInt(*state.allocAttr(v, state.symbols.create("revCount")), *revCount);
+ else if (emptyRevFallback)
+ mkInt(*state.allocAttr(v, state.symbols.create("revCount")), 0);
+
+ }
if (auto lastModified = input.getLastModified()) {
mkInt(*state.allocAttr(v, state.symbols.create("lastModified")), *lastModified);
@@ -61,7 +66,7 @@ void emitTreeAttrs(
v.attrs->sort();
}
-std::string fixURI(std::string uri, EvalState &state, const std::string & defaultScheme = "file")
+std::string fixURI(std::string uri, EvalState & state, const std::string & defaultScheme = "file")
{
state.checkURI(uri);
return uri.find("://") != std::string::npos ? uri : defaultScheme + "://" + uri;
@@ -69,53 +74,66 @@ std::string fixURI(std::string uri, EvalState &state, const std::string & defaul
std::string fixURIForGit(std::string uri, EvalState & state)
{
- static std::regex scp_uri("([^/].*)@(.*):(.*)");
+ /* Detects scp-style uris (e.g. git@github.com:NixOS/nix) and fixes
+ * them by removing the `:` and assuming a scheme of `ssh://`
+ * */
+ static std::regex scp_uri("([^/]*)@(.*):(.*)");
if (uri[0] != '/' && std::regex_match(uri, scp_uri))
return fixURI(std::regex_replace(uri, scp_uri, "$1@$2/$3"), state, "ssh");
else
return fixURI(uri, state);
}
-void addURI(EvalState &state, fetchers::Attrs &attrs, Symbol name, std::string v)
-{
- string n(name);
- attrs.emplace(name, n == "url" ? fixURI(v, state) : v);
-}
-
struct FetchTreeParams {
bool emptyRevFallback = false;
bool allowNameArgument = false;
};
static void fetchTree(
- EvalState &state,
- const Pos &pos,
- Value **args,
- Value &v,
- const std::optional<std::string> type,
+ EvalState & state,
+ const Pos & pos,
+ Value * * args,
+ Value & v,
+ std::optional<std::string> type,
const FetchTreeParams & params = FetchTreeParams{}
) {
fetchers::Input input;
PathSet context;
- state.forceValue(*args[0]);
+ state.forceValue(*args[0], pos);
if (args[0]->type() == nAttrs) {
state.forceAttrs(*args[0], pos);
fetchers::Attrs attrs;
+ if (auto aType = args[0]->attrs->get(state.sType)) {
+ if (type)
+ throw Error({
+ .msg = hintfmt("unexpected attribute 'type'"),
+ .errPos = pos
+ });
+ type = state.forceStringNoCtx(*aType->value, *aType->pos);
+ } else if (!type)
+ throw Error({
+ .msg = hintfmt("attribute 'type' is missing in call to 'fetchTree'"),
+ .errPos = pos
+ });
+
+ attrs.emplace("type", type.value());
+
for (auto & attr : *args[0]->attrs) {
- state.forceValue(*attr.value);
- if (attr.value->type() == nPath || attr.value->type() == nString)
- addURI(
- state,
- attrs,
- attr.name,
- state.coerceToString(*attr.pos, *attr.value, context, false, false)
- );
- else if (attr.value->type() == nString)
- addURI(state, attrs, attr.name, attr.value->string.s);
+ if (attr.name == state.sType) continue;
+ state.forceValue(*attr.value, *attr.pos);
+ if (attr.value->type() == nPath || attr.value->type() == nString) {
+ auto s = state.coerceToString(*attr.pos, *attr.value, context, false, false);
+ attrs.emplace(attr.name,
+ attr.name == "url"
+ ? type == "git"
+ ? fixURIForGit(s, state)
+ : fixURI(s, state)
+ : s);
+ }
else if (attr.value->type() == nBool)
attrs.emplace(attr.name, Explicit<bool>{attr.value->boolean});
else if (attr.value->type() == nInt)
@@ -125,15 +143,6 @@ static void fetchTree(
attr.name, showType(*attr.value));
}
- if (type)
- attrs.emplace("type", type.value());
-
- if (!attrs.count("type"))
- throw Error({
- .msg = hintfmt("attribute 'type' is missing in call to 'fetchTree'"),
- .errPos = pos
- });
-
if (!params.allowNameArgument)
if (auto nameIter = attrs.find("name"); nameIter != attrs.end())
throw Error({
@@ -141,7 +150,6 @@ static void fetchTree(
.errPos = pos
});
-
input = fetchers::Input::fromAttrs(std::move(attrs));
} else {
auto url = state.coerceToString(pos, *args[0], context, false, false);
@@ -164,15 +172,14 @@ static void fetchTree(
auto [tree, input2] = input.fetch(state.store);
- if (state.allowedPaths)
- state.allowedPaths->insert(tree.actualPath);
+ state.allowPath(tree.storePath);
- emitTreeAttrs(state, tree, input2, v, params.emptyRevFallback);
+ emitTreeAttrs(state, tree, input2, v, params.emptyRevFallback, false);
}
static void prim_fetchTree(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
- settings.requireExperimentalFeature("flakes");
+ settings.requireExperimentalFeature(Xp::Flakes);
fetchTree(state, pos, args, v, std::nullopt, FetchTreeParams { .allowNameArgument = false });
}
@@ -185,7 +192,7 @@ static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
std::optional<std::string> url;
std::optional<Hash> expectedHash;
- state.forceValue(*args[0]);
+ state.forceValue(*args[0], pos);
if (args[0]->type() == nAttrs) {
@@ -229,20 +236,18 @@ static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
? fetchers::downloadTarball(state.store, *url, name, (bool) expectedHash).first.storePath
: fetchers::downloadFile(state.store, *url, name, (bool) expectedHash).storePath;
- auto path = state.store->toRealPath(storePath);
-
if (expectedHash) {
auto hash = unpack
? state.store->queryPathInfo(storePath)->narHash
- : hashFile(htSHA256, path);
+ : hashFile(htSHA256, state.store->toRealPath(storePath));
if (hash != *expectedHash)
throw Error((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n specified: %s\n got: %s",
*url, expectedHash->to_string(Base32, true), hash.to_string(Base32, true));
}
- if (state.allowedPaths)
- state.allowedPaths->insert(path);
+ state.allowPath(storePath);
+ auto path = state.store->printStorePath(storePath);
mkString(v, path, PathSet({path}));
}
@@ -285,13 +290,13 @@ static RegisterPrimOp primop_fetchTarball({
stdenv.mkDerivation { … }
```
- The fetched tarball is cached for a certain amount of time (1 hour
- by default) in `~/.cache/nix/tarballs/`. You can change the cache
- timeout either on the command line with `--option tarball-ttl number
- of seconds` or in the Nix configuration file with this option: `
- number of seconds to cache `.
+ The fetched tarball is cached for a certain amount of time (1
+ hour by default) in `~/.cache/nix/tarballs/`. You can change the
+ cache timeout either on the command line with `--tarball-ttl`
+ *number-of-seconds* or in the Nix configuration file by adding
+ the line `tarball-ttl = ` *number-of-seconds*.
- Note that when obtaining the hash with ` nix-prefetch-url ` the
+ Note that when obtaining the hash with `nix-prefetch-url` the
option `--unpack` is required.
This function can also verify the contents against a hash. In that
@@ -391,7 +396,7 @@ static RegisterPrimOp primop_fetchGit({
```
> **Note**
- >
+ >
> It is nice to always specify the branch which a revision
> belongs to. Without the branch being specified, the fetcher
> might fail if the default branch changes. Additionally, it can
@@ -428,12 +433,12 @@ static RegisterPrimOp primop_fetchGit({
```
> **Note**
- >
+ >
> Nix will refetch the branch in accordance with
> the option `tarball-ttl`.
> **Note**
- >
+ >
> This behavior is disabled in *Pure evaluation mode*.
)",
.fun = prim_fetchGit,
diff --git a/src/libexpr/value-to-json.cc b/src/libexpr/value-to-json.cc
index bfea24d40..517da4c01 100644
--- a/src/libexpr/value-to-json.cc
+++ b/src/libexpr/value-to-json.cc
@@ -10,11 +10,11 @@
namespace nix {
void printValueAsJSON(EvalState & state, bool strict,
- Value & v, JSONPlaceholder & out, PathSet & context)
+ Value & v, const Pos & pos, JSONPlaceholder & out, PathSet & context)
{
checkInterrupt();
- if (strict) state.forceValue(v);
+ if (strict) state.forceValue(v, pos);
switch (v.type()) {
@@ -40,7 +40,7 @@ void printValueAsJSON(EvalState & state, bool strict,
break;
case nAttrs: {
- auto maybeString = state.tryAttrsToString(noPos, v, context, false, false);
+ auto maybeString = state.tryAttrsToString(pos, v, context, false, false);
if (maybeString) {
out.write(*maybeString);
break;
@@ -54,18 +54,18 @@ void printValueAsJSON(EvalState & state, bool strict,
for (auto & j : names) {
Attr & a(*v.attrs->find(state.symbols.create(j)));
auto placeholder(obj.placeholder(j));
- printValueAsJSON(state, strict, *a.value, placeholder, context);
+ printValueAsJSON(state, strict, *a.value, *a.pos, placeholder, context);
}
} else
- printValueAsJSON(state, strict, *i->value, out, context);
+ printValueAsJSON(state, strict, *i->value, *i->pos, out, context);
break;
}
case nList: {
auto list(out.list());
- for (unsigned int n = 0; n < v.listSize(); ++n) {
+ for (auto elem : v.listItems()) {
auto placeholder(list.placeholder());
- printValueAsJSON(state, strict, *v.listElems()[n], placeholder, context);
+ printValueAsJSON(state, strict, *elem, pos, placeholder, context);
}
break;
}
@@ -79,18 +79,20 @@ void printValueAsJSON(EvalState & state, bool strict,
break;
case nThunk:
- throw TypeError("cannot convert %1% to JSON", showType(v));
-
case nFunction:
- throw TypeError("cannot convert %1% to JSON", showType(v));
+ auto e = TypeError({
+ .msg = hintfmt("cannot convert %1% to JSON", showType(v)),
+ .errPos = v.determinePos(pos)
+ });
+ throw e.addTrace(pos, hintfmt("message for the trace"));
}
}
void printValueAsJSON(EvalState & state, bool strict,
- Value & v, std::ostream & str, PathSet & context)
+ Value & v, const Pos & pos, std::ostream & str, PathSet & context)
{
JSONPlaceholder out(str);
- printValueAsJSON(state, strict, v, out, context);
+ printValueAsJSON(state, strict, v, pos, out, context);
}
void ExternalValueBase::printValueAsJSON(EvalState & state, bool strict,
diff --git a/src/libexpr/value-to-json.hh b/src/libexpr/value-to-json.hh
index 67fed6487..c2f797b29 100644
--- a/src/libexpr/value-to-json.hh
+++ b/src/libexpr/value-to-json.hh
@@ -11,9 +11,9 @@ namespace nix {
class JSONPlaceholder;
void printValueAsJSON(EvalState & state, bool strict,
- Value & v, JSONPlaceholder & out, PathSet & context);
+ Value & v, const Pos & pos, JSONPlaceholder & out, PathSet & context);
void printValueAsJSON(EvalState & state, bool strict,
- Value & v, std::ostream & str, PathSet & context);
+ Value & v, const Pos & pos, std::ostream & str, PathSet & context);
}
diff --git a/src/libexpr/value-to-xml.cc b/src/libexpr/value-to-xml.cc
index 7464455d8..a875f82d7 100644
--- a/src/libexpr/value-to-xml.cc
+++ b/src/libexpr/value-to-xml.cc
@@ -18,7 +18,8 @@ static XMLAttrs singletonAttrs(const string & name, const string & value)
static void printValueAsXML(EvalState & state, bool strict, bool location,
- Value & v, XMLWriter & doc, PathSet & context, PathSet & drvsSeen);
+ Value & v, XMLWriter & doc, PathSet & context, PathSet & drvsSeen,
+ const Pos & pos);
static void posToXML(XMLAttrs & xmlAttrs, const Pos & pos)
@@ -42,21 +43,22 @@ static void showAttrs(EvalState & state, bool strict, bool location,
XMLAttrs xmlAttrs;
xmlAttrs["name"] = i;
- if (location && a.pos != &noPos) posToXML(xmlAttrs, *a.pos);
+ if (location && a.pos != ptr(&noPos)) posToXML(xmlAttrs, *a.pos);
XMLOpenElement _(doc, "attr", xmlAttrs);
printValueAsXML(state, strict, location,
- *a.value, doc, context, drvsSeen);
+ *a.value, doc, context, drvsSeen, *a.pos);
}
}
static void printValueAsXML(EvalState & state, bool strict, bool location,
- Value & v, XMLWriter & doc, PathSet & context, PathSet & drvsSeen)
+ Value & v, XMLWriter & doc, PathSet & context, PathSet & drvsSeen,
+ const Pos & pos)
{
checkInterrupt();
- if (strict) state.forceValue(v);
+ if (strict) state.forceValue(v, pos);
switch (v.type()) {
@@ -91,14 +93,14 @@ static void printValueAsXML(EvalState & state, bool strict, bool location,
Path drvPath;
a = v.attrs->find(state.sDrvPath);
if (a != v.attrs->end()) {
- if (strict) state.forceValue(*a->value);
+ if (strict) state.forceValue(*a->value, *a->pos);
if (a->value->type() == nString)
xmlAttrs["drvPath"] = drvPath = a->value->string.s;
}
a = v.attrs->find(state.sOutPath);
if (a != v.attrs->end()) {
- if (strict) state.forceValue(*a->value);
+ if (strict) state.forceValue(*a->value, *a->pos);
if (a->value->type() == nString)
xmlAttrs["outPath"] = a->value->string.s;
}
@@ -120,8 +122,8 @@ static void printValueAsXML(EvalState & state, bool strict, bool location,
case nList: {
XMLOpenElement _(doc, "list");
- for (unsigned int n = 0; n < v.listSize(); ++n)
- printValueAsXML(state, strict, location, *v.listElems()[n], doc, context, drvsSeen);
+ for (auto v2 : v.listItems())
+ printValueAsXML(state, strict, location, *v2, doc, context, drvsSeen, pos);
break;
}
@@ -135,7 +137,7 @@ static void printValueAsXML(EvalState & state, bool strict, bool location,
if (location) posToXML(xmlAttrs, v.lambda.fun->pos);
XMLOpenElement _(doc, "function", xmlAttrs);
- if (v.lambda.fun->matchAttrs) {
+ if (v.lambda.fun->hasFormals()) {
XMLAttrs attrs;
if (!v.lambda.fun->arg.empty()) attrs["name"] = v.lambda.fun->arg;
if (v.lambda.fun->formals->ellipsis) attrs["ellipsis"] = "1";
@@ -149,7 +151,7 @@ static void printValueAsXML(EvalState & state, bool strict, bool location,
}
case nExternal:
- v.external->printValueAsXML(state, strict, location, doc, context, drvsSeen);
+ v.external->printValueAsXML(state, strict, location, doc, context, drvsSeen, pos);
break;
case nFloat:
@@ -163,19 +165,20 @@ static void printValueAsXML(EvalState & state, bool strict, bool location,
void ExternalValueBase::printValueAsXML(EvalState & state, bool strict,
- bool location, XMLWriter & doc, PathSet & context, PathSet & drvsSeen) const
+ bool location, XMLWriter & doc, PathSet & context, PathSet & drvsSeen,
+ const Pos & pos) const
{
doc.writeEmptyElement("unevaluated");
}
void printValueAsXML(EvalState & state, bool strict, bool location,
- Value & v, std::ostream & out, PathSet & context)
+ Value & v, std::ostream & out, PathSet & context, const Pos & pos)
{
XMLWriter doc(true, out);
XMLOpenElement root(doc, "expr");
PathSet drvsSeen;
- printValueAsXML(state, strict, location, v, doc, context, drvsSeen);
+ printValueAsXML(state, strict, location, v, doc, context, drvsSeen, pos);
}
diff --git a/src/libexpr/value-to-xml.hh b/src/libexpr/value-to-xml.hh
index 97657327e..cc778a2cb 100644
--- a/src/libexpr/value-to-xml.hh
+++ b/src/libexpr/value-to-xml.hh
@@ -9,6 +9,6 @@
namespace nix {
void printValueAsXML(EvalState & state, bool strict, bool location,
- Value & v, std::ostream & out, PathSet & context);
-
+ Value & v, std::ostream & out, PathSet & context, const Pos & pos);
+
}
diff --git a/src/libexpr/value.hh b/src/libexpr/value.hh
index a1f131f9e..6b4f3c0ae 100644
--- a/src/libexpr/value.hh
+++ b/src/libexpr/value.hh
@@ -1,5 +1,7 @@
#pragma once
+#include <cassert>
+
#include "symbol-table.hh"
#if HAVE_BOEHMGC
@@ -94,7 +96,8 @@ class ExternalValueBase
/* Print the value as XML. Defaults to unevaluated */
virtual void printValueAsXML(EvalState & state, bool strict, bool location,
- XMLWriter & doc, PathSet & context, PathSet & drvsSeen) const;
+ XMLWriter & doc, PathSet & context, PathSet & drvsSeen,
+ const Pos & pos) const;
virtual ~ExternalValueBase()
{
@@ -349,6 +352,34 @@ public:
bool isTrivial() const;
std::vector<std::pair<Path, std::string>> getContext();
+
+ auto listItems()
+ {
+ struct ListIterable
+ {
+ typedef Value * const * iterator;
+ iterator _begin, _end;
+ iterator begin() const { return _begin; }
+ iterator end() const { return _end; }
+ };
+ assert(isList());
+ auto begin = listElems();
+ return ListIterable { begin, begin + listSize() };
+ }
+
+ auto listItems() const
+ {
+ struct ConstListIterable
+ {
+ typedef const Value * const * iterator;
+ iterator _begin, _end;
+ iterator begin() const { return _begin; }
+ iterator end() const { return _end; }
+ };
+ assert(isList());
+ auto begin = listElems();
+ return ConstListIterable { begin, begin + listSize() };
+ }
};
@@ -394,9 +425,11 @@ void mkPath(Value & v, const char * s);
#if HAVE_BOEHMGC
typedef std::vector<Value *, traceable_allocator<Value *> > ValueVector;
typedef std::map<Symbol, Value *, std::less<Symbol>, traceable_allocator<std::pair<const Symbol, Value *> > > ValueMap;
+typedef std::map<Symbol, ValueVector, std::less<Symbol>, traceable_allocator<std::pair<const Symbol, ValueVector> > > ValueVectorMap;
#else
typedef std::vector<Value *> ValueVector;
typedef std::map<Symbol, Value *> ValueMap;
+typedef std::map<Symbol, ValueVector> ValueVectorMap;
#endif
diff --git a/src/libfetchers/fetchers.hh b/src/libfetchers/fetchers.hh
index c839cf23b..c43b047a7 100644
--- a/src/libfetchers/fetchers.hh
+++ b/src/libfetchers/fetchers.hh
@@ -38,6 +38,9 @@ struct Input
bool immutable = false;
bool direct = true;
+ /* path of the parent of this input, used for relative path resolution */
+ std::optional<Path> parent;
+
public:
static Input fromURL(const std::string & url);
diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc
index d89763ccd..544d2ffbf 100644
--- a/src/libfetchers/git.cc
+++ b/src/libfetchers/git.cc
@@ -13,6 +13,12 @@ using namespace std::string_literals;
namespace nix::fetchers {
+// Explicit initial branch of our bare repo to suppress warnings from new version of git.
+// The value itself does not matter, since we always fetch a specific revision or branch.
+// It is set with `-c init.defaultBranch=` instead of `--initial-branch=` to stay compatible with
+// old version of git, which will ignore unrecognized `-c` options.
+const std::string gitInitialBranch = "__nix_dummy_branch";
+
static std::string readHead(const Path & path)
{
return chomp(runProgram("git", true, { "-C", path, "rev-parse", "--abbrev-ref", "HEAD" }));
@@ -45,7 +51,7 @@ struct GitInputScheme : InputScheme
for (auto &[name, value] : url.query) {
if (name == "rev" || name == "ref")
attrs.emplace(name, value);
- else if (name == "shallow")
+ else if (name == "shallow" || name == "submodules")
attrs.emplace(name, Explicit<bool> { value == "1" });
else
url2.query.emplace(name, value);
@@ -318,17 +324,13 @@ struct GitInputScheme : InputScheme
Path cacheDir = getCacheDir() + "/nix/gitv3/" + hashString(htSHA256, actualUrl).to_string(Base32, false);
repoDir = cacheDir;
- Path cacheDirLock = cacheDir + ".lock";
createDirs(dirOf(cacheDir));
- AutoCloseFD lock = openLockFile(cacheDirLock, true);
- lockFile(lock.get(), ltWrite, true);
+ PathLocks cacheDirLock({cacheDir + ".lock"});
if (!pathExists(cacheDir)) {
- runProgram("git", true, { "init", "--bare", repoDir });
+ runProgram("git", true, { "-c", "init.defaultBranch=" + gitInitialBranch, "init", "--bare", repoDir });
}
- deleteLockFile(cacheDirLock, lock.get());
-
Path localRefFile =
input.getRef()->compare(0, 5, "refs/") == 0
? cacheDir + "/" + *input.getRef()
@@ -393,6 +395,8 @@ struct GitInputScheme : InputScheme
if (!input.getRev())
input.attrs.insert_or_assign("rev", Hash::parseAny(chomp(readFile(localRefFile)), htSHA1).gitRev());
+
+ // cache dir lock is removed at scope end; we will only use read-only operations on specific revisions in the remainder
}
bool isShallow = chomp(runProgram("git", true, { "-C", repoDir, "rev-parse", "--is-shallow-repository" })) == "true";
@@ -413,17 +417,14 @@ struct GitInputScheme : InputScheme
AutoDelete delTmpDir(tmpDir, true);
PathFilter filter = defaultPathFilter;
- RunOptions checkCommitOpts(
- "git",
- { "-C", repoDir, "cat-file", "commit", input.getRev()->gitRev() }
- );
- checkCommitOpts.searchPath = true;
- checkCommitOpts.mergeStderrToStdout = true;
-
- auto result = runProgram(checkCommitOpts);
+ auto result = runProgram(RunOptions {
+ .program = "git",
+ .args = { "-C", repoDir, "cat-file", "commit", input.getRev()->gitRev() },
+ .mergeStderrToStdout = true
+ });
if (WEXITSTATUS(result.first) == 128
- && result.second.find("bad file") != std::string::npos
- ) {
+ && result.second.find("bad file") != std::string::npos)
+ {
throw Error(
"Cannot find Git revision '%s' in ref '%s' of repository '%s'! "
"Please make sure that the " ANSI_BOLD "rev" ANSI_NORMAL " exists on the "
@@ -439,7 +440,7 @@ struct GitInputScheme : InputScheme
Path tmpGitDir = createTempDir();
AutoDelete delTmpGitDir(tmpGitDir, true);
- runProgram("git", true, { "init", tmpDir, "--separate-git-dir", tmpGitDir });
+ runProgram("git", true, { "-c", "init.defaultBranch=" + gitInitialBranch, "init", tmpDir, "--separate-git-dir", tmpGitDir });
// TODO: repoDir might lack the ref (it only checks if rev
// exists, see FIXME above) so use a big hammer and fetch
// everything to ensure we get the rev.
@@ -455,9 +456,11 @@ struct GitInputScheme : InputScheme
// FIXME: should pipe this, or find some better way to extract a
// revision.
auto source = sinkToSource([&](Sink & sink) {
- RunOptions gitOptions("git", { "-C", repoDir, "archive", input.getRev()->gitRev() });
- gitOptions.standardOut = &sink;
- runProgram2(gitOptions);
+ runProgram2({
+ .program = "git",
+ .args = { "-C", repoDir, "archive", input.getRev()->gitRev() },
+ .standardOut = &sink
+ });
});
unpackTarfile(*source, tmpDir);
diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc
index 298c05f9a..1c539b80e 100644
--- a/src/libfetchers/github.cc
+++ b/src/libfetchers/github.cc
@@ -273,9 +273,9 @@ struct GitHubInputScheme : GitArchiveInputScheme
void clone(const Input & input, const Path & destDir) override
{
auto host = maybeGetStrAttr(input.attrs, "host").value_or("github.com");
- Input::fromURL(fmt("git+ssh://git@%s/%s/%s.git",
+ Input::fromURL(fmt("git+https://%s/%s/%s.git",
host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo")))
- .applyOverrides(input.getRef().value_or("HEAD"), input.getRev())
+ .applyOverrides(input.getRef(), input.getRev())
.clone(destDir);
}
};
@@ -300,7 +300,7 @@ struct GitLabInputScheme : GitArchiveInputScheme
if ("PAT" == token.substr(0, fldsplit))
return std::make_pair("Private-token", token.substr(fldsplit+1));
warn("Unrecognized GitLab token type %s", token.substr(0, fldsplit));
- return std::nullopt;
+ return std::make_pair(token.substr(0,fldsplit), token.substr(fldsplit+1));
}
Hash getRevFromRef(nix::ref<Store> store, const Input & input) const override
@@ -341,9 +341,9 @@ struct GitLabInputScheme : GitArchiveInputScheme
{
auto host = maybeGetStrAttr(input.attrs, "host").value_or("gitlab.com");
// FIXME: get username somewhere
- Input::fromURL(fmt("git+ssh://git@%s/%s/%s.git",
+ Input::fromURL(fmt("git+https://%s/%s/%s.git",
host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo")))
- .applyOverrides(input.getRef().value_or("HEAD"), input.getRev())
+ .applyOverrides(input.getRef(), input.getRev())
.clone(destDir);
}
};
diff --git a/src/libfetchers/local.mk b/src/libfetchers/local.mk
index bbef03afe..2e8869d83 100644
--- a/src/libfetchers/local.mk
+++ b/src/libfetchers/local.mk
@@ -8,6 +8,6 @@ libfetchers_SOURCES := $(wildcard $(d)/*.cc)
libfetchers_CXXFLAGS += -I src/libutil -I src/libstore
-libfetchers_LDFLAGS = -pthread
+libfetchers_LDFLAGS += -pthread
libfetchers_LIBS = libutil libstore
diff --git a/src/libfetchers/mercurial.cc b/src/libfetchers/mercurial.cc
index efb4ee8db..d52d4641b 100644
--- a/src/libfetchers/mercurial.cc
+++ b/src/libfetchers/mercurial.cc
@@ -11,34 +11,32 @@ using namespace std::string_literals;
namespace nix::fetchers {
-namespace {
-
-RunOptions hgOptions(const Strings & args) {
- RunOptions opts("hg", args);
- opts.searchPath = true;
-
- auto env = getEnv();
- // Set HGPLAIN: this means we get consistent output from hg and avoids leakage from a user or system .hgrc.
- env["HGPLAIN"] = "";
- opts.environment = env;
-
- return opts;
+static RunOptions hgOptions(const Strings & args)
+{
+ auto env = getEnv();
+ // Set HGPLAIN: this means we get consistent output from hg and avoids leakage from a user or system .hgrc.
+ env["HGPLAIN"] = "";
+
+ return {
+ .program = "hg",
+ .searchPath = true,
+ .args = args,
+ .environment = env
+ };
}
// runProgram wrapper that uses hgOptions instead of stock RunOptions.
-string runHg(const Strings & args, const std::optional<std::string> & input = {})
+static string runHg(const Strings & args, const std::optional<std::string> & input = {})
{
- RunOptions opts = hgOptions(args);
- opts.input = input;
-
- auto res = runProgram(opts);
+ RunOptions opts = hgOptions(args);
+ opts.input = input;
- if (!statusOk(res.first))
- throw ExecError(res.first, fmt("hg %1%", statusToString(res.first)));
+ auto res = runProgram(std::move(opts));
- return res.second;
-}
+ if (!statusOk(res.first))
+ throw ExecError(res.first, fmt("hg %1%", statusToString(res.first)));
+ return res.second;
}
struct MercurialInputScheme : InputScheme
@@ -253,9 +251,7 @@ struct MercurialInputScheme : InputScheme
have to pull again. */
if (!(input.getRev()
&& pathExists(cacheDir)
- && runProgram(
- hgOptions({ "log", "-R", cacheDir, "-r", input.getRev()->gitRev(), "--template", "1" })
- .killStderr(true)).second == "1"))
+ && runProgram(hgOptions({ "log", "-R", cacheDir, "-r", input.getRev()->gitRev(), "--template", "1" })).second == "1"))
{
Activity act(*logger, lvlTalkative, actUnknown, fmt("fetching Mercurial repository '%s'", actualUrl));
diff --git a/src/libfetchers/path.cc b/src/libfetchers/path.cc
index d1003de57..07e543c53 100644
--- a/src/libfetchers/path.cc
+++ b/src/libfetchers/path.cc
@@ -82,18 +82,38 @@ struct PathInputScheme : InputScheme
std::pair<Tree, Input> fetch(ref<Store> store, const Input & input) override
{
+ std::string absPath;
auto path = getStrAttr(input.attrs, "path");
- // FIXME: check whether access to 'path' is allowed.
+ if (path[0] != '/') {
+ if (!input.parent)
+ throw Error("cannot fetch input '%s' because it uses a relative path", input.to_string());
+
+ auto parent = canonPath(*input.parent);
- auto storePath = store->maybeParseStorePath(path);
+ // the path isn't relative, prefix it
+ absPath = nix::absPath(path, parent);
+
+ // for security, ensure that if the parent is a store path, it's inside it
+ if (store->isInStore(parent)) {
+ auto storePath = store->printStorePath(store->toStorePath(parent).first);
+ if (!isDirOrInDir(absPath, storePath))
+ throw BadStorePath("relative path '%s' points outside of its parent's store path '%s'", path, storePath);
+ }
+ } else
+ absPath = path;
+
+ Activity act(*logger, lvlTalkative, actUnknown, fmt("copying '%s'", absPath));
+
+ // FIXME: check whether access to 'path' is allowed.
+ auto storePath = store->maybeParseStorePath(absPath);
if (storePath)
store->addTempRoot(*storePath);
if (!storePath || storePath->name() != "source" || !store->isValidPath(*storePath))
// FIXME: try to substitute storePath.
- storePath = store->addToStore("source", path);
+ storePath = store->addToStore("source", absPath);
return {
Tree(store->toRealPath(*storePath), std::move(*storePath)),
diff --git a/src/libmain/local.mk b/src/libmain/local.mk
index a8eed6c65..99da95e27 100644
--- a/src/libmain/local.mk
+++ b/src/libmain/local.mk
@@ -8,10 +8,10 @@ libmain_SOURCES := $(wildcard $(d)/*.cc)
libmain_CXXFLAGS += -I src/libutil -I src/libstore
-libmain_LDFLAGS = $(OPENSSL_LIBS)
+libmain_LDFLAGS += $(OPENSSL_LIBS)
libmain_LIBS = libstore libutil
libmain_ALLOW_UNDEFINED = 1
-$(eval $(call install-file-in, $(d)/nix-main.pc, $(prefix)/lib/pkgconfig, 0644))
+$(eval $(call install-file-in, $(d)/nix-main.pc, $(libdir)/pkgconfig, 0644))
diff --git a/src/libmain/progress-bar.cc b/src/libmain/progress-bar.cc
index b2a6e2a82..f4306ab91 100644
--- a/src/libmain/progress-bar.cc
+++ b/src/libmain/progress-bar.cc
@@ -11,7 +11,7 @@
namespace nix {
-static std::string getS(const std::vector<Logger::Field> & fields, size_t n)
+static std::string_view getS(const std::vector<Logger::Field> & fields, size_t n)
{
assert(n < fields.size());
assert(fields[n].type == Logger::Field::tString);
@@ -103,17 +103,19 @@ public:
~ProgressBar()
{
stop();
- updateThread.join();
}
void stop() override
{
- auto state(state_.lock());
- if (!state->active) return;
- state->active = false;
- writeToStderr("\r\e[K");
- updateCV.notify_one();
- quitCV.notify_one();
+ {
+ auto state(state_.lock());
+ if (!state->active) return;
+ state->active = false;
+ writeToStderr("\r\e[K");
+ updateCV.notify_one();
+ quitCV.notify_one();
+ }
+ updateThread.join();
}
bool isVerbose() override {
diff --git a/src/libmain/shared.cc b/src/libmain/shared.cc
index 3a9529c4d..4404e0195 100644
--- a/src/libmain/shared.cc
+++ b/src/libmain/shared.cc
@@ -15,6 +15,14 @@
#include <sys/stat.h>
#include <unistd.h>
#include <signal.h>
+#ifdef __linux__
+#include <features.h>
+#endif
+#ifdef __GLIBC__
+#include <gnu/lib-names.h>
+#include <nss.h>
+#include <dlfcn.h>
+#endif
#include <openssl/crypto.h>
@@ -110,6 +118,40 @@ static void opensslLockCallback(int mode, int type, const char * file, int line)
}
#endif
+static std::once_flag dns_resolve_flag;
+
+static void preloadNSS() {
+ /* builtin:fetchurl can trigger a DNS lookup, which with glibc can trigger a dynamic library load of
+ one of the glibc NSS libraries in a sandboxed child, which will fail unless the library's already
+ been loaded in the parent. So we force a lookup of an invalid domain to force the NSS machinery to
+ load its lookup libraries in the parent before any child gets a chance to. */
+ std::call_once(dns_resolve_flag, []() {
+#ifdef __GLIBC__
+ /* On linux, glibc will run every lookup through the nss layer.
+ * That means every lookup goes, by default, through nscd, which acts as a local
+ * cache.
+ * Because we run builds in a sandbox, we also remove access to nscd otherwise
+ * lookups would leak into the sandbox.
+ *
+ * But now we have a new problem, we need to make sure the nss_dns backend that
+ * does the dns lookups when nscd is not available is loaded or available.
+ *
+ * We can't make it available without leaking nix's environment, so instead we'll
+ * load the backend, and configure nss so it does not try to run dns lookups
+ * through nscd.
+ *
+ * This is technically only used for builtins:fetch* functions so we only care
+ * about dns.
+ *
+ * All other platforms are unaffected.
+ */
+ if (dlopen (LIBNSS_DNS_SO, RTLD_NOW) == NULL) {
+ printMsg(Verbosity::lvlWarn, fmt("Unable to load nss_dns backend"));
+ }
+ __nss_configure_lookup ("hosts", "dns");
+#endif
+ });
+}
static void sigHandler(int signo) { }
@@ -176,6 +218,8 @@ void initNix()
if (hasPrefix(getEnv("TMPDIR").value_or("/tmp"), "/var/folders/"))
unsetenv("TMPDIR");
#endif
+
+ preloadNSS();
}
@@ -383,7 +427,7 @@ RunPager::RunPager()
});
pid.setKillSignal(SIGINT);
-
+ stdout = fcntl(STDOUT_FILENO, F_DUPFD_CLOEXEC, 0);
if (dup2(toPager.writeSide.get(), STDOUT_FILENO) == -1)
throw SysError("dupping stdout");
}
@@ -394,7 +438,7 @@ RunPager::~RunPager()
try {
if (pid != -1) {
std::cout.flush();
- close(STDOUT_FILENO);
+ dup2(stdout, STDOUT_FILENO);
pid.wait();
}
} catch (...) {
diff --git a/src/libmain/shared.hh b/src/libmain/shared.hh
index 05277d90a..ed012959b 100644
--- a/src/libmain/shared.hh
+++ b/src/libmain/shared.hh
@@ -88,6 +88,7 @@ public:
private:
Pid pid;
+ int stdout;
};
extern volatile ::sig_atomic_t blockInt;
diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc
index 74eb0a9ab..13c086a46 100644
--- a/src/libstore/binary-cache-store.cc
+++ b/src/libstore/binary-cache-store.cc
@@ -52,9 +52,9 @@ void BinaryCacheStore::init()
throw Error("binary cache '%s' is for Nix stores with prefix '%s', not '%s'",
getUri(), value, storeDir);
} else if (name == "WantMassQuery") {
- wantMassQuery.setDefault(value == "1" ? "true" : "false");
+ wantMassQuery.setDefault(value == "1");
} else if (name == "Priority") {
- priority.setDefault(fmt("%d", std::stoi(value)));
+ priority.setDefault(std::stoi(value));
}
}
}
@@ -111,15 +111,15 @@ void BinaryCacheStore::writeNarInfo(ref<NarInfo> narInfo)
upsertFile(narInfoFile, narInfo->to_string(*this), "text/x-nix-narinfo");
- std::string hashPart(narInfo->path.hashPart());
-
{
auto state_(state.lock());
- state_->pathInfoCache.upsert(hashPart, PathInfoCacheValue { .value = std::shared_ptr<NarInfo>(narInfo) });
+ state_->pathInfoCache.upsert(
+ std::string(narInfo->path.to_string()),
+ PathInfoCacheValue { .value = std::shared_ptr<NarInfo>(narInfo) });
}
if (diskCache)
- diskCache->upsertNarInfo(getUri(), hashPart, std::shared_ptr<NarInfo>(narInfo));
+ diskCache->upsertNarInfo(getUri(), std::string(narInfo->path.hashPart()), std::shared_ptr<NarInfo>(narInfo));
}
AutoCloseFD openFile(const Path & path)
@@ -149,7 +149,7 @@ ref<const ValidPathInfo> BinaryCacheStore::addToStoreCommon(
{
FdSink fileSink(fdTemp.get());
TeeSink teeSinkCompressed { fileSink, fileHashSink };
- auto compressionSink = makeCompressionSink(compression, teeSinkCompressed);
+ auto compressionSink = makeCompressionSink(compression, teeSinkCompressed, parallelCompression, compressionLevel);
TeeSink teeSinkUncompressed { *compressionSink, narHashSink };
TeeSource teeSource { narSource, teeSinkUncompressed };
narAccessor = makeNarAccessor(teeSource);
@@ -308,16 +308,17 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource
}
StorePath BinaryCacheStore::addToStoreFromDump(Source & dump, const string & name,
- FileIngestionMethod method, HashType hashAlgo, RepairFlag repair)
+ FileIngestionMethod method, HashType hashAlgo, RepairFlag repair, const StorePathSet & references)
{
if (method != FileIngestionMethod::Recursive || hashAlgo != htSHA256)
unsupported("addToStoreFromDump");
return addToStoreCommon(dump, repair, CheckSigs, [&](HashResult nar) {
ValidPathInfo info {
- makeFixedOutputPath(method, nar.first, name),
+ makeFixedOutputPath(method, nar.first, name, references),
nar.first,
};
info.narSize = nar.second;
+ info.references = references;
return info;
})->path;
}
@@ -385,7 +386,7 @@ void BinaryCacheStore::queryPathInfoUncached(const StorePath & storePath,
}
StorePath BinaryCacheStore::addToStore(const string & name, const Path & srcPath,
- FileIngestionMethod method, HashType hashAlgo, PathFilter & filter, RepairFlag repair)
+ FileIngestionMethod method, HashType hashAlgo, PathFilter & filter, RepairFlag repair, const StorePathSet & references)
{
/* FIXME: Make BinaryCacheStore::addToStoreCommon support
non-recursive+sha256 so we can just use the default
@@ -404,10 +405,11 @@ StorePath BinaryCacheStore::addToStore(const string & name, const Path & srcPath
});
return addToStoreCommon(*source, repair, CheckSigs, [&](HashResult nar) {
ValidPathInfo info {
- makeFixedOutputPath(method, h, name),
+ makeFixedOutputPath(method, h, name, references),
nar.first,
};
info.narSize = nar.second;
+ info.references = references;
info.ca = FixedOutputHash {
.method = method,
.hash = h,
@@ -437,40 +439,29 @@ StorePath BinaryCacheStore::addTextToStore(const string & name, const string & s
})->path;
}
-std::optional<const Realisation> BinaryCacheStore::queryRealisation(const DrvOutput & id)
+void BinaryCacheStore::queryRealisationUncached(const DrvOutput & id,
+ Callback<std::shared_ptr<const Realisation>> callback) noexcept
{
- if (diskCache) {
- auto [cacheOutcome, maybeCachedRealisation] =
- diskCache->lookupRealisation(getUri(), id);
- switch (cacheOutcome) {
- case NarInfoDiskCache::oValid:
- debug("Returning a cached realisation for %s", id.to_string());
- return *maybeCachedRealisation;
- case NarInfoDiskCache::oInvalid:
- debug("Returning a cached missing realisation for %s", id.to_string());
- return {};
- case NarInfoDiskCache::oUnknown:
- break;
- }
- }
-
auto outputInfoFilePath = realisationsPrefix + "/" + id.to_string() + ".doi";
- auto rawOutputInfo = getFile(outputInfoFilePath);
- if (rawOutputInfo) {
- auto realisation = Realisation::fromJSON(
- nlohmann::json::parse(*rawOutputInfo), outputInfoFilePath);
+ auto callbackPtr = std::make_shared<decltype(callback)>(std::move(callback));
- if (diskCache)
- diskCache->upsertRealisation(
- getUri(), realisation);
+ Callback<std::shared_ptr<std::string>> newCallback = {
+ [=](std::future<std::shared_ptr<std::string>> fut) {
+ try {
+ auto data = fut.get();
+ if (!data) return (*callbackPtr)(nullptr);
- return {realisation};
- } else {
- if (diskCache)
- diskCache->upsertAbsentRealisation(getUri(), id);
- return std::nullopt;
- }
+ auto realisation = Realisation::fromJSON(
+ nlohmann::json::parse(*data), outputInfoFilePath);
+ return (*callbackPtr)(std::make_shared<const Realisation>(realisation));
+ } catch (...) {
+ callbackPtr->rethrow();
+ }
+ }
+ };
+
+ getFile(outputInfoFilePath, std::move(newCallback));
}
void BinaryCacheStore::registerDrvOutput(const Realisation& info) {
diff --git a/src/libstore/binary-cache-store.hh b/src/libstore/binary-cache-store.hh
index 657be2fcf..9815af591 100644
--- a/src/libstore/binary-cache-store.hh
+++ b/src/libstore/binary-cache-store.hh
@@ -15,13 +15,17 @@ struct BinaryCacheStoreConfig : virtual StoreConfig
{
using StoreConfig::StoreConfig;
- const Setting<std::string> compression{(StoreConfig*) this, "xz", "compression", "NAR compression method ('xz', 'bzip2', or 'none')"};
+ const Setting<std::string> compression{(StoreConfig*) this, "xz", "compression", "NAR compression method ('xz', 'bzip2', 'gzip', 'zstd', or 'none')"};
const Setting<bool> writeNARListing{(StoreConfig*) this, false, "write-nar-listing", "whether to write a JSON file listing the files in each NAR"};
const Setting<bool> writeDebugInfo{(StoreConfig*) this, false, "index-debug-info", "whether to index DWARF debug info files by build ID"};
const Setting<Path> secretKeyFile{(StoreConfig*) this, "", "secret-key", "path to secret key used to sign the binary cache"};
const Setting<Path> localNarCache{(StoreConfig*) this, "", "local-nar-cache", "path to a local cache of NARs"};
const Setting<bool> parallelCompression{(StoreConfig*) this, false, "parallel-compression",
- "enable multi-threading compression, available for xz only currently"};
+ "enable multi-threading compression for NARs, available for xz and zstd only currently"};
+ const Setting<int> compressionLevel{(StoreConfig*) this, -1, "compression-level",
+ "specify 'preset level' of compression to be used with NARs: "
+ "meaning and accepted range of values depends on compression method selected, "
+ "other than -1 which we reserve to indicate Nix defaults should be used"};
};
class BinaryCacheStore : public virtual BinaryCacheStoreConfig, public virtual Store
@@ -93,18 +97,19 @@ public:
RepairFlag repair, CheckSigsFlag checkSigs) override;
StorePath addToStoreFromDump(Source & dump, const string & name,
- FileIngestionMethod method, HashType hashAlgo, RepairFlag repair) override;
+ FileIngestionMethod method, HashType hashAlgo, RepairFlag repair, const StorePathSet & references ) override;
StorePath addToStore(const string & name, const Path & srcPath,
FileIngestionMethod method, HashType hashAlgo,
- PathFilter & filter, RepairFlag repair) override;
+ PathFilter & filter, RepairFlag repair, const StorePathSet & references) override;
StorePath addTextToStore(const string & name, const string & s,
const StorePathSet & references, RepairFlag repair) override;
void registerDrvOutput(const Realisation & info) override;
- std::optional<const Realisation> queryRealisation(const DrvOutput &) override;
+ void queryRealisationUncached(const DrvOutput &,
+ Callback<std::shared_ptr<const Realisation>> callback) noexcept override;
void narFromPath(const StorePath & path, Sink & sink) override;
diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc
index 876b8def0..60945403e 100644
--- a/src/libstore/build/derivation-goal.cc
+++ b/src/libstore/build/derivation-goal.cc
@@ -204,7 +204,7 @@ void DerivationGoal::haveDerivation()
trace("have derivation");
if (drv->type() == DerivationType::CAFloating)
- settings.requireExperimentalFeature("ca-derivations");
+ settings.requireExperimentalFeature(Xp::CaDerivations);
retrySubstitution = false;
@@ -453,7 +453,7 @@ void DerivationGoal::inputsRealised()
if (useDerivation) {
auto & fullDrv = *dynamic_cast<Derivation *>(drv.get());
- if (settings.isExperimentalFeatureEnabled("ca-derivations") &&
+ if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations) &&
((!fullDrv.inputDrvs.empty() && derivationIsCA(fullDrv.type()))
|| fullDrv.type() == DerivationType::DeferredInputAddressed)) {
/* We are be able to resolve this derivation based on the
@@ -566,7 +566,7 @@ void DerivationGoal::tryToBuild()
lockFiles.insert(worker.store.Store::toRealPath(*i.second.second));
else
lockFiles.insert(
- worker.store.Store::toRealPath(drvPath) + "!" + i.first
+ worker.store.Store::toRealPath(drvPath) + "." + i.first
);
}
}
@@ -616,7 +616,9 @@ void DerivationGoal::tryToBuild()
/* Don't do a remote build if the derivation has the attribute
`preferLocalBuild' set. Also, check and repair modes are only
supported for local builds. */
- bool buildLocally = buildMode != bmNormal || parsedDrv->willBuildLocally(worker.store);
+ bool buildLocally =
+ (buildMode != bmNormal || parsedDrv->willBuildLocally(worker.store))
+ && settings.maxBuildJobs.get() != 0;
if (!buildLocally) {
switch (tryBuildHook()) {
@@ -653,7 +655,7 @@ void DerivationGoal::tryLocalBuild() {
throw Error(
"unable to build with a primary store that isn't a local store; "
"either pass a different '--store' or enable remote builds."
- "\nhttps://nixos.org/nix/manual/#chap-distributed-builds");
+ "\nhttps://nixos.org/manual/nix/stable/advanced-topics/distributed-builds.html");
}
@@ -774,9 +776,6 @@ void runPostBuildHook(
hookEnvironment.emplace("OUT_PATHS", chomp(concatStringsSep(" ", store.printStorePathSet(outputPaths))));
hookEnvironment.emplace("NIX_CONFIG", globalConfig.toKeyValue());
- RunOptions opts(settings.postBuildHook, {});
- opts.environment = hookEnvironment;
-
struct LogSink : Sink {
Activity & act;
std::string currentLine;
@@ -807,9 +806,12 @@ void runPostBuildHook(
};
LogSink sink(act);
- opts.standardOut = &sink;
- opts.mergeStderrToStdout = true;
- runProgram2(opts);
+ runProgram2({
+ .program = settings.postBuildHook,
+ .environment = hookEnvironment,
+ .standardOut = &sink,
+ .mergeStderrToStdout = true,
+ });
}
void DerivationGoal::buildDone()
@@ -1009,7 +1011,7 @@ HookReply DerivationGoal::tryBuildHook()
return readLine(worker.hook->fromHook.readSide.get());
} catch (Error & e) {
e.addTrace({}, "while reading the response from the build hook");
- throw e;
+ throw;
}
}();
if (handleJSONLogMessage(s, worker.act, worker.hook->activities, true))
@@ -1055,7 +1057,7 @@ HookReply DerivationGoal::tryBuildHook()
machineName = readLine(hook->fromHook.readSide.get());
} catch (Error & e) {
e.addTrace({}, "while reading the machine name from the build hook");
- throw e;
+ throw;
}
/* Tell the hook all the inputs that have to be copied to the
@@ -1273,7 +1275,7 @@ void DerivationGoal::checkPathValidity()
: PathStatus::Corrupt,
};
}
- if (settings.isExperimentalFeatureEnabled("ca-derivations")) {
+ if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations)) {
auto drvOutput = DrvOutput{initialOutputs.at(i.first).outputHash, i.first};
if (auto real = worker.store.queryRealisation(drvOutput)) {
info.known = {
diff --git a/src/libstore/build/drv-output-substitution-goal.cc b/src/libstore/build/drv-output-substitution-goal.cc
index be270d079..b9602e696 100644
--- a/src/libstore/build/drv-output-substitution-goal.cc
+++ b/src/libstore/build/drv-output-substitution-goal.cc
@@ -1,6 +1,8 @@
#include "drv-output-substitution-goal.hh"
+#include "finally.hh"
#include "worker.hh"
#include "substitution-goal.hh"
+#include "callback.hh"
namespace nix {
@@ -50,14 +52,42 @@ void DrvOutputSubstitutionGoal::tryNext()
return;
}
- auto sub = subs.front();
+ sub = subs.front();
subs.pop_front();
// FIXME: Make async
- outputInfo = sub->queryRealisation(id);
+ // outputInfo = sub->queryRealisation(id);
+ outPipe.create();
+ promise = decltype(promise)();
+
+ sub->queryRealisation(
+ id, { [&](std::future<std::shared_ptr<const Realisation>> res) {
+ try {
+ Finally updateStats([this]() { outPipe.writeSide.close(); });
+ promise.set_value(res.get());
+ } catch (...) {
+ promise.set_exception(std::current_exception());
+ }
+ } });
+
+ worker.childStarted(shared_from_this(), {outPipe.readSide.get()}, true, false);
+
+ state = &DrvOutputSubstitutionGoal::realisationFetched;
+}
+
+void DrvOutputSubstitutionGoal::realisationFetched()
+{
+ worker.childTerminated(this);
+
+ try {
+ outputInfo = promise.get_future().get();
+ } catch (std::exception & e) {
+ printError(e.what());
+ substituterFailed = true;
+ }
+
if (!outputInfo) {
- tryNext();
- return;
+ return tryNext();
}
for (const auto & [depId, depPath] : outputInfo->dependentRealisations) {
@@ -119,4 +149,10 @@ void DrvOutputSubstitutionGoal::work()
(this->*state)();
}
+void DrvOutputSubstitutionGoal::handleEOF(int fd)
+{
+ if (fd == outPipe.readSide.get()) worker.wakeUp(shared_from_this());
+}
+
+
}
diff --git a/src/libstore/build/drv-output-substitution-goal.hh b/src/libstore/build/drv-output-substitution-goal.hh
index 63ab53d89..67ae2624a 100644
--- a/src/libstore/build/drv-output-substitution-goal.hh
+++ b/src/libstore/build/drv-output-substitution-goal.hh
@@ -3,6 +3,8 @@
#include "store-api.hh"
#include "goal.hh"
#include "realisation.hh"
+#include <thread>
+#include <future>
namespace nix {
@@ -20,11 +22,18 @@ private:
// The realisation corresponding to the given output id.
// Will be filled once we can get it.
- std::optional<Realisation> outputInfo;
+ std::shared_ptr<const Realisation> outputInfo;
/* The remaining substituters. */
std::list<ref<Store>> subs;
+ /* The current substituter. */
+ std::shared_ptr<Store> sub;
+
+ Pipe outPipe;
+ std::thread thr;
+ std::promise<std::shared_ptr<const Realisation>> promise;
+
/* Whether a substituter failed. */
bool substituterFailed = false;
@@ -36,6 +45,7 @@ public:
void init();
void tryNext();
+ void realisationFetched();
void outPathValid();
void finished();
@@ -44,7 +54,7 @@ public:
string key() override;
void work() override;
-
+ void handleEOF(int fd) override;
};
}
diff --git a/src/libstore/build/entry-points.cc b/src/libstore/build/entry-points.cc
index 96deb81d1..9b4cfd835 100644
--- a/src/libstore/build/entry-points.cc
+++ b/src/libstore/build/entry-points.cc
@@ -1,4 +1,3 @@
-#include "machines.hh"
#include "worker.hh"
#include "substitution-goal.hh"
#include "derivation-goal.hh"
@@ -11,12 +10,12 @@ void Store::buildPaths(const std::vector<DerivedPath> & reqs, BuildMode buildMod
Worker worker(*this, evalStore ? *evalStore : *this);
Goals goals;
- for (auto & br : reqs) {
+ for (const auto & br : reqs) {
std::visit(overloaded {
- [&](DerivedPath::Built bfd) {
+ [&](const DerivedPath::Built & bfd) {
goals.insert(worker.makeDerivationGoal(bfd.drvPath, bfd.outputs, buildMode));
},
- [&](DerivedPath::Opaque bo) {
+ [&](const DerivedPath::Opaque & bo) {
goals.insert(worker.makePathSubstitutionGoal(bo.path, buildMode == bmRepair ? Repair : NoRepair));
},
}, br.raw());
@@ -74,7 +73,7 @@ BuildResult Store::buildDerivation(const StorePath & drvPath, const BasicDerivat
outputId,
Realisation{ outputId, *staticOutput.second}
);
- if (settings.isExperimentalFeatureEnabled("ca-derivations") && !derivationHasKnownOutputPaths(drv.type())) {
+ if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations) && !derivationHasKnownOutputPaths(drv.type())) {
auto realisation = this->queryRealisation(outputId);
if (realisation)
result.builtOutputs.insert_or_assign(
diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc
index 990ff60b7..c9a4a31e7 100644
--- a/src/libstore/build/local-derivation-goal.cc
+++ b/src/libstore/build/local-derivation-goal.cc
@@ -17,16 +17,14 @@
#include <regex>
#include <queue>
-#include <sys/types.h>
-#include <sys/socket.h>
#include <sys/un.h>
-#include <netdb.h>
#include <fcntl.h>
#include <termios.h>
#include <unistd.h>
#include <sys/mman.h>
#include <sys/utsname.h>
#include <sys/resource.h>
+#include <sys/socket.h>
#if HAVE_STATVFS
#include <sys/statvfs.h>
@@ -34,7 +32,6 @@
/* Includes required for chroot support. */
#if __linux__
-#include <sys/socket.h>
#include <sys/ioctl.h>
#include <net/if.h>
#include <netinet/ip.h>
@@ -70,12 +67,14 @@ void handleDiffHook(
auto diffHook = settings.diffHook;
if (diffHook != "" && settings.runDiffHook) {
try {
- RunOptions diffHookOptions(diffHook,{tryA, tryB, drvPath, tmpDir});
- diffHookOptions.searchPath = true;
- diffHookOptions.uid = uid;
- diffHookOptions.gid = gid;
- diffHookOptions.chdir = "/";
- auto diffRes = runProgram(diffHookOptions);
+ auto diffRes = runProgram(RunOptions {
+ .program = diffHook,
+ .searchPath = true,
+ .args = {tryA, tryB, drvPath, tmpDir},
+ .uid = uid,
+ .gid = gid,
+ .chdir = "/"
+ });
if (!statusOk(diffRes.first))
throw ExecError(diffRes.first,
"diff-hook program '%1%' %2%",
@@ -343,24 +342,7 @@ int childEntry(void * arg)
return 1;
}
-
-static std::once_flag dns_resolve_flag;
-
-static void preloadNSS() {
- /* builtin:fetchurl can trigger a DNS lookup, which with glibc can trigger a dynamic library load of
- one of the glibc NSS libraries in a sandboxed child, which will fail unless the library's already
- been loaded in the parent. So we force a lookup of an invalid domain to force the NSS machinery to
- load its lookup libraries in the parent before any child gets a chance to. */
- std::call_once(dns_resolve_flag, []() {
- struct addrinfo *res = NULL;
-
- if (getaddrinfo("this.pre-initializes.the.dns.resolvers.invalid.", "http", NULL, &res) != 0) {
- if (res) freeaddrinfo(res);
- }
- });
-}
-
-
+#if __linux__
static void linkOrCopy(const Path & from, const Path & to)
{
if (link(from.c_str(), to.c_str()) == -1) {
@@ -376,6 +358,7 @@ static void linkOrCopy(const Path & from, const Path & to)
copyPath(from, to);
}
}
+#endif
void LocalDerivationGoal::startBuilder()
@@ -389,9 +372,6 @@ void LocalDerivationGoal::startBuilder()
settings.thisSystem,
concatStringsSep<StringSet>(", ", worker.store.systemFeatures));
- if (drv->isBuiltin())
- preloadNSS();
-
#if __APPLE__
additionalSandboxProfile = parsedDrv->getStringAttr("__sandboxProfile").value_or("");
#endif
@@ -732,6 +712,7 @@ void LocalDerivationGoal::startBuilder()
if (!builderOut.readSide)
throw SysError("opening pseudoterminal master");
+ // FIXME: not thread-safe, use ptsname_r
std::string slaveName(ptsname(builderOut.readSide.get()));
if (buildUser) {
@@ -775,7 +756,6 @@ void LocalDerivationGoal::startBuilder()
result.startTime = time(0);
/* Fork a child to build the package. */
- ProcessOptions options;
#if __linux__
if (useChroot) {
@@ -818,8 +798,6 @@ void LocalDerivationGoal::startBuilder()
userNamespaceSync.create();
- options.allowVfork = false;
-
Path maxUserNamespaces = "/proc/sys/user/max_user_namespaces";
static bool userNamespacesEnabled =
pathExists(maxUserNamespaces)
@@ -877,7 +855,7 @@ void LocalDerivationGoal::startBuilder()
writeFull(builderOut.writeSide.get(),
fmt("%d %d\n", usingUserNamespace, child));
_exit(0);
- }, options);
+ });
int res = helper.wait();
if (res != 0 && settings.sandboxFallback) {
@@ -940,11 +918,12 @@ void LocalDerivationGoal::startBuilder()
} else
#endif
{
+#if __linux__
fallback:
- options.allowVfork = !buildUser && !drv->isBuiltin();
+#endif
pid = startProcess([&]() {
runChild();
- }, options);
+ });
}
/* parent */
@@ -959,9 +938,12 @@ void LocalDerivationGoal::startBuilder()
try {
return readLine(builderOut.readSide.get());
} catch (Error & e) {
- e.addTrace({}, "while waiting for the build environment to initialize (previous messages: %s)",
+ auto status = pid.wait();
+ e.addTrace({}, "while waiting for the build environment for '%s' to initialize (%s, previous messages: %s)",
+ worker.store.printStorePath(drvPath),
+ statusToString(status),
concatStringsSep("|", msgs));
- throw e;
+ throw;
}
}();
if (string(msg, 0, 1) == "\2") break;
@@ -1112,10 +1094,10 @@ void LocalDerivationGoal::writeStructuredAttrs()
static StorePath pathPartOfReq(const DerivedPath & req)
{
return std::visit(overloaded {
- [&](DerivedPath::Opaque bo) {
+ [&](const DerivedPath::Opaque & bo) {
return bo.path;
},
- [&](DerivedPath::Built bfd) {
+ [&](const DerivedPath::Built & bfd) {
return bfd.drvPath;
},
}, req.raw());
@@ -1200,7 +1182,8 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual Lo
StorePath addToStore(const string & name, const Path & srcPath,
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256,
- PathFilter & filter = defaultPathFilter, RepairFlag repair = NoRepair) override
+ PathFilter & filter = defaultPathFilter, RepairFlag repair = NoRepair,
+ const StorePathSet & references = StorePathSet()) override
{ throw Error("addToStore"); }
void addToStore(const ValidPathInfo & info, Source & narSource,
@@ -1219,9 +1202,10 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual Lo
}
StorePath addToStoreFromDump(Source & dump, const string & name,
- FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair) override
+ FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair,
+ const StorePathSet & references = StorePathSet()) override
{
- auto path = next->addToStoreFromDump(dump, name, method, hashAlgo, repair);
+ auto path = next->addToStoreFromDump(dump, name, method, hashAlgo, repair, references);
goal.addDependency(path);
return path;
}
@@ -1245,13 +1229,14 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual Lo
// corresponds to an allowed derivation
{ throw Error("registerDrvOutput"); }
- std::optional<const Realisation> queryRealisation(const DrvOutput & id) override
+ void queryRealisationUncached(const DrvOutput & id,
+ Callback<std::shared_ptr<const Realisation>> callback) noexcept override
// XXX: This should probably be allowed if the realisation corresponds to
// an allowed derivation
{
if (!goal.isAllowed(id))
- throw InvalidPath("cannot query an unknown output id '%s' in recursive Nix", id.to_string());
- return next->queryRealisation(id);
+ callback(nullptr);
+ next->queryRealisation(id, std::move(callback));
}
void buildPaths(const std::vector<DerivedPath> & paths, BuildMode buildMode, std::shared_ptr<Store> evalStore) override
@@ -1280,7 +1265,7 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual Lo
for (auto & [outputName, outputPath] : outputs)
if (wantOutput(outputName, bfd.outputs)) {
newPaths.insert(outputPath);
- if (settings.isExperimentalFeatureEnabled("ca-derivations")) {
+ if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations)) {
auto thisRealisation = next->queryRealisation(
DrvOutput{drvHashes.at(outputName), outputName}
);
@@ -1341,7 +1326,7 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual Lo
void LocalDerivationGoal::startDaemon()
{
- settings.requireExperimentalFeature("recursive-nix");
+ settings.requireExperimentalFeature(Xp::RecursiveNix);
Store::Params params;
params["path-info-cache-size"] = "0";
@@ -1374,7 +1359,7 @@ void LocalDerivationGoal::startDaemon()
AutoCloseFD remote = accept(daemonSocket.get(),
(struct sockaddr *) &remoteAddr, &remoteAddrLen);
if (!remote) {
- if (errno == EINTR) continue;
+ if (errno == EINTR || errno == EAGAIN) continue;
if (errno == EINVAL) break;
throw SysError("accepting connection");
}
@@ -1853,7 +1838,7 @@ void LocalDerivationGoal::runChild()
/* Fill in the arguments. */
Strings args;
- const char *builder = "invalid";
+ std::string builder = "invalid";
if (drv->isBuiltin()) {
;
@@ -1979,13 +1964,13 @@ void LocalDerivationGoal::runChild()
}
args.push_back(drv->builder);
} else {
- builder = drv->builder.c_str();
+ builder = drv->builder;
args.push_back(std::string(baseNameOf(drv->builder)));
}
}
#else
else {
- builder = drv->builder.c_str();
+ builder = drv->builder;
args.push_back(std::string(baseNameOf(drv->builder)));
}
#endif
@@ -2012,7 +1997,7 @@ void LocalDerivationGoal::runChild()
else if (drv->builder == "builtin:unpack-channel")
builtinUnpackChannel(drv2);
else
- throw Error("unsupported builtin function '%1%'", string(drv->builder, 8));
+ throw Error("unsupported builtin builder '%1%'", string(drv->builder, 8));
_exit(0);
} catch (std::exception & e) {
writeFull(STDERR_FILENO, e.what() + std::string("\n"));
@@ -2041,9 +2026,9 @@ void LocalDerivationGoal::runChild()
posix_spawnattr_setbinpref_np(&attrp, 1, &cpu, NULL);
}
- posix_spawn(NULL, builder, NULL, &attrp, stringsToCharPtrs(args).data(), stringsToCharPtrs(envStrs).data());
+ posix_spawn(NULL, builder.c_str(), NULL, &attrp, stringsToCharPtrs(args).data(), stringsToCharPtrs(envStrs).data());
#else
- execve(builder, stringsToCharPtrs(args).data(), stringsToCharPtrs(envStrs).data());
+ execve(builder.c_str(), stringsToCharPtrs(args).data(), stringsToCharPtrs(envStrs).data());
#endif
throw SysError("executing '%1%'", drv->builder);
@@ -2158,8 +2143,7 @@ void LocalDerivationGoal::registerOutputs()
/* Pass blank Sink as we are not ready to hash data at this stage. */
NullSink blank;
- auto references = worker.store.parseStorePathSet(
- scanForReferences(blank, actualPath, worker.store.printStorePathSet(referenceablePaths)));
+ auto references = scanForReferences(blank, actualPath, referenceablePaths);
outputReferencesIfUnregistered.insert_or_assign(
outputName,
@@ -2173,8 +2157,8 @@ void LocalDerivationGoal::registerOutputs()
/* Since we'll use the already installed versions of these, we
can treat them as leaves and ignore any references they
have. */
- [&](AlreadyRegistered _) { return StringSet {}; },
- [&](PerhapsNeedToRegister refs) {
+ [&](const AlreadyRegistered &) { return StringSet {}; },
+ [&](const PerhapsNeedToRegister & refs) {
StringSet referencedOutputs;
/* FIXME build inverted map up front so no quadratic waste here */
for (auto & r : refs.refs)
@@ -2210,11 +2194,11 @@ void LocalDerivationGoal::registerOutputs()
};
std::optional<StorePathSet> referencesOpt = std::visit(overloaded {
- [&](AlreadyRegistered skippedFinalPath) -> std::optional<StorePathSet> {
+ [&](const AlreadyRegistered & skippedFinalPath) -> std::optional<StorePathSet> {
finish(skippedFinalPath.path);
return std::nullopt;
},
- [&](PerhapsNeedToRegister r) -> std::optional<StorePathSet> {
+ [&](const PerhapsNeedToRegister & r) -> std::optional<StorePathSet> {
return r.refs;
},
}, outputReferencesIfUnregistered.at(outputName));
@@ -2226,7 +2210,7 @@ void LocalDerivationGoal::registerOutputs()
auto rewriteOutput = [&]() {
/* Apply hash rewriting if necessary. */
if (!outputRewrites.empty()) {
- warn("rewriting hashes in '%1%'; cross fingers", actualPath);
+ debug("rewriting hashes in '%1%'; cross fingers", actualPath);
/* FIXME: this is in-memory. */
StringSink sink;
@@ -2330,7 +2314,7 @@ void LocalDerivationGoal::registerOutputs()
};
ValidPathInfo newInfo = std::visit(overloaded {
- [&](DerivationOutputInputAddressed output) {
+ [&](const DerivationOutputInputAddressed & output) {
/* input-addressed case */
auto requiredFinalPath = output.path;
/* Preemptively add rewrite rule for final hash, as that is
@@ -2349,14 +2333,14 @@ void LocalDerivationGoal::registerOutputs()
newInfo0.references.insert(newInfo0.path);
return newInfo0;
},
- [&](DerivationOutputCAFixed dof) {
+ [&](const DerivationOutputCAFixed & dof) {
auto newInfo0 = newInfoFromCA(DerivationOutputCAFloating {
.method = dof.hash.method,
.hashType = dof.hash.hash.type,
});
/* Check wanted hash */
- Hash & wanted = dof.hash.hash;
+ const Hash & wanted = dof.hash.hash;
assert(newInfo0.ca);
auto got = getContentAddressHash(*newInfo0.ca);
if (wanted != got) {
@@ -2492,7 +2476,13 @@ void LocalDerivationGoal::registerOutputs()
infos.emplace(outputName, std::move(newInfo));
}
- if (buildMode == bmCheck) return;
+ if (buildMode == bmCheck) {
+ // In case of FOD mismatches on `--check` an error must be thrown as this is also
+ // a source for non-determinism.
+ if (delayedException)
+ std::rethrow_exception(delayedException);
+ return;
+ }
/* Apply output checks. */
checkOutputs(infos);
@@ -2577,7 +2567,7 @@ void LocalDerivationGoal::registerOutputs()
that for floating CA derivations, which otherwise couldn't be cached,
but it's fine to do in all cases. */
- if (settings.isExperimentalFeatureEnabled("ca-derivations")) {
+ if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations)) {
for (auto& [outputName, newInfo] : infos) {
auto thisRealisation = Realisation{
.id = DrvOutput{initialOutputs.at(outputName).outputHash,
diff --git a/src/libstore/build/worker.cc b/src/libstore/build/worker.cc
index a7a6b92a6..f11c5ce68 100644
--- a/src/libstore/build/worker.cc
+++ b/src/libstore/build/worker.cc
@@ -239,7 +239,7 @@ void Worker::run(const Goals & _topGoals)
}
}
- /* Call queryMissing() efficiently query substitutes. */
+ /* Call queryMissing() to efficiently query substitutes. */
StorePathSet willBuild, willSubstitute, unknown;
uint64_t downloadSize, narSize;
store.queryMissing(topPaths, willBuild, willSubstitute, unknown, downloadSize, narSize);
@@ -281,11 +281,11 @@ void Worker::run(const Goals & _topGoals)
if (getMachines().empty())
throw Error("unable to start any build; either increase '--max-jobs' "
"or enable remote builds."
- "\nhttps://nixos.org/nix/manual/#chap-distributed-builds");
+ "\nhttps://nixos.org/manual/nix/stable/advanced-topics/distributed-builds.html");
else
throw Error("unable to start any build; remote machines may not have "
"all required system features."
- "\nhttps://nixos.org/nix/manual/#chap-distributed-builds");
+ "\nhttps://nixos.org/manual/nix/stable/advanced-topics/distributed-builds.html");
}
assert(!awake.empty());
diff --git a/src/libstore/content-address.cc b/src/libstore/content-address.cc
index 90a3ad1f5..cf32ccdc4 100644
--- a/src/libstore/content-address.cc
+++ b/src/libstore/content-address.cc
@@ -31,10 +31,10 @@ std::string makeFixedOutputCA(FileIngestionMethod method, const Hash & hash)
std::string renderContentAddress(ContentAddress ca)
{
return std::visit(overloaded {
- [](TextHash th) {
+ [](TextHash & th) {
return "text:" + th.hash.to_string(Base32, true);
},
- [](FixedOutputHash fsh) {
+ [](FixedOutputHash & fsh) {
return makeFixedOutputCA(fsh.method, fsh.hash);
}
}, ca);
@@ -43,10 +43,10 @@ std::string renderContentAddress(ContentAddress ca)
std::string renderContentAddressMethod(ContentAddressMethod cam)
{
return std::visit(overloaded {
- [](TextHashMethod &th) {
+ [](TextHashMethod & th) {
return std::string{"text:"} + printHashType(htSHA256);
},
- [](FixedOutputHashMethod &fshm) {
+ [](FixedOutputHashMethod & fshm) {
return "fixed:" + makeFileIngestionPrefix(fshm.fileIngestionMethod) + printHashType(fshm.hashType);
}
}, cam);
@@ -104,12 +104,12 @@ ContentAddress parseContentAddress(std::string_view rawCa) {
return std::visit(
overloaded {
- [&](TextHashMethod thm) {
+ [&](TextHashMethod & thm) {
return ContentAddress(TextHash {
.hash = Hash::parseNonSRIUnprefixed(rest, htSHA256)
});
},
- [&](FixedOutputHashMethod fohMethod) {
+ [&](FixedOutputHashMethod & fohMethod) {
return ContentAddress(FixedOutputHash {
.method = fohMethod.fileIngestionMethod,
.hash = Hash::parseNonSRIUnprefixed(rest, std::move(fohMethod.hashType)),
@@ -120,8 +120,10 @@ ContentAddress parseContentAddress(std::string_view rawCa) {
ContentAddressMethod parseContentAddressMethod(std::string_view caMethod)
{
- std::string_view asPrefix {std::string{caMethod} + ":"};
- return parseContentAddressMethodPrefix(asPrefix);
+ std::string asPrefix = std::string{caMethod} + ":";
+ // parseContentAddressMethodPrefix takes its argument by reference
+ std::string_view asPrefixView = asPrefix;
+ return parseContentAddressMethodPrefix(asPrefixView);
}
std::optional<ContentAddress> parseContentAddressOpt(std::string_view rawCaOpt)
@@ -137,10 +139,10 @@ std::string renderContentAddress(std::optional<ContentAddress> ca)
Hash getContentAddressHash(const ContentAddress & ca)
{
return std::visit(overloaded {
- [](TextHash th) {
+ [](const TextHash & th) {
return th.hash;
},
- [](FixedOutputHash fsh) {
+ [](const FixedOutputHash & fsh) {
return fsh.hash;
}
}, ca);
diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc
index d68ff64d7..bafab6fd5 100644
--- a/src/libstore/daemon.cc
+++ b/src/libstore/daemon.cc
@@ -227,8 +227,15 @@ struct ClientSettings
try {
if (name == "ssh-auth-sock") // obsolete
;
+ else if (name == settings.experimentalFeatures.name) {
+ // We don’t want to forward the experimental features to
+ // the daemon, as that could cause some pretty weird stuff
+ if (parseFeatures(tokenizeString<StringSet>(value)) != settings.experimentalFeatures.get())
+ debug("Ignoring the client-specified experimental features");
+ }
else if (trusted
|| name == settings.buildTimeout.name
+ || name == settings.buildRepeat.name
|| name == "connect-timeout"
|| (name == "builders" && value == ""))
settings.set(name, value);
@@ -389,16 +396,14 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
FramedSource source(from);
// TODO this is essentially RemoteStore::addCAToStore. Move it up to Store.
return std::visit(overloaded {
- [&](TextHashMethod &_) {
+ [&](TextHashMethod &) {
// We could stream this by changing Store
std::string contents = source.drain();
auto path = store->addTextToStore(name, contents, refs, repair);
return store->queryPathInfo(path);
},
- [&](FixedOutputHashMethod &fohm) {
- if (!refs.empty())
- throw UnimplementedError("cannot yet have refs with flat or nar-hashed data");
- auto path = store->addToStoreFromDump(source, name, fohm.fileIngestionMethod, fohm.hashType, repair);
+ [&](FixedOutputHashMethod & fohm) {
+ auto path = store->addToStoreFromDump(source, name, fohm.fileIngestionMethod, fohm.hashType, repair, refs);
return store->queryPathInfo(path);
},
}, contentAddressMethod);
@@ -426,25 +431,30 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
hashAlgo = parseHashType(hashAlgoRaw);
}
- StringSink saved;
- TeeSource savedNARSource(from, saved);
- RetrieveRegularNARSink savedRegular { saved };
-
- if (method == FileIngestionMethod::Recursive) {
- /* Get the entire NAR dump from the client and save it to
- a string so that we can pass it to
- addToStoreFromDump(). */
- ParseSink sink; /* null sink; just parse the NAR */
- parseDump(sink, savedNARSource);
- } else
- parseDump(savedRegular, from);
-
+ auto dumpSource = sinkToSource([&](Sink & saved) {
+ if (method == FileIngestionMethod::Recursive) {
+ /* We parse the NAR dump through into `saved` unmodified,
+ so why all this extra work? We still parse the NAR so
+ that we aren't sending arbitrary data to `saved`
+ unwittingly`, and we know when the NAR ends so we don't
+ consume the rest of `from` and can't parse another
+ command. (We don't trust `addToStoreFromDump` to not
+ eagerly consume the entire stream it's given, past the
+ length of the Nar. */
+ TeeSource savedNARSource(from, saved);
+ ParseSink sink; /* null sink; just parse the NAR */
+ parseDump(sink, savedNARSource);
+ } else {
+ /* Incrementally parse the NAR file, stripping the
+ metadata, and streaming the sole file we expect into
+ `saved`. */
+ RetrieveRegularNARSink savedRegular { saved };
+ parseDump(savedRegular, from);
+ if (!savedRegular.regular) throw Error("regular file expected");
+ }
+ });
logger->startWork();
- if (!savedRegular.regular) throw Error("regular file expected");
-
- // FIXME: try to stream directly from `from`.
- StringSource dumpSource { *saved.s };
- auto path = store->addToStoreFromDump(dumpSource, baseName, method, hashAlgo);
+ auto path = store->addToStoreFromDump(*dumpSource, baseName, method, hashAlgo);
logger->stopWork();
to << store->printStorePath(path);
@@ -618,9 +628,9 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
break;
}
+ // Obsolete.
case wopSyncWithGC: {
logger->startWork();
- store->syncWithGC();
logger->stopWork();
to << 1;
break;
@@ -946,7 +956,7 @@ void processConnection(
Finally finally([&]() {
_isInterrupted = false;
- prevLogger->log(lvlDebug, fmt("%d operations", opCount));
+ printMsgUsing(prevLogger, lvlDebug, "%d operations", opCount);
});
if (GET_PROTOCOL_MINOR(clientVersion) >= 14 && readInt(from)) {
@@ -979,6 +989,8 @@ void processConnection(
break;
}
+ printMsgUsing(prevLogger, lvlDebug, "received daemon op %d", op);
+
opCount++;
try {
diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc
index 899475860..b926bb711 100644
--- a/src/libstore/derivations.cc
+++ b/src/libstore/derivations.cc
@@ -10,18 +10,18 @@ namespace nix {
std::optional<StorePath> DerivationOutput::path(const Store & store, std::string_view drvName, std::string_view outputName) const
{
return std::visit(overloaded {
- [](DerivationOutputInputAddressed doi) -> std::optional<StorePath> {
+ [](const DerivationOutputInputAddressed & doi) -> std::optional<StorePath> {
return { doi.path };
},
- [&](DerivationOutputCAFixed dof) -> std::optional<StorePath> {
+ [&](const DerivationOutputCAFixed & dof) -> std::optional<StorePath> {
return {
dof.path(store, drvName, outputName)
};
},
- [](DerivationOutputCAFloating dof) -> std::optional<StorePath> {
+ [](const DerivationOutputCAFloating & dof) -> std::optional<StorePath> {
return std::nullopt;
},
- [](DerivationOutputDeferred) -> std::optional<StorePath> {
+ [](const DerivationOutputDeferred &) -> std::optional<StorePath> {
return std::nullopt;
},
}, output);
@@ -187,7 +187,7 @@ static DerivationOutput parseDerivationOutput(const Store & store,
},
};
} else {
- settings.requireExperimentalFeature("ca-derivations");
+ settings.requireExperimentalFeature(Xp::CaDerivations);
assert(pathS == "");
return DerivationOutput {
.output = DerivationOutputCAFloating {
@@ -332,22 +332,22 @@ string Derivation::unparse(const Store & store, bool maskOutputs,
if (first) first = false; else s += ',';
s += '('; printUnquotedString(s, i.first);
std::visit(overloaded {
- [&](DerivationOutputInputAddressed doi) {
+ [&](const DerivationOutputInputAddressed & doi) {
s += ','; printUnquotedString(s, maskOutputs ? "" : store.printStorePath(doi.path));
s += ','; printUnquotedString(s, "");
s += ','; printUnquotedString(s, "");
},
- [&](DerivationOutputCAFixed dof) {
+ [&](const DerivationOutputCAFixed & dof) {
s += ','; printUnquotedString(s, maskOutputs ? "" : store.printStorePath(dof.path(store, name, i.first)));
s += ','; printUnquotedString(s, dof.hash.printMethodAlgo());
s += ','; printUnquotedString(s, dof.hash.hash.to_string(Base16, false));
},
- [&](DerivationOutputCAFloating dof) {
+ [&](const DerivationOutputCAFloating & dof) {
s += ','; printUnquotedString(s, "");
s += ','; printUnquotedString(s, makeFileIngestionPrefix(dof.method) + printHashType(dof.hashType));
s += ','; printUnquotedString(s, "");
},
- [&](DerivationOutputDeferred) {
+ [&](const DerivationOutputDeferred &) {
s += ','; printUnquotedString(s, "");
s += ','; printUnquotedString(s, "");
s += ','; printUnquotedString(s, "");
@@ -420,13 +420,13 @@ DerivationType BasicDerivation::type() const
std::optional<HashType> floatingHashType;
for (auto & i : outputs) {
std::visit(overloaded {
- [&](DerivationOutputInputAddressed _) {
+ [&](const DerivationOutputInputAddressed &) {
inputAddressedOutputs.insert(i.first);
},
- [&](DerivationOutputCAFixed _) {
+ [&](const DerivationOutputCAFixed &) {
fixedCAOutputs.insert(i.first);
},
- [&](DerivationOutputCAFloating dof) {
+ [&](const DerivationOutputCAFloating & dof) {
floatingCAOutputs.insert(i.first);
if (!floatingHashType) {
floatingHashType = dof.hashType;
@@ -435,7 +435,7 @@ DerivationType BasicDerivation::type() const
throw Error("All floating outputs must use the same hash type");
}
},
- [&](DerivationOutputDeferred _) {
+ [&](const DerivationOutputDeferred &) {
deferredIAOutputs.insert(i.first);
},
}, i.second.output);
@@ -538,15 +538,15 @@ DrvHashModulo hashDerivationModulo(Store & store, const Derivation & drv, bool m
const auto & res = pathDerivationModulo(store, i.first);
std::visit(overloaded {
// Regular non-CA derivation, replace derivation
- [&](Hash drvHash) {
+ [&](const Hash & drvHash) {
inputs2.insert_or_assign(drvHash.to_string(Base16, false), i.second);
},
- [&](DeferredHash deferredHash) {
+ [&](const DeferredHash & deferredHash) {
isDeferred = true;
inputs2.insert_or_assign(deferredHash.hash.to_string(Base16, false), i.second);
},
// CA derivation's output hashes
- [&](CaOutputHashes outputHashes) {
+ [&](const CaOutputHashes & outputHashes) {
std::set<std::string> justOut = { "out" };
for (auto & output : i.second) {
/* Put each one in with a single "out" output.. */
@@ -572,17 +572,17 @@ std::map<std::string, Hash> staticOutputHashes(Store & store, const Derivation &
{
std::map<std::string, Hash> res;
std::visit(overloaded {
- [&](Hash drvHash) {
+ [&](const Hash & drvHash) {
for (auto & outputName : drv.outputNames()) {
res.insert({outputName, drvHash});
}
},
- [&](DeferredHash deferredHash) {
+ [&](const DeferredHash & deferredHash) {
for (auto & outputName : drv.outputNames()) {
res.insert({outputName, deferredHash.hash});
}
},
- [&](CaOutputHashes outputHashes) {
+ [&](const CaOutputHashes & outputHashes) {
res = outputHashes;
},
}, hashDerivationModulo(store, drv, true));
@@ -666,22 +666,22 @@ void writeDerivation(Sink & out, const Store & store, const BasicDerivation & dr
for (auto & i : drv.outputs) {
out << i.first;
std::visit(overloaded {
- [&](DerivationOutputInputAddressed doi) {
+ [&](const DerivationOutputInputAddressed & doi) {
out << store.printStorePath(doi.path)
<< ""
<< "";
},
- [&](DerivationOutputCAFixed dof) {
+ [&](const DerivationOutputCAFixed & dof) {
out << store.printStorePath(dof.path(store, drv.name, i.first))
<< dof.hash.printMethodAlgo()
<< dof.hash.hash.to_string(Base16, false);
},
- [&](DerivationOutputCAFloating dof) {
+ [&](const DerivationOutputCAFloating & dof) {
out << ""
<< (makeFileIngestionPrefix(dof.method) + printHashType(dof.hashType))
<< "";
},
- [&](DerivationOutputDeferred) {
+ [&](const DerivationOutputDeferred &) {
out << ""
<< ""
<< "";
diff --git a/src/libstore/derivations.hh b/src/libstore/derivations.hh
index 2df440536..b1cb68194 100644
--- a/src/libstore/derivations.hh
+++ b/src/libstore/derivations.hh
@@ -138,8 +138,8 @@ struct Derivation : BasicDerivation
/* Return the underlying basic derivation but with these changes:
- 1. Input drvs are emptied, but the outputs of them that were used are
- added directly to input sources.
+ 1. Input drvs are emptied, but the outputs of them that were used are
+ added directly to input sources.
2. Input placeholders are replaced with realized input store paths. */
std::optional<BasicDerivation> tryResolve(Store & store);
diff --git a/src/libstore/derived-path.cc b/src/libstore/derived-path.cc
index 8da81d0ac..3d188e981 100644
--- a/src/libstore/derived-path.cc
+++ b/src/libstore/derived-path.cc
@@ -24,8 +24,8 @@ StorePathSet BuiltPath::outPaths() const
{
return std::visit(
overloaded{
- [](BuiltPath::Opaque p) { return StorePathSet{p.path}; },
- [](BuiltPath::Built b) {
+ [](const BuiltPath::Opaque & p) { return StorePathSet{p.path}; },
+ [](const BuiltPath::Built & b) {
StorePathSet res;
for (auto & [_, path] : b.outputs)
res.insert(path);
@@ -94,13 +94,13 @@ RealisedPath::Set BuiltPath::toRealisedPaths(Store & store) const
RealisedPath::Set res;
std::visit(
overloaded{
- [&](BuiltPath::Opaque p) { res.insert(p.path); },
- [&](BuiltPath::Built p) {
+ [&](const BuiltPath::Opaque & p) { res.insert(p.path); },
+ [&](const BuiltPath::Built & p) {
auto drvHashes =
staticOutputHashes(store, store.readDerivation(p.drvPath));
for (auto& [outputName, outputPath] : p.outputs) {
if (settings.isExperimentalFeatureEnabled(
- "ca-derivations")) {
+ Xp::CaDerivations)) {
auto thisRealisation = store.queryRealisation(
DrvOutput{drvHashes.at(outputName), outputName});
assert(thisRealisation); // We’ve built it, so we must h
diff --git a/src/libstore/dummy-store.cc b/src/libstore/dummy-store.cc
index 36c6e725c..62dc21c59 100644
--- a/src/libstore/dummy-store.cc
+++ b/src/libstore/dummy-store.cc
@@ -50,8 +50,9 @@ struct DummyStore : public virtual DummyStoreConfig, public virtual Store
void narFromPath(const StorePath & path, Sink & sink) override
{ unsupported("narFromPath"); }
- std::optional<const Realisation> queryRealisation(const DrvOutput&) override
- { unsupported("queryRealisation"); }
+ void queryRealisationUncached(const DrvOutput &,
+ Callback<std::shared_ptr<const Realisation>> callback) noexcept override
+ { callback(nullptr); }
};
static RegisterStoreImplementation<DummyStore, DummyStoreConfig> regDummyStore;
diff --git a/src/libstore/filetransfer.cc b/src/libstore/filetransfer.cc
index 2cf35ec83..4621a8217 100644
--- a/src/libstore/filetransfer.cc
+++ b/src/libstore/filetransfer.cc
@@ -544,6 +544,14 @@ struct curlFileTransfer : public FileTransfer
stopWorkerThread();
});
+#ifdef __linux__
+ /* Cause this thread to not share any FS attributes with the main thread,
+ because this causes setns() in restoreMountNamespace() to fail.
+ Ideally, this would happen in the std::thread() constructor. */
+ if (unshare(CLONE_FS) != 0)
+ throw SysError("unsharing filesystem state in download thread");
+#endif
+
std::map<CURL *, std::shared_ptr<TransferItem>> items;
bool quit = false;
@@ -716,15 +724,24 @@ struct curlFileTransfer : public FileTransfer
}
};
+ref<curlFileTransfer> makeCurlFileTransfer()
+{
+ return make_ref<curlFileTransfer>();
+}
+
ref<FileTransfer> getFileTransfer()
{
- static ref<FileTransfer> fileTransfer = makeFileTransfer();
+ static ref<curlFileTransfer> fileTransfer = makeCurlFileTransfer();
+
+ if (fileTransfer->state_.lock()->quit)
+ fileTransfer = makeCurlFileTransfer();
+
return fileTransfer;
}
ref<FileTransfer> makeFileTransfer()
{
- return make_ref<curlFileTransfer>();
+ return makeCurlFileTransfer();
}
std::future<FileTransferResult> FileTransfer::enqueueFileTransfer(const FileTransferRequest & request)
diff --git a/src/libstore/gc.cc b/src/libstore/gc.cc
index 5a62c6529..7a414da6b 100644
--- a/src/libstore/gc.cc
+++ b/src/libstore/gc.cc
@@ -10,48 +10,22 @@
#include <regex>
#include <random>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <sys/statvfs.h>
+#include <climits>
#include <errno.h>
#include <fcntl.h>
+#include <poll.h>
+#include <sys/socket.h>
+#include <sys/stat.h>
+#include <sys/statvfs.h>
+#include <sys/types.h>
+#include <sys/un.h>
#include <unistd.h>
-#include <climits>
namespace nix {
-static string gcLockName = "gc.lock";
-static string gcRootsDir = "gcroots";
-
-
-/* Acquire the global GC lock. This is used to prevent new Nix
- processes from starting after the temporary root files have been
- read. To be precise: when they try to create a new temporary root
- file, they will block until the garbage collector has finished /
- yielded the GC lock. */
-AutoCloseFD LocalStore::openGCLock(LockType lockType)
-{
- Path fnGCLock = (format("%1%/%2%")
- % stateDir % gcLockName).str();
-
- debug(format("acquiring global GC lock '%1%'") % fnGCLock);
-
- AutoCloseFD fdGCLock = open(fnGCLock.c_str(), O_RDWR | O_CREAT | O_CLOEXEC, 0600);
- if (!fdGCLock)
- throw SysError("opening global GC lock '%1%'", fnGCLock);
-
- if (!lockFile(fdGCLock.get(), lockType, false)) {
- printInfo("waiting for the big garbage collector lock...");
- lockFile(fdGCLock.get(), lockType, true);
- }
-
- /* !!! Restrict read permission on the GC root. Otherwise any
- process that can open the file for reading can DoS the
- collector. */
-
- return fdGCLock;
-}
+static std::string gcSocketPath = "/gc-socket/socket";
+static std::string gcRootsDir = "gcroots";
static void makeSymlink(const Path & link, const Path & target)
@@ -71,12 +45,6 @@ static void makeSymlink(const Path & link, const Path & target)
}
-void LocalStore::syncWithGC()
-{
- AutoCloseFD fdGCLock = openGCLock(ltRead);
-}
-
-
void LocalStore::addIndirectRoot(const Path & path)
{
string hash = hashString(htSHA1, path).to_string(Base32, false);
@@ -95,6 +63,12 @@ Path LocalFSStore::addPermRoot(const StorePath & storePath, const Path & _gcRoot
"creating a garbage collector root (%1%) in the Nix store is forbidden "
"(are you running nix-build inside the store?)", gcRoot);
+ /* Register this root with the garbage collector, if it's
+ running. This should be superfluous since the caller should
+ have registered this root yet, but let's be on the safe
+ side. */
+ addTempRoot(storePath);
+
/* Don't clobber the link if it already exists and doesn't
point to the Nix store. */
if (pathExists(gcRoot) && (!isLink(gcRoot) || !isInStore(readLink(gcRoot))))
@@ -102,11 +76,6 @@ Path LocalFSStore::addPermRoot(const StorePath & storePath, const Path & _gcRoot
makeSymlink(gcRoot, printStorePath(storePath));
addIndirectRoot(gcRoot);
- /* Grab the global GC root, causing us to block while a GC is in
- progress. This prevents the set of permanent roots from
- increasing while a GC is in progress. */
- syncWithGC();
-
return gcRoot;
}
@@ -119,8 +88,6 @@ void LocalStore::addTempRoot(const StorePath & path)
if (!state->fdTempRoots) {
while (1) {
- AutoCloseFD fdGCLock = openGCLock(ltRead);
-
if (pathExists(fnTempRoots))
/* It *must* be stale, since there can be no two
processes with the same pid. */
@@ -128,10 +95,8 @@ void LocalStore::addTempRoot(const StorePath & path)
state->fdTempRoots = openLockFile(fnTempRoots, true);
- fdGCLock = -1;
-
- debug(format("acquiring read lock on '%1%'") % fnTempRoots);
- lockFile(state->fdTempRoots.get(), ltRead, true);
+ debug("acquiring write lock on '%s'", fnTempRoots);
+ lockFile(state->fdTempRoots.get(), ltWrite, true);
/* Check whether the garbage collector didn't get in our
way. */
@@ -147,24 +112,55 @@ void LocalStore::addTempRoot(const StorePath & path)
}
- /* Upgrade the lock to a write lock. This will cause us to block
- if the garbage collector is holding our lock. */
- debug(format("acquiring write lock on '%1%'") % fnTempRoots);
- lockFile(state->fdTempRoots.get(), ltWrite, true);
+ if (!state->fdGCLock)
+ state->fdGCLock = openGCLock();
+
+ restart:
+ FdLock gcLock(state->fdGCLock.get(), ltRead, false, "");
+
+ if (!gcLock.acquired) {
+ /* We couldn't get a shared global GC lock, so the garbage
+ collector is running. So we have to connect to the garbage
+ collector and inform it about our root. */
+ if (!state->fdRootsSocket) {
+ auto socketPath = stateDir.get() + gcSocketPath;
+ debug("connecting to '%s'", socketPath);
+ state->fdRootsSocket = createUnixDomainSocket();
+ nix::connect(state->fdRootsSocket.get(), socketPath);
+ }
+
+ try {
+ debug("sending GC root '%s'", printStorePath(path));
+ writeFull(state->fdRootsSocket.get(), printStorePath(path) + "\n", false);
+ char c;
+ readFull(state->fdRootsSocket.get(), &c, 1);
+ assert(c == '1');
+ debug("got ack for GC root '%s'", printStorePath(path));
+ } catch (SysError & e) {
+ /* The garbage collector may have exited, so we need to
+ restart. */
+ if (e.errNo == EPIPE) {
+ debug("GC socket disconnected");
+ state->fdRootsSocket.close();
+ goto restart;
+ }
+ } catch (EndOfFile & e) {
+ debug("GC socket disconnected");
+ state->fdRootsSocket.close();
+ goto restart;
+ }
+ }
+ /* Append the store path to the temporary roots file. */
string s = printStorePath(path) + '\0';
writeFull(state->fdTempRoots.get(), s);
-
- /* Downgrade to a read lock. */
- debug(format("downgrading to read lock on '%1%'") % fnTempRoots);
- lockFile(state->fdTempRoots.get(), ltRead, true);
}
static std::string censored = "{censored}";
-void LocalStore::findTempRoots(FDs & fds, Roots & tempRoots, bool censor)
+void LocalStore::findTempRoots(Roots & tempRoots, bool censor)
{
/* Read the `temproots' directory for per-process temporary root
files. */
@@ -179,35 +175,25 @@ void LocalStore::findTempRoots(FDs & fds, Roots & tempRoots, bool censor)
pid_t pid = std::stoi(i.name);
debug(format("reading temporary root file '%1%'") % path);
- FDPtr fd(new AutoCloseFD(open(path.c_str(), O_CLOEXEC | O_RDWR, 0666)));
- if (!*fd) {
+ AutoCloseFD fd(open(path.c_str(), O_CLOEXEC | O_RDWR, 0666));
+ if (!fd) {
/* It's okay if the file has disappeared. */
if (errno == ENOENT) continue;
throw SysError("opening temporary roots file '%1%'", path);
}
- /* This should work, but doesn't, for some reason. */
- //FDPtr fd(new AutoCloseFD(openLockFile(path, false)));
- //if (*fd == -1) continue;
-
/* Try to acquire a write lock without blocking. This can
only succeed if the owning process has died. In that case
we don't care about its temporary roots. */
- if (lockFile(fd->get(), ltWrite, false)) {
+ if (lockFile(fd.get(), ltWrite, false)) {
printInfo("removing stale temporary roots file '%1%'", path);
unlink(path.c_str());
- writeFull(fd->get(), "d");
+ writeFull(fd.get(), "d");
continue;
}
- /* Acquire a read lock. This will prevent the owning process
- from upgrading to a write lock, therefore it will block in
- addTempRoot(). */
- debug(format("waiting for read lock on '%1%'") % path);
- lockFile(fd->get(), ltRead, true);
-
/* Read the entire file. */
- string contents = readFile(fd->get());
+ string contents = readFile(fd.get());
/* Extract the roots. */
string::size_type pos = 0, end;
@@ -218,8 +204,6 @@ void LocalStore::findTempRoots(FDs & fds, Roots & tempRoots, bool censor)
tempRoots[parseStorePath(root)].emplace(censor ? censored : fmt("{temp:%d}", pid));
pos = end + 1;
}
-
- fds.push_back(fd); /* keep open */
}
}
@@ -304,8 +288,7 @@ Roots LocalStore::findRoots(bool censor)
Roots roots;
findRootsNoTemp(roots, censor);
- FDs fds;
- findTempRoots(fds, roots, censor);
+ findTempRoots(roots, censor);
return roots;
}
@@ -341,6 +324,7 @@ static string quoteRegexChars(const string & raw)
return std::regex_replace(raw, specialRegex, R"(\$&)");
}
+#if __linux__
static void readFileRoots(const char * path, UncheckedRoots & roots)
{
try {
@@ -350,6 +334,7 @@ static void readFileRoots(const char * path, UncheckedRoots & roots)
throw;
}
}
+#endif
void LocalStore::findRuntimeRoots(Roots & roots, bool censor)
{
@@ -431,7 +416,7 @@ void LocalStore::findRuntimeRoots(Roots & roots, bool censor)
}
#endif
-#if defined(__linux__)
+#if __linux__
readFileRoots("/proc/sys/kernel/modprobe", unchecked);
readFileRoots("/proc/sys/kernel/fbsplash", unchecked);
readFileRoots("/proc/sys/kernel/poweroff_cmd", unchecked);
@@ -455,391 +440,397 @@ void LocalStore::findRuntimeRoots(Roots & roots, bool censor)
struct GCLimitReached { };
-struct LocalStore::GCState
-{
- const GCOptions & options;
- GCResults & results;
- StorePathSet roots;
- StorePathSet tempRoots;
- StorePathSet dead;
- StorePathSet alive;
- bool gcKeepOutputs;
- bool gcKeepDerivations;
- uint64_t bytesInvalidated;
- bool moveToTrash = true;
- bool shouldDelete;
- GCState(const GCOptions & options, GCResults & results)
- : options(options), results(results), bytesInvalidated(0) { }
-};
-
-
-bool LocalStore::isActiveTempFile(const GCState & state,
- const Path & path, const string & suffix)
+void LocalStore::collectGarbage(const GCOptions & options, GCResults & results)
{
- return hasSuffix(path, suffix)
- && state.tempRoots.count(parseStorePath(string(path, 0, path.size() - suffix.size())));
-}
+ bool shouldDelete = options.action == GCOptions::gcDeleteDead || options.action == GCOptions::gcDeleteSpecific;
+ bool gcKeepOutputs = settings.gcKeepOutputs;
+ bool gcKeepDerivations = settings.gcKeepDerivations;
+ StorePathSet roots, dead, alive;
-void LocalStore::deleteGarbage(GCState & state, const Path & path)
-{
- uint64_t bytesFreed;
- deletePath(path, bytesFreed);
- state.results.bytesFreed += bytesFreed;
-}
+ struct Shared
+ {
+ // The temp roots only store the hash part to make it easier to
+ // ignore suffixes like '.lock', '.chroot' and '.check'.
+ std::unordered_set<std::string> tempRoots;
+ // Hash part of the store path currently being deleted, if
+ // any.
+ std::optional<std::string> pending;
+ };
-void LocalStore::deletePathRecursive(GCState & state, const Path & path)
-{
- checkInterrupt();
-
- uint64_t size = 0;
-
- auto storePath = maybeParseStorePath(path);
- if (storePath && isValidPath(*storePath)) {
- StorePathSet referrers;
- queryReferrers(*storePath, referrers);
- for (auto & i : referrers)
- if (printStorePath(i) != path) deletePathRecursive(state, printStorePath(i));
- size = queryPathInfo(*storePath)->narSize;
- invalidatePathChecked(*storePath);
- }
+ Sync<Shared> _shared;
- Path realPath = realStoreDir + "/" + std::string(baseNameOf(path));
+ std::condition_variable wakeup;
- struct stat st;
- if (lstat(realPath.c_str(), &st)) {
- if (errno == ENOENT) return;
- throw SysError("getting status of %1%", realPath);
+ /* Using `--ignore-liveness' with `--delete' can have unintended
+ consequences if `keep-outputs' or `keep-derivations' are true
+ (the garbage collector will recurse into deleting the outputs
+ or derivers, respectively). So disable them. */
+ if (options.action == GCOptions::gcDeleteSpecific && options.ignoreLiveness) {
+ gcKeepOutputs = false;
+ gcKeepDerivations = false;
}
- printInfo(format("deleting '%1%'") % path);
-
- state.results.paths.insert(path);
+ if (shouldDelete)
+ deletePath(reservedPath);
- /* If the path is not a regular file or symlink, move it to the
- trash directory. The move is to ensure that later (when we're
- not holding the global GC lock) we can delete the path without
- being afraid that the path has become alive again. Otherwise
- delete it right away. */
- if (state.moveToTrash && S_ISDIR(st.st_mode)) {
- // Estimate the amount freed using the narSize field. FIXME:
- // if the path was not valid, need to determine the actual
- // size.
- try {
- if (chmod(realPath.c_str(), st.st_mode | S_IWUSR) == -1)
- throw SysError("making '%1%' writable", realPath);
- Path tmp = trashDir + "/" + std::string(baseNameOf(path));
- if (rename(realPath.c_str(), tmp.c_str()))
- throw SysError("unable to rename '%1%' to '%2%'", realPath, tmp);
- state.bytesInvalidated += size;
- } catch (SysError & e) {
- if (e.errNo == ENOSPC) {
- printInfo(format("note: can't create move '%1%': %2%") % realPath % e.msg());
- deleteGarbage(state, realPath);
+ /* Acquire the global GC root. Note: we don't use fdGCLock
+ here because then in auto-gc mode, another thread could
+ downgrade our exclusive lock. */
+ auto fdGCLock = openGCLock();
+ FdLock gcLock(fdGCLock.get(), ltWrite, true, "waiting for the big garbage collector lock...");
+
+ /* Start the server for receiving new roots. */
+ auto socketPath = stateDir.get() + gcSocketPath;
+ createDirs(dirOf(socketPath));
+ auto fdServer = createUnixDomainSocket(socketPath, 0666);
+
+ if (fcntl(fdServer.get(), F_SETFL, fcntl(fdServer.get(), F_GETFL) | O_NONBLOCK) == -1)
+ throw SysError("making socket '%1%' non-blocking", socketPath);
+
+ Pipe shutdownPipe;
+ shutdownPipe.create();
+
+ std::thread serverThread([&]() {
+ Sync<std::map<int, std::thread>> connections;
+
+ Finally cleanup([&]() {
+ debug("GC roots server shutting down");
+ while (true) {
+ auto item = remove_begin(*connections.lock());
+ if (!item) break;
+ auto & [fd, thread] = *item;
+ shutdown(fd, SHUT_RDWR);
+ thread.join();
}
- }
- } else
- deleteGarbage(state, realPath);
-
- if (state.results.bytesFreed + state.bytesInvalidated > state.options.maxFreed) {
- printInfo(format("deleted or invalidated more than %1% bytes; stopping") % state.options.maxFreed);
- throw GCLimitReached();
- }
-}
-
-
-bool LocalStore::canReachRoot(GCState & state, StorePathSet & visited, const StorePath & path)
-{
- if (visited.count(path)) return false;
-
- if (state.alive.count(path)) return true;
-
- if (state.dead.count(path)) return false;
-
- if (state.roots.count(path)) {
- debug("cannot delete '%1%' because it's a root", printStorePath(path));
- state.alive.insert(path);
- return true;
- }
-
- visited.insert(path);
-
- if (!isValidPath(path)) return false;
-
- StorePathSet incoming;
-
- /* Don't delete this path if any of its referrers are alive. */
- queryReferrers(path, incoming);
-
- /* If keep-derivations is set and this is a derivation, then
- don't delete the derivation if any of the outputs are alive. */
- if (state.gcKeepDerivations && path.isDerivation()) {
- for (auto & [name, maybeOutPath] : queryPartialDerivationOutputMap(path))
- if (maybeOutPath &&
- isValidPath(*maybeOutPath) &&
- queryPathInfo(*maybeOutPath)->deriver == path
- )
- incoming.insert(*maybeOutPath);
- }
-
- /* If keep-outputs is set, then don't delete this path if there
- are derivers of this path that are not garbage. */
- if (state.gcKeepOutputs) {
- auto derivers = queryValidDerivers(path);
- for (auto & i : derivers)
- incoming.insert(i);
- }
+ });
+
+ while (true) {
+ std::vector<struct pollfd> fds;
+ fds.push_back({.fd = shutdownPipe.readSide.get(), .events = POLLIN});
+ fds.push_back({.fd = fdServer.get(), .events = POLLIN});
+ auto count = poll(fds.data(), fds.size(), -1);
+ assert(count != -1);
+
+ if (fds[0].revents)
+ /* Parent is asking us to quit. */
+ break;
+
+ if (fds[1].revents) {
+ /* Accept a new connection. */
+ assert(fds[1].revents & POLLIN);
+ AutoCloseFD fdClient = accept(fdServer.get(), nullptr, nullptr);
+ if (!fdClient) continue;
+
+ /* Process the connection in a separate thread. */
+ auto fdClient_ = fdClient.get();
+ std::thread clientThread([&, fdClient = std::move(fdClient)]() {
+ Finally cleanup([&]() {
+ auto conn(connections.lock());
+ auto i = conn->find(fdClient.get());
+ if (i != conn->end()) {
+ i->second.detach();
+ conn->erase(i);
+ }
+ });
+
+ while (true) {
+ try {
+ auto path = readLine(fdClient.get());
+ auto storePath = maybeParseStorePath(path);
+ if (storePath) {
+ debug("got new GC root '%s'", path);
+ auto hashPart = std::string(storePath->hashPart());
+ auto shared(_shared.lock());
+ shared->tempRoots.insert(hashPart);
+ /* If this path is currently being
+ deleted, then we have to wait until
+ deletion is finished to ensure that
+ the client doesn't start
+ re-creating it before we're
+ done. FIXME: ideally we would use a
+ FD for this so we don't block the
+ poll loop. */
+ while (shared->pending == hashPart) {
+ debug("synchronising with deletion of path '%s'", path);
+ shared.wait(wakeup);
+ }
+ } else
+ printError("received garbage instead of a root from client");
+ writeFull(fdClient.get(), "1", false);
+ } catch (Error &) { break; }
+ }
+ });
- for (auto & i : incoming)
- if (i != path)
- if (canReachRoot(state, visited, i)) {
- state.alive.insert(path);
- return true;
+ connections.lock()->insert({fdClient_, std::move(clientThread)});
}
+ }
+ });
- return false;
-}
-
-
-void LocalStore::tryToDelete(GCState & state, const Path & path)
-{
- checkInterrupt();
-
- auto realPath = realStoreDir + "/" + std::string(baseNameOf(path));
- if (realPath == linksDir || realPath == trashDir) return;
-
- //Activity act(*logger, lvlDebug, format("considering whether to delete '%1%'") % path);
-
- auto storePath = maybeParseStorePath(path);
-
- if (!storePath || !isValidPath(*storePath)) {
- /* A lock file belonging to a path that we're building right
- now isn't garbage. */
- if (isActiveTempFile(state, path, ".lock")) return;
+ Finally stopServer([&]() {
+ writeFull(shutdownPipe.writeSide.get(), "x", false);
+ wakeup.notify_all();
+ if (serverThread.joinable()) serverThread.join();
+ });
- /* Don't delete .chroot directories for derivations that are
- currently being built. */
- if (isActiveTempFile(state, path, ".chroot")) return;
+ /* Find the roots. Since we've grabbed the GC lock, the set of
+ permanent roots cannot increase now. */
+ printInfo("finding garbage collector roots...");
+ Roots rootMap;
+ if (!options.ignoreLiveness)
+ findRootsNoTemp(rootMap, true);
- /* Don't delete .check directories for derivations that are
- currently being built, because we may need to run
- diff-hook. */
- if (isActiveTempFile(state, path, ".check")) return;
- }
+ for (auto & i : rootMap) roots.insert(i.first);
- StorePathSet visited;
-
- if (storePath && canReachRoot(state, visited, *storePath)) {
- debug("cannot delete '%s' because it's still reachable", path);
- } else {
- /* No path we visited was a root, so everything is garbage.
- But we only delete ‘path’ and its referrers here so that
- ‘nix-store --delete’ doesn't have the unexpected effect of
- recursing into derivations and outputs. */
- for (auto & i : visited)
- state.dead.insert(i);
- if (state.shouldDelete)
- deletePathRecursive(state, path);
+ /* Read the temporary roots created before we acquired the global
+ GC root. Any new roots will be sent to our socket. */
+ Roots tempRoots;
+ findTempRoots(tempRoots, true);
+ for (auto & root : tempRoots) {
+ _shared.lock()->tempRoots.insert(std::string(root.first.hashPart()));
+ roots.insert(root.first);
}
-}
+ /* Helper function that deletes a path from the store and throws
+ GCLimitReached if we've deleted enough garbage. */
+ auto deleteFromStore = [&](std::string_view baseName)
+ {
+ Path path = storeDir + "/" + std::string(baseName);
+ Path realPath = realStoreDir + "/" + std::string(baseName);
-/* Unlink all files in /nix/store/.links that have a link count of 1,
- which indicates that there are no other links and so they can be
- safely deleted. FIXME: race condition with optimisePath(): we
- might see a link count of 1 just before optimisePath() increases
- the link count. */
-void LocalStore::removeUnusedLinks(const GCState & state)
-{
- AutoCloseDir dir(opendir(linksDir.c_str()));
- if (!dir) throw SysError("opening directory '%1%'", linksDir);
-
- int64_t actualSize = 0, unsharedSize = 0;
+ printInfo("deleting '%1%'", path);
- struct dirent * dirent;
- while (errno = 0, dirent = readdir(dir.get())) {
- checkInterrupt();
- string name = dirent->d_name;
- if (name == "." || name == "..") continue;
- Path path = linksDir + "/" + name;
+ results.paths.insert(path);
- auto st = lstat(path);
+ uint64_t bytesFreed;
+ deletePath(realPath, bytesFreed);
+ results.bytesFreed += bytesFreed;
- if (st.st_nlink != 1) {
- actualSize += st.st_size;
- unsharedSize += (st.st_nlink - 1) * st.st_size;
- continue;
+ if (results.bytesFreed > options.maxFreed) {
+ printInfo("deleted more than %d bytes; stopping", options.maxFreed);
+ throw GCLimitReached();
}
+ };
- printMsg(lvlTalkative, format("deleting unused link '%1%'") % path);
-
- if (unlink(path.c_str()) == -1)
- throw SysError("deleting '%1%'", path);
+ std::map<StorePath, StorePathSet> referrersCache;
- state.results.bytesFreed += st.st_size;
- }
+ /* Helper function that visits all paths reachable from `start`
+ via the referrers edges and optionally derivers and derivation
+ output edges. If none of those paths are roots, then all
+ visited paths are garbage and are deleted. */
+ auto deleteReferrersClosure = [&](const StorePath & start) {
+ StorePathSet visited;
+ std::queue<StorePath> todo;
- struct stat st;
- if (stat(linksDir.c_str(), &st) == -1)
- throw SysError("statting '%1%'", linksDir);
- int64_t overhead = st.st_blocks * 512ULL;
+ /* Wake up any GC client waiting for deletion of the paths in
+ 'visited' to finish. */
+ Finally releasePending([&]() {
+ auto shared(_shared.lock());
+ shared->pending.reset();
+ wakeup.notify_all();
+ });
- printInfo("note: currently hard linking saves %.2f MiB",
- ((unsharedSize - actualSize - overhead) / (1024.0 * 1024.0)));
-}
+ auto enqueue = [&](const StorePath & path) {
+ if (visited.insert(path).second)
+ todo.push(path);
+ };
+ enqueue(start);
-void LocalStore::collectGarbage(const GCOptions & options, GCResults & results)
-{
- GCState state(options, results);
- state.gcKeepOutputs = settings.gcKeepOutputs;
- state.gcKeepDerivations = settings.gcKeepDerivations;
+ while (auto path = pop(todo)) {
+ checkInterrupt();
- /* Using `--ignore-liveness' with `--delete' can have unintended
- consequences if `keep-outputs' or `keep-derivations' are true
- (the garbage collector will recurse into deleting the outputs
- or derivers, respectively). So disable them. */
- if (options.action == GCOptions::gcDeleteSpecific && options.ignoreLiveness) {
- state.gcKeepOutputs = false;
- state.gcKeepDerivations = false;
- }
+ /* Bail out if we've previously discovered that this path
+ is alive. */
+ if (alive.count(*path)) {
+ alive.insert(start);
+ return;
+ }
- state.shouldDelete = options.action == GCOptions::gcDeleteDead || options.action == GCOptions::gcDeleteSpecific;
+ /* If we've previously deleted this path, we don't have to
+ handle it again. */
+ if (dead.count(*path)) continue;
- if (state.shouldDelete)
- deletePath(reservedPath);
+ auto markAlive = [&]()
+ {
+ alive.insert(*path);
+ alive.insert(start);
+ try {
+ StorePathSet closure;
+ computeFSClosure(*path, closure);
+ for (auto & p : closure)
+ alive.insert(p);
+ } catch (InvalidPath &) { }
+ };
+
+ /* If this is a root, bail out. */
+ if (roots.count(*path)) {
+ debug("cannot delete '%s' because it's a root", printStorePath(*path));
+ return markAlive();
+ }
- /* Acquire the global GC root. This prevents
- a) New roots from being added.
- b) Processes from creating new temporary root files. */
- AutoCloseFD fdGCLock = openGCLock(ltWrite);
+ if (options.action == GCOptions::gcDeleteSpecific
+ && !options.pathsToDelete.count(*path))
+ return;
- /* Find the roots. Since we've grabbed the GC lock, the set of
- permanent roots cannot increase now. */
- printInfo("finding garbage collector roots...");
- Roots rootMap;
- if (!options.ignoreLiveness)
- findRootsNoTemp(rootMap, true);
+ {
+ auto hashPart = std::string(path->hashPart());
+ auto shared(_shared.lock());
+ if (shared->tempRoots.count(hashPart)) {
+ debug("cannot delete '%s' because it's a temporary root", printStorePath(*path));
+ return markAlive();
+ }
+ shared->pending = hashPart;
+ }
- for (auto & i : rootMap) state.roots.insert(i.first);
+ if (isValidPath(*path)) {
- /* Read the temporary roots. This acquires read locks on all
- per-process temporary root files. So after this point no paths
- can be added to the set of temporary roots. */
- FDs fds;
- Roots tempRoots;
- findTempRoots(fds, tempRoots, true);
- for (auto & root : tempRoots) {
- state.tempRoots.insert(root.first);
- state.roots.insert(root.first);
- }
+ /* Visit the referrers of this path. */
+ auto i = referrersCache.find(*path);
+ if (i == referrersCache.end()) {
+ StorePathSet referrers;
+ queryReferrers(*path, referrers);
+ referrersCache.emplace(*path, std::move(referrers));
+ i = referrersCache.find(*path);
+ }
+ for (auto & p : i->second)
+ enqueue(p);
+
+ /* If keep-derivations is set and this is a
+ derivation, then visit the derivation outputs. */
+ if (gcKeepDerivations && path->isDerivation()) {
+ for (auto & [name, maybeOutPath] : queryPartialDerivationOutputMap(*path))
+ if (maybeOutPath &&
+ isValidPath(*maybeOutPath) &&
+ queryPathInfo(*maybeOutPath)->deriver == *path)
+ enqueue(*maybeOutPath);
+ }
- /* After this point the set of roots or temporary roots cannot
- increase, since we hold locks on everything. So everything
- that is not reachable from `roots' is garbage. */
+ /* If keep-outputs is set, then visit the derivers. */
+ if (gcKeepOutputs) {
+ auto derivers = queryValidDerivers(*path);
+ for (auto & i : derivers)
+ enqueue(i);
+ }
+ }
+ }
- if (state.shouldDelete) {
- if (pathExists(trashDir)) deleteGarbage(state, trashDir);
- try {
- createDirs(trashDir);
- } catch (SysError & e) {
- if (e.errNo == ENOSPC) {
- printInfo("note: can't create trash directory: %s", e.msg());
- state.moveToTrash = false;
+ for (auto & path : topoSortPaths(visited)) {
+ if (!dead.insert(path).second) continue;
+ if (shouldDelete) {
+ invalidatePathChecked(path);
+ deleteFromStore(path.to_string());
+ referrersCache.erase(path);
}
}
- }
+ };
- /* Now either delete all garbage paths, or just the specified
- paths (for gcDeleteSpecific). */
+ /* Synchronisation point for testing, see tests/gc-concurrent.sh. */
+ if (auto p = getEnv("_NIX_TEST_GC_SYNC"))
+ readFile(*p);
+ /* Either delete all garbage paths, or just the specified
+ paths (for gcDeleteSpecific). */
if (options.action == GCOptions::gcDeleteSpecific) {
for (auto & i : options.pathsToDelete) {
- tryToDelete(state, printStorePath(i));
- if (state.dead.find(i) == state.dead.end())
+ deleteReferrersClosure(i);
+ if (!dead.count(i))
throw Error(
- "cannot delete path '%1%' since it is still alive. "
- "To find out why use: "
+ "Cannot delete path '%1%' since it is still alive. "
+ "To find out why, use: "
"nix-store --query --roots",
printStorePath(i));
}
} else if (options.maxFreed > 0) {
- if (state.shouldDelete)
+ if (shouldDelete)
printInfo("deleting garbage...");
else
printInfo("determining live/dead paths...");
try {
-
AutoCloseDir dir(opendir(realStoreDir.get().c_str()));
if (!dir) throw SysError("opening directory '%1%'", realStoreDir);
- /* Read the store and immediately delete all paths that
- aren't valid. When using --max-freed etc., deleting
- invalid paths is preferred over deleting unreachable
- paths, since unreachable paths could become reachable
- again. We don't use readDirectory() here so that GCing
- can start faster. */
+ /* Read the store and delete all paths that are invalid or
+ unreachable. We don't use readDirectory() here so that
+ GCing can start faster. */
+ auto linksName = baseNameOf(linksDir);
Paths entries;
struct dirent * dirent;
while (errno = 0, dirent = readdir(dir.get())) {
checkInterrupt();
string name = dirent->d_name;
- if (name == "." || name == "..") continue;
- Path path = storeDir + "/" + name;
- auto storePath = maybeParseStorePath(path);
- if (storePath && isValidPath(*storePath))
- entries.push_back(path);
- else
- tryToDelete(state, path);
- }
-
- dir.reset();
-
- /* Now delete the unreachable valid paths. Randomise the
- order in which we delete entries to make the collector
- less biased towards deleting paths that come
- alphabetically first (e.g. /nix/store/000...). This
- matters when using --max-freed etc. */
- vector<Path> entries_(entries.begin(), entries.end());
- std::mt19937 gen(1);
- std::shuffle(entries_.begin(), entries_.end(), gen);
+ if (name == "." || name == ".." || name == linksName) continue;
- for (auto & i : entries_)
- tryToDelete(state, i);
+ if (auto storePath = maybeParseStorePath(storeDir + "/" + name))
+ deleteReferrersClosure(*storePath);
+ else
+ deleteFromStore(name);
+ }
} catch (GCLimitReached & e) {
}
}
- if (state.options.action == GCOptions::gcReturnLive) {
- for (auto & i : state.alive)
- state.results.paths.insert(printStorePath(i));
+ if (options.action == GCOptions::gcReturnLive) {
+ for (auto & i : alive)
+ results.paths.insert(printStorePath(i));
return;
}
- if (state.options.action == GCOptions::gcReturnDead) {
- for (auto & i : state.dead)
- state.results.paths.insert(printStorePath(i));
+ if (options.action == GCOptions::gcReturnDead) {
+ for (auto & i : dead)
+ results.paths.insert(printStorePath(i));
return;
}
- /* Allow other processes to add to the store from here on. */
- fdGCLock = -1;
- fds.clear();
-
- /* Delete the trash directory. */
- printInfo(format("deleting '%1%'") % trashDir);
- deleteGarbage(state, trashDir);
-
- /* Clean up the links directory. */
+ /* Unlink all files in /nix/store/.links that have a link count of 1,
+ which indicates that there are no other links and so they can be
+ safely deleted. FIXME: race condition with optimisePath(): we
+ might see a link count of 1 just before optimisePath() increases
+ the link count. */
if (options.action == GCOptions::gcDeleteDead || options.action == GCOptions::gcDeleteSpecific) {
printInfo("deleting unused links...");
- removeUnusedLinks(state);
+
+ AutoCloseDir dir(opendir(linksDir.c_str()));
+ if (!dir) throw SysError("opening directory '%1%'", linksDir);
+
+ int64_t actualSize = 0, unsharedSize = 0;
+
+ struct dirent * dirent;
+ while (errno = 0, dirent = readdir(dir.get())) {
+ checkInterrupt();
+ string name = dirent->d_name;
+ if (name == "." || name == "..") continue;
+ Path path = linksDir + "/" + name;
+
+ auto st = lstat(path);
+
+ if (st.st_nlink != 1) {
+ actualSize += st.st_size;
+ unsharedSize += (st.st_nlink - 1) * st.st_size;
+ continue;
+ }
+
+ printMsg(lvlTalkative, format("deleting unused link '%1%'") % path);
+
+ if (unlink(path.c_str()) == -1)
+ throw SysError("deleting '%1%'", path);
+
+ results.bytesFreed += st.st_size;
+ }
+
+ struct stat st;
+ if (stat(linksDir.c_str(), &st) == -1)
+ throw SysError("statting '%1%'", linksDir);
+ int64_t overhead = st.st_blocks * 512ULL;
+
+ printInfo("note: currently hard linking saves %.2f MiB",
+ ((unsharedSize - actualSize - overhead) / (1024.0 * 1024.0)));
}
/* While we're at it, vacuum the database. */
diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc
index d3b27d7be..81ca9cc0f 100644
--- a/src/libstore/globals.cc
+++ b/src/libstore/globals.cc
@@ -122,7 +122,7 @@ StringSet Settings::getDefaultSystemFeatures()
/* For backwards compatibility, accept some "features" that are
used in Nixpkgs to route builds to certain machines but don't
actually require anything special on the machines. */
- StringSet features{"nixos-test", "benchmark", "big-parallel", "recursive-nix"};
+ StringSet features{"nixos-test", "benchmark", "big-parallel"};
#if __linux__
if (access("/dev/kvm", R_OK | W_OK) == 0)
@@ -148,7 +148,8 @@ StringSet Settings::getDefaultExtraPlatforms()
// machines. Note that we can’t force processes from executing
// x86_64 in aarch64 environments or vice versa since they can
// always exec with their own binary preferences.
- if (pathExists("/Library/Apple/System/Library/LaunchDaemons/com.apple.oahd.plist")) {
+ if (pathExists("/Library/Apple/System/Library/LaunchDaemons/com.apple.oahd.plist") ||
+ pathExists("/System/Library/LaunchDaemons/com.apple.oahd.plist")) {
if (std::string{SYSTEM} == "x86_64-darwin")
extraPlatforms.insert("aarch64-darwin");
else if (std::string{SYSTEM} == "aarch64-darwin")
@@ -159,21 +160,16 @@ StringSet Settings::getDefaultExtraPlatforms()
return extraPlatforms;
}
-bool Settings::isExperimentalFeatureEnabled(const std::string & name)
+bool Settings::isExperimentalFeatureEnabled(const ExperimentalFeature & feature)
{
auto & f = experimentalFeatures.get();
- return std::find(f.begin(), f.end(), name) != f.end();
+ return std::find(f.begin(), f.end(), feature) != f.end();
}
-MissingExperimentalFeature::MissingExperimentalFeature(std::string feature)
- : Error("experimental Nix feature '%1%' is disabled; use '--experimental-features %1%' to override", feature)
- , missingFeature(feature)
- {}
-
-void Settings::requireExperimentalFeature(const std::string & name)
+void Settings::requireExperimentalFeature(const ExperimentalFeature & feature)
{
- if (!isExperimentalFeatureEnabled(name))
- throw MissingExperimentalFeature(name);
+ if (!isExperimentalFeatureEnabled(feature))
+ throw MissingExperimentalFeature(feature);
}
bool Settings::isWSL1()
diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh
index 7e01b4960..2f9e8c6e8 100644
--- a/src/libstore/globals.hh
+++ b/src/libstore/globals.hh
@@ -3,6 +3,7 @@
#include "types.hh"
#include "config.hh"
#include "util.hh"
+#include "experimental-features.hh"
#include <map>
#include <limits>
@@ -45,15 +46,6 @@ struct PluginFilesSetting : public BaseSetting<Paths>
void set(const std::string & str, bool append = false) override;
};
-class MissingExperimentalFeature: public Error
-{
-public:
- std::string missingFeature;
-
- MissingExperimentalFeature(std::string feature);
- virtual const char* sname() const override { return "MissingExperimentalFeature"; }
-};
-
class Settings : public Config {
unsigned int getDefaultCores();
@@ -934,12 +926,12 @@ public:
value.
)"};
- Setting<Strings> experimentalFeatures{this, {}, "experimental-features",
+ Setting<std::set<ExperimentalFeature>> experimentalFeatures{this, {}, "experimental-features",
"Experimental Nix features to enable."};
- bool isExperimentalFeatureEnabled(const std::string & name);
+ bool isExperimentalFeatureEnabled(const ExperimentalFeature &);
- void requireExperimentalFeature(const std::string & name);
+ void requireExperimentalFeature(const ExperimentalFeature &);
Setting<bool> allowDirty{this, true, "allow-dirty",
"Whether to allow dirty Git/Mercurial trees."};
@@ -968,6 +960,9 @@ public:
Setting<bool> useRegistries{this, true, "use-registries",
"Whether to use flake registries to resolve flake references."};
+
+ Setting<bool> acceptFlakeConfig{this, false, "accept-flake-config",
+ "Whether to accept nix configuration from a flake without prompting."};
};
diff --git a/src/libstore/http-binary-cache-store.cc b/src/libstore/http-binary-cache-store.cc
index 0a3afcd51..605ec4b28 100644
--- a/src/libstore/http-binary-cache-store.cc
+++ b/src/libstore/http-binary-cache-store.cc
@@ -57,8 +57,8 @@ public:
{
// FIXME: do this lazily?
if (auto cacheInfo = diskCache->cacheExists(cacheUri)) {
- wantMassQuery.setDefault(cacheInfo->wantMassQuery ? "true" : "false");
- priority.setDefault(fmt("%d", cacheInfo->priority));
+ wantMassQuery.setDefault(cacheInfo->wantMassQuery);
+ priority.setDefault(cacheInfo->priority);
} else {
try {
BinaryCacheStore::init();
diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc
index 45eed5707..4861d185e 100644
--- a/src/libstore/legacy-ssh-store.cc
+++ b/src/libstore/legacy-ssh-store.cc
@@ -82,9 +82,20 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
conn->to << SERVE_MAGIC_1 << SERVE_PROTOCOL_VERSION;
conn->to.flush();
- unsigned int magic = readInt(conn->from);
- if (magic != SERVE_MAGIC_2)
- throw Error("protocol mismatch with 'nix-store --serve' on '%s'", host);
+ StringSink saved;
+ try {
+ TeeSource tee(conn->from, saved);
+ unsigned int magic = readInt(tee);
+ if (magic != SERVE_MAGIC_2)
+ throw Error("'nix-store --serve' protocol mismatch from '%s'", host);
+ } catch (SerialisationError & e) {
+ /* In case the other side is waiting for our input,
+ close it. */
+ conn->sshConn->in.close();
+ auto msg = conn->from.drain();
+ throw Error("'nix-store --serve' protocol mismatch from '%s', got '%s'",
+ host, chomp(*saved.s + msg));
+ }
conn->remoteVersion = readInt(conn->from);
if (GET_PROTOCOL_MAJOR(conn->remoteVersion) != 0x200)
throw Error("unsupported 'nix-store --serve' protocol version on '%s'", host);
@@ -216,7 +227,7 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
StorePath addToStore(const string & name, const Path & srcPath,
FileIngestionMethod method, HashType hashAlgo,
- PathFilter & filter, RepairFlag repair) override
+ PathFilter & filter, RepairFlag repair, const StorePathSet & references) override
{ unsupported("addToStore"); }
StorePath addTextToStore(const string & name, const string & s,
@@ -237,6 +248,10 @@ private:
conn.to
<< settings.buildRepeat
<< settings.enforceDeterminism;
+
+ if (GET_PROTOCOL_MINOR(conn.remoteVersion) >= 7) {
+ conn.to << ((int) settings.keepFailed);
+ }
}
public:
@@ -279,10 +294,10 @@ public:
for (auto & p : drvPaths) {
auto sOrDrvPath = StorePathWithOutputs::tryFromDerivedPath(p);
std::visit(overloaded {
- [&](StorePathWithOutputs s) {
+ [&](const StorePathWithOutputs & s) {
ss.push_back(s.to_string(*this));
},
- [&](StorePath drvPath) {
+ [&](const StorePath & drvPath) {
throw Error("wanted to fetch '%s' but the legacy ssh protocol doesn't support merely substituting drv files via the build paths command. It would build them instead. Try using ssh-ng://", printStorePath(drvPath));
},
}, sOrDrvPath);
@@ -352,7 +367,8 @@ public:
return conn->remoteVersion;
}
- std::optional<const Realisation> queryRealisation(const DrvOutput&) override
+ void queryRealisationUncached(const DrvOutput &,
+ Callback<std::shared_ptr<const Realisation>> callback) noexcept override
// TODO: Implement
{ unsupported("queryRealisation"); }
};
diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc
index c6f774bc2..79011b522 100644
--- a/src/libstore/local-store.cc
+++ b/src/libstore/local-store.cc
@@ -8,6 +8,7 @@
#include "references.hh"
#include "callback.hh"
#include "topo-sort.hh"
+#include "finally.hh"
#include <iostream>
#include <algorithm>
@@ -145,7 +146,6 @@ LocalStore::LocalStore(const Params & params)
, linksDir(realStoreDir + "/.links")
, reservedPath(dbDir + "/reserved")
, schemaPath(dbDir + "/schema")
- , trashDir(realStoreDir + "/trash")
, tempRootsDir(stateDir + "/temproots")
, fnTempRoots(fmt("%s/%d", tempRootsDir, getpid()))
, locksHeld(tokenizeString<PathSet>(getEnv("NIX_HELD_LOCKS").value_or("")))
@@ -309,7 +309,7 @@ LocalStore::LocalStore(const Params & params)
else openDB(*state, false);
- if (settings.isExperimentalFeatureEnabled("ca-derivations")) {
+ if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations)) {
migrateCASchema(state->db, dbDir + "/ca-schema", globalLock);
}
@@ -339,7 +339,7 @@ LocalStore::LocalStore(const Params & params)
state->stmts->QueryPathFromHashPart.create(state->db,
"select path from ValidPaths where path >= ? limit 1;");
state->stmts->QueryValidPaths.create(state->db, "select path from ValidPaths");
- if (settings.isExperimentalFeatureEnabled("ca-derivations")) {
+ if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations)) {
state->stmts->RegisterRealisedOutput.create(state->db,
R"(
insert into Realisations (drvPath, outputName, outputPath, signatures)
@@ -386,6 +386,16 @@ LocalStore::LocalStore(const Params & params)
}
+AutoCloseFD LocalStore::openGCLock()
+{
+ Path fnGCLock = stateDir + "/gc.lock";
+ auto fdGCLock = open(fnGCLock.c_str(), O_RDWR | O_CREAT | O_CLOEXEC, 0600);
+ if (!fdGCLock)
+ throw SysError("opening global GC lock '%1%'", fnGCLock);
+ return fdGCLock;
+}
+
+
LocalStore::~LocalStore()
{
std::shared_future<void> future;
@@ -495,9 +505,6 @@ void LocalStore::makeStoreWritable()
throw SysError("getting info about the Nix store mount point");
if (stat.f_flag & ST_RDONLY) {
- if (unshare(CLONE_NEWNS) == -1)
- throw SysError("setting up a private mount namespace");
-
if (mount(0, realStoreDir.get().c_str(), "none", MS_REMOUNT | MS_BIND, 0) == -1)
throw SysError("remounting %1% writable", realStoreDir);
}
@@ -679,7 +686,7 @@ void LocalStore::checkDerivationOutputs(const StorePath & drvPath, const Derivat
std::optional<Hash> h;
for (auto & i : drv.outputs) {
std::visit(overloaded {
- [&](DerivationOutputInputAddressed doia) {
+ [&](const DerivationOutputInputAddressed & doia) {
if (!h) {
// somewhat expensive so we do lazily
auto temp = hashDerivationModulo(*this, drv, true);
@@ -691,14 +698,14 @@ void LocalStore::checkDerivationOutputs(const StorePath & drvPath, const Derivat
printStorePath(drvPath), printStorePath(doia.path), printStorePath(recomputed));
envHasRightPath(doia.path, i.first);
},
- [&](DerivationOutputCAFixed dof) {
+ [&](const DerivationOutputCAFixed & dof) {
StorePath path = makeFixedOutputPath(dof.hash.method, dof.hash.hash, drvName);
envHasRightPath(path, i.first);
},
- [&](DerivationOutputCAFloating _) {
+ [&](const DerivationOutputCAFloating &) {
/* Nothing to check */
},
- [&](DerivationOutputDeferred) {
+ [&](const DerivationOutputDeferred &) {
},
}, i.second.output);
}
@@ -706,7 +713,7 @@ void LocalStore::checkDerivationOutputs(const StorePath & drvPath, const Derivat
void LocalStore::registerDrvOutput(const Realisation & info, CheckSigsFlag checkSigs)
{
- settings.requireExperimentalFeature("ca-derivations");
+ settings.requireExperimentalFeature(Xp::CaDerivations);
if (checkSigs == NoCheckSigs || !realisationIsUntrusted(info))
registerDrvOutput(info);
else
@@ -715,7 +722,7 @@ void LocalStore::registerDrvOutput(const Realisation & info, CheckSigsFlag check
void LocalStore::registerDrvOutput(const Realisation & info)
{
- settings.requireExperimentalFeature("ca-derivations");
+ settings.requireExperimentalFeature(Xp::CaDerivations);
retrySQLite<void>([&]() {
auto state(_state.lock());
if (auto oldR = queryRealisation_(*state, info.id)) {
@@ -823,7 +830,7 @@ uint64_t LocalStore::addValidPath(State & state,
{
auto state_(Store::state.lock());
- state_->pathInfoCache.upsert(std::string(info.path.hashPart()),
+ state_->pathInfoCache.upsert(std::string(info.path.to_string()),
PathInfoCacheValue{ .value = std::make_shared<const ValidPathInfo>(info) });
}
@@ -1001,7 +1008,7 @@ LocalStore::queryPartialDerivationOutputMap(const StorePath & path_)
return outputs;
});
- if (!settings.isExperimentalFeatureEnabled("ca-derivations"))
+ if (!settings.isExperimentalFeatureEnabled(Xp::CaDerivations))
return outputs;
auto drv = readInvalidDerivation(path);
@@ -1069,14 +1076,19 @@ StorePathSet LocalStore::querySubstitutablePaths(const StorePathSet & paths)
}
+// FIXME: move this, it's not specific to LocalStore.
void LocalStore::querySubstitutablePathInfos(const StorePathCAMap & paths, SubstitutablePathInfos & infos)
{
if (!settings.useSubstitutes) return;
for (auto & sub : getDefaultSubstituters()) {
for (auto & path : paths) {
+ if (infos.count(path.first))
+ // Choose first succeeding substituter.
+ continue;
+
auto subPath(path.first);
- // recompute store path so that we can use a different store root
+ // Recompute store path so that we can use a different store root.
if (path.second) {
subPath = makeFixedOutputPathFromCA(path.first.name(), *path.second);
if (sub->storeDir == storeDir)
@@ -1191,7 +1203,7 @@ void LocalStore::invalidatePath(State & state, const StorePath & path)
{
auto state_(Store::state.lock());
- state_->pathInfoCache.erase(std::string(path.hashPart()));
+ state_->pathInfoCache.erase(std::string(path.to_string()));
}
}
@@ -1237,11 +1249,6 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
deletePath(realPath);
- // text hashing has long been allowed to have non-self-references because it is used for drv files.
- bool refersToSelf = info.references.count(info.path) > 0;
- if (info.ca.has_value() && !info.references.empty() && !(std::holds_alternative<TextHash>(*info.ca) && !refersToSelf))
- settings.requireExperimentalFeature("ca-references");
-
/* While restoring the path from the NAR, compute the hash
of the NAR. */
HashSink hashSink(htSHA256);
@@ -1300,7 +1307,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
StorePath LocalStore::addToStoreFromDump(Source & source0, const string & name,
- FileIngestionMethod method, HashType hashAlgo, RepairFlag repair)
+ FileIngestionMethod method, HashType hashAlgo, RepairFlag repair, const StorePathSet & references)
{
/* For computing the store path. */
auto hashSink = std::make_unique<HashSink>(hashAlgo);
@@ -1325,13 +1332,15 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, const string & name,
auto want = std::min(chunkSize, settings.narBufferSize - oldSize);
dump.resize(oldSize + want);
auto got = 0;
+ Finally cleanup([&]() {
+ dump.resize(oldSize + got);
+ });
try {
got = source.read(dump.data() + oldSize, want);
} catch (EndOfFile &) {
inMemory = true;
break;
}
- dump.resize(oldSize + got);
}
std::unique_ptr<AutoDelete> delTempDir;
@@ -1356,7 +1365,7 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, const string & name,
auto [hash, size] = hashSink->finish();
- auto dstPath = makeFixedOutputPath(method, hash, name);
+ auto dstPath = makeFixedOutputPath(method, hash, name, references);
addTempRoot(dstPath);
@@ -1403,6 +1412,7 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, const string & name,
ValidPathInfo info { dstPath, narHash.first };
info.narSize = narHash.second;
+ info.references = references;
info.ca = FixedOutputHash { .method = method, .hash = hash };
registerValidPath(info);
}
@@ -1503,7 +1513,8 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
/* Acquire the global GC lock to get a consistent snapshot of
existing and valid paths. */
- AutoCloseFD fdGCLock = openGCLock(ltWrite);
+ auto fdGCLock = openGCLock();
+ FdLock gcLock(fdGCLock.get(), ltRead, true, "waiting for the big garbage collector lock...");
StringSet store;
for (auto & i : readDirectory(realStoreDir)) store.insert(i.name);
@@ -1514,8 +1525,6 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
StorePathSet validPaths;
PathSet done;
- fdGCLock = -1;
-
for (auto & i : queryAllValidPaths())
verifyPath(printStorePath(i), store, done, validPaths, repair, errors);
@@ -1828,13 +1837,24 @@ std::optional<const Realisation> LocalStore::queryRealisation_(
return { res };
}
-std::optional<const Realisation>
-LocalStore::queryRealisation(const DrvOutput & id)
+void LocalStore::queryRealisationUncached(const DrvOutput & id,
+ Callback<std::shared_ptr<const Realisation>> callback) noexcept
{
- return retrySQLite<std::optional<const Realisation>>([&]() {
- auto state(_state.lock());
- return queryRealisation_(*state, id);
- });
+ try {
+ auto maybeRealisation
+ = retrySQLite<std::optional<const Realisation>>([&]() {
+ auto state(_state.lock());
+ return queryRealisation_(*state, id);
+ });
+ if (maybeRealisation)
+ callback(
+ std::make_shared<const Realisation>(maybeRealisation.value()));
+ else
+ callback(nullptr);
+
+ } catch (...) {
+ callback.rethrow();
+ }
}
FixedOutputHash LocalStore::hashCAPath(
diff --git a/src/libstore/local-store.hh b/src/libstore/local-store.hh
index a01d48c4b..115ea046a 100644
--- a/src/libstore/local-store.hh
+++ b/src/libstore/local-store.hh
@@ -58,9 +58,15 @@ private:
struct Stmts;
std::unique_ptr<Stmts> stmts;
+ /* The global GC lock */
+ AutoCloseFD fdGCLock;
+
/* The file to which we write our temporary roots. */
AutoCloseFD fdTempRoots;
+ /* Connection to the garbage collector. */
+ AutoCloseFD fdRootsSocket;
+
/* The last time we checked whether to do an auto-GC, or an
auto-GC finished. */
std::chrono::time_point<std::chrono::steady_clock> lastGCCheck;
@@ -87,7 +93,6 @@ public:
const Path linksDir;
const Path reservedPath;
const Path schemaPath;
- const Path trashDir;
const Path tempRootsDir;
const Path fnTempRoots;
@@ -140,7 +145,7 @@ public:
RepairFlag repair, CheckSigsFlag checkSigs) override;
StorePath addToStoreFromDump(Source & dump, const string & name,
- FileIngestionMethod method, HashType hashAlgo, RepairFlag repair) override;
+ FileIngestionMethod method, HashType hashAlgo, RepairFlag repair, const StorePathSet & references) override;
StorePath addTextToStore(const string & name, const string & s,
const StorePathSet & references, RepairFlag repair) override;
@@ -149,14 +154,11 @@ public:
void addIndirectRoot(const Path & path) override;
- void syncWithGC() override;
-
private:
- typedef std::shared_ptr<AutoCloseFD> FDPtr;
- typedef list<FDPtr> FDs;
+ void findTempRoots(Roots & roots, bool censor);
- void findTempRoots(FDs & fds, Roots & roots, bool censor);
+ AutoCloseFD openGCLock();
public:
@@ -205,7 +207,8 @@ public:
std::optional<const Realisation> queryRealisation_(State & state, const DrvOutput & id);
std::optional<std::pair<int64_t, Realisation>> queryRealisationCore_(State & state, const DrvOutput & id);
- std::optional<const Realisation> queryRealisation(const DrvOutput&) override;
+ void queryRealisationUncached(const DrvOutput&,
+ Callback<std::shared_ptr<const Realisation>> callback) noexcept override;
private:
@@ -236,29 +239,12 @@ private:
PathSet queryValidPathsOld();
ValidPathInfo queryPathInfoOld(const Path & path);
- struct GCState;
-
- void deleteGarbage(GCState & state, const Path & path);
-
- void tryToDelete(GCState & state, const Path & path);
-
- bool canReachRoot(GCState & state, StorePathSet & visited, const StorePath & path);
-
- void deletePathRecursive(GCState & state, const Path & path);
-
- bool isActiveTempFile(const GCState & state,
- const Path & path, const string & suffix);
-
- AutoCloseFD openGCLock(LockType lockType);
-
void findRoots(const Path & path, unsigned char type, Roots & roots);
void findRootsNoTemp(Roots & roots, bool censor);
void findRuntimeRoots(Roots & roots, bool censor);
- void removeUnusedLinks(const GCState & state);
-
Path createTempDirInStore();
void checkDerivationOutputs(const StorePath & drvPath, const Derivation & drv);
diff --git a/src/libstore/local.mk b/src/libstore/local.mk
index 44316f542..b992bcbc0 100644
--- a/src/libstore/local.mk
+++ b/src/libstore/local.mk
@@ -8,7 +8,7 @@ libstore_SOURCES := $(wildcard $(d)/*.cc $(d)/builtins/*.cc $(d)/build/*.cc)
libstore_LIBS = libutil
-libstore_LDFLAGS = $(SQLITE3_LIBS) $(LIBCURL_LIBS) $(SODIUM_LIBS) -pthread
+libstore_LDFLAGS += $(SQLITE3_LIBS) $(LIBCURL_LIBS) $(SODIUM_LIBS) -pthread
ifdef HOST_LINUX
libstore_LDFLAGS += -ldl
endif
@@ -60,7 +60,7 @@ $(d)/build.cc:
clean-files += $(d)/schema.sql.gen.hh $(d)/ca-specific-schema.sql.gen.hh
-$(eval $(call install-file-in, $(d)/nix-store.pc, $(prefix)/lib/pkgconfig, 0644))
+$(eval $(call install-file-in, $(d)/nix-store.pc, $(libdir)/pkgconfig, 0644))
$(foreach i, $(wildcard src/libstore/builtins/*.hh), \
$(eval $(call install-file-in, $(i), $(includedir)/nix/builtins, 0644)))
diff --git a/src/libstore/machines.cc b/src/libstore/machines.cc
index 9843ccf04..b6270a81b 100644
--- a/src/libstore/machines.cc
+++ b/src/libstore/machines.cc
@@ -39,7 +39,8 @@ Machine::Machine(decltype(storeUri) storeUri,
sshPublicHostKey(sshPublicHostKey)
{}
-bool Machine::allSupported(const std::set<string> & features) const {
+bool Machine::allSupported(const std::set<string> & features) const
+{
return std::all_of(features.begin(), features.end(),
[&](const string & feature) {
return supportedFeatures.count(feature) ||
@@ -47,14 +48,16 @@ bool Machine::allSupported(const std::set<string> & features) const {
});
}
-bool Machine::mandatoryMet(const std::set<string> & features) const {
+bool Machine::mandatoryMet(const std::set<string> & features) const
+{
return std::all_of(mandatoryFeatures.begin(), mandatoryFeatures.end(),
[&](const string & feature) {
return features.count(feature);
});
}
-ref<Store> Machine::openStore() const {
+ref<Store> Machine::openStore() const
+{
Store::Params storeParams;
if (hasPrefix(storeUri, "ssh://")) {
storeParams["max-connections"] = "1";
@@ -83,53 +86,86 @@ ref<Store> Machine::openStore() const {
return nix::openStore(storeUri, storeParams);
}
-void parseMachines(const std::string & s, Machines & machines)
+static std::vector<std::string> expandBuilderLines(const std::string & builders)
{
- for (auto line : tokenizeString<std::vector<string>>(s, "\n;")) {
+ std::vector<std::string> result;
+ for (auto line : tokenizeString<std::vector<string>>(builders, "\n;")) {
trim(line);
line.erase(std::find(line.begin(), line.end(), '#'), line.end());
if (line.empty()) continue;
if (line[0] == '@') {
- auto file = trim(std::string(line, 1));
+ const std::string path = trim(std::string(line, 1));
+ std::string text;
try {
- parseMachines(readFile(file), machines);
+ text = readFile(path);
} catch (const SysError & e) {
if (e.errNo != ENOENT)
throw;
- debug("cannot find machines file '%s'", file);
+ debug("cannot find machines file '%s'", path);
}
+
+ const auto lines = expandBuilderLines(text);
+ result.insert(end(result), begin(lines), end(lines));
continue;
}
- auto tokens = tokenizeString<std::vector<string>>(line);
- auto sz = tokens.size();
- if (sz < 1)
- throw FormatError("bad machine specification '%s'", line);
+ result.emplace_back(line);
+ }
+ return result;
+}
- auto isSet = [&](size_t n) {
- return tokens.size() > n && tokens[n] != "" && tokens[n] != "-";
- };
+static Machine parseBuilderLine(const std::string & line)
+{
+ const auto tokens = tokenizeString<std::vector<string>>(line);
- machines.emplace_back(tokens[0],
- isSet(1) ? tokenizeString<std::vector<string>>(tokens[1], ",") : std::vector<string>{settings.thisSystem},
- isSet(2) ? tokens[2] : "",
- isSet(3) ? std::stoull(tokens[3]) : 1LL,
- isSet(4) ? std::stoull(tokens[4]) : 1LL,
- isSet(5) ? tokenizeString<std::set<string>>(tokens[5], ",") : std::set<string>{},
- isSet(6) ? tokenizeString<std::set<string>>(tokens[6], ",") : std::set<string>{},
- isSet(7) ? tokens[7] : "");
- }
+ auto isSet = [&](size_t fieldIndex) {
+ return tokens.size() > fieldIndex && tokens[fieldIndex] != "" && tokens[fieldIndex] != "-";
+ };
+
+ auto parseUnsignedIntField = [&](size_t fieldIndex) {
+ const auto result = string2Int<unsigned int>(tokens[fieldIndex]);
+ if (!result) {
+ throw FormatError("bad machine specification: failed to convert column #%lu in a row: '%s' to 'unsigned int'", fieldIndex, line);
+ }
+ return result.value();
+ };
+
+ auto ensureBase64 = [&](size_t fieldIndex) {
+ const auto & str = tokens[fieldIndex];
+ try {
+ base64Decode(str);
+ } catch (const Error & e) {
+ throw FormatError("bad machine specification: a column #%lu in a row: '%s' is not valid base64 string: %s", fieldIndex, line, e.what());
+ }
+ return str;
+ };
+
+ if (!isSet(0))
+ throw FormatError("bad machine specification: store URL was not found at the first column of a row: '%s'", line);
+
+ return {
+ tokens[0],
+ isSet(1) ? tokenizeString<std::vector<string>>(tokens[1], ",") : std::vector<string>{settings.thisSystem},
+ isSet(2) ? tokens[2] : "",
+ isSet(3) ? parseUnsignedIntField(3) : 1U,
+ isSet(4) ? parseUnsignedIntField(4) : 1U,
+ isSet(5) ? tokenizeString<std::set<string>>(tokens[5], ",") : std::set<string>{},
+ isSet(6) ? tokenizeString<std::set<string>>(tokens[6], ",") : std::set<string>{},
+ isSet(7) ? ensureBase64(7) : ""
+ };
+}
+
+static Machines parseBuilderLines(const std::vector<std::string>& builders) {
+ Machines result;
+ std::transform(builders.begin(), builders.end(), std::back_inserter(result), parseBuilderLine);
+ return result;
}
Machines getMachines()
{
- static auto machines = [&]() {
- Machines machines;
- parseMachines(settings.builders, machines);
- return machines;
- }();
- return machines;
+ const auto builderLines = expandBuilderLines(settings.builders);
+ return parseBuilderLines(builderLines);
}
}
diff --git a/src/libstore/misc.cc b/src/libstore/misc.cc
index b4929b445..32786e963 100644
--- a/src/libstore/misc.cc
+++ b/src/libstore/misc.cc
@@ -166,7 +166,7 @@ void Store::queryMissing(const std::vector<DerivedPath> & targets,
}
std::visit(overloaded {
- [&](DerivedPath::Built bfd) {
+ [&](const DerivedPath::Built & bfd) {
if (!isValidPath(bfd.drvPath)) {
// FIXME: we could try to substitute the derivation.
auto state(state_.lock());
@@ -199,7 +199,7 @@ void Store::queryMissing(const std::vector<DerivedPath> & targets,
mustBuildDrv(bfd.drvPath, *drv);
},
- [&](DerivedPath::Opaque bo) {
+ [&](const DerivedPath::Opaque & bo) {
if (isValidPath(bo.path)) return;
@@ -239,12 +239,11 @@ StorePaths Store::topoSortPaths(const StorePathSet & paths)
{
return topoSort(paths,
{[&](const StorePath & path) {
- StorePathSet references;
try {
- references = queryPathInfo(path)->references;
+ return queryPathInfo(path)->references;
} catch (InvalidPath &) {
+ return StorePathSet();
}
- return references;
}},
{[&](const StorePath & path, const StorePath & parent) {
return BuildError(
diff --git a/src/libstore/names.cc b/src/libstore/names.cc
index ce808accc..54c95055d 100644
--- a/src/libstore/names.cc
+++ b/src/libstore/names.cc
@@ -42,7 +42,7 @@ DrvName::~DrvName()
{ }
-bool DrvName::matches(DrvName & n)
+bool DrvName::matches(const DrvName & n)
{
if (name != "*") {
if (!regex) {
diff --git a/src/libstore/names.hh b/src/libstore/names.hh
index bc62aac93..3f861bc44 100644
--- a/src/libstore/names.hh
+++ b/src/libstore/names.hh
@@ -19,7 +19,7 @@ struct DrvName
DrvName(std::string_view s);
~DrvName();
- bool matches(DrvName & n);
+ bool matches(const DrvName & n);
private:
std::unique_ptr<Regex> regex;
diff --git a/src/libstore/path-with-outputs.cc b/src/libstore/path-with-outputs.cc
index 865d64cf2..e5a121e00 100644
--- a/src/libstore/path-with-outputs.cc
+++ b/src/libstore/path-with-outputs.cc
@@ -31,14 +31,14 @@ std::vector<DerivedPath> toDerivedPaths(const std::vector<StorePathWithOutputs>
std::variant<StorePathWithOutputs, StorePath> StorePathWithOutputs::tryFromDerivedPath(const DerivedPath & p)
{
return std::visit(overloaded {
- [&](DerivedPath::Opaque bo) -> std::variant<StorePathWithOutputs, StorePath> {
+ [&](const DerivedPath::Opaque & bo) -> std::variant<StorePathWithOutputs, StorePath> {
if (bo.path.isDerivation()) {
// drv path gets interpreted as "build", not "get drv file itself"
return bo.path;
}
return StorePathWithOutputs { bo.path };
},
- [&](DerivedPath::Built bfd) -> std::variant<StorePathWithOutputs, StorePath> {
+ [&](const DerivedPath::Built & bfd) -> std::variant<StorePathWithOutputs, StorePath> {
return StorePathWithOutputs { bfd.drvPath, bfd.outputs };
},
}, p.raw());
diff --git a/src/libstore/pathlocks.cc b/src/libstore/pathlocks.cc
index 926f4ea1e..2da74e262 100644
--- a/src/libstore/pathlocks.cc
+++ b/src/libstore/pathlocks.cc
@@ -176,4 +176,17 @@ void PathLocks::setDeletion(bool deletePaths)
}
+FdLock::FdLock(int fd, LockType lockType, bool wait, std::string_view waitMsg)
+ : fd(fd)
+{
+ if (wait) {
+ if (!lockFile(fd, lockType, false)) {
+ printInfo("%s", waitMsg);
+ acquired = lockFile(fd, lockType, true);
+ }
+ } else
+ acquired = lockFile(fd, lockType, false);
+}
+
+
}
diff --git a/src/libstore/pathlocks.hh b/src/libstore/pathlocks.hh
index 411da0222..919c8904c 100644
--- a/src/libstore/pathlocks.hh
+++ b/src/libstore/pathlocks.hh
@@ -35,4 +35,18 @@ public:
void setDeletion(bool deletePaths);
};
+struct FdLock
+{
+ int fd;
+ bool acquired = false;
+
+ FdLock(int fd, LockType lockType, bool wait, std::string_view waitMsg);
+
+ ~FdLock()
+ {
+ if (acquired)
+ lockFile(fd, ltNone, false);
+ }
+};
+
}
diff --git a/src/libstore/profiles.cc b/src/libstore/profiles.cc
index 84a21c0ba..73163424c 100644
--- a/src/libstore/profiles.cc
+++ b/src/libstore/profiles.cc
@@ -126,9 +126,9 @@ void deleteGeneration(const Path & profile, GenerationNumber gen)
static void deleteGeneration2(const Path & profile, GenerationNumber gen, bool dryRun)
{
if (dryRun)
- printInfo(format("would remove generation %1%") % gen);
+ notice("would remove profile version %1%", gen);
else {
- printInfo(format("removing generation %1%") % gen);
+ notice("removing profile version %1%", gen);
deleteGeneration(profile, gen);
}
}
@@ -142,7 +142,7 @@ void deleteGenerations(const Path & profile, const std::set<GenerationNumber> &
auto [gens, curGen] = findGenerations(profile);
if (gensToDelete.count(*curGen))
- throw Error("cannot delete current generation of profile %1%'", profile);
+ throw Error("cannot delete current version of profile %1%'", profile);
for (auto & i : gens) {
if (!gensToDelete.count(i.number)) continue;
@@ -236,6 +236,37 @@ void switchLink(Path link, Path target)
}
+void switchGeneration(
+ const Path & profile,
+ std::optional<GenerationNumber> dstGen,
+ bool dryRun)
+{
+ PathLocks lock;
+ lockProfile(lock, profile);
+
+ auto [gens, curGen] = findGenerations(profile);
+
+ std::optional<Generation> dst;
+ for (auto & i : gens)
+ if ((!dstGen && i.number < curGen) ||
+ (dstGen && i.number == *dstGen))
+ dst = i;
+
+ if (!dst) {
+ if (dstGen)
+ throw Error("profile version %1% does not exist", *dstGen);
+ else
+ throw Error("no profile version older than the current (%1%) exists", curGen.value_or(0));
+ }
+
+ notice("switching profile from version %d to %d", curGen.value_or(0), dst->number);
+
+ if (dryRun) return;
+
+ switchLink(profile, dst->path);
+}
+
+
void lockProfile(PathLocks & lock, const Path & profile)
{
lock.lockPaths({profile}, (format("waiting for lock on profile '%1%'") % profile).str());
diff --git a/src/libstore/profiles.hh b/src/libstore/profiles.hh
index be55a65d4..d100c970c 100644
--- a/src/libstore/profiles.hh
+++ b/src/libstore/profiles.hh
@@ -11,7 +11,7 @@ namespace nix {
class StorePath;
-typedef unsigned int GenerationNumber;
+typedef uint64_t GenerationNumber;
struct Generation
{
@@ -46,6 +46,13 @@ void deleteGenerationsOlderThan(const Path & profile, const string & timeSpec, b
void switchLink(Path link, Path target);
+/* Roll back a profile to the specified generation, or to the most
+ recent one older than the current. */
+void switchGeneration(
+ const Path & profile,
+ std::optional<GenerationNumber> dstGen,
+ bool dryRun);
+
/* Ensure exclusive access to a profile. Any command that modifies
the profile first acquires this lock. */
void lockProfile(PathLocks & lock, const Path & profile);
diff --git a/src/libstore/references.cc b/src/libstore/references.cc
index 3a07c1411..91b3fc142 100644
--- a/src/libstore/references.cc
+++ b/src/libstore/references.cc
@@ -11,11 +11,13 @@
namespace nix {
-static unsigned int refLength = 32; /* characters */
+static size_t refLength = 32; /* characters */
-static void search(const unsigned char * s, size_t len,
- StringSet & hashes, StringSet & seen)
+static void search(
+ std::string_view s,
+ StringSet & hashes,
+ StringSet & seen)
{
static std::once_flag initialised;
static bool isBase32[256];
@@ -25,7 +27,7 @@ static void search(const unsigned char * s, size_t len,
isBase32[(unsigned char) base32Chars[i]] = true;
});
- for (size_t i = 0; i + refLength <= len; ) {
+ for (size_t i = 0; i + refLength <= s.size(); ) {
int j;
bool match = true;
for (j = refLength - 1; j >= 0; --j)
@@ -35,7 +37,7 @@ static void search(const unsigned char * s, size_t len,
break;
}
if (!match) continue;
- string ref((const char *) s + i, refLength);
+ std::string ref(s.substr(i, refLength));
if (hashes.erase(ref)) {
debug(format("found reference to '%1%' at offset '%2%'")
% ref % i);
@@ -46,69 +48,60 @@ static void search(const unsigned char * s, size_t len,
}
-struct RefScanSink : Sink
+void RefScanSink::operator () (std::string_view data)
{
- StringSet hashes;
- StringSet seen;
-
- string tail;
-
- RefScanSink() { }
-
- void operator () (std::string_view data) override
- {
- /* It's possible that a reference spans the previous and current
- fragment, so search in the concatenation of the tail of the
- previous fragment and the start of the current fragment. */
- string s = tail + std::string(data, 0, refLength);
- search((const unsigned char *) s.data(), s.size(), hashes, seen);
-
- search((const unsigned char *) data.data(), data.size(), hashes, seen);
-
- size_t tailLen = data.size() <= refLength ? data.size() : refLength;
- tail = std::string(tail, tail.size() < refLength - tailLen ? 0 : tail.size() - (refLength - tailLen));
- tail.append({data.data() + data.size() - tailLen, tailLen});
- }
-};
+ /* It's possible that a reference spans the previous and current
+ fragment, so search in the concatenation of the tail of the
+ previous fragment and the start of the current fragment. */
+ auto s = tail;
+ auto tailLen = std::min(data.size(), refLength);
+ s.append(data.data(), tailLen);
+ search(s, hashes, seen);
+
+ search(data, hashes, seen);
+
+ auto rest = refLength - tailLen;
+ if (rest < tail.size())
+ tail = tail.substr(tail.size() - rest);
+ tail.append(data.data() + data.size() - tailLen, tailLen);
+}
-std::pair<PathSet, HashResult> scanForReferences(const string & path,
- const PathSet & refs)
+std::pair<StorePathSet, HashResult> scanForReferences(
+ const string & path,
+ const StorePathSet & refs)
{
HashSink hashSink { htSHA256 };
auto found = scanForReferences(hashSink, path, refs);
auto hash = hashSink.finish();
- return std::pair<PathSet, HashResult>(found, hash);
+ return std::pair<StorePathSet, HashResult>(found, hash);
}
-PathSet scanForReferences(Sink & toTee,
- const string & path, const PathSet & refs)
+StorePathSet scanForReferences(
+ Sink & toTee,
+ const Path & path,
+ const StorePathSet & refs)
{
- RefScanSink refsSink;
- TeeSink sink { refsSink, toTee };
- std::map<string, Path> backMap;
+ StringSet hashes;
+ std::map<std::string, StorePath> backMap;
for (auto & i : refs) {
- auto baseName = std::string(baseNameOf(i));
- string::size_type pos = baseName.find('-');
- if (pos == string::npos)
- throw Error("bad reference '%1%'", i);
- string s = string(baseName, 0, pos);
- assert(s.size() == refLength);
- assert(backMap.find(s) == backMap.end());
- // parseHash(htSHA256, s);
- refsSink.hashes.insert(s);
- backMap[s] = i;
+ std::string hashPart(i.hashPart());
+ auto inserted = backMap.emplace(hashPart, i).second;
+ assert(inserted);
+ hashes.insert(hashPart);
}
/* Look for the hashes in the NAR dump of the path. */
+ RefScanSink refsSink(std::move(hashes));
+ TeeSink sink { refsSink, toTee };
dumpPath(path, sink);
/* Map the hashes found back to their store paths. */
- PathSet found;
- for (auto & i : refsSink.seen) {
- std::map<string, Path>::iterator j;
- if ((j = backMap.find(i)) == backMap.end()) abort();
+ StorePathSet found;
+ for (auto & i : refsSink.getResult()) {
+ auto j = backMap.find(i);
+ assert(j != backMap.end());
found.insert(j->second);
}
diff --git a/src/libstore/references.hh b/src/libstore/references.hh
index 4f12e6b21..a6119c861 100644
--- a/src/libstore/references.hh
+++ b/src/libstore/references.hh
@@ -1,13 +1,31 @@
#pragma once
-#include "types.hh"
#include "hash.hh"
+#include "path.hh"
namespace nix {
-std::pair<PathSet, HashResult> scanForReferences(const Path & path, const PathSet & refs);
+std::pair<StorePathSet, HashResult> scanForReferences(const Path & path, const StorePathSet & refs);
-PathSet scanForReferences(Sink & toTee, const Path & path, const PathSet & refs);
+StorePathSet scanForReferences(Sink & toTee, const Path & path, const StorePathSet & refs);
+
+class RefScanSink : public Sink
+{
+ StringSet hashes;
+ StringSet seen;
+
+ std::string tail;
+
+public:
+
+ RefScanSink(StringSet && hashes) : hashes(hashes)
+ { }
+
+ StringSet & getResult()
+ { return seen; }
+
+ void operator () (std::string_view data) override;
+};
struct RewritingSink : Sink
{
diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc
index 140f39120..7f7e973e9 100644
--- a/src/libstore/remote-store.cc
+++ b/src/libstore/remote-store.cc
@@ -162,8 +162,19 @@ void RemoteStore::initConnection(Connection & conn)
try {
conn.to << WORKER_MAGIC_1;
conn.to.flush();
- unsigned int magic = readInt(conn.from);
- if (magic != WORKER_MAGIC_2) throw Error("protocol mismatch");
+ StringSink saved;
+ try {
+ TeeSource tee(conn.from, saved);
+ unsigned int magic = readInt(tee);
+ if (magic != WORKER_MAGIC_2)
+ throw Error("protocol mismatch");
+ } catch (SerialisationError & e) {
+ /* In case the other side is waiting for our input, close
+ it. */
+ conn.closeWrite();
+ auto msg = conn.from.drain();
+ throw Error("protocol mismatch, got '%s'", chomp(*saved.s + msg));
+ }
conn.from >> conn.daemonVersion;
if (GET_PROTOCOL_MAJOR(conn.daemonVersion) != GET_PROTOCOL_MAJOR(PROTOCOL_VERSION))
@@ -222,6 +233,7 @@ void RemoteStore::setOptions(Connection & conn)
overrides.erase(settings.buildCores.name);
overrides.erase(settings.useSubstitutes.name);
overrides.erase(loggerSettings.showTrace.name);
+ overrides.erase(settings.experimentalFeatures.name);
conn.to << overrides.size();
for (auto & i : overrides)
conn.to << i.first << i.second.value;
@@ -278,6 +290,10 @@ ConnectionHandle RemoteStore::getConnection()
return ConnectionHandle(connections->get());
}
+void RemoteStore::setOptions()
+{
+ setOptions(*(getConnection().handle));
+}
bool RemoteStore::isValidPathUncached(const StorePath & path)
{
@@ -516,13 +532,13 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
if (repair) throw Error("repairing is not supported when building through the Nix daemon protocol < 1.25");
std::visit(overloaded {
- [&](TextHashMethod thm) -> void {
+ [&](const TextHashMethod & thm) -> void {
std::string s = dump.drain();
conn->to << wopAddTextToStore << name << s;
worker_proto::write(*this, conn->to, references);
conn.processStderr();
},
- [&](FixedOutputHashMethod fohm) -> void {
+ [&](const FixedOutputHashMethod & fohm) -> void {
conn->to
<< wopAddToStore
<< name
@@ -566,9 +582,8 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
StorePath RemoteStore::addToStoreFromDump(Source & dump, const string & name,
- FileIngestionMethod method, HashType hashType, RepairFlag repair)
+ FileIngestionMethod method, HashType hashType, RepairFlag repair, const StorePathSet & references)
{
- StorePathSet references;
return addCAToStore(dump, name, FixedOutputHashMethod{ .fileIngestionMethod = method, .hashType = hashType }, references, repair)->path;
}
@@ -665,23 +680,41 @@ void RemoteStore::registerDrvOutput(const Realisation & info)
conn.processStderr();
}
-std::optional<const Realisation> RemoteStore::queryRealisation(const DrvOutput & id)
+void RemoteStore::queryRealisationUncached(const DrvOutput & id,
+ Callback<std::shared_ptr<const Realisation>> callback) noexcept
{
auto conn(getConnection());
+
+ if (GET_PROTOCOL_MINOR(conn->daemonVersion) < 27) {
+ warn("the daemon is too old to support content-addressed derivations, please upgrade it to 2.4");
+ try {
+ callback(nullptr);
+ } catch (...) { return callback.rethrow(); }
+ }
+
conn->to << wopQueryRealisation;
conn->to << id.to_string();
conn.processStderr();
- if (GET_PROTOCOL_MINOR(conn->daemonVersion) < 31) {
- auto outPaths = worker_proto::read(*this, conn->from, Phantom<std::set<StorePath>>{});
- if (outPaths.empty())
- return std::nullopt;
- return {Realisation{.id = id, .outPath = *outPaths.begin()}};
- } else {
- auto realisations = worker_proto::read(*this, conn->from, Phantom<std::set<Realisation>>{});
- if (realisations.empty())
- return std::nullopt;
- return *realisations.begin();
- }
+
+ auto real = [&]() -> std::shared_ptr<const Realisation> {
+ if (GET_PROTOCOL_MINOR(conn->daemonVersion) < 31) {
+ auto outPaths = worker_proto::read(
+ *this, conn->from, Phantom<std::set<StorePath>> {});
+ if (outPaths.empty())
+ return nullptr;
+ return std::make_shared<const Realisation>(Realisation { .id = id, .outPath = *outPaths.begin() });
+ } else {
+ auto realisations = worker_proto::read(
+ *this, conn->from, Phantom<std::set<Realisation>> {});
+ if (realisations.empty())
+ return nullptr;
+ return std::make_shared<const Realisation>(*realisations.begin());
+ }
+ }();
+
+ try {
+ callback(std::shared_ptr<const Realisation>(real));
+ } catch (...) { return callback.rethrow(); }
}
static void writeDerivedPaths(RemoteStore & store, ConnectionHandle & conn, const std::vector<DerivedPath> & reqs)
@@ -693,10 +726,10 @@ static void writeDerivedPaths(RemoteStore & store, ConnectionHandle & conn, cons
for (auto & p : reqs) {
auto sOrDrvPath = StorePathWithOutputs::tryFromDerivedPath(p);
std::visit(overloaded {
- [&](StorePathWithOutputs s) {
+ [&](const StorePathWithOutputs & s) {
ss.push_back(s.to_string(store));
},
- [&](StorePath drvPath) {
+ [&](const StorePath & drvPath) {
throw Error("trying to request '%s', but daemon protocol %d.%d is too old (< 1.29) to request a derivation file",
store.printStorePath(drvPath),
GET_PROTOCOL_MAJOR(conn->daemonVersion),
@@ -785,15 +818,6 @@ void RemoteStore::addIndirectRoot(const Path & path)
}
-void RemoteStore::syncWithGC()
-{
- auto conn(getConnection());
- conn->to << wopSyncWithGC;
- conn.processStderr();
- readInt(conn->from);
-}
-
-
Roots RemoteStore::findRoots(bool censor)
{
auto conn(getConnection());
diff --git a/src/libstore/remote-store.hh b/src/libstore/remote-store.hh
index 8901c79fc..0fd67f371 100644
--- a/src/libstore/remote-store.hh
+++ b/src/libstore/remote-store.hh
@@ -73,7 +73,7 @@ public:
/* Add a content-addressable store path. Does not support references. `dump` will be drained. */
StorePath addToStoreFromDump(Source & dump, const string & name,
- FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair) override;
+ FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair, const StorePathSet & references = StorePathSet()) override;
void addToStore(const ValidPathInfo & info, Source & nar,
RepairFlag repair, CheckSigsFlag checkSigs) override;
@@ -88,7 +88,8 @@ public:
void registerDrvOutput(const Realisation & info) override;
- std::optional<const Realisation> queryRealisation(const DrvOutput &) override;
+ void queryRealisationUncached(const DrvOutput &,
+ Callback<std::shared_ptr<const Realisation>> callback) noexcept override;
void buildPaths(const std::vector<DerivedPath> & paths, BuildMode buildMode, std::shared_ptr<Store> evalStore) override;
@@ -101,8 +102,6 @@ public:
void addIndirectRoot(const Path & path) override;
- void syncWithGC() override;
-
Roots findRoots(bool censor) override;
void collectGarbage(const GCOptions & options, GCResults & results) override;
@@ -125,7 +124,6 @@ public:
struct Connection
{
- AutoCloseFD fd;
FdSink to;
FdSource from;
unsigned int daemonVersion;
@@ -133,6 +131,8 @@ public:
virtual ~Connection();
+ virtual void closeWrite() = 0;
+
std::exception_ptr processStderr(Sink * sink = 0, Source * source = 0, bool flush = true);
};
@@ -148,6 +148,8 @@ protected:
virtual void setOptions(Connection & conn);
+ void setOptions() override;
+
ConnectionHandle getConnection();
friend struct ConnectionHandle;
diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc
index 6bfbee044..7accad7f4 100644
--- a/src/libstore/s3-binary-cache-store.cc
+++ b/src/libstore/s3-binary-cache-store.cc
@@ -209,7 +209,7 @@ struct S3BinaryCacheStoreImpl : virtual S3BinaryCacheStoreConfig, public virtual
S3Helper s3Helper;
S3BinaryCacheStoreImpl(
- const std::string & scheme,
+ const std::string & uriScheme,
const std::string & bucketName,
const Params & params)
: StoreConfig(params)
@@ -232,8 +232,8 @@ struct S3BinaryCacheStoreImpl : virtual S3BinaryCacheStoreConfig, public virtual
void init() override
{
if (auto cacheInfo = diskCache->cacheExists(getUri())) {
- wantMassQuery.setDefault(cacheInfo->wantMassQuery ? "true" : "false");
- priority.setDefault(fmt("%d", cacheInfo->priority));
+ wantMassQuery.setDefault(cacheInfo->wantMassQuery);
+ priority.setDefault(cacheInfo->priority);
} else {
BinaryCacheStore::init();
diskCache->createCache(getUri(), storeDir, wantMassQuery, priority);
diff --git a/src/libstore/sandbox-defaults.sb b/src/libstore/sandbox-defaults.sb
index 2bb1ea130..56b35c3fe 100644
--- a/src/libstore/sandbox-defaults.sb
+++ b/src/libstore/sandbox-defaults.sb
@@ -97,3 +97,8 @@
; This is used by /bin/sh on macOS 10.15 and later.
(allow file*
(literal "/private/var/select/sh"))
+
+; Allow Rosetta 2 to run x86_64 binaries on aarch64-darwin.
+(allow file-read*
+ (subpath "/Library/Apple/usr/libexec/oah")
+ (subpath "/System/Library/Apple/usr/libexec/oah"))
diff --git a/src/libstore/serve-protocol.hh b/src/libstore/serve-protocol.hh
index 02d0810cc..3f76baa82 100644
--- a/src/libstore/serve-protocol.hh
+++ b/src/libstore/serve-protocol.hh
@@ -5,7 +5,7 @@ namespace nix {
#define SERVE_MAGIC_1 0x390c9deb
#define SERVE_MAGIC_2 0x5452eecb
-#define SERVE_PROTOCOL_VERSION (2 << 8 | 6)
+#define SERVE_PROTOCOL_VERSION (2 << 8 | 7)
#define GET_PROTOCOL_MAJOR(x) ((x) & 0xff00)
#define GET_PROTOCOL_MINOR(x) ((x) & 0x00ff)
diff --git a/src/libstore/sqlite.cc b/src/libstore/sqlite.cc
index 447b4179b..1d6baf02d 100644
--- a/src/libstore/sqlite.cc
+++ b/src/libstore/sqlite.cc
@@ -1,4 +1,5 @@
#include "sqlite.hh"
+#include "globals.hh"
#include "util.hh"
#include <sqlite3.h>
@@ -27,8 +28,12 @@ namespace nix {
SQLite::SQLite(const Path & path, bool create)
{
+ // useSQLiteWAL also indicates what virtual file system we need. Using
+ // `unix-dotfile` is needed on NFS file systems and on Windows' Subsystem
+ // for Linux (WSL) where useSQLiteWAL should be false by default.
+ const char *vfs = settings.useSQLiteWAL ? 0 : "unix-dotfile";
if (sqlite3_open_v2(path.c_str(), &db,
- SQLITE_OPEN_READWRITE | (create ? SQLITE_OPEN_CREATE : 0), 0) != SQLITE_OK)
+ SQLITE_OPEN_READWRITE | (create ? SQLITE_OPEN_CREATE : 0), vfs) != SQLITE_OK)
throw Error("cannot open SQLite database '%s'", path);
if (sqlite3_busy_timeout(db, 60 * 60 * 1000) != SQLITE_OK)
diff --git a/src/libstore/ssh-store.cc b/src/libstore/ssh-store.cc
index f2caf2aeb..bb03daef4 100644
--- a/src/libstore/ssh-store.cc
+++ b/src/libstore/ssh-store.cc
@@ -57,6 +57,11 @@ private:
struct Connection : RemoteStore::Connection
{
std::unique_ptr<SSHMaster::Connection> sshConn;
+
+ void closeWrite() override
+ {
+ sshConn->in.close();
+ }
};
ref<RemoteStore::Connection> openConnection() override;
diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc
index 970bafd88..aab4ce94c 100644
--- a/src/libstore/store-api.cc
+++ b/src/libstore/store-api.cc
@@ -199,10 +199,10 @@ StorePath Store::makeFixedOutputPathFromCA(std::string_view name, ContentAddress
{
// New template
return std::visit(overloaded {
- [&](TextHash th) {
+ [&](const TextHash & th) {
return makeTextPath(name, th.hash, references);
},
- [&](FixedOutputHash fsh) {
+ [&](const FixedOutputHash & fsh) {
return makeFixedOutputPath(fsh.method, fsh.hash, name, references, hasSelfReference);
}
}, ca);
@@ -237,7 +237,7 @@ StorePath Store::computeStorePathForText(const string & name, const string & s,
StorePath Store::addToStore(const string & name, const Path & _srcPath,
- FileIngestionMethod method, HashType hashAlgo, PathFilter & filter, RepairFlag repair)
+ FileIngestionMethod method, HashType hashAlgo, PathFilter & filter, RepairFlag repair, const StorePathSet & references)
{
Path srcPath(absPath(_srcPath));
auto source = sinkToSource([&](Sink & sink) {
@@ -246,7 +246,7 @@ StorePath Store::addToStore(const string & name, const Path & _srcPath,
else
readFile(srcPath, sink);
});
- return addToStoreFromDump(*source, name, method, hashAlgo, repair);
+ return addToStoreFromDump(*source, name, method, hashAlgo, repair, references);
}
@@ -355,8 +355,13 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
StringSet StoreConfig::getDefaultSystemFeatures()
{
auto res = settings.systemFeatures.get();
- if (settings.isExperimentalFeatureEnabled("ca-derivations"))
+
+ if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations))
res.insert("ca-derivations");
+
+ if (settings.isExperimentalFeatureEnabled(Xp::RecursiveNix))
+ res.insert("recursive-nix");
+
return res;
}
@@ -414,11 +419,9 @@ StorePathSet Store::queryDerivationOutputs(const StorePath & path)
bool Store::isValidPath(const StorePath & storePath)
{
- std::string hashPart(storePath.hashPart());
-
{
auto state_(state.lock());
- auto res = state_->pathInfoCache.get(hashPart);
+ auto res = state_->pathInfoCache.get(std::string(storePath.to_string()));
if (res && res->isKnownNow()) {
stats.narInfoReadAverted++;
return res->didExist();
@@ -426,11 +429,11 @@ bool Store::isValidPath(const StorePath & storePath)
}
if (diskCache) {
- auto res = diskCache->lookupNarInfo(getUri(), hashPart);
+ auto res = diskCache->lookupNarInfo(getUri(), std::string(storePath.hashPart()));
if (res.first != NarInfoDiskCache::oUnknown) {
stats.narInfoReadAverted++;
auto state_(state.lock());
- state_->pathInfoCache.upsert(hashPart,
+ state_->pathInfoCache.upsert(std::string(storePath.to_string()),
res.first == NarInfoDiskCache::oInvalid ? PathInfoCacheValue{} : PathInfoCacheValue { .value = res.second });
return res.first == NarInfoDiskCache::oValid;
}
@@ -440,7 +443,7 @@ bool Store::isValidPath(const StorePath & storePath)
if (diskCache && !valid)
// FIXME: handle valid = true case.
- diskCache->upsertNarInfo(getUri(), hashPart, 0);
+ diskCache->upsertNarInfo(getUri(), std::string(storePath.hashPart()), 0);
return valid;
}
@@ -487,13 +490,11 @@ static bool goodStorePath(const StorePath & expected, const StorePath & actual)
void Store::queryPathInfo(const StorePath & storePath,
Callback<ref<const ValidPathInfo>> callback) noexcept
{
- std::string hashPart;
+ auto hashPart = std::string(storePath.hashPart());
try {
- hashPart = storePath.hashPart();
-
{
- auto res = state.lock()->pathInfoCache.get(hashPart);
+ auto res = state.lock()->pathInfoCache.get(std::string(storePath.to_string()));
if (res && res->isKnownNow()) {
stats.narInfoReadAverted++;
if (!res->didExist())
@@ -508,7 +509,7 @@ void Store::queryPathInfo(const StorePath & storePath,
stats.narInfoReadAverted++;
{
auto state_(state.lock());
- state_->pathInfoCache.upsert(hashPart,
+ state_->pathInfoCache.upsert(std::string(storePath.to_string()),
res.first == NarInfoDiskCache::oInvalid ? PathInfoCacheValue{} : PathInfoCacheValue{ .value = res.second });
if (res.first == NarInfoDiskCache::oInvalid ||
!goodStorePath(storePath, res.second->path))
@@ -523,7 +524,7 @@ void Store::queryPathInfo(const StorePath & storePath,
auto callbackPtr = std::make_shared<decltype(callback)>(std::move(callback));
queryPathInfoUncached(storePath,
- {[this, storePathS{printStorePath(storePath)}, hashPart, callbackPtr](std::future<std::shared_ptr<const ValidPathInfo>> fut) {
+ {[this, storePath, hashPart, callbackPtr](std::future<std::shared_ptr<const ValidPathInfo>> fut) {
try {
auto info = fut.get();
@@ -533,14 +534,12 @@ void Store::queryPathInfo(const StorePath & storePath,
{
auto state_(state.lock());
- state_->pathInfoCache.upsert(hashPart, PathInfoCacheValue { .value = info });
+ state_->pathInfoCache.upsert(std::string(storePath.to_string()), PathInfoCacheValue { .value = info });
}
- auto storePath = parseStorePath(storePathS);
-
if (!info || !goodStorePath(storePath, info->path)) {
stats.narInfoMissing++;
- throw InvalidPath("path '%s' is not valid", storePathS);
+ throw InvalidPath("path '%s' is not valid", printStorePath(storePath));
}
(*callbackPtr)(ref<const ValidPathInfo>(info));
@@ -548,6 +547,74 @@ void Store::queryPathInfo(const StorePath & storePath,
}});
}
+void Store::queryRealisation(const DrvOutput & id,
+ Callback<std::shared_ptr<const Realisation>> callback) noexcept
+{
+
+ try {
+ if (diskCache) {
+ auto [cacheOutcome, maybeCachedRealisation]
+ = diskCache->lookupRealisation(getUri(), id);
+ switch (cacheOutcome) {
+ case NarInfoDiskCache::oValid:
+ debug("Returning a cached realisation for %s", id.to_string());
+ callback(maybeCachedRealisation);
+ return;
+ case NarInfoDiskCache::oInvalid:
+ debug(
+ "Returning a cached missing realisation for %s",
+ id.to_string());
+ callback(nullptr);
+ return;
+ case NarInfoDiskCache::oUnknown:
+ break;
+ }
+ }
+ } catch (...) {
+ return callback.rethrow();
+ }
+
+ auto callbackPtr
+ = std::make_shared<decltype(callback)>(std::move(callback));
+
+ queryRealisationUncached(
+ id,
+ { [this, id, callbackPtr](
+ std::future<std::shared_ptr<const Realisation>> fut) {
+ try {
+ auto info = fut.get();
+
+ if (diskCache) {
+ if (info)
+ diskCache->upsertRealisation(getUri(), *info);
+ else
+ diskCache->upsertAbsentRealisation(getUri(), id);
+ }
+
+ (*callbackPtr)(std::shared_ptr<const Realisation>(info));
+
+ } catch (...) {
+ callbackPtr->rethrow();
+ }
+ } });
+}
+
+std::shared_ptr<const Realisation> Store::queryRealisation(const DrvOutput & id)
+{
+ using RealPtr = std::shared_ptr<const Realisation>;
+ std::promise<RealPtr> promise;
+
+ queryRealisation(id,
+ {[&](std::future<RealPtr> result) {
+ try {
+ promise.set_value(result.get());
+ } catch (...) {
+ promise.set_exception(std::current_exception());
+ }
+ }});
+
+ return promise.get_future().get();
+}
void Store::substitutePaths(const StorePathSet & paths)
{
@@ -860,7 +927,7 @@ std::map<StorePath, StorePath> copyPaths(
for (auto & path : paths) {
storePaths.insert(path.path());
if (auto realisation = std::get_if<Realisation>(&path.raw)) {
- settings.requireExperimentalFeature("ca-derivations");
+ settings.requireExperimentalFeature(Xp::CaDerivations);
toplevelRealisations.insert(*realisation);
}
}
@@ -892,7 +959,7 @@ std::map<StorePath, StorePath> copyPaths(
// Don't fail if the remote doesn't support CA derivations is it might
// not be within our control to change that, and we might still want
// to at least copy the output paths.
- if (e.missingFeature == "ca-derivations")
+ if (e.missingFeature == Xp::CaDerivations)
ignoreException();
else
throw;
@@ -1012,7 +1079,7 @@ std::map<StorePath, StorePath> copyPaths(
nrFailed++;
if (!settings.keepGoing)
throw e;
- logger->log(lvlError, fmt("could not copy %s: %s", dstStore.printStorePath(storePath), e.what()));
+ printMsg(lvlError, "could not copy %s: %s", dstStore.printStorePath(storePath), e.what());
showProgress();
return;
}
@@ -1114,10 +1181,10 @@ bool ValidPathInfo::isContentAddressed(const Store & store) const
if (! ca) return false;
auto caPath = std::visit(overloaded {
- [&](TextHash th) {
+ [&](const TextHash & th) {
return store.makeTextPath(path.name(), th.hash, references);
},
- [&](FixedOutputHash fsh) {
+ [&](const FixedOutputHash & fsh) {
auto refs = references;
bool hasSelfReference = false;
if (refs.count(path)) {
diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh
index 4fb6c40c7..aa44651d4 100644
--- a/src/libstore/store-api.hh
+++ b/src/libstore/store-api.hh
@@ -232,7 +232,6 @@ protected:
struct State
{
- // FIXME: fix key
LRUCache<std::string, PathInfoCacheValue> pathInfoCache;
};
@@ -370,6 +369,14 @@ public:
void queryPathInfo(const StorePath & path,
Callback<ref<const ValidPathInfo>> callback) noexcept;
+ /* Query the information about a realisation. */
+ std::shared_ptr<const Realisation> queryRealisation(const DrvOutput &);
+
+ /* Asynchronous version of queryRealisation(). */
+ void queryRealisation(const DrvOutput &,
+ Callback<std::shared_ptr<const Realisation>> callback) noexcept;
+
+
/* Check whether the given valid path info is sufficiently attested, by
either being signed by a trusted public key or content-addressed, in
order to be included in the given store.
@@ -394,11 +401,11 @@ protected:
virtual void queryPathInfoUncached(const StorePath & path,
Callback<std::shared_ptr<const ValidPathInfo>> callback) noexcept = 0;
+ virtual void queryRealisationUncached(const DrvOutput &,
+ Callback<std::shared_ptr<const Realisation>> callback) noexcept = 0;
public:
- virtual std::optional<const Realisation> queryRealisation(const DrvOutput &) = 0;
-
/* Queries the set of incoming FS references for a store path.
The result is not cleared. */
virtual void queryReferrers(const StorePath & path, StorePathSet & referrers)
@@ -430,9 +437,10 @@ public:
virtual StorePathSet querySubstitutablePaths(const StorePathSet & paths) { return {}; };
/* Query substitute info (i.e. references, derivers and download
- sizes) of a map of paths to their optional ca values. If a path
- does not have substitute info, it's omitted from the resulting
- ‘infos’ map. */
+ sizes) of a map of paths to their optional ca values. The info
+ of the first succeeding substituter for each path will be
+ returned. If a path does not have substitute info, it's omitted
+ from the resulting ‘infos’ map. */
virtual void querySubstitutablePathInfos(const StorePathCAMap & paths,
SubstitutablePathInfos & infos) { return; };
@@ -452,7 +460,7 @@ public:
libutil/archive.hh). */
virtual StorePath addToStore(const string & name, const Path & srcPath,
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256,
- PathFilter & filter = defaultPathFilter, RepairFlag repair = NoRepair);
+ PathFilter & filter = defaultPathFilter, RepairFlag repair = NoRepair, const StorePathSet & references = StorePathSet());
/* Copy the contents of a path to the store and register the
validity the resulting path, using a constant amount of
@@ -468,7 +476,8 @@ public:
`dump` may be drained */
// FIXME: remove?
virtual StorePath addToStoreFromDump(Source & dump, const string & name,
- FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair)
+ FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair,
+ const StorePathSet & references = StorePathSet())
{ unsupported("addToStoreFromDump"); }
/* Like addToStore, but the contents written to the output path is
@@ -560,26 +569,6 @@ public:
virtual void addIndirectRoot(const Path & path)
{ unsupported("addIndirectRoot"); }
- /* Acquire the global GC lock, then immediately release it. This
- function must be called after registering a new permanent root,
- but before exiting. Otherwise, it is possible that a running
- garbage collector doesn't see the new root and deletes the
- stuff we've just built. By acquiring the lock briefly, we
- ensure that either:
-
- - The collector is already running, and so we block until the
- collector is finished. The collector will know about our
- *temporary* locks, which should include whatever it is we
- want to register as a permanent lock.
-
- - The collector isn't running, or it's just started but hasn't
- acquired the GC lock yet. In that case we get and release
- the lock right away, then exit. The collector scans the
- permanent root and sees ours.
-
- In either case the permanent root is seen by the collector. */
- virtual void syncWithGC() { };
-
/* Find the roots of the garbage collector. Each root is a pair
(link, storepath) where `link' is the path of the symlink
outside of the Nix store that point to `storePath'. If
@@ -744,6 +733,11 @@ public:
virtual void createUser(const std::string & userName, uid_t userId)
{ }
+ /*
+ * Synchronises the options of the client with those of the daemon
+ * (a no-op when there’s no daemon)
+ */
+ virtual void setOptions() { }
protected:
Stats stats;
diff --git a/src/libstore/tests/local.mk b/src/libstore/tests/local.mk
new file mode 100644
index 000000000..f74295d97
--- /dev/null
+++ b/src/libstore/tests/local.mk
@@ -0,0 +1,15 @@
+check: libstore-tests_RUN
+
+programs += libstore-tests
+
+libstore-tests_DIR := $(d)
+
+libstore-tests_INSTALL_DIR :=
+
+libstore-tests_SOURCES := $(wildcard $(d)/*.cc)
+
+libstore-tests_CXXFLAGS += -I src/libstore -I src/libutil
+
+libstore-tests_LIBS = libstore libutil
+
+libstore-tests_LDFLAGS := $(GTEST_LIBS)
diff --git a/src/libstore/tests/machines.cc b/src/libstore/tests/machines.cc
new file mode 100644
index 000000000..f51052b14
--- /dev/null
+++ b/src/libstore/tests/machines.cc
@@ -0,0 +1,169 @@
+#include "machines.hh"
+#include "globals.hh"
+
+#include <gmock/gmock-matchers.h>
+
+using testing::Contains;
+using testing::ElementsAre;
+using testing::EndsWith;
+using testing::Eq;
+using testing::Field;
+using testing::SizeIs;
+
+using nix::absPath;
+using nix::FormatError;
+using nix::getMachines;
+using nix::Machine;
+using nix::Machines;
+using nix::pathExists;
+using nix::Settings;
+using nix::settings;
+
+class Environment : public ::testing::Environment {
+ public:
+ void SetUp() override { settings.thisSystem = "TEST_ARCH-TEST_OS"; }
+};
+
+testing::Environment* const foo_env =
+ testing::AddGlobalTestEnvironment(new Environment);
+
+TEST(machines, getMachinesWithEmptyBuilders) {
+ settings.builders = "";
+ Machines actual = getMachines();
+ ASSERT_THAT(actual, SizeIs(0));
+}
+
+TEST(machines, getMachinesUriOnly) {
+ settings.builders = "nix@scratchy.labs.cs.uu.nl";
+ Machines actual = getMachines();
+ ASSERT_THAT(actual, SizeIs(1));
+ EXPECT_THAT(actual[0], Field(&Machine::storeUri, Eq("ssh://nix@scratchy.labs.cs.uu.nl")));
+ EXPECT_THAT(actual[0], Field(&Machine::systemTypes, ElementsAre("TEST_ARCH-TEST_OS")));
+ EXPECT_THAT(actual[0], Field(&Machine::sshKey, SizeIs(0)));
+ EXPECT_THAT(actual[0], Field(&Machine::maxJobs, Eq(1)));
+ EXPECT_THAT(actual[0], Field(&Machine::speedFactor, Eq(1)));
+ EXPECT_THAT(actual[0], Field(&Machine::supportedFeatures, SizeIs(0)));
+ EXPECT_THAT(actual[0], Field(&Machine::mandatoryFeatures, SizeIs(0)));
+ EXPECT_THAT(actual[0], Field(&Machine::sshPublicHostKey, SizeIs(0)));
+}
+
+TEST(machines, getMachinesDefaults) {
+ settings.builders = "nix@scratchy.labs.cs.uu.nl - - - - - - -";
+ Machines actual = getMachines();
+ ASSERT_THAT(actual, SizeIs(1));
+ EXPECT_THAT(actual[0], Field(&Machine::storeUri, Eq("ssh://nix@scratchy.labs.cs.uu.nl")));
+ EXPECT_THAT(actual[0], Field(&Machine::systemTypes, ElementsAre("TEST_ARCH-TEST_OS")));
+ EXPECT_THAT(actual[0], Field(&Machine::sshKey, SizeIs(0)));
+ EXPECT_THAT(actual[0], Field(&Machine::maxJobs, Eq(1)));
+ EXPECT_THAT(actual[0], Field(&Machine::speedFactor, Eq(1)));
+ EXPECT_THAT(actual[0], Field(&Machine::supportedFeatures, SizeIs(0)));
+ EXPECT_THAT(actual[0], Field(&Machine::mandatoryFeatures, SizeIs(0)));
+ EXPECT_THAT(actual[0], Field(&Machine::sshPublicHostKey, SizeIs(0)));
+}
+
+TEST(machines, getMachinesWithNewLineSeparator) {
+ settings.builders = "nix@scratchy.labs.cs.uu.nl\nnix@itchy.labs.cs.uu.nl";
+ Machines actual = getMachines();
+ ASSERT_THAT(actual, SizeIs(2));
+ EXPECT_THAT(actual, Contains(Field(&Machine::storeUri, EndsWith("nix@scratchy.labs.cs.uu.nl"))));
+ EXPECT_THAT(actual, Contains(Field(&Machine::storeUri, EndsWith("nix@itchy.labs.cs.uu.nl"))));
+}
+
+TEST(machines, getMachinesWithSemicolonSeparator) {
+ settings.builders = "nix@scratchy.labs.cs.uu.nl ; nix@itchy.labs.cs.uu.nl";
+ Machines actual = getMachines();
+ EXPECT_THAT(actual, SizeIs(2));
+ EXPECT_THAT(actual, Contains(Field(&Machine::storeUri, EndsWith("nix@scratchy.labs.cs.uu.nl"))));
+ EXPECT_THAT(actual, Contains(Field(&Machine::storeUri, EndsWith("nix@itchy.labs.cs.uu.nl"))));
+}
+
+TEST(machines, getMachinesWithCorrectCompleteSingleBuilder) {
+ settings.builders = "nix@scratchy.labs.cs.uu.nl i686-linux "
+ "/home/nix/.ssh/id_scratchy_auto 8 3 kvm "
+ "benchmark SSH+HOST+PUBLIC+KEY+BASE64+ENCODED==";
+ Machines actual = getMachines();
+ ASSERT_THAT(actual, SizeIs(1));
+ EXPECT_THAT(actual[0], Field(&Machine::storeUri, EndsWith("nix@scratchy.labs.cs.uu.nl")));
+ EXPECT_THAT(actual[0], Field(&Machine::systemTypes, ElementsAre("i686-linux")));
+ EXPECT_THAT(actual[0], Field(&Machine::sshKey, Eq("/home/nix/.ssh/id_scratchy_auto")));
+ EXPECT_THAT(actual[0], Field(&Machine::maxJobs, Eq(8)));
+ EXPECT_THAT(actual[0], Field(&Machine::speedFactor, Eq(3)));
+ EXPECT_THAT(actual[0], Field(&Machine::supportedFeatures, ElementsAre("kvm")));
+ EXPECT_THAT(actual[0], Field(&Machine::mandatoryFeatures, ElementsAre("benchmark")));
+ EXPECT_THAT(actual[0], Field(&Machine::sshPublicHostKey, Eq("SSH+HOST+PUBLIC+KEY+BASE64+ENCODED==")));
+}
+
+TEST(machines,
+ getMachinesWithCorrectCompleteSingleBuilderWithTabColumnDelimiter) {
+ settings.builders =
+ "nix@scratchy.labs.cs.uu.nl\ti686-linux\t/home/nix/.ssh/"
+ "id_scratchy_auto\t8\t3\tkvm\tbenchmark\tSSH+HOST+PUBLIC+"
+ "KEY+BASE64+ENCODED==";
+ Machines actual = getMachines();
+ ASSERT_THAT(actual, SizeIs(1));
+ EXPECT_THAT(actual[0], Field(&Machine::storeUri, EndsWith("nix@scratchy.labs.cs.uu.nl")));
+ EXPECT_THAT(actual[0], Field(&Machine::systemTypes, ElementsAre("i686-linux")));
+ EXPECT_THAT(actual[0], Field(&Machine::sshKey, Eq("/home/nix/.ssh/id_scratchy_auto")));
+ EXPECT_THAT(actual[0], Field(&Machine::maxJobs, Eq(8)));
+ EXPECT_THAT(actual[0], Field(&Machine::speedFactor, Eq(3)));
+ EXPECT_THAT(actual[0], Field(&Machine::supportedFeatures, ElementsAre("kvm")));
+ EXPECT_THAT(actual[0], Field(&Machine::mandatoryFeatures, ElementsAre("benchmark")));
+ EXPECT_THAT(actual[0], Field(&Machine::sshPublicHostKey, Eq("SSH+HOST+PUBLIC+KEY+BASE64+ENCODED==")));
+}
+
+TEST(machines, getMachinesWithMultiOptions) {
+ settings.builders = "nix@scratchy.labs.cs.uu.nl Arch1,Arch2 - - - "
+ "SupportedFeature1,SupportedFeature2 "
+ "MandatoryFeature1,MandatoryFeature2";
+ Machines actual = getMachines();
+ ASSERT_THAT(actual, SizeIs(1));
+ EXPECT_THAT(actual[0], Field(&Machine::storeUri, EndsWith("nix@scratchy.labs.cs.uu.nl")));
+ EXPECT_THAT(actual[0], Field(&Machine::systemTypes, ElementsAre("Arch1", "Arch2")));
+ EXPECT_THAT(actual[0], Field(&Machine::supportedFeatures, ElementsAre("SupportedFeature1", "SupportedFeature2")));
+ EXPECT_THAT(actual[0], Field(&Machine::mandatoryFeatures, ElementsAre("MandatoryFeature1", "MandatoryFeature2")));
+}
+
+TEST(machines, getMachinesWithIncorrectFormat) {
+ settings.builders = "nix@scratchy.labs.cs.uu.nl - - eight";
+ EXPECT_THROW(getMachines(), FormatError);
+ settings.builders = "nix@scratchy.labs.cs.uu.nl - - -1";
+ EXPECT_THROW(getMachines(), FormatError);
+ settings.builders = "nix@scratchy.labs.cs.uu.nl - - 8 three";
+ EXPECT_THROW(getMachines(), FormatError);
+ settings.builders = "nix@scratchy.labs.cs.uu.nl - - 8 -3";
+ EXPECT_THROW(getMachines(), FormatError);
+ settings.builders = "nix@scratchy.labs.cs.uu.nl - - 8 3 - - BAD_BASE64";
+ EXPECT_THROW(getMachines(), FormatError);
+}
+
+TEST(machines, getMachinesWithCorrectFileReference) {
+ auto path = absPath("src/libstore/tests/test-data/machines.valid");
+ ASSERT_TRUE(pathExists(path));
+
+ settings.builders = std::string("@") + path;
+ Machines actual = getMachines();
+ ASSERT_THAT(actual, SizeIs(3));
+ EXPECT_THAT(actual, Contains(Field(&Machine::storeUri, EndsWith("nix@scratchy.labs.cs.uu.nl"))));
+ EXPECT_THAT(actual, Contains(Field(&Machine::storeUri, EndsWith("nix@itchy.labs.cs.uu.nl"))));
+ EXPECT_THAT(actual, Contains(Field(&Machine::storeUri, EndsWith("nix@poochie.labs.cs.uu.nl"))));
+}
+
+TEST(machines, getMachinesWithCorrectFileReferenceToEmptyFile) {
+ auto path = "/dev/null";
+ ASSERT_TRUE(pathExists(path));
+
+ settings.builders = std::string("@") + path;
+ Machines actual = getMachines();
+ ASSERT_THAT(actual, SizeIs(0));
+}
+
+TEST(machines, getMachinesWithIncorrectFileReference) {
+ settings.builders = std::string("@") + absPath("/not/a/file");
+ Machines actual = getMachines();
+ ASSERT_THAT(actual, SizeIs(0));
+}
+
+TEST(machines, getMachinesWithCorrectFileReferenceToIncorrectFile) {
+ settings.builders = std::string("@") + absPath("src/libstore/tests/test-data/machines.bad_format");
+ EXPECT_THROW(getMachines(), FormatError);
+}
diff --git a/src/libstore/tests/references.cc b/src/libstore/tests/references.cc
new file mode 100644
index 000000000..d91d1cedd
--- /dev/null
+++ b/src/libstore/tests/references.cc
@@ -0,0 +1,45 @@
+#include "references.hh"
+
+#include <gtest/gtest.h>
+
+namespace nix {
+
+TEST(references, scan)
+{
+ std::string hash1 = "dc04vv14dak1c1r48qa0m23vr9jy8sm0";
+ std::string hash2 = "zc842j0rz61mjsp3h3wp5ly71ak6qgdn";
+
+ {
+ RefScanSink scanner(StringSet{hash1});
+ auto s = "foobar";
+ scanner(s);
+ ASSERT_EQ(scanner.getResult(), StringSet{});
+ }
+
+ {
+ RefScanSink scanner(StringSet{hash1});
+ auto s = "foobar" + hash1 + "xyzzy";
+ scanner(s);
+ ASSERT_EQ(scanner.getResult(), StringSet{hash1});
+ }
+
+ {
+ RefScanSink scanner(StringSet{hash1, hash2});
+ auto s = "foobar" + hash1 + "xyzzy" + hash2;
+ scanner(((std::string_view) s).substr(0, 10));
+ scanner(((std::string_view) s).substr(10, 5));
+ scanner(((std::string_view) s).substr(15, 5));
+ scanner(((std::string_view) s).substr(20));
+ ASSERT_EQ(scanner.getResult(), StringSet({hash1, hash2}));
+ }
+
+ {
+ RefScanSink scanner(StringSet{hash1, hash2});
+ auto s = "foobar" + hash1 + "xyzzy" + hash2;
+ for (auto & i : s)
+ scanner(std::string(1, i));
+ ASSERT_EQ(scanner.getResult(), StringSet({hash1, hash2}));
+ }
+}
+
+}
diff --git a/src/libstore/tests/test-data/machines.bad_format b/src/libstore/tests/test-data/machines.bad_format
new file mode 100644
index 000000000..7255a1216
--- /dev/null
+++ b/src/libstore/tests/test-data/machines.bad_format
@@ -0,0 +1 @@
+nix@scratchy.labs.cs.uu.nl - - eight
diff --git a/src/libstore/tests/test-data/machines.valid b/src/libstore/tests/test-data/machines.valid
new file mode 100644
index 000000000..1a6c8017c
--- /dev/null
+++ b/src/libstore/tests/test-data/machines.valid
@@ -0,0 +1,3 @@
+nix@scratchy.labs.cs.uu.nl i686-linux /home/nix/.ssh/id_scratchy_auto 8 1 kvm
+nix@itchy.labs.cs.uu.nl i686-linux /home/nix/.ssh/id_scratchy_auto 8 2
+nix@poochie.labs.cs.uu.nl i686-linux /home/nix/.ssh/id_scratchy_auto 1 2 kvm benchmark c3NoLXJzYSBBQUFBQjNOemFDMXljMkVBQUFBREFRQUJBQUFDQVFDWWV5R1laNTNzd1VjMUZNSHBWL1BCcXlKaFR5S1JoRkpWWVRpRHlQN2h5c1JGa0w4VDlLOGdhL2Y2L3c3QjN2SjNHSFRIUFkybENiUEdZbGNLd2h6M2ZRbFNNOEViNi95b3ZLajdvM1FsMEx5Y0dzdGJvRmcwWkZKNldncUxsR0ltS0NobUlxOGZ3TW5ZTWUxbnRQeTBUZFZjSU1tOTV3YzF3SjBMd2c3cEVMRmtHazdkeTVvYnM4a3lGZ0pORDVRSmFwQWJjeWp4Z1QzdzdMcktNZ2xzeWhhd01JNVpkMGZsQTVudW5OZ3pid3plYVhLaUsyTW0vdGJXYTU1YTd4QmNYdHpIZGlPSWdSajJlRWxaMGh5bk10YjBmcklsdmxIcEtLaVFaZ3pQdCtIVXQ2bXpRMkRVME52MGYyYnNSU0krOGpJU2pQcmdlcVVHRldMUzVIUTg2N2xSMlpiaWtyclhZNTdqbVFEZk5DRHY1VFBHZU9UekFEd2pjMDc2aFZ3VFJCd3VTZFhtaWNxTS95b3lrWitkV1dnZ25MenE5QU1tdlNZcDhmZkZDcS9CSDBZNUFXWTFHay9vS3hMVTNaOWt3ZDd2UWNFQWFCQ2dxdnVZRGdTaHE1RlhndDM3OVZESWtEL05ZSTg2QXVvajVDRmVNTzlRM2pJSlRadlh6c1VldjVoSnA2djcxSVh5ODVtbTY5R20zcXdicVE1SjVQZDU1Um56SitpaW5BNjZxTEFSc0Y4amNsSnd5ekFXclBoYU9DRVY2bjVMeVhVazhzMW9EVVR4V1pWN25rVkFTbHJ0MllGcjN5dzdjRTRXQVhsemhHcDhocmdLMVVkMUlyeDVnZWRaSnBWcy9uNWVybmJFMUxmb2x5UHUvRUFIWlh6VGd4dHVDUFNobXc9PQo=
diff --git a/src/libstore/uds-remote-store.cc b/src/libstore/uds-remote-store.cc
index cac4fa036..5c38323cd 100644
--- a/src/libstore/uds-remote-store.cc
+++ b/src/libstore/uds-remote-store.cc
@@ -45,30 +45,20 @@ std::string UDSRemoteStore::getUri()
}
+void UDSRemoteStore::Connection::closeWrite()
+{
+ shutdown(fd.get(), SHUT_WR);
+}
+
+
ref<RemoteStore::Connection> UDSRemoteStore::openConnection()
{
auto conn = make_ref<Connection>();
/* Connect to a daemon that does the privileged work for us. */
- conn->fd = socket(PF_UNIX, SOCK_STREAM
- #ifdef SOCK_CLOEXEC
- | SOCK_CLOEXEC
- #endif
- , 0);
- if (!conn->fd)
- throw SysError("cannot create Unix domain socket");
- closeOnExec(conn->fd.get());
-
- string socketPath = path ? *path : settings.nixDaemonSocketFile;
-
- struct sockaddr_un addr;
- addr.sun_family = AF_UNIX;
- if (socketPath.size() + 1 >= sizeof(addr.sun_path))
- throw Error("socket path '%1%' is too long", socketPath);
- strcpy(addr.sun_path, socketPath.c_str());
-
- if (::connect(conn->fd.get(), (struct sockaddr *) &addr, sizeof(addr)) == -1)
- throw SysError("cannot connect to daemon at '%1%'", socketPath);
+ conn->fd = createUnixDomainSocket();
+
+ nix::connect(conn->fd.get(), path ? *path : settings.nixDaemonSocketFile);
conn->from.fd = conn->fd.get();
conn->to.fd = conn->fd.get();
diff --git a/src/libstore/uds-remote-store.hh b/src/libstore/uds-remote-store.hh
index ddc7716cd..f8dfcca70 100644
--- a/src/libstore/uds-remote-store.hh
+++ b/src/libstore/uds-remote-store.hh
@@ -40,6 +40,12 @@ public:
private:
+ struct Connection : RemoteStore::Connection
+ {
+ AutoCloseFD fd;
+ void closeWrite() override;
+ };
+
ref<RemoteStore::Connection> openConnection() override;
std::optional<std::string> path;
};
diff --git a/src/libutil/ansicolor.hh b/src/libutil/ansicolor.hh
index ae741f867..38305e71c 100644
--- a/src/libutil/ansicolor.hh
+++ b/src/libutil/ansicolor.hh
@@ -9,7 +9,7 @@ namespace nix {
#define ANSI_ITALIC "\e[3m"
#define ANSI_RED "\e[31;1m"
#define ANSI_GREEN "\e[32;1m"
-#define ANSI_YELLOW "\e[33;1m"
+#define ANSI_WARNING "\e[35;1m"
#define ANSI_BLUE "\e[34;1m"
#define ANSI_MAGENTA "\e[35;1m"
#define ANSI_CYAN "\e[36;1m"
diff --git a/src/libutil/archive.cc b/src/libutil/archive.cc
index ed0eb2fb5..d78ec2b93 100644
--- a/src/libutil/archive.cc
+++ b/src/libutil/archive.cc
@@ -42,7 +42,7 @@ static string caseHackSuffix = "~nix~case~hack~";
PathFilter defaultPathFilter = [](const Path &) { return true; };
-static void dumpContents(const Path & path, size_t size,
+static void dumpContents(const Path & path, off_t size,
Sink & sink)
{
sink << "contents" << size;
@@ -76,7 +76,7 @@ static void dump(const Path & path, Sink & sink, PathFilter & filter)
sink << "type" << "regular";
if (st.st_mode & S_IXUSR)
sink << "executable" << "";
- dumpContents(path, (size_t) st.st_size, sink);
+ dumpContents(path, st.st_size, sink);
}
else if (S_ISDIR(st.st_mode)) {
diff --git a/src/libutil/args.cc b/src/libutil/args.cc
index afed0670f..9df279faf 100644
--- a/src/libutil/args.cc
+++ b/src/libutil/args.cc
@@ -331,6 +331,7 @@ MultiCommand::MultiCommand(const Commands & commands_)
if (i == commands.end())
throw UsageError("'%s' is not a recognised command", s);
command = {s, i->second()};
+ command->second->parent = this;
}}
});
diff --git a/src/libutil/args.hh b/src/libutil/args.hh
index c08ba8abd..7521b3065 100644
--- a/src/libutil/args.hh
+++ b/src/libutil/args.hh
@@ -12,6 +12,8 @@ namespace nix {
enum HashType : char;
+class MultiCommand;
+
class Args
{
public:
@@ -89,6 +91,14 @@ protected:
})
, arity(1)
{ }
+
+ template<class I>
+ Handler(std::optional<I> * dest)
+ : fun([=](std::vector<std::string> ss) {
+ *dest = string2IntWithUnitPrefix<I>(ss[0]);
+ })
+ , arity(1)
+ { }
};
/* Options. */
@@ -169,11 +179,13 @@ public:
virtual nlohmann::json toJSON();
friend class MultiCommand;
+
+ MultiCommand * parent = nullptr;
};
/* A command is an argument parser that can be executed by calling its
run() method. */
-struct Command : virtual Args
+struct Command : virtual public Args
{
friend class MultiCommand;
@@ -193,7 +205,7 @@ typedef std::map<std::string, std::function<ref<Command>()>> Commands;
/* An argument parser that supports multiple subcommands,
i.e. ‘<command> <subcommand>’. */
-class MultiCommand : virtual Args
+class MultiCommand : virtual public Args
{
public:
Commands commands;
diff --git a/src/libutil/compression.cc b/src/libutil/compression.cc
index d26f68fde..f80ca664c 100644
--- a/src/libutil/compression.cc
+++ b/src/libutil/compression.cc
@@ -16,6 +16,8 @@
namespace nix {
+static const int COMPRESSION_LEVEL_DEFAULT = -1;
+
// Don't feed brotli too much at once.
struct ChunkedCompressionSink : CompressionSink
{
@@ -65,14 +67,16 @@ struct ArchiveCompressionSink : CompressionSink
Sink & nextSink;
struct archive * archive;
- ArchiveCompressionSink(Sink & nextSink, std::string format, bool parallel) : nextSink(nextSink) {
+ ArchiveCompressionSink(Sink & nextSink, std::string format, bool parallel, int level = COMPRESSION_LEVEL_DEFAULT) : nextSink(nextSink)
+ {
archive = archive_write_new();
if (!archive) throw Error("failed to initialize libarchive");
check(archive_write_add_filter_by_name(archive, format.c_str()), "couldn't initialize compression (%s)");
check(archive_write_set_format_raw(archive));
- if (format == "xz" && parallel) {
+ if (parallel)
check(archive_write_set_filter_option(archive, format.c_str(), "threads", "0"));
- }
+ if (level != COMPRESSION_LEVEL_DEFAULT)
+ check(archive_write_set_filter_option(archive, format.c_str(), "compression-level", std::to_string(level).c_str()));
// disable internal buffering
check(archive_write_set_bytes_per_block(archive, 0));
// disable output padding
@@ -126,7 +130,11 @@ private:
struct NoneSink : CompressionSink
{
Sink & nextSink;
- NoneSink(Sink & nextSink) : nextSink(nextSink) { }
+ NoneSink(Sink & nextSink, int level = COMPRESSION_LEVEL_DEFAULT) : nextSink(nextSink)
+ {
+ if (level != COMPRESSION_LEVEL_DEFAULT)
+ warn("requested compression level '%d' not supported by compression method 'none'", level);
+ }
void finish() override { flush(); }
void write(std::string_view data) override { nextSink(data); }
};
@@ -257,13 +265,13 @@ struct BrotliCompressionSink : ChunkedCompressionSink
}
};
-ref<CompressionSink> makeCompressionSink(const std::string & method, Sink & nextSink, const bool parallel)
+ref<CompressionSink> makeCompressionSink(const std::string & method, Sink & nextSink, const bool parallel, int level)
{
std::vector<std::string> la_supports = {
"bzip2", "compress", "grzip", "gzip", "lrzip", "lz4", "lzip", "lzma", "lzop", "xz", "zstd"
};
if (std::find(la_supports.begin(), la_supports.end(), method) != la_supports.end()) {
- return make_ref<ArchiveCompressionSink>(nextSink, method, parallel);
+ return make_ref<ArchiveCompressionSink>(nextSink, method, parallel, level);
}
if (method == "none")
return make_ref<NoneSink>(nextSink);
@@ -273,10 +281,10 @@ ref<CompressionSink> makeCompressionSink(const std::string & method, Sink & next
throw UnknownCompressionMethod("unknown compression method '%s'", method);
}
-ref<std::string> compress(const std::string & method, const std::string & in, const bool parallel)
+ref<std::string> compress(const std::string & method, const std::string & in, const bool parallel, int level)
{
StringSink ssink;
- auto sink = makeCompressionSink(method, ssink, parallel);
+ auto sink = makeCompressionSink(method, ssink, parallel, level);
(*sink)(in);
sink->finish();
return ssink.s;
diff --git a/src/libutil/compression.hh b/src/libutil/compression.hh
index 338a0d9f2..9b1e4a9d4 100644
--- a/src/libutil/compression.hh
+++ b/src/libutil/compression.hh
@@ -19,9 +19,9 @@ ref<std::string> decompress(const std::string & method, const std::string & in);
std::unique_ptr<FinishSink> makeDecompressionSink(const std::string & method, Sink & nextSink);
-ref<std::string> compress(const std::string & method, const std::string & in, const bool parallel = false);
+ref<std::string> compress(const std::string & method, const std::string & in, const bool parallel = false, int level = -1);
-ref<CompressionSink> makeCompressionSink(const std::string & method, Sink & nextSink, const bool parallel = false);
+ref<CompressionSink> makeCompressionSink(const std::string & method, Sink & nextSink, const bool parallel = false, int level = -1);
MakeError(UnknownCompressionMethod, Error);
diff --git a/src/libutil/config.cc b/src/libutil/config.cc
index 2a5f913e6..92ab265d3 100644
--- a/src/libutil/config.cc
+++ b/src/libutil/config.cc
@@ -1,6 +1,7 @@
#include "config.hh"
#include "args.hh"
#include "abstract-setting-to-json.hh"
+#include "experimental-features.hh"
#include <nlohmann/json.hpp>
@@ -177,11 +178,6 @@ AbstractSetting::AbstractSetting(
{
}
-void AbstractSetting::setDefault(const std::string & str)
-{
- if (!overridden) set(str);
-}
-
nlohmann::json AbstractSetting::toJSON()
{
return nlohmann::json(toJSONObject());
@@ -318,6 +314,31 @@ template<> std::string BaseSetting<StringSet>::to_string() const
return concatStringsSep(" ", value);
}
+template<> void BaseSetting<std::set<ExperimentalFeature>>::set(const std::string & str, bool append)
+{
+ if (!append) value.clear();
+ for (auto & s : tokenizeString<StringSet>(str)) {
+ auto thisXpFeature = parseExperimentalFeature(s);
+ if (thisXpFeature)
+ value.insert(thisXpFeature.value());
+ else
+ warn("unknown experimental feature '%s'", s);
+ }
+}
+
+template<> bool BaseSetting<std::set<ExperimentalFeature>>::isAppendable()
+{
+ return true;
+}
+
+template<> std::string BaseSetting<std::set<ExperimentalFeature>>::to_string() const
+{
+ StringSet stringifiedXpFeatures;
+ for (auto & feature : value)
+ stringifiedXpFeatures.insert(std::string(showExperimentalFeature(feature)));
+ return concatStringsSep(" ", stringifiedXpFeatures);
+}
+
template<> void BaseSetting<StringMap>::set(const std::string & str, bool append)
{
if (!append) value.clear();
@@ -353,6 +374,7 @@ template class BaseSetting<std::string>;
template class BaseSetting<Strings>;
template class BaseSetting<StringSet>;
template class BaseSetting<StringMap>;
+template class BaseSetting<std::set<ExperimentalFeature>>;
void PathSetting::set(const std::string & str, bool append)
{
diff --git a/src/libutil/config.hh b/src/libutil/config.hh
index df5c2226f..736810bf3 100644
--- a/src/libutil/config.hh
+++ b/src/libutil/config.hh
@@ -194,8 +194,6 @@ public:
bool overridden = false;
- void setDefault(const std::string & str);
-
protected:
AbstractSetting(
@@ -253,6 +251,7 @@ public:
bool operator !=(const T & v2) const { return value != v2; }
void operator =(const T & v) { assign(v); }
virtual void assign(const T & v) { value = v; }
+ void setDefault(const T & v) { if (!overridden) value = v; }
void set(const std::string & str, bool append = false) override;
diff --git a/src/libutil/error.cc b/src/libutil/error.cc
index 0eea3455d..203d79087 100644
--- a/src/libutil/error.cc
+++ b/src/libutil/error.cc
@@ -185,15 +185,15 @@ void printAtPos(const ErrPos & pos, std::ostream & out)
if (pos) {
switch (pos.origin) {
case foFile: {
- out << fmt(ANSI_BLUE "at " ANSI_YELLOW "%s:%s" ANSI_NORMAL ":", pos.file, showErrPos(pos));
+ out << fmt(ANSI_BLUE "at " ANSI_WARNING "%s:%s" ANSI_NORMAL ":", pos.file, showErrPos(pos));
break;
}
case foString: {
- out << fmt(ANSI_BLUE "at " ANSI_YELLOW "«string»:%s" ANSI_NORMAL ":", showErrPos(pos));
+ out << fmt(ANSI_BLUE "at " ANSI_WARNING "«string»:%s" ANSI_NORMAL ":", showErrPos(pos));
break;
}
case foStdin: {
- out << fmt(ANSI_BLUE "at " ANSI_YELLOW "«stdin»:%s" ANSI_NORMAL ":", showErrPos(pos));
+ out << fmt(ANSI_BLUE "at " ANSI_WARNING "«stdin»:%s" ANSI_NORMAL ":", showErrPos(pos));
break;
}
default:
@@ -232,7 +232,7 @@ std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool s
break;
}
case Verbosity::lvlWarn: {
- prefix = ANSI_YELLOW "warning";
+ prefix = ANSI_WARNING "warning";
break;
}
case Verbosity::lvlInfo: {
@@ -252,7 +252,7 @@ std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool s
break;
}
case Verbosity::lvlDebug: {
- prefix = ANSI_YELLOW "debug";
+ prefix = ANSI_WARNING "debug";
break;
}
default:
diff --git a/src/libutil/experimental-features.cc b/src/libutil/experimental-features.cc
new file mode 100644
index 000000000..b49f47e1d
--- /dev/null
+++ b/src/libutil/experimental-features.cc
@@ -0,0 +1,59 @@
+#include "experimental-features.hh"
+#include "util.hh"
+
+#include "nlohmann/json.hpp"
+
+namespace nix {
+
+std::map<ExperimentalFeature, std::string> stringifiedXpFeatures = {
+ { Xp::CaDerivations, "ca-derivations" },
+ { Xp::Flakes, "flakes" },
+ { Xp::NixCommand, "nix-command" },
+ { Xp::RecursiveNix, "recursive-nix" },
+ { Xp::NoUrlLiterals, "no-url-literals" },
+};
+
+const std::optional<ExperimentalFeature> parseExperimentalFeature(const std::string_view & name)
+{
+ using ReverseXpMap = std::map<std::string_view, ExperimentalFeature>;
+
+ static auto reverseXpMap = []()
+ {
+ auto reverseXpMap = std::make_unique<ReverseXpMap>();
+ for (auto & [feature, name] : stringifiedXpFeatures)
+ (*reverseXpMap)[name] = feature;
+ return reverseXpMap;
+ }();
+
+ if (auto feature = get(*reverseXpMap, name))
+ return *feature;
+ else
+ return std::nullopt;
+}
+
+std::string_view showExperimentalFeature(const ExperimentalFeature feature)
+{
+ return stringifiedXpFeatures.at(feature);
+}
+
+std::set<ExperimentalFeature> parseFeatures(const std::set<std::string> & rawFeatures)
+{
+ std::set<ExperimentalFeature> res;
+ for (auto & rawFeature : rawFeatures) {
+ if (auto feature = parseExperimentalFeature(rawFeature))
+ res.insert(*feature);
+ }
+ return res;
+}
+
+MissingExperimentalFeature::MissingExperimentalFeature(ExperimentalFeature feature)
+ : Error("experimental Nix feature '%1%' is disabled; use '--extra-experimental-features %1%' to override", showExperimentalFeature(feature))
+ , missingFeature(feature)
+{}
+
+std::ostream & operator <<(std::ostream & str, const ExperimentalFeature & feature)
+{
+ return str << showExperimentalFeature(feature);
+}
+
+}
diff --git a/src/libutil/experimental-features.hh b/src/libutil/experimental-features.hh
new file mode 100644
index 000000000..291a58e32
--- /dev/null
+++ b/src/libutil/experimental-features.hh
@@ -0,0 +1,56 @@
+#pragma once
+
+#include "comparator.hh"
+#include "error.hh"
+#include "nlohmann/json_fwd.hpp"
+#include "types.hh"
+
+namespace nix {
+
+/**
+ * The list of available experimental features.
+ *
+ * If you update this, don’t forget to also change the map defining their
+ * string representation in the corresponding `.cc` file.
+ **/
+enum struct ExperimentalFeature
+{
+ CaDerivations,
+ Flakes,
+ NixCommand,
+ RecursiveNix,
+ NoUrlLiterals
+};
+
+/**
+ * Just because writing `ExperimentalFeature::CaDerivations` is way too long
+ */
+using Xp = ExperimentalFeature;
+
+const std::optional<ExperimentalFeature> parseExperimentalFeature(
+ const std::string_view & name);
+std::string_view showExperimentalFeature(const ExperimentalFeature);
+
+std::ostream & operator<<(
+ std::ostream & str,
+ const ExperimentalFeature & feature);
+
+/**
+ * Parse a set of strings to the corresponding set of experimental features,
+ * ignoring (but warning for) any unkwown feature.
+ */
+std::set<ExperimentalFeature> parseFeatures(const std::set<std::string> &);
+
+class MissingExperimentalFeature : public Error
+{
+public:
+ ExperimentalFeature missingFeature;
+
+ MissingExperimentalFeature(ExperimentalFeature);
+ virtual const char * sname() const override
+ {
+ return "MissingExperimentalFeature";
+ }
+};
+
+}
diff --git a/src/libutil/fmt.hh b/src/libutil/fmt.hh
index 85c0e9429..fd335b811 100644
--- a/src/libutil/fmt.hh
+++ b/src/libutil/fmt.hh
@@ -82,7 +82,7 @@ struct yellowtxt
template <class T>
std::ostream & operator<<(std::ostream & out, const yellowtxt<T> & y)
{
- return out << ANSI_YELLOW << y.value << ANSI_NORMAL;
+ return out << ANSI_WARNING << y.value << ANSI_NORMAL;
}
template <class T>
diff --git a/src/libutil/local.mk b/src/libutil/local.mk
index 3a6415ee3..f880c0fc5 100644
--- a/src/libutil/local.mk
+++ b/src/libutil/local.mk
@@ -6,7 +6,7 @@ libutil_DIR := $(d)
libutil_SOURCES := $(wildcard $(d)/*.cc)
-libutil_LDFLAGS = -pthread $(OPENSSL_LIBS) $(LIBBROTLI_LIBS) $(LIBARCHIVE_LIBS) $(BOOST_LDFLAGS) -lboost_context
+libutil_LDFLAGS += -pthread $(OPENSSL_LIBS) $(LIBBROTLI_LIBS) $(LIBARCHIVE_LIBS) $(BOOST_LDFLAGS) -lboost_context
ifeq ($(HAVE_LIBCPUID), 1)
libutil_LDFLAGS += -lcpuid
diff --git a/src/libutil/logging.cc b/src/libutil/logging.cc
index 6b9b850ca..f8a121ed1 100644
--- a/src/libutil/logging.cc
+++ b/src/libutil/logging.cc
@@ -27,7 +27,7 @@ Logger * logger = makeSimpleLogger(true);
void Logger::warn(const std::string & msg)
{
- log(lvlWarn, ANSI_YELLOW "warning:" ANSI_NORMAL " " + msg);
+ log(lvlWarn, ANSI_WARNING "warning:" ANSI_NORMAL " " + msg);
}
void Logger::writeToStdout(std::string_view s)
@@ -163,7 +163,7 @@ struct JSONLogger : Logger {
void write(const nlohmann::json & json)
{
- prevLogger.log(lvlError, "@nix " + json.dump());
+ prevLogger.log(lvlError, "@nix " + json.dump(-1, ' ', false, nlohmann::json::error_handler_t::replace));
}
void log(Verbosity lvl, const FormatOrString & fs) override
diff --git a/src/libutil/logging.hh b/src/libutil/logging.hh
index 96ad69790..ce9c3dfed 100644
--- a/src/libutil/logging.hh
+++ b/src/libutil/logging.hh
@@ -189,13 +189,14 @@ extern Verbosity verbosity; /* suppress msgs > this */
/* Print a string message if the current log level is at least the specified
level. Note that this has to be implemented as a macro to ensure that the
arguments are evaluated lazily. */
-#define printMsg(level, args...) \
+#define printMsgUsing(loggerParam, level, args...) \
do { \
auto __lvl = level; \
if (__lvl <= nix::verbosity) { \
- logger->log(__lvl, fmt(args)); \
+ loggerParam->log(__lvl, fmt(args)); \
} \
} while (0)
+#define printMsg(level, args...) printMsgUsing(logger, level, args)
#define printError(args...) printMsg(lvlError, args)
#define notice(args...) printMsg(lvlNotice, args)
diff --git a/src/libutil/ref.hh b/src/libutil/ref.hh
index 2549ef496..347b81f73 100644
--- a/src/libutil/ref.hh
+++ b/src/libutil/ref.hh
@@ -17,7 +17,7 @@ private:
public:
- ref<T>(const ref<T> & r)
+ ref(const ref<T> & r)
: p(r.p)
{ }
@@ -99,4 +99,47 @@ make_ref(Args&&... args)
return ref<T>(p);
}
+
+/* A non-nullable pointer.
+ This is similar to a C++ "& reference", but mutable.
+ This is similar to ref<T> but backed by a regular pointer instead of a smart pointer.
+ */
+template<typename T>
+class ptr {
+private:
+ T * p;
+
+public:
+ ptr<T>(const ptr<T> & r)
+ : p(r.p)
+ { }
+
+ explicit ptr<T>(T * p)
+ : p(p)
+ {
+ if (!p)
+ throw std::invalid_argument("null pointer cast to ptr");
+ }
+
+ T* operator ->() const
+ {
+ return &*p;
+ }
+
+ T& operator *() const
+ {
+ return *p;
+ }
+
+ bool operator == (const ptr<T> & other) const
+ {
+ return p == other.p;
+ }
+
+ bool operator != (const ptr<T> & other) const
+ {
+ return p != other.p;
+ }
+};
+
}
diff --git a/src/libutil/serialise.cc b/src/libutil/serialise.cc
index 374b48d79..16f3476c2 100644
--- a/src/libutil/serialise.cc
+++ b/src/libutil/serialise.cc
@@ -244,7 +244,8 @@ std::unique_ptr<FinishSink> sourceToSink(std::function<void(Source &)> fun)
if (!cur.empty()) (*coro)(false);
}
- void finish() {
+ void finish() override
+ {
if (!coro) return;
if (!*coro) abort();
(*coro)(true);
diff --git a/src/libutil/tarfile.cc b/src/libutil/tarfile.cc
index 24905130d..50e691a3d 100644
--- a/src/libutil/tarfile.cc
+++ b/src/libutil/tarfile.cc
@@ -39,32 +39,30 @@ void TarArchive::check(int err, const std::string & reason)
throw Error(reason, archive_error_string(this->archive));
}
-TarArchive::TarArchive(Source & source, bool raw) : buffer(4096)
+TarArchive::TarArchive(Source & source, bool raw)
+ : source(&source), buffer(4096)
{
- this->archive = archive_read_new();
- this->source = &source;
-
- if (!raw) {
- archive_read_support_filter_all(archive);
+ init();
+ if (!raw)
archive_read_support_format_all(archive);
- } else {
- archive_read_support_filter_all(archive);
+ else
archive_read_support_format_raw(archive);
- archive_read_support_format_empty(archive);
- }
check(archive_read_open(archive, (void *)this, callback_open, callback_read, callback_close), "Failed to open archive (%s)");
}
-
TarArchive::TarArchive(const Path & path)
{
- this->archive = archive_read_new();
-
- archive_read_support_filter_all(archive);
+ init();
archive_read_support_format_all(archive);
check(archive_read_open_filename(archive, path.c_str(), 16384), "failed to open archive: %s");
}
+void TarArchive::init()
+{
+ archive = archive_read_new();
+ archive_read_support_filter_all(archive);
+}
+
void TarArchive::close()
{
check(archive_read_close(this->archive), "Failed to close archive (%s)");
@@ -87,13 +85,16 @@ static void extract_archive(TarArchive & archive, const Path & destDir)
struct archive_entry * entry;
int r = archive_read_next_header(archive.archive, &entry);
if (r == ARCHIVE_EOF) break;
- else if (r == ARCHIVE_WARN)
+ auto name = archive_entry_pathname(entry);
+ if (!name)
+ throw Error("cannot get archive member name: %s", archive_error_string(archive.archive));
+ if (r == ARCHIVE_WARN)
warn(archive_error_string(archive.archive));
else
archive.check(r);
archive_entry_set_pathname(entry,
- (destDir + "/" + archive_entry_pathname(entry)).c_str());
+ (destDir + "/" + name).c_str());
archive.check(archive_read_extract(archive.archive, entry, flags));
}
diff --git a/src/libutil/tarfile.hh b/src/libutil/tarfile.hh
index 4d9141fd4..f107a7e2e 100644
--- a/src/libutil/tarfile.hh
+++ b/src/libutil/tarfile.hh
@@ -17,10 +17,13 @@ struct TarArchive {
// disable copy constructor
TarArchive(const TarArchive &) = delete;
+ void init();
+
void close();
~TarArchive();
};
+
void unpackTarfile(Source & source, const Path & destDir);
void unpackTarfile(const Path & tarFile, const Path & destDir);
diff --git a/src/libutil/tests/logging.cc b/src/libutil/tests/logging.cc
index d990e5499..cef3bd481 100644
--- a/src/libutil/tests/logging.cc
+++ b/src/libutil/tests/logging.cc
@@ -336,7 +336,7 @@ namespace nix {
ASSERT_STREQ(
hintfmt("only one arg %1% %2%", "fulfilled").str().c_str(),
- "only one arg " ANSI_YELLOW "fulfilled" ANSI_NORMAL " ");
+ "only one arg " ANSI_WARNING "fulfilled" ANSI_NORMAL " ");
}
@@ -344,7 +344,7 @@ namespace nix {
ASSERT_STREQ(
hintfmt("what about this %1% %2%", "%3%", "one", "two").str().c_str(),
- "what about this " ANSI_YELLOW "%3%" ANSI_NORMAL " " ANSI_YELLOW "one" ANSI_NORMAL);
+ "what about this " ANSI_WARNING "%3%" ANSI_NORMAL " " ANSI_YELLOW "one" ANSI_NORMAL);
}
diff --git a/src/libutil/tests/tests.cc b/src/libutil/tests/tests.cc
index 58df9c5ac..92972ed14 100644
--- a/src/libutil/tests/tests.cc
+++ b/src/libutil/tests/tests.cc
@@ -4,6 +4,8 @@
#include <limits.h>
#include <gtest/gtest.h>
+#include <numeric>
+
namespace nix {
/* ----------- tests for util.hh ------------------------------------------------*/
@@ -282,6 +284,17 @@ namespace nix {
ASSERT_EQ(decoded, s);
}
+ TEST(base64Encode, encodeAndDecodeNonPrintable) {
+ char s[256];
+ std::iota(std::rbegin(s), std::rend(s), 0);
+
+ auto encoded = base64Encode(s);
+ auto decoded = base64Decode(encoded);
+
+ EXPECT_EQ(decoded.length(), 255);
+ ASSERT_EQ(decoded, s);
+ }
+
/* ----------------------------------------------------------------------------
* base64Decode
* --------------------------------------------------------------------------*/
@@ -294,6 +307,10 @@ namespace nix {
ASSERT_EQ(base64Decode("cXVvZCBlcmF0IGRlbW9uc3RyYW5kdW0="), "quod erat demonstrandum");
}
+ TEST(base64Decode, decodeThrowsOnInvalidChar) {
+ ASSERT_THROW(base64Decode("cXVvZCBlcm_0IGRlbW9uc3RyYW5kdW0="), Error);
+ }
+
/* ----------------------------------------------------------------------------
* toLower
* --------------------------------------------------------------------------*/
diff --git a/src/libutil/util.cc b/src/libutil/util.cc
index d1270cd31..1b6467eb2 100644
--- a/src/libutil/util.cc
+++ b/src/libutil/util.cc
@@ -4,6 +4,7 @@
#include "finally.hh"
#include "serialise.hh"
+#include <array>
#include <cctype>
#include <cerrno>
#include <climits>
@@ -511,6 +512,7 @@ std::pair<AutoCloseFD, Path> createTempFile(const Path & prefix)
AutoCloseFD fd(mkstemp((char *) tmpl.c_str()));
if (!fd)
throw SysError("creating temporary file '%s'", tmpl);
+ closeOnExec(fd.get());
return {std::move(fd), tmpl};
}
@@ -561,7 +563,7 @@ Path getConfigDir()
std::vector<Path> getConfigDirs()
{
Path configHome = getConfigDir();
- string configDirs = getEnv("XDG_CONFIG_DIRS").value_or("");
+ string configDirs = getEnv("XDG_CONFIG_DIRS").value_or("/etc/xdg");
std::vector<Path> result = tokenizeString<std::vector<string>>(configDirs, ":");
result.insert(result.begin(), configHome);
return result;
@@ -902,7 +904,7 @@ int Pid::wait()
return status;
}
if (errno != EINTR)
- throw SysError("cannot get child exit status");
+ throw SysError("cannot get exit status of PID %d", pid);
checkInterrupt();
}
}
@@ -938,9 +940,6 @@ void killUser(uid_t uid)
users to which the current process can send signals. So we
fork a process, switch to uid, and send a mass kill. */
- ProcessOptions options;
- options.allowVfork = false;
-
Pid pid = startProcess([&]() {
if (setuid(uid) == -1)
@@ -963,7 +962,7 @@ void killUser(uid_t uid)
}
_exit(0);
- }, options);
+ });
int status = pid.wait();
if (status != 0)
@@ -1033,17 +1032,10 @@ std::vector<char *> stringsToCharPtrs(const Strings & ss)
return res;
}
-// Output = "standard out" output stream
string runProgram(Path program, bool searchPath, const Strings & args,
const std::optional<std::string> & input)
{
- RunOptions opts(program, args);
- opts.searchPath = searchPath;
- // This allows you to refer to a program with a pathname relative to the
- // PATH variable.
- opts.input = input;
-
- auto res = runProgram(opts);
+ auto res = runProgram(RunOptions {.program = program, .searchPath = searchPath, .args = args, .input = input});
if (!statusOk(res.first))
throw ExecError(res.first, fmt("program '%1%' %2%", program, statusToString(res.first)));
@@ -1052,9 +1044,8 @@ string runProgram(Path program, bool searchPath, const Strings & args,
}
// Output = error code + "standard out" output stream
-std::pair<int, std::string> runProgram(const RunOptions & options_)
+std::pair<int, std::string> runProgram(RunOptions && options)
{
- RunOptions options(options_);
StringSink sink;
options.standardOut = &sink;
@@ -1092,8 +1083,7 @@ void runProgram2(const RunOptions & options)
// vfork implies that the environment of the main process and the fork will
// be shared (technically this is undefined, but in practice that's the
// case), so we can't use it if we alter the environment
- if (options.environment)
- processOptions.allowVfork = false;
+ processOptions.allowVfork = !options.environment;
/* Fork. */
Pid pid = startProcess([&]() {
@@ -1216,7 +1206,7 @@ void closeOnExec(int fd)
//////////////////////////////////////////////////////////////////////
-bool _isInterrupted = false;
+std::atomic<bool> _isInterrupted = false;
static thread_local bool interruptThrown = false;
thread_local std::function<bool()> interruptCheck;
@@ -1447,8 +1437,7 @@ std::string filterANSIEscapes(const std::string & s, bool filterAll, unsigned in
}
-static char base64Chars[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
-static std::array<char, 256> base64DecodeChars;
+constexpr char base64Chars[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
string base64Encode(std::string_view s)
{
@@ -1473,12 +1462,15 @@ string base64Encode(std::string_view s)
string base64Decode(std::string_view s)
{
- static std::once_flag flag;
- std::call_once(flag, [](){
- base64DecodeChars = { (char)-1 };
+ constexpr char npos = -1;
+ constexpr std::array<char, 256> base64DecodeChars = [&]() {
+ std::array<char, 256> result{};
+ for (auto& c : result)
+ c = npos;
for (int i = 0; i < 64; i++)
- base64DecodeChars[(int) base64Chars[i]] = i;
- });
+ result[base64Chars[i]] = i;
+ return result;
+ }();
string res;
unsigned int d = 0, bits = 0;
@@ -1488,7 +1480,7 @@ string base64Decode(std::string_view s)
if (c == '\n') continue;
char digit = base64DecodeChars[(unsigned char) c];
- if (digit == -1)
+ if (digit == npos)
throw Error("invalid character in Base64 string: '%c'", c);
bits += 6;
@@ -1641,9 +1633,39 @@ void setStackSize(size_t stackSize)
#endif
}
-void restoreProcessContext()
+static AutoCloseFD fdSavedMountNamespace;
+
+void saveMountNamespace()
+{
+#if __linux__
+ static std::once_flag done;
+ std::call_once(done, []() {
+ AutoCloseFD fd = open("/proc/self/ns/mnt", O_RDONLY);
+ if (!fd)
+ throw SysError("saving parent mount namespace");
+ fdSavedMountNamespace = std::move(fd);
+ });
+#endif
+}
+
+void restoreMountNamespace()
+{
+#if __linux__
+ try {
+ if (fdSavedMountNamespace && setns(fdSavedMountNamespace.get(), CLONE_NEWNS) == -1)
+ throw SysError("restoring parent mount namespace");
+ } catch (Error & e) {
+ debug(e.msg());
+ }
+#endif
+}
+
+void restoreProcessContext(bool restoreMounts)
{
restoreSignals();
+ if (restoreMounts) {
+ restoreMountNamespace();
+ }
restoreAffinity();
@@ -1681,7 +1703,7 @@ std::unique_ptr<InterruptCallback> createInterruptCallback(std::function<void()>
}
-AutoCloseFD createUnixDomainSocket(const Path & path, mode_t mode)
+AutoCloseFD createUnixDomainSocket()
{
AutoCloseFD fdSocket = socket(PF_UNIX, SOCK_STREAM
#ifdef SOCK_CLOEXEC
@@ -1690,19 +1712,16 @@ AutoCloseFD createUnixDomainSocket(const Path & path, mode_t mode)
, 0);
if (!fdSocket)
throw SysError("cannot create Unix domain socket");
-
closeOnExec(fdSocket.get());
+ return fdSocket;
+}
- struct sockaddr_un addr;
- addr.sun_family = AF_UNIX;
- if (path.size() + 1 >= sizeof(addr.sun_path))
- throw Error("socket path '%1%' is too long", path);
- strcpy(addr.sun_path, path.c_str());
- unlink(path.c_str());
+AutoCloseFD createUnixDomainSocket(const Path & path, mode_t mode)
+{
+ auto fdSocket = nix::createUnixDomainSocket();
- if (bind(fdSocket.get(), (struct sockaddr *) &addr, sizeof(addr)) == -1)
- throw SysError("cannot bind to socket '%1%'", path);
+ bind(fdSocket.get(), path);
if (chmod(path.c_str(), mode) == -1)
throw SysError("changing permissions on '%1%'", path);
@@ -1714,6 +1733,66 @@ AutoCloseFD createUnixDomainSocket(const Path & path, mode_t mode)
}
+void bind(int fd, const std::string & path)
+{
+ unlink(path.c_str());
+
+ struct sockaddr_un addr;
+ addr.sun_family = AF_UNIX;
+
+ if (path.size() + 1 >= sizeof(addr.sun_path)) {
+ Pid pid = startProcess([&]() {
+ auto dir = dirOf(path);
+ if (chdir(dir.c_str()) == -1)
+ throw SysError("chdir to '%s' failed", dir);
+ std::string base(baseNameOf(path));
+ if (base.size() + 1 >= sizeof(addr.sun_path))
+ throw Error("socket path '%s' is too long", base);
+ memcpy(addr.sun_path, base.c_str(), base.size() + 1);
+ if (bind(fd, (struct sockaddr *) &addr, sizeof(addr)) == -1)
+ throw SysError("cannot bind to socket '%s'", path);
+ _exit(0);
+ });
+ int status = pid.wait();
+ if (status != 0)
+ throw Error("cannot bind to socket '%s'", path);
+ } else {
+ memcpy(addr.sun_path, path.c_str(), path.size() + 1);
+ if (bind(fd, (struct sockaddr *) &addr, sizeof(addr)) == -1)
+ throw SysError("cannot bind to socket '%s'", path);
+ }
+}
+
+
+void connect(int fd, const std::string & path)
+{
+ struct sockaddr_un addr;
+ addr.sun_family = AF_UNIX;
+
+ if (path.size() + 1 >= sizeof(addr.sun_path)) {
+ Pid pid = startProcess([&]() {
+ auto dir = dirOf(path);
+ if (chdir(dir.c_str()) == -1)
+ throw SysError("chdir to '%s' failed", dir);
+ std::string base(baseNameOf(path));
+ if (base.size() + 1 >= sizeof(addr.sun_path))
+ throw Error("socket path '%s' is too long", base);
+ memcpy(addr.sun_path, base.c_str(), base.size() + 1);
+ if (connect(fd, (struct sockaddr *) &addr, sizeof(addr)) == -1)
+ throw SysError("cannot connect to socket at '%s'", path);
+ _exit(0);
+ });
+ int status = pid.wait();
+ if (status != 0)
+ throw Error("cannot connect to socket at '%s'", path);
+ } else {
+ memcpy(addr.sun_path, path.c_str(), path.size() + 1);
+ if (connect(fd, (struct sockaddr *) &addr, sizeof(addr)) == -1)
+ throw SysError("cannot connect to socket at '%s'", path);
+ }
+}
+
+
string showBytes(uint64_t bytes)
{
return fmt("%.2f MiB", bytes / (1024.0 * 1024.0));
@@ -1723,8 +1802,10 @@ string showBytes(uint64_t bytes)
// FIXME: move to libstore/build
void commonChildInit(Pipe & logPipe)
{
+ logger = makeSimpleLogger();
+
const static string pathNullDevice = "/dev/null";
- restoreProcessContext();
+ restoreProcessContext(false);
/* Put the child in a separate session (and thus a separate
process group) so that it has no controlling terminal (meaning
diff --git a/src/libutil/util.hh b/src/libutil/util.hh
index a8dd4bd47..bc96bfed1 100644
--- a/src/libutil/util.hh
+++ b/src/libutil/util.hh
@@ -11,6 +11,7 @@
#include <unistd.h>
#include <signal.h>
+#include <atomic>
#include <functional>
#include <map>
#include <sstream>
@@ -259,10 +260,10 @@ void killUser(uid_t uid);
pid to the caller. */
struct ProcessOptions
{
- string errorPrefix = "error: ";
+ string errorPrefix = "";
bool dieWithParent = true;
bool runExitHandlers = false;
- bool allowVfork = true;
+ bool allowVfork = false;
};
pid_t startProcess(std::function<void()> fun, const ProcessOptions & options = ProcessOptions());
@@ -276,26 +277,20 @@ string runProgram(Path program, bool searchPath = false,
struct RunOptions
{
+ Path program;
+ bool searchPath = true;
+ Strings args;
std::optional<uid_t> uid;
std::optional<uid_t> gid;
std::optional<Path> chdir;
std::optional<std::map<std::string, std::string>> environment;
- Path program;
- bool searchPath = true;
- Strings args;
std::optional<std::string> input;
Source * standardIn = nullptr;
Sink * standardOut = nullptr;
bool mergeStderrToStdout = false;
- bool _killStderr = false;
-
- RunOptions(const Path & program, const Strings & args)
- : program(program), args(args) { };
-
- RunOptions & killStderr(bool v) { _killStderr = true; return *this; }
};
-std::pair<int, std::string> runProgram(const RunOptions & options);
+std::pair<int, std::string> runProgram(RunOptions && options);
void runProgram2(const RunOptions & options);
@@ -306,7 +301,15 @@ void setStackSize(size_t stackSize);
/* Restore the original inherited Unix process context (such as signal
masks, stack size, CPU affinity). */
-void restoreProcessContext();
+void restoreProcessContext(bool restoreMounts = true);
+
+/* Save the current mount namespace. Ignored if called more than
+ once. */
+void saveMountNamespace();
+
+/* Restore the mount namespace saved by saveMountNamespace(). Ignored
+ if saveMountNamespace() was never called. */
+void restoreMountNamespace();
class ExecError : public Error
@@ -335,7 +338,7 @@ void closeOnExec(int fd);
/* User interruption. */
-extern bool _isInterrupted;
+extern std::atomic<bool> _isInterrupted;
extern thread_local std::function<bool()> interruptCheck;
@@ -517,6 +520,29 @@ std::optional<typename T::mapped_type> get(const T & map, const typename T::key_
}
+/* Remove and return the first item from a container. */
+template <class T>
+std::optional<typename T::value_type> remove_begin(T & c)
+{
+ auto i = c.begin();
+ if (i == c.end()) return {};
+ auto v = std::move(*i);
+ c.erase(i);
+ return v;
+}
+
+
+/* Remove and return the first item from a container. */
+template <class T>
+std::optional<typename T::value_type> pop(T & c)
+{
+ if (c.empty()) return {};
+ auto v = std::move(c.front());
+ c.pop();
+ return v;
+}
+
+
template<typename T>
class Callback;
@@ -577,9 +603,18 @@ extern PathFilter defaultPathFilter;
/* Common initialisation performed in child processes. */
void commonChildInit(Pipe & logPipe);
+/* Create a Unix domain socket. */
+AutoCloseFD createUnixDomainSocket();
+
/* Create a Unix domain socket in listen mode. */
AutoCloseFD createUnixDomainSocket(const Path & path, mode_t mode);
+/* Bind a Unix domain socket to a path. */
+void bind(int fd, const std::string & path);
+
+/* Connect to a Unix domain socket. */
+void connect(int fd, const std::string & path);
+
// A Rust/Python-like enumerate() iterator adapter.
// Borrowed from http://reedbeta.com/blog/python-like-enumerate-in-cpp17.
diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc
index 77594f046..e2325c91f 100755
--- a/src/nix-build/nix-build.cc
+++ b/src/nix-build/nix-build.cc
@@ -105,7 +105,8 @@ static void main_nix_build(int argc, char * * argv)
// List of environment variables kept for --pure
std::set<string> keepVars{
- "HOME", "USER", "LOGNAME", "DISPLAY", "PATH", "TERM", "IN_NIX_SHELL",
+ "HOME", "XDG_RUNTIME_DIR", "USER", "LOGNAME", "DISPLAY",
+ "WAYLAND_DISPLAY", "WAYLAND_SOCKET", "PATH", "TERM", "IN_NIX_SHELL",
"NIX_SHELL_PRESERVE_PROMPT", "TZ", "PAGER", "NIX_BUILD_SHELL", "SHLVL",
"http_proxy", "https_proxy", "ftp_proxy", "all_proxy", "no_proxy"
};
@@ -358,6 +359,7 @@ static void main_nix_build(int argc, char * * argv)
is not set, then build bashInteractive from
<nixpkgs>. */
auto shell = getEnv("NIX_BUILD_SHELL");
+ std::optional<StorePath> shellDrv;
if (!shell) {
@@ -374,8 +376,7 @@ static void main_nix_build(int argc, char * * argv)
auto bashDrv = store->parseStorePath(drv->queryDrvPath());
pathsToBuild.push_back({bashDrv});
pathsToCopy.insert(bashDrv);
-
- shell = drv->queryOutPath() + "/bin/bash";
+ shellDrv = bashDrv;
} catch (Error & e) {
logError(e.info());
@@ -401,7 +402,12 @@ static void main_nix_build(int argc, char * * argv)
if (dryRun) return;
- if (settings.isExperimentalFeatureEnabled("ca-derivations")) {
+ if (shellDrv) {
+ auto shellDrvOutputs = store->queryPartialDerivationOutputMap(shellDrv.value());
+ shell = store->printStorePath(shellDrvOutputs.at("out").value()) + "/bin/bash";
+ }
+
+ if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations)) {
auto resolvedDrv = drv.tryResolve(*store);
assert(resolvedDrv && "Successfully resolved the derivation");
drv = *resolvedDrv;
diff --git a/src/nix-env/nix-env.cc b/src/nix-env/nix-env.cc
index e04954d45..b9e7be1c6 100644
--- a/src/nix-env/nix-env.cc
+++ b/src/nix-env/nix-env.cc
@@ -224,6 +224,91 @@ static void checkSelectorUse(DrvNames & selectors)
}
+namespace {
+
+std::set<std::string> searchByPrefix(const DrvInfos & allElems, std::string_view prefix) {
+ constexpr std::size_t maxResults = 3;
+ std::set<std::string> result;
+ for (const auto & drvInfo : allElems) {
+ const auto drvName = DrvName { drvInfo.queryName() };
+ if (hasPrefix(drvName.name, prefix)) {
+ result.emplace(drvName.name);
+
+ if (result.size() >= maxResults) {
+ break;
+ }
+ }
+ }
+ return result;
+}
+
+struct Match
+{
+ DrvInfo drvInfo;
+ std::size_t index;
+
+ Match(DrvInfo drvInfo_, std::size_t index_)
+ : drvInfo{std::move(drvInfo_)}
+ , index{index_}
+ {}
+};
+
+/* If a selector matches multiple derivations
+ with the same name, pick the one matching the current
+ system. If there are still multiple derivations, pick the
+ one with the highest priority. If there are still multiple
+ derivations, pick the one with the highest version.
+ Finally, if there are still multiple derivations,
+ arbitrarily pick the first one. */
+std::vector<Match> pickNewestOnly(EvalState & state, std::vector<Match> matches) {
+ /* Map from package names to derivations. */
+ std::map<std::string, Match> newest;
+ StringSet multiple;
+
+ for (auto & match : matches) {
+ auto & oneDrv = match.drvInfo;
+
+ const auto drvName = DrvName { oneDrv.queryName() };
+ long comparison = 1;
+
+ const auto itOther = newest.find(drvName.name);
+
+ if (itOther != newest.end()) {
+ auto & newestDrv = itOther->second.drvInfo;
+
+ comparison =
+ oneDrv.querySystem() == newestDrv.querySystem() ? 0 :
+ oneDrv.querySystem() == settings.thisSystem ? 1 :
+ newestDrv.querySystem() == settings.thisSystem ? -1 : 0;
+ if (comparison == 0)
+ comparison = comparePriorities(state, oneDrv, newestDrv);
+ if (comparison == 0)
+ comparison = compareVersions(drvName.version, DrvName { newestDrv.queryName() }.version);
+ }
+
+ if (comparison > 0) {
+ newest.erase(drvName.name);
+ newest.emplace(drvName.name, match);
+ multiple.erase(drvName.fullName);
+ } else if (comparison == 0) {
+ multiple.insert(drvName.fullName);
+ }
+ }
+
+ matches.clear();
+ for (auto & [name, match] : newest) {
+ if (multiple.find(name) != multiple.end())
+ warn(
+ "there are multiple derivations named '%1%'; using the first one",
+ name);
+ matches.push_back(match);
+ }
+
+ return matches;
+}
+
+} // end namespace
+
static DrvInfos filterBySelector(EvalState & state, const DrvInfos & allElems,
const Strings & args, bool newestOnly)
{
@@ -232,80 +317,43 @@ static DrvInfos filterBySelector(EvalState & state, const DrvInfos & allElems,
selectors.emplace_back("*");
DrvInfos elems;
- set<unsigned int> done;
-
- for (auto & i : selectors) {
- typedef list<std::pair<DrvInfo, unsigned int> > Matches;
- Matches matches;
- unsigned int n = 0;
- for (DrvInfos::const_iterator j = allElems.begin();
- j != allElems.end(); ++j, ++n)
- {
- DrvName drvName(j->queryName());
- if (i.matches(drvName)) {
- i.hits++;
- matches.push_back(std::pair<DrvInfo, unsigned int>(*j, n));
+ std::set<std::size_t> done;
+
+ for (auto & selector : selectors) {
+ std::vector<Match> matches;
+ for (const auto & [index, drvInfo] : enumerate(allElems)) {
+ const auto drvName = DrvName { drvInfo.queryName() };
+ if (selector.matches(drvName)) {
+ ++selector.hits;
+ matches.emplace_back(drvInfo, index);
}
}
- /* If `newestOnly', if a selector matches multiple derivations
- with the same name, pick the one matching the current
- system. If there are still multiple derivations, pick the
- one with the highest priority. If there are still multiple
- derivations, pick the one with the highest version.
- Finally, if there are still multiple derivations,
- arbitrarily pick the first one. */
if (newestOnly) {
-
- /* Map from package names to derivations. */
- typedef map<string, std::pair<DrvInfo, unsigned int> > Newest;
- Newest newest;
- StringSet multiple;
-
- for (auto & j : matches) {
- DrvName drvName(j.first.queryName());
- long d = 1;
-
- Newest::iterator k = newest.find(drvName.name);
-
- if (k != newest.end()) {
- d = j.first.querySystem() == k->second.first.querySystem() ? 0 :
- j.first.querySystem() == settings.thisSystem ? 1 :
- k->second.first.querySystem() == settings.thisSystem ? -1 : 0;
- if (d == 0)
- d = comparePriorities(state, j.first, k->second.first);
- if (d == 0)
- d = compareVersions(drvName.version, DrvName(k->second.first.queryName()).version);
- }
-
- if (d > 0) {
- newest.erase(drvName.name);
- newest.insert(Newest::value_type(drvName.name, j));
- multiple.erase(j.first.queryName());
- } else if (d == 0) {
- multiple.insert(j.first.queryName());
- }
- }
-
- matches.clear();
- for (auto & j : newest) {
- if (multiple.find(j.second.first.queryName()) != multiple.end())
- printInfo(
- "warning: there are multiple derivations named '%1%'; using the first one",
- j.second.first.queryName());
- matches.push_back(j.second);
- }
+ matches = pickNewestOnly(state, std::move(matches));
}
/* Insert only those elements in the final list that we
haven't inserted before. */
- for (auto & j : matches)
- if (done.insert(j.second).second)
- elems.push_back(j.first);
+ for (auto & match : matches)
+ if (done.insert(match.index).second)
+ elems.push_back(match.drvInfo);
+
+ if (selector.hits == 0 && selector.fullName != "*") {
+ const auto prefixHits = searchByPrefix(allElems, selector.name);
+
+ if (prefixHits.empty()) {
+ throw Error("selector '%1%' matches no derivations", selector.fullName);
+ } else {
+ std::string suggestionMessage = ", maybe you meant:";
+ for (const auto & drvName : prefixHits) {
+ suggestionMessage += fmt("\n%s", drvName);
+ }
+ throw Error("selector '%1%' matches no derivations" + suggestionMessage, selector.fullName);
+ }
+ }
}
- checkSelectorUse(selectors);
-
return elems;
}
@@ -879,7 +927,7 @@ static void queryJSON(Globals & globals, vector<DrvInfo> & elems)
placeholder.write(nullptr);
} else {
PathSet context;
- printValueAsJSON(*globals.state, true, *v, placeholder, context);
+ printValueAsJSON(*globals.state, true, *v, noPos, placeholder, context);
}
}
}
@@ -1149,10 +1197,10 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs)
} else if (v->type() == nList) {
attrs2["type"] = "strings";
XMLOpenElement m(xml, "meta", attrs2);
- for (unsigned int j = 0; j < v->listSize(); ++j) {
- if (v->listElems()[j]->type() != nString) continue;
+ for (auto elem : v->listItems()) {
+ if (elem->type() != nString) continue;
XMLAttrs attrs3;
- attrs3["value"] = v->listElems()[j]->string.s;
+ attrs3["value"] = elem->string.s;
xml.writeEmptyElement("string", attrs3);
}
} else if (v->type() == nAttrs) {
@@ -1204,37 +1252,6 @@ static void opSwitchProfile(Globals & globals, Strings opFlags, Strings opArgs)
}
-static constexpr GenerationNumber prevGen = std::numeric_limits<GenerationNumber>::max();
-
-
-static void switchGeneration(Globals & globals, GenerationNumber dstGen)
-{
- PathLocks lock;
- lockProfile(lock, globals.profile);
-
- auto [gens, curGen] = findGenerations(globals.profile);
-
- std::optional<Generation> dst;
- for (auto & i : gens)
- if ((dstGen == prevGen && i.number < curGen) ||
- (dstGen >= 0 && i.number == dstGen))
- dst = i;
-
- if (!dst) {
- if (dstGen == prevGen)
- throw Error("no generation older than the current (%1%) exists", curGen.value_or(0));
- else
- throw Error("generation %1% does not exist", dstGen);
- }
-
- printInfo("switching from generation %1% to %2%", curGen.value_or(0), dst->number);
-
- if (globals.dryRun) return;
-
- switchLink(globals.profile, dst->path);
-}
-
-
static void opSwitchGeneration(Globals & globals, Strings opFlags, Strings opArgs)
{
if (opFlags.size() > 0)
@@ -1243,7 +1260,7 @@ static void opSwitchGeneration(Globals & globals, Strings opFlags, Strings opArg
throw UsageError("exactly one argument expected");
if (auto dstGen = string2Int<GenerationNumber>(opArgs.front()))
- switchGeneration(globals, *dstGen);
+ switchGeneration(globals.profile, *dstGen, globals.dryRun);
else
throw UsageError("expected a generation number");
}
@@ -1256,7 +1273,7 @@ static void opRollback(Globals & globals, Strings opFlags, Strings opArgs)
if (opArgs.size() != 0)
throw UsageError("no arguments expected");
- switchGeneration(globals, prevGen);
+ switchGeneration(globals.profile, {}, globals.dryRun);
}
@@ -1296,12 +1313,12 @@ static void opDeleteGenerations(Globals & globals, Strings opFlags, Strings opAr
} else if (opArgs.size() == 1 && opArgs.front().find('d') != string::npos) {
deleteGenerationsOlderThan(globals.profile, opArgs.front(), globals.dryRun);
} else if (opArgs.size() == 1 && opArgs.front().find('+') != string::npos) {
- if(opArgs.front().size() < 2)
- throw Error("invalid number of generations ‘%1%’", opArgs.front());
+ if (opArgs.front().size() < 2)
+ throw Error("invalid number of generations '%1%'", opArgs.front());
string str_max = string(opArgs.front(), 1, opArgs.front().size());
auto max = string2Int<GenerationNumber>(str_max);
if (!max || *max == 0)
- throw Error("invalid number of generations to keep ‘%1%’", opArgs.front());
+ throw Error("invalid number of generations to keep '%1%'", opArgs.front());
deleteGenerationsGreaterThan(globals.profile, *max, globals.dryRun);
} else {
std::set<GenerationNumber> gens;
diff --git a/src/nix-env/user-env.cc b/src/nix-env/user-env.cc
index 5ceb2ae67..1fd4bcbd3 100644
--- a/src/nix-env/user-env.cc
+++ b/src/nix-env/user-env.cc
@@ -131,9 +131,9 @@ bool createUserEnv(EvalState & state, DrvInfos & elems,
state.forceValue(topLevel);
PathSet context;
Attr & aDrvPath(*topLevel.attrs->find(state.sDrvPath));
- auto topLevelDrv = state.store->parseStorePath(state.coerceToPath(aDrvPath.pos ? *(aDrvPath.pos) : noPos, *(aDrvPath.value), context));
+ auto topLevelDrv = state.store->parseStorePath(state.coerceToPath(*aDrvPath.pos, *aDrvPath.value, context));
Attr & aOutPath(*topLevel.attrs->find(state.sOutPath));
- Path topLevelOut = state.coerceToPath(aOutPath.pos ? *(aOutPath.pos) : noPos, *(aOutPath.value), context);
+ Path topLevelOut = state.coerceToPath(*aOutPath.pos, *aOutPath.value, context);
/* Realise the resulting store expression. */
debug("building user environment");
diff --git a/src/nix-instantiate/nix-instantiate.cc b/src/nix-instantiate/nix-instantiate.cc
index 25d0fa3ba..19a954ddd 100644
--- a/src/nix-instantiate/nix-instantiate.cc
+++ b/src/nix-instantiate/nix-instantiate.cc
@@ -50,9 +50,9 @@ void processExpr(EvalState & state, const Strings & attrPaths,
else
state.autoCallFunction(autoArgs, v, vRes);
if (output == okXML)
- printValueAsXML(state, strict, location, vRes, std::cout, context);
+ printValueAsXML(state, strict, location, vRes, std::cout, context, noPos);
else if (output == okJSON)
- printValueAsJSON(state, strict, vRes, std::cout, context);
+ printValueAsJSON(state, strict, vRes, v.determinePos(noPos), std::cout, context);
else {
if (strict) state.forceValueDeep(vRes);
std::cout << vRes << std::endl;
diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc
index b327793e7..f0ce0368a 100644
--- a/src/nix-store/nix-store.cc
+++ b/src/nix-store/nix-store.cc
@@ -801,6 +801,9 @@ static void opServe(Strings opFlags, Strings opArgs)
settings.enforceDeterminism = readInt(in);
settings.runDiffHook = true;
}
+ if (GET_PROTOCOL_MINOR(clientVersion) >= 7) {
+ settings.keepFailed = (bool) readInt(in);
+ }
settings.printRepeatedBuilds = false;
};
diff --git a/src/nix/build.cc b/src/nix/build.cc
index 13eb66ac6..6e31757a2 100644
--- a/src/nix/build.cc
+++ b/src/nix/build.cc
@@ -54,7 +54,7 @@ struct CmdBuild : InstallablesCommand, MixDryRun, MixJSON, MixProfile
{
auto buildables = build(
getEvalStore(), store,
- dryRun ? Realise::Nothing : Realise::Outputs,
+ dryRun ? Realise::Derivation : Realise::Outputs,
installables, buildMode);
if (json) logger->cout("%s", derivedPathsWithHintsToJSON(buildables, store).dump());
@@ -66,12 +66,12 @@ struct CmdBuild : InstallablesCommand, MixDryRun, MixJSON, MixProfile
for (const auto & [_i, buildable] : enumerate(buildables)) {
auto i = _i;
std::visit(overloaded {
- [&](BuiltPath::Opaque bo) {
+ [&](const BuiltPath::Opaque & bo) {
std::string symlink = outLink;
if (i) symlink += fmt("-%d", i);
store2->addPermRoot(bo.path, absPath(symlink));
},
- [&](BuiltPath::Built bfd) {
+ [&](const BuiltPath::Built & bfd) {
for (auto & output : bfd.outputs) {
std::string symlink = outLink;
if (i) symlink += fmt("-%d", i);
diff --git a/src/nix/bundle.cc b/src/nix/bundle.cc
index cedb5704c..aca024bca 100644
--- a/src/nix/bundle.cc
+++ b/src/nix/bundle.cc
@@ -59,7 +59,7 @@ struct CmdBundle : InstallableCommand
Strings getDefaultFlakeAttrPathPrefixes() override
{
- Strings res{"apps." + settings.thisSystem.get() + ".", "packages"};
+ Strings res{"apps." + settings.thisSystem.get() + "."};
for (auto & s : SourceExprCommand::getDefaultFlakeAttrPathPrefixes())
res.push_back(s);
return res;
diff --git a/src/nix/copy.cc b/src/nix/copy.cc
index 0489dfe06..197c85316 100644
--- a/src/nix/copy.cc
+++ b/src/nix/copy.cc
@@ -78,7 +78,7 @@ struct CmdCopy : BuiltPathsCommand
BuiltPathsCommand::run(store);
}
- void run(ref<Store> srcStore, BuiltPaths paths) override
+ void run(ref<Store> srcStore, BuiltPaths && paths) override
{
ref<Store> dstStore = dstUri.empty() ? openStore() : openStore(dstUri);
diff --git a/src/nix/daemon.cc b/src/nix/daemon.cc
index 2cf2a04c9..6a40a0bd3 100644
--- a/src/nix/daemon.cc
+++ b/src/nix/daemon.cc
@@ -156,9 +156,6 @@ static void daemonLoop()
if (chdir("/") == -1)
throw SysError("cannot change current directory");
- // Get rid of children automatically; don't let them become zombies.
- setSigChldAction(true);
-
AutoCloseFD fdSocket;
// Handle socket-based activation by systemd.
@@ -176,6 +173,9 @@ static void daemonLoop()
fdSocket = createUnixDomainSocket(settings.nixDaemonSocketFile, 0666);
}
+ // Get rid of children automatically; don't let them become zombies.
+ setSigChldAction(true);
+
// Loop accepting connections.
while (1) {
diff --git a/src/nix/develop.cc b/src/nix/develop.cc
index 55023545d..5aad53919 100644
--- a/src/nix/develop.cc
+++ b/src/nix/develop.cc
@@ -9,6 +9,7 @@
#include "progress-bar.hh"
#include "run.hh"
+#include <memory>
#include <nlohmann/json.hpp>
using namespace nix;
@@ -194,7 +195,7 @@ static StorePath getDerivationEnvironment(ref<Store> store, ref<Store> evalStore
'buildDerivation', but that's privileged. */
drv.name += "-env";
drv.inputSrcs.insert(std::move(getEnvShPath));
- if (settings.isExperimentalFeatureEnabled("ca-derivations")) {
+ if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations)) {
for (auto & output : drv.outputs) {
output.second = {
.output = DerivationOutputDeferred{},
@@ -330,7 +331,7 @@ struct Common : InstallableCommand, MixProfile
Strings getDefaultFlakeAttrPathPrefixes() override
{
auto res = SourceExprCommand::getDefaultFlakeAttrPathPrefixes();
- res.emplace_front("devShells." + settings.thisSystem.get());
+ res.emplace_front("devShells." + settings.thisSystem.get() + ".");
return res;
}
@@ -392,6 +393,12 @@ struct CmdDevelop : Common, MixEnvironment
});
addFlag({
+ .longName = "unpack",
+ .description = "Run the `unpack` phase.",
+ .handler = {&phase, {"unpack"}},
+ });
+
+ addFlag({
.longName = "configure",
.description = "Run the `configure` phase.",
.handler = {&phase, {"configure"}},
@@ -505,6 +512,20 @@ struct CmdDevelop : Common, MixEnvironment
auto args = phase || !command.empty() ? Strings{std::string(baseNameOf(shell)), rcFilePath}
: Strings{std::string(baseNameOf(shell)), "--rcfile", rcFilePath};
+ // Need to chdir since phases assume in flake directory
+ if (phase) {
+ // chdir if installable is a flake of type git+file or path
+ auto installableFlake = std::dynamic_pointer_cast<InstallableFlake>(installable);
+ if (installableFlake) {
+ auto sourcePath = installableFlake->getLockedFlake()->flake.resolvedRef.input.getSourcePath();
+ if (sourcePath) {
+ if (chdir(sourcePath->c_str()) == -1) {
+ throw SysError("chdir to '%s' failed", *sourcePath);
+ }
+ }
+ }
+ }
+
runProgramInStore(store, shell, args);
}
};
diff --git a/src/nix/develop.md b/src/nix/develop.md
index c86c4872b..1f214966a 100644
--- a/src/nix/develop.md
+++ b/src/nix/develop.md
@@ -29,6 +29,7 @@ R""(
* Run a particular build phase directly:
```console
+ # nix develop --unpack
# nix develop --configure
# nix develop --build
# nix develop --check
diff --git a/src/nix/eval.cc b/src/nix/eval.cc
index 65d61e005..c7517cf79 100644
--- a/src/nix/eval.cc
+++ b/src/nix/eval.cc
@@ -112,7 +112,7 @@ struct CmdEval : MixJSON, InstallableCommand
else if (json) {
JSONPlaceholder jsonOut(std::cout);
- printValueAsJSON(*state, true, *v, jsonOut, context);
+ printValueAsJSON(*state, true, *v, pos, jsonOut, context);
}
else {
diff --git a/src/nix/flake-check.md b/src/nix/flake-check.md
index d995d6274..07031c909 100644
--- a/src/nix/flake-check.md
+++ b/src/nix/flake-check.md
@@ -31,38 +31,38 @@ at the first error.
The following flake output attributes must be derivations:
* `checks.`*system*`.`*name*
-* `defaultPackage.`*system*`
-* `devShell.`*system*`
-* `devShells.`*system*`.`*name*`
-* `nixosConfigurations.`*name*`.config.system.build.toplevel
+* `defaultPackage.`*system*
+* `devShell.`*system*
+* `devShells.`*system*`.`*name*
+* `nixosConfigurations.`*name*`.config.system.build.toplevel`
* `packages.`*system*`.`*name*
The following flake output attributes must be [app
definitions](./nix3-run.md):
* `apps.`*system*`.`*name*
-* `defaultApp.`*system*`
+* `defaultApp.`*system*
The following flake output attributes must be [template
definitions](./nix3-flake-init.md):
* `defaultTemplate`
-* `templates`.`*name*
+* `templates.`*name*
The following flake output attributes must be *Nixpkgs overlays*:
* `overlay`
-* `overlays`.`*name*
+* `overlays.`*name*
The following flake output attributes must be *NixOS modules*:
* `nixosModule`
-* `nixosModules`.`*name*
+* `nixosModules.`*name*
The following flake output attributes must be
[bundlers](./nix3-bundle.md):
-* `bundlers`.`*name*
+* `bundlers.`*name*
* `defaultBundler`
In addition, the `hydraJobs` output is evaluated in the same way as
diff --git a/src/nix/flake-show.md b/src/nix/flake-show.md
index 1a42c44a0..e484cf47e 100644
--- a/src/nix/flake-show.md
+++ b/src/nix/flake-show.md
@@ -35,4 +35,7 @@ specified by flake reference *flake-url*. These are the top-level
attributes in the `outputs` of the flake, as well as lower-level
attributes for some standard outputs (e.g. `packages` or `checks`).
+With `--json`, the output is in a JSON representation suitable for automatic
+processing by other tools.
+
)""
diff --git a/src/nix/flake.cc b/src/nix/flake.cc
index abb0fd3b4..97f4d911c 100644
--- a/src/nix/flake.cc
+++ b/src/nix/flake.cc
@@ -131,8 +131,18 @@ static void enumerateOutputs(EvalState & state, Value & vFlake,
state.forceAttrs(*aOutputs->value);
- for (auto & attr : *aOutputs->value->attrs)
- callback(attr.name, *attr.value, *attr.pos);
+ auto sHydraJobs = state.symbols.create("hydraJobs");
+
+ /* Hack: ensure that hydraJobs is evaluated before anything
+ else. This way we can disable IFD for hydraJobs and then enable
+ it for other outputs. */
+ if (auto attr = aOutputs->value->attrs->get(sHydraJobs))
+ callback(attr->name, *attr->value, *attr->pos);
+
+ for (auto & attr : *aOutputs->value->attrs) {
+ if (attr.name != sHydraJobs)
+ callback(attr.name, *attr.value, *attr.pos);
+ }
}
struct CmdFlakeMetadata : FlakeCommand, MixJSON
@@ -242,6 +252,14 @@ struct CmdFlakeInfo : CmdFlakeMetadata
}
};
+static bool argHasName(std::string_view arg, std::string_view expected)
+{
+ return
+ arg == expected
+ || arg == "_"
+ || (hasPrefix(arg, "_") && arg.substr(1) == expected);
+}
+
struct CmdFlakeCheck : FlakeCommand
{
bool build = true;
@@ -269,7 +287,10 @@ struct CmdFlakeCheck : FlakeCommand
void run(nix::ref<nix::Store> store) override
{
- settings.readOnlyMode = !build;
+ if (!build) {
+ settings.readOnlyMode = true;
+ evalSettings.enableImportFromDerivation.setDefault(false);
+ }
auto state = getEvalState();
@@ -333,10 +354,14 @@ struct CmdFlakeCheck : FlakeCommand
auto checkOverlay = [&](const std::string & attrPath, Value & v, const Pos & pos) {
try {
state->forceValue(v, pos);
- if (!v.isLambda() || v.lambda.fun->matchAttrs || std::string(v.lambda.fun->arg) != "final")
+ if (!v.isLambda()
+ || v.lambda.fun->hasFormals()
+ || !argHasName(v.lambda.fun->arg, "final"))
throw Error("overlay does not take an argument named 'final'");
auto body = dynamic_cast<ExprLambda *>(v.lambda.fun->body);
- if (!body || body->matchAttrs || std::string(body->arg) != "prev")
+ if (!body
+ || body->hasFormals()
+ || !argHasName(body->arg, "prev"))
throw Error("overlay does not take an argument named 'prev'");
// FIXME: if we have a 'nixpkgs' input, use it to
// evaluate the overlay.
@@ -350,7 +375,7 @@ struct CmdFlakeCheck : FlakeCommand
try {
state->forceValue(v, pos);
if (v.isLambda()) {
- if (!v.lambda.fun->matchAttrs || !v.lambda.fun->formals->ellipsis)
+ if (!v.lambda.fun->hasFormals() || !v.lambda.fun->formals->ellipsis)
throw Error("module must match an open attribute set ('{ config, ... }')");
} else if (v.type() == nAttrs) {
for (auto & attr : *v.attrs)
@@ -381,9 +406,13 @@ struct CmdFlakeCheck : FlakeCommand
for (auto & attr : *v.attrs) {
state->forceAttrs(*attr.value, *attr.pos);
- if (!state->isDerivation(*attr.value))
- checkHydraJobs(attrPath + "." + (std::string) attr.name,
- *attr.value, *attr.pos);
+ auto attrPath2 = attrPath + "." + (std::string) attr.name;
+ if (state->isDerivation(*attr.value)) {
+ Activity act(*logger, lvlChatty, actUnknown,
+ fmt("checking Hydra job '%s'", attrPath2));
+ checkDerivation(attrPath2, *attr.value, *attr.pos);
+ } else
+ checkHydraJobs(attrPath2, *attr.value, *attr.pos);
}
} catch (Error & e) {
@@ -447,8 +476,8 @@ struct CmdFlakeCheck : FlakeCommand
if (!v.isLambda())
throw Error("bundler must be a function");
if (!v.lambda.fun->formals ||
- v.lambda.fun->formals->argNames.find(state->symbols.create("program")) == v.lambda.fun->formals->argNames.end() ||
- v.lambda.fun->formals->argNames.find(state->symbols.create("system")) == v.lambda.fun->formals->argNames.end())
+ !v.lambda.fun->formals->argNames.count(state->symbols.create("program")) ||
+ !v.lambda.fun->formals->argNames.count(state->symbols.create("system")))
throw Error("bundler must take formal arguments 'program' and 'system'");
} catch (Error & e) {
e.addTrace(pos, hintfmt("while checking the template '%s'", attrPath));
@@ -469,6 +498,8 @@ struct CmdFlakeCheck : FlakeCommand
fmt("checking flake output '%s'", name));
try {
+ evalSettings.enableImportFromDerivation.setDefault(name != "hydraJobs");
+
state->forceValue(vOutput, pos);
if (name == "checks") {
@@ -603,7 +634,7 @@ struct CmdFlakeCheck : FlakeCommand
store->buildPaths(drvPaths);
}
if (hasErrors)
- throw Error("Some errors were encountered during the evaluation");
+ throw Error("some errors were encountered during the evaluation");
}
};
@@ -846,7 +877,7 @@ struct CmdFlakeArchive : FlakeCommand, MixJSON, MixDryRun
}
};
-struct CmdFlakeShow : FlakeCommand
+struct CmdFlakeShow : FlakeCommand, MixJSON
{
bool showLegacy = false;
@@ -873,52 +904,69 @@ struct CmdFlakeShow : FlakeCommand
void run(nix::ref<nix::Store> store) override
{
+ evalSettings.enableImportFromDerivation.setDefault(false);
+
auto state = getEvalState();
auto flake = std::make_shared<LockedFlake>(lockFlake());
- std::function<void(eval_cache::AttrCursor & visitor, const std::vector<Symbol> & attrPath, const std::string & headerPrefix, const std::string & nextPrefix)> visit;
-
- visit = [&](eval_cache::AttrCursor & visitor, const std::vector<Symbol> & attrPath, const std::string & headerPrefix, const std::string & nextPrefix)
+ std::function<nlohmann::json(
+ eval_cache::AttrCursor & visitor,
+ const std::vector<Symbol> & attrPath,
+ const std::string & headerPrefix,
+ const std::string & nextPrefix)> visit;
+
+ visit = [&](
+ eval_cache::AttrCursor & visitor,
+ const std::vector<Symbol> & attrPath,
+ const std::string & headerPrefix,
+ const std::string & nextPrefix)
+ -> nlohmann::json
{
+ auto j = nlohmann::json::object();
+
Activity act(*logger, lvlInfo, actUnknown,
fmt("evaluating '%s'", concatStringsSep(".", attrPath)));
try {
auto recurse = [&]()
{
- logger->cout("%s", headerPrefix);
+ if (!json)
+ logger->cout("%s", headerPrefix);
auto attrs = visitor.getAttrs();
for (const auto & [i, attr] : enumerate(attrs)) {
bool last = i + 1 == attrs.size();
auto visitor2 = visitor.getAttr(attr);
auto attrPath2(attrPath);
attrPath2.push_back(attr);
- visit(*visitor2, attrPath2,
+ auto j2 = visit(*visitor2, attrPath2,
fmt(ANSI_GREEN "%s%s" ANSI_NORMAL ANSI_BOLD "%s" ANSI_NORMAL, nextPrefix, last ? treeLast : treeConn, attr),
nextPrefix + (last ? treeNull : treeLine));
+ if (json) j.emplace(attr, std::move(j2));
}
};
auto showDerivation = [&]()
{
auto name = visitor.getAttr(state->sName)->getString();
-
- /*
- std::string description;
-
- if (auto aMeta = visitor.maybeGetAttr("meta")) {
- if (auto aDescription = aMeta->maybeGetAttr("description"))
- description = aDescription->getString();
+ if (json) {
+ std::optional<std::string> description;
+ if (auto aMeta = visitor.maybeGetAttr("meta")) {
+ if (auto aDescription = aMeta->maybeGetAttr("description"))
+ description = aDescription->getString();
+ }
+ j.emplace("type", "derivation");
+ j.emplace("name", name);
+ if (description)
+ j.emplace("description", *description);
+ } else {
+ logger->cout("%s: %s '%s'",
+ headerPrefix,
+ attrPath.size() == 2 && attrPath[0] == "devShell" ? "development environment" :
+ attrPath.size() >= 2 && attrPath[0] == "devShells" ? "development environment" :
+ attrPath.size() == 3 && attrPath[0] == "checks" ? "derivation" :
+ attrPath.size() >= 1 && attrPath[0] == "hydraJobs" ? "derivation" :
+ "package",
+ name);
}
- */
-
- logger->cout("%s: %s '%s'",
- headerPrefix,
- attrPath.size() == 2 && attrPath[0] == "devShell" ? "development environment" :
- attrPath.size() >= 2 && attrPath[0] == "devShells" ? "development environment" :
- attrPath.size() == 3 && attrPath[0] == "checks" ? "derivation" :
- attrPath.size() >= 1 && attrPath[0] == "hydraJobs" ? "derivation" :
- "package",
- name);
};
if (attrPath.size() == 0
@@ -962,7 +1010,7 @@ struct CmdFlakeShow : FlakeCommand
if (attrPath.size() == 1)
recurse();
else if (!showLegacy)
- logger->cout("%s: " ANSI_YELLOW "omitted" ANSI_NORMAL " (use '--legacy' to show)", headerPrefix);
+ logger->warn(fmt("%s: " ANSI_WARNING "omitted" ANSI_NORMAL " (use '--legacy' to show)", headerPrefix));
else {
if (visitor.isDerivation())
showDerivation();
@@ -979,7 +1027,11 @@ struct CmdFlakeShow : FlakeCommand
auto aType = visitor.maybeGetAttr("type");
if (!aType || aType->getString() != "app")
throw EvalError("not an app definition");
- logger->cout("%s: app", headerPrefix);
+ if (json) {
+ j.emplace("type", "app");
+ } else {
+ logger->cout("%s: app", headerPrefix);
+ }
}
else if (
@@ -987,27 +1039,41 @@ struct CmdFlakeShow : FlakeCommand
(attrPath.size() == 2 && attrPath[0] == "templates"))
{
auto description = visitor.getAttr("description")->getString();
- logger->cout("%s: template: " ANSI_BOLD "%s" ANSI_NORMAL, headerPrefix, description);
+ if (json) {
+ j.emplace("type", "template");
+ j.emplace("description", description);
+ } else {
+ logger->cout("%s: template: " ANSI_BOLD "%s" ANSI_NORMAL, headerPrefix, description);
+ }
}
else {
- logger->cout("%s: %s",
- headerPrefix,
+ auto [type, description] =
(attrPath.size() == 1 && attrPath[0] == "overlay")
- || (attrPath.size() == 2 && attrPath[0] == "overlays") ? "Nixpkgs overlay" :
- attrPath.size() == 2 && attrPath[0] == "nixosConfigurations" ? "NixOS configuration" :
- attrPath.size() == 2 && attrPath[0] == "nixosModules" ? "NixOS module" :
- ANSI_YELLOW "unknown" ANSI_NORMAL);
+ || (attrPath.size() == 2 && attrPath[0] == "overlays") ? std::make_pair("nixpkgs-overlay", "Nixpkgs overlay") :
+ attrPath.size() == 2 && attrPath[0] == "nixosConfigurations" ? std::make_pair("nixos-configuration", "NixOS configuration") :
+ (attrPath.size() == 1 && attrPath[0] == "nixosModule")
+ || (attrPath.size() == 2 && attrPath[0] == "nixosModules") ? std::make_pair("nixos-module", "NixOS module") :
+ std::make_pair("unknown", "unknown");
+ if (json) {
+ j.emplace("type", type);
+ } else {
+ logger->cout("%s: " ANSI_WARNING "%s" ANSI_NORMAL, headerPrefix, description);
+ }
}
} catch (EvalError & e) {
if (!(attrPath.size() > 0 && attrPath[0] == "legacyPackages"))
throw;
}
+
+ return j;
};
auto cache = openEvalCache(*state, flake);
- visit(*cache->getRoot(), {}, fmt(ANSI_BOLD "%s" ANSI_NORMAL, flake->flake.lockedRef), "");
+ auto j = visit(*cache->getRoot(), {}, fmt(ANSI_BOLD "%s" ANSI_NORMAL, flake->flake.lockedRef), "");
+ if (json)
+ logger->cout("%s", j.dump());
}
};
@@ -1085,7 +1151,7 @@ struct CmdFlake : NixMultiCommand
{
if (!command)
throw UsageError("'nix flake' requires a sub-command.");
- settings.requireExperimentalFeature("flakes");
+ settings.requireExperimentalFeature(Xp::Flakes);
command->second->prepare();
command->second->run();
}
diff --git a/src/nix/flake.md b/src/nix/flake.md
index 3d273100b..3b5812a0a 100644
--- a/src/nix/flake.md
+++ b/src/nix/flake.md
@@ -225,7 +225,7 @@ Currently the `type` attribute can be one of the following:
[flake:]<flake-id>(/<rev-or-ref>(/rev)?)?
```
- These perform a lookup of `<flake-id>` in the flake registry. or
+ These perform a lookup of `<flake-id>` in the flake registry. For
example, `nixpkgs` and `nixpkgs/release-20.09` are indirect flake
references. The specified `rev` and/or `ref` are merged with the
entry in the registry; see [nix registry](./nix3-registry.md) for
diff --git a/src/nix/local.mk b/src/nix/local.mk
index 83b6dd08b..e4ec7634d 100644
--- a/src/nix/local.mk
+++ b/src/nix/local.mk
@@ -14,7 +14,7 @@ nix_SOURCES := \
$(wildcard src/nix-instantiate/*.cc) \
$(wildcard src/nix-store/*.cc) \
-nix_CXXFLAGS += -I src/libutil -I src/libstore -I src/libfetchers -I src/libexpr -I src/libmain -I src/libcmd
+nix_CXXFLAGS += -I src/libutil -I src/libstore -I src/libfetchers -I src/libexpr -I src/libmain -I src/libcmd -I doc/manual
nix_LIBS = libexpr libmain libfetchers libstore libutil libcmd
@@ -30,3 +30,5 @@ src/nix-env/user-env.cc: src/nix-env/buildenv.nix.gen.hh
src/nix/develop.cc: src/nix/get-env.sh.gen.hh
src/nix-channel/nix-channel.cc: src/nix-channel/unpack-channel.nix.gen.hh
+
+src/nix/main.cc: doc/manual/generate-manpage.nix.gen.hh doc/manual/utils.nix.gen.hh
diff --git a/src/nix/log.cc b/src/nix/log.cc
index 962c47525..fd3c1d787 100644
--- a/src/nix/log.cc
+++ b/src/nix/log.cc
@@ -35,10 +35,10 @@ struct CmdLog : InstallableCommand
RunPager pager;
for (auto & sub : subs) {
auto log = std::visit(overloaded {
- [&](DerivedPath::Opaque bo) {
+ [&](const DerivedPath::Opaque & bo) {
return sub->getBuildLog(bo.path);
},
- [&](DerivedPath::Built bfd) {
+ [&](const DerivedPath::Built & bfd) {
return sub->getBuildLog(bfd.drvPath);
},
}, b.raw());
diff --git a/src/nix/main.cc b/src/nix/main.cc
index 008482be3..60b0aa410 100644
--- a/src/nix/main.cc
+++ b/src/nix/main.cc
@@ -10,6 +10,7 @@
#include "filetransfer.hh"
#include "finally.hh"
#include "loggers.hh"
+#include "markdown.hh"
#include <sys/types.h>
#include <sys/socket.h>
@@ -163,9 +164,46 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs
}
};
-static void showHelp(std::vector<std::string> subcommand)
+/* Render the help for the specified subcommand to stdout using
+ lowdown. */
+static void showHelp(std::vector<std::string> subcommand, MultiCommand & toplevel)
{
- showManPage(subcommand.empty() ? "nix" : fmt("nix3-%s", concatStringsSep("-", subcommand)));
+ auto mdName = subcommand.empty() ? "nix" : fmt("nix3-%s", concatStringsSep("-", subcommand));
+
+ evalSettings.restrictEval = false;
+ evalSettings.pureEval = false;
+ EvalState state({}, openStore("dummy://"));
+
+ auto vGenerateManpage = state.allocValue();
+ state.eval(state.parseExprFromString(
+ #include "generate-manpage.nix.gen.hh"
+ , "/"), *vGenerateManpage);
+
+ auto vUtils = state.allocValue();
+ state.cacheFile(
+ "/utils.nix", "/utils.nix",
+ state.parseExprFromString(
+ #include "utils.nix.gen.hh"
+ , "/"),
+ *vUtils);
+
+ auto vArgs = state.allocValue();
+ state.mkAttrs(*vArgs, 16);
+ auto vJson = state.allocAttr(*vArgs, state.symbols.create("command"));
+ mkString(*vJson, toplevel.toJSON().dump());
+ vArgs->attrs->sort();
+
+ auto vRes = state.allocValue();
+ state.callFunction(*vGenerateManpage, *vArgs, *vRes, noPos);
+
+ auto attr = vRes->attrs->get(state.symbols.create(mdName + ".md"));
+ if (!attr)
+ throw UsageError("Nix has no subcommand '%s'", concatStringsSep("", subcommand));
+
+ auto markdown = state.forceString(*attr->value);
+
+ RunPager pager;
+ std::cout << renderMarkdownToTerminal(markdown) << "\n";
}
struct CmdHelp : Command
@@ -194,7 +232,10 @@ struct CmdHelp : Command
void run() override
{
- showHelp(subcommand);
+ assert(parent);
+ MultiCommand * toplevel = parent;
+ while (toplevel->parent) toplevel = toplevel->parent;
+ showHelp(subcommand, *toplevel);
}
};
@@ -214,6 +255,16 @@ void mainWrapped(int argc, char * * argv)
initNix();
initGC();
+ #if __linux__
+ if (getuid() == 0) {
+ try {
+ saveMountNamespace();
+ if (unshare(CLONE_NEWNS) == -1)
+ throw SysError("setting up a private mount namespace");
+ } catch (Error & e) { }
+ }
+ #endif
+
programPath = argv[0];
auto programName = std::string(baseNameOf(programPath));
@@ -277,7 +328,7 @@ void mainWrapped(int argc, char * * argv)
} else
break;
}
- showHelp(subcommand);
+ showHelp(subcommand, args);
return;
} catch (UsageError &) {
if (!completions) throw;
@@ -296,7 +347,7 @@ void mainWrapped(int argc, char * * argv)
if (args.command->first != "repl"
&& args.command->first != "doctor"
&& args.command->first != "upgrade-nix")
- settings.requireExperimentalFeature("nix-command");
+ settings.requireExperimentalFeature(Xp::NixCommand);
if (args.useNet && !haveInternet()) {
warn("you don't have Internet access; disabling some network-dependent features");
diff --git a/src/nix/make-content-addressable.cc b/src/nix/make-content-addressable.cc
index f5bdc7e65..12f303a10 100644
--- a/src/nix/make-content-addressable.cc
+++ b/src/nix/make-content-addressable.cc
@@ -25,7 +25,7 @@ struct CmdMakeContentAddressable : StorePathsCommand, MixJSON
;
}
- void run(ref<Store> store, StorePaths storePaths) override
+ void run(ref<Store> store, StorePaths && storePaths) override
{
auto paths = store->topoSortPaths(StorePathSet(storePaths.begin(), storePaths.end()));
diff --git a/src/nix/path-info.cc b/src/nix/path-info.cc
index 518cd5568..3743d7504 100644
--- a/src/nix/path-info.cc
+++ b/src/nix/path-info.cc
@@ -79,7 +79,7 @@ struct CmdPathInfo : StorePathsCommand, MixJSON
std::cout << fmt("\t%6.1f%c", res, idents.at(power));
}
- void run(ref<Store> store, StorePaths storePaths) override
+ void run(ref<Store> store, StorePaths && storePaths) override
{
size_t pathLen = 0;
for (auto & storePath : storePaths)
diff --git a/src/nix/path-info.md b/src/nix/path-info.md
index 76a83e39d..7a1714ba4 100644
--- a/src/nix/path-info.md
+++ b/src/nix/path-info.md
@@ -82,7 +82,7 @@ This command shows information about the store paths produced by
By default, this command only prints the store paths. You can get
additional information by passing flags such as `--closure-size`,
---size`, `--sigs` or `--json`.
+`--size`, `--sigs` or `--json`.
> **Warning**
>
diff --git a/src/nix/prefetch.cc b/src/nix/prefetch.cc
index 9c2309a5f..768d37595 100644
--- a/src/nix/prefetch.cc
+++ b/src/nix/prefetch.cc
@@ -199,26 +199,24 @@ static int main_nix_prefetch_url(int argc, char * * argv)
state->forceAttrs(v);
/* Extract the URL. */
- auto attr = v.attrs->find(state->symbols.create("urls"));
- if (attr == v.attrs->end())
- throw Error("attribute set does not contain a 'urls' attribute");
- state->forceList(*attr->value);
- if (attr->value->listSize() < 1)
+ auto & attr = v.attrs->need(state->symbols.create("urls"));
+ state->forceList(*attr.value);
+ if (attr.value->listSize() < 1)
throw Error("'urls' list is empty");
- url = state->forceString(*attr->value->listElems()[0]);
+ url = state->forceString(*attr.value->listElems()[0]);
/* Extract the hash mode. */
- attr = v.attrs->find(state->symbols.create("outputHashMode"));
- if (attr == v.attrs->end())
+ auto attr2 = v.attrs->get(state->symbols.create("outputHashMode"));
+ if (!attr2)
printInfo("warning: this does not look like a fetchurl call");
else
- unpack = state->forceString(*attr->value) == "recursive";
+ unpack = state->forceString(*attr2->value) == "recursive";
/* Extract the name. */
if (!name) {
- attr = v.attrs->find(state->symbols.create("name"));
- if (attr != v.attrs->end())
- name = state->forceString(*attr->value);
+ auto attr3 = v.attrs->get(state->symbols.create("name"));
+ if (!attr3)
+ name = state->forceString(*attr3->value);
}
}
diff --git a/src/nix/profile-history.md b/src/nix/profile-history.md
index d0fe40c82..f0bfe5037 100644
--- a/src/nix/profile-history.md
+++ b/src/nix/profile-history.md
@@ -6,10 +6,10 @@ R""(
```console
# nix profile history
- Version 508 -> 509:
+ Version 508 (2020-04-10):
flake:nixpkgs#legacyPackages.x86_64-linux.awscli: ∅ -> 1.17.13
- Version 509 -> 510:
+ Version 509 (2020-05-16) <- 508:
flake:nixpkgs#legacyPackages.x86_64-linux.awscli: 1.17.13 -> 1.18.211
```
diff --git a/src/nix/profile-rollback.md b/src/nix/profile-rollback.md
new file mode 100644
index 000000000..6bb75aa5e
--- /dev/null
+++ b/src/nix/profile-rollback.md
@@ -0,0 +1,26 @@
+R""(
+
+# Examples
+
+* Roll back your default profile to the previous version:
+
+ ```console
+ # nix profile rollback
+ switching profile from version 519 to 518
+ ```
+
+* Switch your default profile to version 510:
+
+ ```console
+ # nix profile rollback --to 510
+ switching profile from version 518 to 510
+ ```
+
+# Description
+
+This command switches a profile to the most recent version older
+than the currently active version, or if `--to` *N* is given, to
+version *N* of the profile. To see the available versions of a
+profile, use `nix profile history`.
+
+)""
diff --git a/src/nix/profile-wipe-history.md b/src/nix/profile-wipe-history.md
new file mode 100644
index 000000000..b4b262864
--- /dev/null
+++ b/src/nix/profile-wipe-history.md
@@ -0,0 +1,20 @@
+R""(
+
+# Examples
+
+* Delete all versions of the default profile older than 100 days:
+
+ ```console
+ # nix profile wipe-history --profile /tmp/profile --older-than 100d
+ removing profile version 515
+ removing profile version 514
+ ```
+
+# Description
+
+This command deletes non-current versions of a profile, making it
+impossible to roll back to these versions. By default, all non-current
+versions are deleted. With `--older-than` *N*`d`, all non-current
+versions older than *N* days are deleted.
+
+)""
diff --git a/src/nix/profile.cc b/src/nix/profile.cc
index 8cef6d0b6..96a20f673 100644
--- a/src/nix/profile.cc
+++ b/src/nix/profile.cc
@@ -12,6 +12,7 @@
#include <nlohmann/json.hpp>
#include <regex>
+#include <iomanip>
using namespace nix;
@@ -97,10 +98,8 @@ struct ProfileManifest
else if (pathExists(profile + "/manifest.nix")) {
// FIXME: needed because of pure mode; ugly.
- if (state.allowedPaths) {
- state.allowedPaths->insert(state.store->followLinksToStore(profile));
- state.allowedPaths->insert(state.store->followLinksToStore(profile + "/manifest.nix"));
- }
+ state.allowPath(state.store->followLinksToStore(profile));
+ state.allowPath(state.store->followLinksToStore(profile + "/manifest.nix"));
auto drvInfos = queryInstalled(state, state.store->followLinksToStore(profile));
@@ -259,11 +258,11 @@ struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile
ProfileElement element;
std::visit(overloaded {
- [&](BuiltPath::Opaque bo) {
+ [&](const BuiltPath::Opaque & bo) {
pathsToBuild.push_back(bo);
element.storePaths.insert(bo.path);
},
- [&](BuiltPath::Built bfd) {
+ [&](const BuiltPath::Built & bfd) {
// TODO: Why are we querying if we know the output
// names already? Is it just to figure out what the
// default one is?
@@ -528,10 +527,11 @@ struct CmdProfileHistory : virtual StoreCommand, EvalCommand, MixDefaultProfile
if (!first) std::cout << "\n";
first = false;
- if (prevGen)
- std::cout << fmt("Version %d -> %d:\n", prevGen->first.number, gen.number);
- else
- std::cout << fmt("Version %d:\n", gen.number);
+ std::cout << fmt("Version %s%d" ANSI_NORMAL " (%s)%s:\n",
+ gen.number == curGen ? ANSI_GREEN : ANSI_BOLD,
+ gen.number,
+ std::put_time(std::gmtime(&gen.creationTime), "%Y-%m-%d"),
+ prevGen ? fmt(" <- %d", prevGen->first.number) : "");
ProfileManifest::printDiff(
prevGen ? prevGen->second : ProfileManifest(),
@@ -543,6 +543,76 @@ struct CmdProfileHistory : virtual StoreCommand, EvalCommand, MixDefaultProfile
}
};
+struct CmdProfileRollback : virtual StoreCommand, MixDefaultProfile, MixDryRun
+{
+ std::optional<GenerationNumber> version;
+
+ CmdProfileRollback()
+ {
+ addFlag({
+ .longName = "to",
+ .description = "The profile version to roll back to.",
+ .labels = {"version"},
+ .handler = {&version},
+ });
+ }
+
+ std::string description() override
+ {
+ return "roll back to the previous version or a specified version of a profile";
+ }
+
+ std::string doc() override
+ {
+ return
+ #include "profile-rollback.md"
+ ;
+ }
+
+ void run(ref<Store> store) override
+ {
+ switchGeneration(*profile, version, dryRun);
+ }
+};
+
+struct CmdProfileWipeHistory : virtual StoreCommand, MixDefaultProfile, MixDryRun
+{
+ std::optional<std::string> minAge;
+
+ CmdProfileWipeHistory()
+ {
+ addFlag({
+ .longName = "older-than",
+ .description =
+ "Delete versions older than the specified age. *age* "
+ "must be in the format *N*`d`, where *N* denotes a number "
+ "of days.",
+ .labels = {"age"},
+ .handler = {&minAge},
+ });
+ }
+
+ std::string description() override
+ {
+ return "delete non-current versions of a profile";
+ }
+
+ std::string doc() override
+ {
+ return
+ #include "profile-wipe-history.md"
+ ;
+ }
+
+ void run(ref<Store> store) override
+ {
+ if (minAge)
+ deleteGenerationsOlderThan(*profile, *minAge, dryRun);
+ else
+ deleteOldGenerations(*profile, dryRun);
+ }
+};
+
struct CmdProfile : NixMultiCommand
{
CmdProfile()
@@ -553,6 +623,8 @@ struct CmdProfile : NixMultiCommand
{"list", []() { return make_ref<CmdProfileList>(); }},
{"diff-closures", []() { return make_ref<CmdProfileDiffClosures>(); }},
{"history", []() { return make_ref<CmdProfileHistory>(); }},
+ {"rollback", []() { return make_ref<CmdProfileRollback>(); }},
+ {"wipe-history", []() { return make_ref<CmdProfileWipeHistory>(); }},
})
{ }
diff --git a/src/nix/realisation.cc b/src/nix/realisation.cc
index d59e594df..c9a7157cd 100644
--- a/src/nix/realisation.cc
+++ b/src/nix/realisation.cc
@@ -44,9 +44,9 @@ struct CmdRealisationInfo : BuiltPathsCommand, MixJSON
Category category() override { return catSecondary; }
- void run(ref<Store> store, BuiltPaths paths) override
+ void run(ref<Store> store, BuiltPaths && paths) override
{
- settings.requireExperimentalFeature("ca-derivations");
+ settings.requireExperimentalFeature(Xp::CaDerivations);
RealisedPath::Set realisations;
for (auto & builtPath : paths) {
diff --git a/src/nix/registry.cc b/src/nix/registry.cc
index 6a92576c7..c496f94f8 100644
--- a/src/nix/registry.cc
+++ b/src/nix/registry.cc
@@ -226,6 +226,7 @@ struct CmdRegistry : virtual NixMultiCommand
void run() override
{
+ settings.requireExperimentalFeature(Xp::Flakes);
if (!command)
throw UsageError("'nix registry' requires a sub-command.");
command->second->prepare();
diff --git a/src/nix/registry.md b/src/nix/registry.md
index 557e5795b..d5c9ef442 100644
--- a/src/nix/registry.md
+++ b/src/nix/registry.md
@@ -2,7 +2,7 @@ R""(
# Description
-`nix flake` provides subcommands for managing *flake
+`nix registry` provides subcommands for managing *flake
registries*. Flake registries are a convenience feature that allows
you to refer to flakes using symbolic identifiers such as `nixpkgs`,
rather than full URLs such as `git://github.com/NixOS/nixpkgs`. You
@@ -41,7 +41,7 @@ A registry is a JSON file with the following format:
```json
{
"version": 2,
- [
+ "flakes": [
{
"from": {
"type": "indirect",
diff --git a/src/nix/repl.cc b/src/nix/repl.cc
index b711f4163..f453343f3 100644
--- a/src/nix/repl.cc
+++ b/src/nix/repl.cc
@@ -110,11 +110,13 @@ string runNix(Path program, const Strings & args,
{
auto subprocessEnv = getEnv();
subprocessEnv["NIX_CONFIG"] = globalConfig.toKeyValue();
- RunOptions opts(settings.nixBinDir+ "/" + program, args);
- opts.input = input;
- opts.environment = subprocessEnv;
- auto res = runProgram(opts);
+ auto res = runProgram(RunOptions {
+ .program = settings.nixBinDir+ "/" + program,
+ .args = args,
+ .environment = subprocessEnv,
+ .input = input,
+ });
if (!statusOk(res.first))
throw ExecError(res.first, fmt("program '%1%' %2%", program, statusToString(res.first)));
@@ -198,13 +200,13 @@ namespace {
void NixRepl::mainLoop(const std::vector<std::string> & files)
{
string error = ANSI_RED "error:" ANSI_NORMAL " ";
- std::cout << "Welcome to Nix version " << nixVersion << ". Type :? for help." << std::endl << std::endl;
+ notice("Welcome to Nix " + nixVersion + ". Type :? for help.\n");
for (auto & i : files)
loadedFiles.push_back(i);
reloadFiles();
- if (!loadedFiles.empty()) std::cout << std::endl;
+ if (!loadedFiles.empty()) notice("");
// Allow nix-repl specific settings in .inputrc
rl_readline_name = "nix-repl";
@@ -277,6 +279,7 @@ bool NixRepl::getLine(string & input, const std::string &prompt)
};
setupSignals();
+ Finally resetTerminal([&]() { rl_deprep_terminal(); });
char * s = readline(prompt.c_str());
Finally doFree([&]() { free(s); });
restoreSignals();
@@ -354,6 +357,8 @@ StringSet NixRepl::completePrefix(string prefix)
// Quietly ignore evaluation errors.
} catch (UndefinedVarError & e) {
// Quietly ignore undefined variable errors.
+ } catch (BadURL & e) {
+ // Quietly ignore BadURL flake-related errors.
}
}
@@ -394,6 +399,8 @@ bool NixRepl::processLine(string line)
{
if (line == "") return true;
+ _isInterrupted = false;
+
string command, arg;
if (line[0] == ':') {
@@ -423,7 +430,8 @@ bool NixRepl::processLine(string line)
<< " :s <expr> Build dependencies of derivation, then start nix-shell\n"
<< " :t <expr> Describe result of evaluation\n"
<< " :u <expr> Build derivation, then start nix-shell\n"
- << " :doc <expr> Show documentation of a builtin function\n";
+ << " :doc <expr> Show documentation of a builtin function\n"
+ << " :log <expr> Show logs for a derivation\n";
}
else if (command == ":a" || command == ":add") {
@@ -467,7 +475,10 @@ bool NixRepl::processLine(string line)
auto args = editorFor(pos);
auto editor = args.front();
args.pop_front();
- runProgram(editor, true, args);
+
+ // runProgram redirects stdout to a StringSink,
+ // using runProgram2 to allow editors to display their UI
+ runProgram2(RunOptions { .program = editor, .searchPath = true, .args = args });
// Reload right after exiting the editor
state->resetFileCache();
@@ -477,9 +488,10 @@ bool NixRepl::processLine(string line)
else if (command == ":t") {
Value v;
evalString(arg, v);
- std::cout << showType(v) << std::endl;
+ logger->cout(showType(v));
+ }
- } else if (command == ":u") {
+ else if (command == ":u") {
Value v, f, result;
evalString(arg, v);
evalString("drv: (import <nixpkgs> {}).runCommand \"shell\" { buildInputs = [ drv ]; } \"\"", f);
@@ -489,26 +501,41 @@ bool NixRepl::processLine(string line)
runNix("nix-shell", {state->store->printStorePath(drvPath)});
}
- else if (command == ":b" || command == ":i" || command == ":s") {
+ else if (command == ":b" || command == ":i" || command == ":s" || command == ":log") {
Value v;
evalString(arg, v);
StorePath drvPath = getDerivationPath(v);
Path drvPathRaw = state->store->printStorePath(drvPath);
if (command == ":b") {
- /* We could do the build in this process using buildPaths(),
- but doing it in a child makes it easier to recover from
- problems / SIGINT. */
- try {
- runNix("nix", {"build", "--no-link", drvPathRaw});
- auto drv = state->store->readDerivation(drvPath);
- std::cout << std::endl << "this derivation produced the following outputs:" << std::endl;
- for (auto & i : drv.outputsAndOptPaths(*state->store))
- std::cout << fmt(" %s -> %s\n", i.first, state->store->printStorePath(*i.second.second));
- } catch (ExecError &) {
- }
+ state->store->buildPaths({DerivedPath::Built{drvPath}});
+ auto drv = state->store->readDerivation(drvPath);
+ logger->cout("\nThis derivation produced the following outputs:");
+ for (auto & [outputName, outputPath] : state->store->queryDerivationOutputMap(drvPath))
+ logger->cout(" %s -> %s", outputName, state->store->printStorePath(outputPath));
} else if (command == ":i") {
runNix("nix-env", {"-i", drvPathRaw});
+ } else if (command == ":log") {
+ settings.readOnlyMode = true;
+ Finally roModeReset([&]() {
+ settings.readOnlyMode = false;
+ });
+ auto subs = getDefaultSubstituters();
+
+ subs.push_front(state->store);
+
+ bool foundLog = false;
+ RunPager pager;
+ for (auto & sub : subs) {
+ auto log = sub->getBuildLog(drvPath);
+ if (log) {
+ printInfo("got build log for '%s' from '%s'", drvPathRaw, sub->getUri());
+ logger->writeToStdout(*log);
+ foundLog = true;
+ break;
+ }
+ }
+ if (!foundLog) throw Error("build log of '%s' is not available", drvPathRaw);
} else {
runNix("nix-shell", {drvPathRaw});
}
@@ -539,9 +566,9 @@ bool NixRepl::processLine(string line)
+ concatStringsSep(" ", args) + "\n\n";
}
- markdown += trim(stripIndentation(doc->doc));
+ markdown += stripIndentation(doc->doc);
- std::cout << renderMarkdownToTerminal(markdown);
+ logger->cout(trim(renderMarkdownToTerminal(markdown)));
} else
throw Error("value does not have documentation");
}
@@ -624,9 +651,9 @@ void NixRepl::reloadFiles()
bool first = true;
for (auto & i : old) {
- if (!first) std::cout << std::endl;
+ if (!first) notice("");
first = false;
- std::cout << format("Loading '%1%'...") % i << std::endl;
+ notice("Loading '%1%'...", i);
loadFile(i);
}
}
@@ -637,7 +664,7 @@ void NixRepl::addAttrsToScope(Value & attrs)
state->forceAttrs(attrs);
for (auto & i : *attrs.attrs)
addVarToScope(i.name, *i.value);
- std::cout << format("Added %1% variables.") % attrs.attrs->size() << std::endl;
+ notice("Added %1% variables.", attrs.attrs->size());
}
@@ -645,7 +672,10 @@ void NixRepl::addVarToScope(const Symbol & name, Value & v)
{
if (displ >= envSize)
throw Error("environment full; cannot add more variables");
- staticEnv.vars[name] = displ;
+ if (auto oldVar = staticEnv.find(name); oldVar != staticEnv.vars.end())
+ staticEnv.vars.erase(oldVar);
+ staticEnv.vars.emplace_back(name, displ);
+ staticEnv.sort();
env->values[displ++] = &v;
varNames.insert((string) name);
}
@@ -705,7 +735,7 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int m
break;
case nString:
- str << ANSI_YELLOW;
+ str << ANSI_WARNING;
printStringValue(str, v.string.s);
str << ANSI_NORMAL;
break;
@@ -768,12 +798,12 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int m
str << "[ ";
if (maxDepth > 0)
- for (unsigned int n = 0; n < v.listSize(); ++n) {
- if (seen.find(v.listElems()[n]) != seen.end())
+ for (auto elem : v.listItems()) {
+ if (seen.count(elem))
str << "«repeated»";
else
try {
- printValue(str, *v.listElems()[n], maxDepth - 1, seen);
+ printValue(str, *elem, maxDepth - 1, seen);
} catch (AssertionError & e) {
str << ANSI_RED "«error: " << e.msg() << "»" ANSI_NORMAL;
}
diff --git a/src/nix/repl.md b/src/nix/repl.md
index bba60f871..9b6f2bee3 100644
--- a/src/nix/repl.md
+++ b/src/nix/repl.md
@@ -35,14 +35,17 @@ R""(
nix-repl> emacs.drvPath
"/nix/store/lp0sjrhgg03y2n0l10n70rg0k7hhyz0l-emacs-27.1.drv"
- nix-repl> drv = runCommand "hello" { buildInputs = [ hello ]; } "hello > $out"
+ nix-repl> drv = runCommand "hello" { buildInputs = [ hello ]; } "hello; hello > $out"
- nix-repl> :b x
+ nix-repl> :b drv
this derivation produced the following outputs:
out -> /nix/store/0njwbgwmkwls0w5dv9mpc1pq5fj39q0l-hello
nix-repl> builtins.readFile drv
"Hello, world!\n"
+
+ nix-repl> :log drv
+ Hello, world!
```
# Description
diff --git a/src/nix/run.cc b/src/nix/run.cc
index 7597b61f7..b01fdebaa 100644
--- a/src/nix/run.cc
+++ b/src/nix/run.cc
@@ -167,7 +167,7 @@ struct CmdRun : InstallableCommand
Strings getDefaultFlakeAttrPathPrefixes() override
{
- Strings res{"apps." + settings.thisSystem.get() + ".", "packages"};
+ Strings res{"apps." + settings.thisSystem.get() + "."};
for (auto & s : SourceExprCommand::getDefaultFlakeAttrPathPrefixes())
res.push_back(s);
return res;
diff --git a/src/nix/search.cc b/src/nix/search.cc
index c52a48d4e..0d8fdd5c2 100644
--- a/src/nix/search.cc
+++ b/src/nix/search.cc
@@ -62,6 +62,7 @@ struct CmdSearch : InstallableCommand, MixJSON
void run(ref<Store> store) override
{
settings.readOnlyMode = true;
+ evalSettings.enableImportFromDerivation.setDefault(false);
// Empty search string should match all packages
// Use "^" here instead of ".*" due to differences in resulting highlighting
diff --git a/src/nix/show-derivation.cc b/src/nix/show-derivation.cc
index 2588a011d..c614be68d 100644
--- a/src/nix/show-derivation.cc
+++ b/src/nix/show-derivation.cc
@@ -65,18 +65,18 @@ struct CmdShowDerivation : InstallablesCommand
auto & outputName = _outputName; // work around clang bug
auto outputObj { outputsObj.object(outputName) };
std::visit(overloaded {
- [&](DerivationOutputInputAddressed doi) {
+ [&](const DerivationOutputInputAddressed & doi) {
outputObj.attr("path", store->printStorePath(doi.path));
},
- [&](DerivationOutputCAFixed dof) {
+ [&](const DerivationOutputCAFixed & dof) {
outputObj.attr("path", store->printStorePath(dof.path(*store, drv.name, outputName)));
outputObj.attr("hashAlgo", dof.hash.printMethodAlgo());
outputObj.attr("hash", dof.hash.hash.to_string(Base16, false));
},
- [&](DerivationOutputCAFloating dof) {
+ [&](const DerivationOutputCAFloating & dof) {
outputObj.attr("hashAlgo", makeFileIngestionPrefix(dof.method) + printHashType(dof.hashType));
},
- [&](DerivationOutputDeferred) {},
+ [&](const DerivationOutputDeferred &) {},
}, output.output);
}
}
diff --git a/src/nix/sigs.cc b/src/nix/sigs.cc
index c64b472b6..3d659d6d2 100644
--- a/src/nix/sigs.cc
+++ b/src/nix/sigs.cc
@@ -27,7 +27,7 @@ struct CmdCopySigs : StorePathsCommand
return "copy store path signatures from substituters";
}
- void run(ref<Store> store, StorePaths storePaths) override
+ void run(ref<Store> store, StorePaths && storePaths) override
{
if (substituterUris.empty())
throw UsageError("you must specify at least one substituter using '-s'");
@@ -113,7 +113,7 @@ struct CmdSign : StorePathsCommand
return "sign store paths";
}
- void run(ref<Store> store, StorePaths storePaths) override
+ void run(ref<Store> store, StorePaths && storePaths) override
{
if (secretKeyFile.empty())
throw UsageError("you must specify a secret key file using '-k'");
@@ -218,8 +218,7 @@ struct CmdKey : NixMultiCommand
void run() override
{
if (!command)
- throw UsageError("'nix flake' requires a sub-command.");
- settings.requireExperimentalFeature("flakes");
+ throw UsageError("'nix key' requires a sub-command.");
command->second->prepare();
command->second->run();
}
diff --git a/src/nix/store-delete.cc b/src/nix/store-delete.cc
index 10245978e..e4a3cb554 100644
--- a/src/nix/store-delete.cc
+++ b/src/nix/store-delete.cc
@@ -30,7 +30,7 @@ struct CmdStoreDelete : StorePathsCommand
;
}
- void run(ref<Store> store, std::vector<StorePath> storePaths) override
+ void run(ref<Store> store, std::vector<StorePath> && storePaths) override
{
for (auto & path : storePaths)
options.pathsToDelete.insert(path);
diff --git a/src/nix/store-repair.cc b/src/nix/store-repair.cc
index 1c7a4392e..8fcb3639a 100644
--- a/src/nix/store-repair.cc
+++ b/src/nix/store-repair.cc
@@ -17,7 +17,7 @@ struct CmdStoreRepair : StorePathsCommand
;
}
- void run(ref<Store> store, std::vector<StorePath> storePaths) override
+ void run(ref<Store> store, std::vector<StorePath> && storePaths) override
{
for (auto & path : storePaths)
store->repairPath(path);
diff --git a/src/nix/verify.cc b/src/nix/verify.cc
index f5a576064..e92df1303 100644
--- a/src/nix/verify.cc
+++ b/src/nix/verify.cc
@@ -59,7 +59,7 @@ struct CmdVerify : StorePathsCommand
;
}
- void run(ref<Store> store, StorePaths storePaths) override
+ void run(ref<Store> store, StorePaths && storePaths) override
{
std::vector<ref<Store>> substituters;
for (auto & s : substituterUris)