aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--doc/manual/release-notes/rl-2.0.xml12
-rw-r--r--flake.nix30
-rw-r--r--shell.nix25
-rw-r--r--src/libstore/derivations.cc6
-rw-r--r--src/libstore/derivations.hh2
-rw-r--r--src/libstore/fetchers/parse.cc4
-rw-r--r--src/libstore/local-store.cc28
-rw-r--r--src/libstore/ssh.cc10
-rw-r--r--src/nix/eval-hydra-jobs.cc105
-rw-r--r--src/nix/installables.cc2
-rw-r--r--src/nix/progress-bar.cc4
11 files changed, 143 insertions, 85 deletions
diff --git a/doc/manual/release-notes/rl-2.0.xml b/doc/manual/release-notes/rl-2.0.xml
index fc9a77b08..4c683dd3d 100644
--- a/doc/manual/release-notes/rl-2.0.xml
+++ b/doc/manual/release-notes/rl-2.0.xml
@@ -503,14 +503,14 @@
</listitem>
<listitem>
- <para><emphasis>Pure evaluation mode</emphasis>. This is a variant
- of the existing restricted evaluation mode. In pure mode, the Nix
- evaluator forbids access to anything that could cause different
- evaluations of the same command line arguments to produce a
+ <para><emphasis>Pure evaluation mode</emphasis>. With the
+ <literal>--pure-eval</literal> flag, Nix enables a variant of the existing
+ restricted evaluation mode that forbids access to anything that could cause
+ different evaluations of the same command line arguments to produce a
different result. This includes builtin functions such as
<function>builtins.getEnv</function>, but more importantly,
- <emphasis>all</emphasis> filesystem or network access unless a
- content hash or commit hash is specified. For example, calls to
+ <emphasis>all</emphasis> filesystem or network access unless a content hash
+ or commit hash is specified. For example, calls to
<function>builtins.fetchGit</function> are only allowed if a
<varname>rev</varname> attribute is specified.</para>
diff --git a/flake.nix b/flake.nix
index ff9f817c0..078f51fa4 100644
--- a/flake.nix
+++ b/flake.nix
@@ -471,21 +471,21 @@
name = "nix-${tarball.version}";
meta.description = "Release-critical builds";
constituents =
- [ tarball
- build.i686-linux
- build.x86_64-darwin
- build.x86_64-linux
- build.aarch64-linux
- binaryTarball.i686-linux
- binaryTarball.x86_64-darwin
- binaryTarball.x86_64-linux
- binaryTarball.aarch64-linux
- tests.remoteBuilds
- tests.nix-copy-closure
- tests.binaryTarball
- #tests.evalNixpkgs
- #tests.evalNixOS
- installerScript
+ [ "tarball"
+ "build.i686-linux"
+ "build.x86_64-darwin"
+ "build.x86_64-linux"
+ "build.aarch64-linux"
+ "binaryTarball.i686-linux"
+ "binaryTarball.x86_64-darwin"
+ "binaryTarball.x86_64-linux"
+ "binaryTarball.aarch64-linux"
+ "tests.remoteBuilds"
+ "tests.nix-copy-closure"
+ "tests.binaryTarball"
+ #"tests.evalNixpkgs"
+ #"tests.evalNixOS"
+ "installerScript"
];
};
diff --git a/shell.nix b/shell.nix
deleted file mode 100644
index e5a2b2c91..000000000
--- a/shell.nix
+++ /dev/null
@@ -1,25 +0,0 @@
-{ useClang ? false }:
-
-with import (builtins.fetchTarball https://github.com/NixOS/nixpkgs/archive/nixos-19.09.tar.gz) {};
-
-with import ./release-common.nix { inherit pkgs; };
-
-(if useClang then clangStdenv else stdenv).mkDerivation {
- name = "nix";
-
- buildInputs = buildDeps ++ tarballDeps ++ perlDeps ++ [ pkgs.rustfmt ];
-
- inherit configureFlags;
-
- enableParallelBuilding = true;
-
- installFlags = "sysconfdir=$(out)/etc";
-
- shellHook =
- ''
- export prefix=$(pwd)/inst
- configureFlags+=" --prefix=$prefix"
- PKG_CONFIG_PATH=$prefix/lib/pkgconfig:$PKG_CONFIG_PATH
- PATH=$prefix/bin:$PATH
- '';
-}
diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc
index a554cb66d..5dba84aaf 100644
--- a/src/libstore/derivations.cc
+++ b/src/libstore/derivations.cc
@@ -65,7 +65,7 @@ bool BasicDerivation::isBuiltin() const
StorePath writeDerivation(ref<Store> store,
- const Derivation & drv, const string & name, RepairFlag repair)
+ const Derivation & drv, std::string_view name, RepairFlag repair)
{
auto references = cloneStorePathSet(drv.inputSrcs);
for (auto & i : drv.inputDrvs)
@@ -73,8 +73,8 @@ StorePath writeDerivation(ref<Store> store,
/* Note that the outputs of a derivation are *not* references
(that can be missing (of course) and should not necessarily be
held during a garbage collection). */
- string suffix = name + drvExtension;
- string contents = drv.unparse(*store, false);
+ auto suffix = std::string(name) + drvExtension;
+ auto contents = drv.unparse(*store, false);
return settings.readOnlyMode
? store->computeStorePathForText(suffix, contents, references)
: store->addTextToStore(suffix, contents, references, repair);
diff --git a/src/libstore/derivations.hh b/src/libstore/derivations.hh
index c2df66229..7222d25e5 100644
--- a/src/libstore/derivations.hh
+++ b/src/libstore/derivations.hh
@@ -79,7 +79,7 @@ class Store;
/* Write a derivation to the Nix store, and return its path. */
StorePath writeDerivation(ref<Store> store,
- const Derivation & drv, const string & name, RepairFlag repair = NoRepair);
+ const Derivation & drv, std::string_view name, RepairFlag repair = NoRepair);
/* Read a derivation from a file. */
Derivation readDerivation(const Store & store, const Path & drvPath);
diff --git a/src/libstore/fetchers/parse.cc b/src/libstore/fetchers/parse.cc
index 4f7cb3c6b..a5ad14c87 100644
--- a/src/libstore/fetchers/parse.cc
+++ b/src/libstore/fetchers/parse.cc
@@ -22,8 +22,8 @@ ParsedURL parseURL(const std::string & url)
if (std::regex_match(url, match, uriRegex)) {
auto & base = match[1];
std::string scheme = match[2];
- auto authority = match[4].matched
- ? std::optional<std::string>(match[5]) : std::nullopt;
+ auto authority = match[3].matched
+ ? std::optional<std::string>(match[3]) : std::nullopt;
std::string path = match[4].matched ? match[4] : match[5];
auto & query = match[6];
auto & fragment = match[7];
diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc
index a337ad0cc..e00556645 100644
--- a/src/libstore/local-store.cc
+++ b/src/libstore/local-store.cc
@@ -540,6 +540,18 @@ void LocalStore::checkDerivationOutputs(const StorePath & drvPath, const Derivat
std::string drvName(drvPath.name());
drvName = string(drvName, 0, drvName.size() - drvExtension.size());
+ auto check = [&](const StorePath & expected, const StorePath & actual, const std::string & varName)
+ {
+ if (actual != expected)
+ throw Error("derivation '%s' has incorrect output '%s', should be '%s'",
+ printStorePath(drvPath), printStorePath(actual), printStorePath(expected));
+ auto j = drv.env.find(varName);
+ if (j == drv.env.end() || parseStorePath(j->second) != actual)
+ throw Error("derivation '%s' has incorrect environment variable '%s', should be '%s'",
+ printStorePath(drvPath), varName, printStorePath(actual));
+ };
+
+
if (drv.isFixedOutput()) {
DerivationOutputs::const_iterator out = drv.outputs.find("out");
if (out == drv.outputs.end())
@@ -547,24 +559,14 @@ void LocalStore::checkDerivationOutputs(const StorePath & drvPath, const Derivat
bool recursive; Hash h;
out->second.parseHashInfo(recursive, h);
- auto outPath = makeFixedOutputPath(recursive, h, drvName);
- StringPairs::const_iterator j = drv.env.find("out");
- if (out->second.path != outPath || j == drv.env.end() || parseStorePath(j->second) != outPath)
- throw Error("derivation '%s' has incorrect output '%s', should be '%s'",
- printStorePath(drvPath), printStorePath(out->second.path), printStorePath(outPath));
+ check(makeFixedOutputPath(recursive, h, drvName), out->second.path, "out");
}
else {
Hash h = hashDerivationModulo(*this, drv, true);
-
- for (auto & i : drv.outputs) {
- auto outPath = makeOutputPath(i.first, h, drvName);
- StringPairs::const_iterator j = drv.env.find(i.first);
- if (i.second.path != outPath || j == drv.env.end() || parseStorePath(j->second) != outPath)
- throw Error("derivation '%s' has incorrect output '%s', should be '%s'",
- printStorePath(drvPath), printStorePath(i.second.path), printStorePath(outPath));
- }
+ for (auto & i : drv.outputs)
+ check(makeOutputPath(i.first, h, drvName), i.second.path, i.first);
}
}
diff --git a/src/libstore/ssh.cc b/src/libstore/ssh.cc
index 2ee7115c5..84548a6e4 100644
--- a/src/libstore/ssh.cc
+++ b/src/libstore/ssh.cc
@@ -33,6 +33,9 @@ std::unique_ptr<SSHMaster::Connection> SSHMaster::startCommand(const std::string
out.create();
auto conn = std::make_unique<Connection>();
+ ProcessOptions options;
+ options.dieWithParent = false;
+
conn->sshPid = startProcess([&]() {
restoreSignals();
@@ -64,7 +67,7 @@ std::unique_ptr<SSHMaster::Connection> SSHMaster::startCommand(const std::string
// could not exec ssh/bash
throw SysError("unable to execute '%s'", args.front());
- });
+ }, options);
in.readSide = -1;
@@ -91,6 +94,9 @@ Path SSHMaster::startMaster()
Pipe out;
out.create();
+ ProcessOptions options;
+ options.dieWithParent = false;
+
state->sshMaster = startProcess([&]() {
restoreSignals();
@@ -110,7 +116,7 @@ Path SSHMaster::startMaster()
execvp(args.begin()->c_str(), stringsToCharPtrs(args).data());
throw SysError("unable to execute '%s'", args.front());
- });
+ }, options);
out.writeSide = -1;
diff --git a/src/nix/eval-hydra-jobs.cc b/src/nix/eval-hydra-jobs.cc
index 18a681eb4..f66fb3571 100644
--- a/src/nix/eval-hydra-jobs.cc
+++ b/src/nix/eval-hydra-jobs.cc
@@ -143,15 +143,23 @@ struct CmdEvalHydraJobs : MixJSON, MixDryRun, InstallableCommand
auto a = v->attrs->get(state->symbols.create("constituents"));
if (!a)
throw EvalError("derivation must have a ‘constituents’ attribute");
+
+
PathSet context;
state->coerceToString(*a->pos, *a->value, context, true, false);
- PathSet drvs;
for (auto & i : context)
if (i.at(0) == '!') {
size_t index = i.find("!", 1);
- drvs.insert(string(i, index + 1));
+ job["constituents"].push_back(string(i, index + 1));
}
- job["constituents"] = concatStringsSep(" ", drvs);
+
+ state->forceList(*a->value, *a->pos);
+ for (unsigned int n = 0; n < a->value->listSize(); ++n) {
+ auto v = a->value->listElems()[n];
+ state->forceValue(*v);
+ if (v->type == tString)
+ job["namedConstituents"].push_back(state->forceStringNoCtx(*v));
+ }
}
/* Register the derivation as a GC root. !!! This
@@ -204,13 +212,16 @@ struct CmdEvalHydraJobs : MixJSON, MixDryRun, InstallableCommand
void run(ref<Store> store) override
{
+ settings.requireExperimentalFeature("eval-hydra-jobs");
+
if (!gcRootsDir) warn("'--gc-roots-dir' not specified");
struct State
{
std::set<std::string> todo{""};
std::set<std::string> active;
- nlohmann::json result;
+ nlohmann::json jobs;
+ std::exception_ptr exc;
};
std::condition_variable wakeup;
@@ -239,9 +250,10 @@ struct CmdEvalHydraJobs : MixJSON, MixDryRun, InstallableCommand
{
try {
worker(*to, *from);
- } catch (Error & e) {
- printError("unexpected worker error: %s", e.msg());
- _exit(1);
+ } catch (std::exception & e) {
+ nlohmann::json err;
+ err["error"] = e.what();
+ writeLine(to->get(), err.dump());
}
},
ProcessOptions { .allowVfork = false });
@@ -255,8 +267,10 @@ struct CmdEvalHydraJobs : MixJSON, MixDryRun, InstallableCommand
if (s == "restart") {
pid = -1;
continue;
- } else if (s != "next")
- throw Error("unexpected worker request: %s", s);
+ } else if (s != "next") {
+ auto json = nlohmann::json::parse(s);
+ throw Error("worker error: %s", (std::string) json["error"]);
+ }
/* Wait for a job name to become available. */
std::string attrPath;
@@ -264,7 +278,7 @@ struct CmdEvalHydraJobs : MixJSON, MixDryRun, InstallableCommand
while (true) {
checkInterrupt();
auto state(state_.lock());
- if (state->todo.empty() && state->active.empty()) {
+ if ((state->todo.empty() && state->active.empty()) || state->exc) {
writeLine(to.get(), "exit");
return;
}
@@ -277,6 +291,8 @@ struct CmdEvalHydraJobs : MixJSON, MixDryRun, InstallableCommand
state.wait(wakeup);
}
+ Activity act(*logger, lvlInfo, actUnknown, fmt("evaluating '%s'", attrPath));
+
/* Tell the worker to evaluate it. */
writeLine(to.get(), "do " + attrPath);
@@ -289,7 +305,7 @@ struct CmdEvalHydraJobs : MixJSON, MixDryRun, InstallableCommand
if (response.find("job") != response.end()) {
auto state(state_.lock());
if (json)
- state->result[attrPath] = response["job"];
+ state->jobs[attrPath] = response["job"];
else
std::cout << fmt("%d: %d\n", attrPath, (std::string) response["job"]["drvPath"]);
}
@@ -304,7 +320,7 @@ struct CmdEvalHydraJobs : MixJSON, MixDryRun, InstallableCommand
if (response.find("error") != response.end()) {
auto state(state_.lock());
if (json)
- state->result[attrPath]["error"] = response["error"];
+ state->jobs[attrPath]["error"] = response["error"];
else
printError("error in job '%s': %s",
attrPath, (std::string) response["error"]);
@@ -319,9 +335,10 @@ struct CmdEvalHydraJobs : MixJSON, MixDryRun, InstallableCommand
wakeup.notify_all();
}
}
- } catch (Error & e) {
- printError("unexpected handler thread error: %s", e.msg());
- abort();
+ } catch (...) {
+ auto state(state_.lock());
+ state->exc = std::current_exception();
+ wakeup.notify_all();
}
};
@@ -332,7 +349,63 @@ struct CmdEvalHydraJobs : MixJSON, MixDryRun, InstallableCommand
for (auto & thread : threads)
thread.join();
- if (json) std::cout << state_.lock()->result.dump(2) << "\n";
+ auto state(state_.lock());
+
+ if (state->exc)
+ std::rethrow_exception(state->exc);
+
+ /* For aggregate jobs that have named consistuents
+ (i.e. constituents that are a job name rather than a
+ derivation), look up the referenced job and add it to the
+ dependencies of the aggregate derivation. */
+ for (auto i = state->jobs.begin(); i != state->jobs.end(); ++i) {
+ auto jobName = i.key();
+ auto & job = i.value();
+
+ auto named = job.find("namedConstituents");
+ if (named == job.end()) continue;
+
+ if (dryRun) {
+ for (std::string jobName2 : *named) {
+ auto job2 = state->jobs.find(jobName2);
+ if (job2 == state->jobs.end())
+ throw Error("aggregate job '%s' references non-existent job '%s'", jobName, jobName2);
+ std::string drvPath2 = (*job2)["drvPath"];
+ job["constituents"].push_back(drvPath2);
+ }
+ } else {
+ std::string drvPath = job["drvPath"];
+ auto drv = readDerivation(*store, drvPath);
+
+ for (std::string jobName2 : *named) {
+ auto job2 = state->jobs.find(jobName2);
+ if (job2 == state->jobs.end())
+ throw Error("aggregate job '%s' references non-existent job '%s'", jobName, jobName2);
+ std::string drvPath2 = (*job2)["drvPath"];
+ auto drv2 = readDerivation(*store, drvPath2);
+ job["constituents"].push_back(drvPath2);
+ drv.inputDrvs[store->parseStorePath(drvPath2)] = {drv2.outputs.begin()->first};
+ }
+
+ std::string drvName(store->parseStorePath(drvPath).name());
+ assert(hasSuffix(drvName, drvExtension));
+ drvName.resize(drvName.size() - drvExtension.size());
+ auto h = hashDerivationModulo(*store, drv, true);
+ auto outPath = store->makeOutputPath("out", h, drvName);
+ drv.env["out"] = store->printStorePath(outPath);
+ drv.outputs.insert_or_assign("out", DerivationOutput(outPath.clone(), "", ""));
+ auto newDrvPath = store->printStorePath(writeDerivation(store, drv, drvName));
+
+ debug("rewrote aggregate derivation %s -> %s", drvPath, newDrvPath);
+
+ job["drvPath"] = newDrvPath;
+ job["outputs"]["out"] = store->printStorePath(outPath);
+ }
+
+ job.erase("namedConstituents");
+ }
+
+ if (json) std::cout << state->jobs.dump(2) << "\n";
}
};
diff --git a/src/nix/installables.cc b/src/nix/installables.cc
index 071edf432..3bf4fa8f4 100644
--- a/src/nix/installables.cc
+++ b/src/nix/installables.cc
@@ -454,7 +454,7 @@ std::vector<std::shared_ptr<Installable>> SourceExprCommand::parseInstallables(
for (auto & s : ss) {
if (hasPrefix(s, "nixpkgs.")) {
bool static warned;
- warnOnce(warned, "the syntax 'nixpkgs.<attr>' is deprecated; use 'nixpkgs:<attr>' instead");
+ warnOnce(warned, "the syntax 'nixpkgs.<attr>' is deprecated; use 'nixpkgs#<attr>' instead");
result.push_back(std::make_shared<InstallableFlake>(*this,
FlakeRef::fromAttrs({{"type", "indirect"}, {"id", "nixpkgs"}}),
Strings{"legacyPackages." + settings.thisSystem.get() + "." + std::string(s, 8)}, Strings{}));
diff --git a/src/nix/progress-bar.cc b/src/nix/progress-bar.cc
index c445f31cc..26631416c 100644
--- a/src/nix/progress-bar.cc
+++ b/src/nix/progress-bar.cc
@@ -446,7 +446,9 @@ public:
void startProgressBar(bool printBuildLogs)
{
- logger = new ProgressBar(printBuildLogs, isatty(STDERR_FILENO));
+ logger = new ProgressBar(
+ printBuildLogs,
+ isatty(STDERR_FILENO) && getEnv("TERM").value_or("dumb") != "dumb");
}
void stopProgressBar()