aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJohn Ericson <John.Ericson@Obsidian.Systems>2020-07-05 21:49:01 +0000
committerJohn Ericson <John.Ericson@Obsidian.Systems>2020-07-05 21:49:01 +0000
commita38ab99d576bb31288edb8a68c9564d962415662 (patch)
tree883f2b1fdaaa5e241cd71e1f786c3f2aaf1813ba
parent8313f0e939a99b1f715695c0e798cfb368dfc1f2 (diff)
parent14227aeb327798a1446ddde59fc561c3d2e6b7a8 (diff)
Merge remote-tracking branch 'upstream/master' into derivation-header-include-order
-rw-r--r--.github/dependabot.yml6
-rw-r--r--.github/workflows/test.yml2
-rw-r--r--doc/manual/installation/installing-binary.xml6
-rwxr-xr-xmk/run_test.sh28
-rw-r--r--mk/tests.mk44
-rw-r--r--scripts/install-multi-user.sh4
-rw-r--r--scripts/install-nix-from-closure.sh2
-rw-r--r--src/libexpr/attr-path.cc2
-rw-r--r--src/libexpr/attr-set.hh2
-rw-r--r--src/libexpr/eval-inline.hh4
-rw-r--r--src/libexpr/eval.cc45
-rw-r--r--src/libexpr/eval.hh2
-rw-r--r--src/libexpr/nixexpr.cc19
-rw-r--r--src/libexpr/nixexpr.hh9
-rw-r--r--src/libexpr/parser.y45
-rw-r--r--src/libexpr/primops.cc109
-rw-r--r--src/libexpr/primops/context.cc6
-rw-r--r--src/libexpr/primops/fetchGit.cc4
-rw-r--r--src/libexpr/primops/fetchMercurial.cc4
-rw-r--r--src/libexpr/primops/fetchTree.cc6
-rw-r--r--src/libexpr/primops/fromTOML.cc2
-rw-r--r--src/libfetchers/cache.hh2
-rw-r--r--src/libfetchers/tarball.cc5
-rw-r--r--src/libmain/loggers.cc2
-rw-r--r--src/libmain/progress-bar.cc2
-rw-r--r--src/libmain/shared.cc4
-rw-r--r--src/libstore/build.cc23
-rw-r--r--src/libstore/builtins/fetchurl.cc9
-rw-r--r--src/libstore/content-address.cc85
-rw-r--r--src/libstore/content-address.hh56
-rw-r--r--src/libstore/daemon.cc18
-rw-r--r--src/libstore/derivations.cc30
-rw-r--r--src/libstore/derivations.hh11
-rw-r--r--src/libstore/globals.cc2
-rw-r--r--src/libstore/globals.hh4
-rw-r--r--src/libstore/legacy-ssh-store.cc4
-rw-r--r--src/libstore/local-store.cc33
-rw-r--r--src/libstore/local-store.hh2
-rw-r--r--src/libstore/nar-info-disk-cache.cc4
-rw-r--r--src/libstore/nar-info.cc9
-rw-r--r--src/libstore/path.hh7
-rw-r--r--src/libstore/remote-store.cc42
-rw-r--r--src/libstore/remote-store.hh1
-rw-r--r--src/libstore/store-api.cc85
-rw-r--r--src/libstore/store-api.hh37
-rw-r--r--src/libstore/worker-protocol.hh6
-rw-r--r--src/libutil/archive.cc2
-rw-r--r--src/libutil/error.cc251
-rw-r--r--src/libutil/error.hh86
-rw-r--r--src/libutil/fmt.hh17
-rw-r--r--src/libutil/hash.cc29
-rw-r--r--src/libutil/hash.hh2
-rw-r--r--src/libutil/logging.cc10
-rw-r--r--src/libutil/logging.hh11
-rw-r--r--src/libutil/tests/logging.cc207
-rw-r--r--src/nix-env/nix-env.cc4
-rw-r--r--src/nix-store/nix-store.cc4
-rw-r--r--src/nix/add-to-store.cc5
-rw-r--r--src/nix/develop.cc10
-rw-r--r--src/nix/hash.cc1
-rw-r--r--src/nix/make-content-addressable.cc5
-rw-r--r--src/nix/path-info.cc2
-rw-r--r--src/nix/repl.cc6
-rw-r--r--src/nix/search.cc2
-rw-r--r--src/nix/verify.cc2
-rw-r--r--tests/build-hook.nix32
-rw-r--r--tests/build-remote.sh33
-rw-r--r--tests/common.sh.in6
-rw-r--r--tests/gc-auto.sh51
-rw-r--r--tests/gc-concurrent.builder.sh5
-rw-r--r--tests/gc-concurrent.nix3
-rw-r--r--tests/gc-concurrent.sh21
-rw-r--r--tests/gc-concurrent2.builder.sh2
-rw-r--r--tests/init.sh1
-rw-r--r--tests/local.mk2
-rw-r--r--tests/misc.sh11
-rw-r--r--tests/nix-shell.sh7
-rw-r--r--tests/post-hook.sh2
-rw-r--r--tests/recursive.sh2
-rw-r--r--tests/structured-attrs.sh2
80 files changed, 1124 insertions, 546 deletions
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
new file mode 100644
index 000000000..5ace4600a
--- /dev/null
+++ b/.github/dependabot.yml
@@ -0,0 +1,6 @@
+version: 2
+updates:
+ - package-ecosystem: "github-actions"
+ directory: "/"
+ schedule:
+ interval: "weekly"
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 7feefc855..7755466a0 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -10,5 +10,5 @@ jobs:
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v2
- - uses: cachix/install-nix-action@v8
+ - uses: cachix/install-nix-action@v10
- run: nix-build release.nix --arg nix '{ outPath = ./.; revCount = 123; shortRev = "abcdefgh"; }' --arg systems '[ builtins.currentSystem ]' -A installerScript -A perlBindings
diff --git a/doc/manual/installation/installing-binary.xml b/doc/manual/installation/installing-binary.xml
index d25c46b85..64c7a37fb 100644
--- a/doc/manual/installation/installing-binary.xml
+++ b/doc/manual/installation/installing-binary.xml
@@ -97,7 +97,7 @@ $ rm -rf /nix
installation on your system:
</para>
- <screen>sh &lt;(curl https://nixos.org/nix/install) --daemon</screen>
+ <screen>sh &lt;(curl -L https://nixos.org/nix/install) --daemon</screen>
<para>
The multi-user installation of Nix will create build users between
@@ -178,7 +178,7 @@ sudo rm /Library/LaunchDaemons/org.nixos.nix-daemon.plist
is a bit of a misnomer). To use this approach, just install Nix with:
</para>
- <screen>$ sh &lt;(curl https://nixos.org/nix/install) --darwin-use-unencrypted-nix-store-volume</screen>
+ <screen>$ sh &lt;(curl -L https://nixos.org/nix/install) --darwin-use-unencrypted-nix-store-volume</screen>
<para>
If you don't like the sound of this, you'll want to weigh the
@@ -429,7 +429,7 @@ LABEL=Nix\040Store /nix apfs rw,nobrowse
NixOS.org installation script:
<screen>
- sh &lt;(curl https://nixos.org/nix/install)
+ sh &lt;(curl -L https://nixos.org/nix/install)
</screen>
</para>
diff --git a/mk/run_test.sh b/mk/run_test.sh
new file mode 100755
index 000000000..6af5b070a
--- /dev/null
+++ b/mk/run_test.sh
@@ -0,0 +1,28 @@
+#!/bin/sh
+
+set -u
+
+red=""
+green=""
+yellow=""
+normal=""
+
+post_run_msg="ran test $1..."
+if [ -t 1 ]; then
+ red=""
+ green=""
+ yellow=""
+ normal=""
+fi
+(cd $(dirname $1) && env ${TESTS_ENVIRONMENT} init.sh 2>/dev/null > /dev/null)
+log="$(cd $(dirname $1) && env ${TESTS_ENVIRONMENT} $(basename $1) 2>&1)"
+status=$?
+if [ $status -eq 0 ]; then
+ echo "$post_run_msg [${green}PASS$normal]"
+elif [ $status -eq 99 ]; then
+ echo "$post_run_msg [${yellow}SKIP$normal]"
+else
+ echo "$post_run_msg [${red}FAIL$normal]"
+ echo "$log" | sed 's/^/ /'
+ exit "$status"
+fi
diff --git a/mk/tests.mk b/mk/tests.mk
index 70c30661b..2e39bb694 100644
--- a/mk/tests.mk
+++ b/mk/tests.mk
@@ -1,45 +1,15 @@
# Run program $1 as part of ‘make installcheck’.
+
+test-deps =
+
define run-install-test
- installcheck: $1
+ installcheck: $1.test
- _installcheck-list += $1
+ .PHONY: $1.test
+ $1.test: $1 $(test-deps)
+ @env TEST_NAME=$(notdir $(basename $1)) TESTS_ENVIRONMENT="$(tests-environment)" mk/run_test.sh $1
endef
-# Color code from https://unix.stackexchange.com/a/10065
-installcheck:
- @total=0; failed=0; \
- red=""; \
- green=""; \
- yellow=""; \
- normal=""; \
- if [ -t 1 ]; then \
- red=""; \
- green=""; \
- yellow=""; \
- normal=""; \
- fi; \
- for i in $(_installcheck-list); do \
- total=$$((total + 1)); \
- printf "running test $$i..."; \
- log="$$(cd $$(dirname $$i) && $(tests-environment) $$(basename $$i) 2>&1)"; \
- status=$$?; \
- if [ $$status -eq 0 ]; then \
- echo " [$${green}PASS$$normal]"; \
- elif [ $$status -eq 99 ]; then \
- echo " [$${yellow}SKIP$$normal]"; \
- else \
- echo " [$${red}FAIL$$normal]"; \
- echo "$$log" | sed 's/^/ /'; \
- failed=$$((failed + 1)); \
- fi; \
- done; \
- if [ "$$failed" != 0 ]; then \
- echo "$${red}$$failed out of $$total tests failed $$normal"; \
- exit 1; \
- else \
- echo "$${green}All tests succeeded$$normal"; \
- fi
-
.PHONY: check installcheck
diff --git a/scripts/install-multi-user.sh b/scripts/install-multi-user.sh
index 157e8ddb4..00c9d540b 100644
--- a/scripts/install-multi-user.sh
+++ b/scripts/install-multi-user.sh
@@ -526,7 +526,7 @@ This script is going to call sudo a lot. Normally, it would show you
exactly what commands it is running and why. However, the script is
run in a headless fashion, like this:
- $ curl https://nixos.org/nix/install | sh
+ $ curl -L https://nixos.org/nix/install | sh
or maybe in a CI pipeline. Because of that, we're going to skip the
verbose output in the interest of brevity.
@@ -534,7 +534,7 @@ verbose output in the interest of brevity.
If you would like to
see the output, try like this:
- $ curl -o install-nix https://nixos.org/nix/install
+ $ curl -L -o install-nix https://nixos.org/nix/install
$ sh ./install-nix
EOF
diff --git a/scripts/install-nix-from-closure.sh b/scripts/install-nix-from-closure.sh
index 826ca8b8c..5824c2217 100644
--- a/scripts/install-nix-from-closure.sh
+++ b/scripts/install-nix-from-closure.sh
@@ -113,7 +113,7 @@ if [ "$(uname -s)" = "Darwin" ]; then
(
echo ""
echo "Installing on macOS >=10.15 requires relocating the store to an apfs volume."
- echo "Use sh <(curl https://nixos.org/nix/install) --darwin-use-unencrypted-nix-store-volume or run the preparation steps manually."
+ echo "Use sh <(curl -L https://nixos.org/nix/install) --darwin-use-unencrypted-nix-store-volume or run the preparation steps manually."
echo "See https://nixos.org/nix/manual/#sect-macos-installation"
echo ""
) >&2
diff --git a/src/libexpr/attr-path.cc b/src/libexpr/attr-path.cc
index 2e2a17b14..83854df49 100644
--- a/src/libexpr/attr-path.cc
+++ b/src/libexpr/attr-path.cc
@@ -130,7 +130,7 @@ Pos findDerivationFilename(EvalState & state, Value & v, std::string what)
Symbol file = state.symbols.create(filename);
- return { file, lineno, 0 };
+ return { foFile, file, lineno, 0 };
}
diff --git a/src/libexpr/attr-set.hh b/src/libexpr/attr-set.hh
index c601d09c2..7eaa16c59 100644
--- a/src/libexpr/attr-set.hh
+++ b/src/libexpr/attr-set.hh
@@ -78,7 +78,7 @@ public:
if (!a)
throw Error({
.hint = hintfmt("attribute '%s' missing", name),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
return *a;
diff --git a/src/libexpr/eval-inline.hh b/src/libexpr/eval-inline.hh
index 3d544c903..30f6ec7db 100644
--- a/src/libexpr/eval-inline.hh
+++ b/src/libexpr/eval-inline.hh
@@ -11,7 +11,7 @@ LocalNoInlineNoReturn(void throwEvalError(const Pos & pos, const char * s))
{
throw EvalError({
.hint = hintfmt(s),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
}
@@ -25,7 +25,7 @@ LocalNoInlineNoReturn(void throwTypeError(const Pos & pos, const char * s, const
{
throw TypeError({
.hint = hintfmt(s, showType(v)),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
}
diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc
index b90a64357..c1a9af9b2 100644
--- a/src/libexpr/eval.cc
+++ b/src/libexpr/eval.cc
@@ -529,7 +529,7 @@ LocalNoInlineNoReturn(void throwEvalError(const Pos & pos, const char * s, const
{
throw EvalError({
.hint = hintfmt(s, s2),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
}
@@ -542,7 +542,7 @@ LocalNoInlineNoReturn(void throwEvalError(const Pos & pos, const char * s, const
{
throw EvalError({
.hint = hintfmt(s, s2, s3),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
}
@@ -551,7 +551,7 @@ LocalNoInlineNoReturn(void throwEvalError(const Pos & p1, const char * s, const
// p1 is where the error occurred; p2 is a position mentioned in the message.
throw EvalError({
.hint = hintfmt(s, sym, p2),
- .nixCode = NixCode { .errPos = p1 }
+ .errPos = p1
});
}
@@ -559,7 +559,7 @@ LocalNoInlineNoReturn(void throwTypeError(const Pos & pos, const char * s))
{
throw TypeError({
.hint = hintfmt(s),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
}
@@ -572,7 +572,7 @@ LocalNoInlineNoReturn(void throwTypeError(const Pos & pos, const char * s, const
{
throw TypeError({
.hint = hintfmt(s, fun.showNamePos(), s2),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
}
@@ -580,7 +580,7 @@ LocalNoInlineNoReturn(void throwAssertionError(const Pos & pos, const char * s,
{
throw AssertionError({
.hint = hintfmt(s, s1),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
}
@@ -588,23 +588,18 @@ LocalNoInlineNoReturn(void throwUndefinedVarError(const Pos & pos, const char *
{
throw UndefinedVarError({
.hint = hintfmt(s, s1),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
}
-LocalNoInline(void addErrorPrefix(Error & e, const char * s, const string & s2))
+LocalNoInline(void addErrorTrace(Error & e, const char * s, const string & s2))
{
- e.addPrefix(format(s) % s2);
+ e.addTrace(std::nullopt, s, s2);
}
-LocalNoInline(void addErrorPrefix(Error & e, const char * s, const ExprLambda & fun, const Pos & pos))
+LocalNoInline(void addErrorTrace(Error & e, const Pos & pos, const char * s, const string & s2))
{
- e.addPrefix(format(s) % fun.showNamePos() % pos);
-}
-
-LocalNoInline(void addErrorPrefix(Error & e, const char * s, const string & s2, const Pos & pos))
-{
- e.addPrefix(format(s) % s2 % pos);
+ e.addTrace(pos, s, s2);
}
@@ -818,7 +813,7 @@ void EvalState::evalFile(const Path & path_, Value & v)
try {
eval(e, v);
} catch (Error & e) {
- addErrorPrefix(e, "while evaluating the file '%1%':\n", path2);
+ addErrorTrace(e, "while evaluating the file '%1%':", path2);
throw;
}
@@ -1068,8 +1063,8 @@ void ExprSelect::eval(EvalState & state, Env & env, Value & v)
} catch (Error & e) {
if (pos2 && pos2->file != state.sDerivationNix)
- addErrorPrefix(e, "while evaluating the attribute '%1%' at %2%:\n",
- showAttrPath(state, env, attrPath), *pos2);
+ addErrorTrace(e, *pos2, "while evaluating the attribute '%1%'",
+ showAttrPath(state, env, attrPath));
throw;
}
@@ -1237,11 +1232,15 @@ void EvalState::callFunction(Value & fun, Value & arg, Value & v, const Pos & po
/* Evaluate the body. This is conditional on showTrace, because
catching exceptions makes this function not tail-recursive. */
- if (settings.showTrace)
+ if (loggerSettings.showTrace.get())
try {
lambda.body->eval(*this, env2, v);
} catch (Error & e) {
- addErrorPrefix(e, "while evaluating %1%, called from %2%:\n", lambda, pos);
+ addErrorTrace(e, lambda.pos, "while evaluating %s",
+ (lambda.name.set()
+ ? "'" + (string) lambda.name + "'"
+ : "anonymous lambdaction"));
+ addErrorTrace(e, pos, "from call site%s", "");
throw;
}
else
@@ -1516,7 +1515,7 @@ void EvalState::forceValueDeep(Value & v)
try {
recurse(*i.value);
} catch (Error & e) {
- addErrorPrefix(e, "while evaluating the attribute '%1%' at %2%:\n", i.name, *i.pos);
+ addErrorTrace(e, *i.pos, "while evaluating the attribute '%1%'", i.name);
throw;
}
}
@@ -1936,7 +1935,7 @@ string ExternalValueBase::coerceToString(const Pos & pos, PathSet & context, boo
{
throw TypeError({
.hint = hintfmt("cannot coerce %1% to a string", showType()),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
}
diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh
index 863365259..0d52a7f63 100644
--- a/src/libexpr/eval.hh
+++ b/src/libexpr/eval.hh
@@ -250,7 +250,7 @@ private:
friend struct ExprAttrs;
friend struct ExprLet;
- Expr * parse(const char * text, const Path & path,
+ Expr * parse(const char * text, FileOrigin origin, const Path & path,
const Path & basePath, StaticEnv & staticEnv);
public:
diff --git a/src/libexpr/nixexpr.cc b/src/libexpr/nixexpr.cc
index b4b65883d..d5698011f 100644
--- a/src/libexpr/nixexpr.cc
+++ b/src/libexpr/nixexpr.cc
@@ -197,7 +197,22 @@ std::ostream & operator << (std::ostream & str, const Pos & pos)
if (!pos)
str << "undefined position";
else
- str << (format(ANSI_BOLD "%1%" ANSI_NORMAL ":%2%:%3%") % (string) pos.file % pos.line % pos.column).str();
+ {
+ auto f = format(ANSI_BOLD "%1%" ANSI_NORMAL ":%2%:%3%");
+ switch (pos.origin) {
+ case foFile:
+ f % (string) pos.file;
+ break;
+ case foStdin:
+ case foString:
+ f % "(string)";
+ break;
+ default:
+ throw Error("unhandled Pos origin!");
+ }
+ str << (f % pos.line % pos.column).str();
+ }
+
return str;
}
@@ -270,7 +285,7 @@ void ExprVar::bindVars(const StaticEnv & env)
if (withLevel == -1)
throw UndefinedVarError({
.hint = hintfmt("undefined variable '%1%'", name),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
fromWith = true;
this->level = withLevel;
diff --git a/src/libexpr/nixexpr.hh b/src/libexpr/nixexpr.hh
index ec6fd3190..e4cbc660f 100644
--- a/src/libexpr/nixexpr.hh
+++ b/src/libexpr/nixexpr.hh
@@ -24,11 +24,12 @@ MakeError(RestrictedPathError, Error);
struct Pos
{
+ FileOrigin origin;
Symbol file;
unsigned int line, column;
- Pos() : line(0), column(0) { };
- Pos(const Symbol & file, unsigned int line, unsigned int column)
- : file(file), line(line), column(column) { };
+ Pos() : origin(foString), line(0), column(0) { };
+ Pos(FileOrigin origin, const Symbol & file, unsigned int line, unsigned int column)
+ : origin(origin), file(file), line(line), column(column) { };
operator bool() const
{
return line != 0;
@@ -238,7 +239,7 @@ struct ExprLambda : Expr
if (!arg.empty() && formals && formals->argNames.find(arg) != formals->argNames.end())
throw ParseError({
.hint = hintfmt("duplicate formal function argument '%1%'", arg),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
};
void setName(Symbol & name);
diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y
index a639be64e..878f06c96 100644
--- a/src/libexpr/parser.y
+++ b/src/libexpr/parser.y
@@ -30,7 +30,8 @@ namespace nix {
SymbolTable & symbols;
Expr * result;
Path basePath;
- Symbol path;
+ Symbol file;
+ FileOrigin origin;
ErrorInfo error;
Symbol sLetBody;
ParseData(EvalState & state)
@@ -65,18 +66,17 @@ namespace nix {
static void dupAttr(const AttrPath & attrPath, const Pos & pos, const Pos & prevPos)
{
throw ParseError({
- .hint = hintfmt("attribute '%1%' already defined at %2%",
+ .hint = hintfmt("attribute '%1%' already defined at %2%",
showAttrPath(attrPath), prevPos),
- .nixCode = NixCode { .errPos = pos },
+ .errPos = pos
});
}
-
static void dupAttr(Symbol attr, const Pos & pos, const Pos & prevPos)
{
throw ParseError({
.hint = hintfmt("attribute '%1%' already defined at %2%", attr, prevPos),
- .nixCode = NixCode { .errPos = pos },
+ .errPos = pos
});
}
@@ -148,7 +148,7 @@ static void addFormal(const Pos & pos, Formals * formals, const Formal & formal)
throw ParseError({
.hint = hintfmt("duplicate formal function argument '%1%'",
formal.name),
- .nixCode = NixCode { .errPos = pos },
+ .errPos = pos
});
formals->formals.push_front(formal);
}
@@ -246,7 +246,7 @@ static Expr * stripIndentation(const Pos & pos, SymbolTable & symbols, vector<Ex
static inline Pos makeCurPos(const YYLTYPE & loc, ParseData * data)
{
- return Pos(data->path, loc.first_line, loc.first_column);
+ return Pos(data->origin, data->file, loc.first_line, loc.first_column);
}
#define CUR_POS makeCurPos(*yylocp, data)
@@ -259,7 +259,7 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParseData * data, const char * err
{
data->error = {
.hint = hintfmt(error),
- .nixCode = NixCode { .errPos = makeCurPos(*loc, data) }
+ .errPos = makeCurPos(*loc, data)
};
}
@@ -339,7 +339,7 @@ expr_function
{ if (!$2->dynamicAttrs.empty())
throw ParseError({
.hint = hintfmt("dynamic attributes not allowed in let"),
- .nixCode = NixCode { .errPos = CUR_POS },
+ .errPos = CUR_POS
});
$$ = new ExprLet($2, $4);
}
@@ -419,7 +419,7 @@ expr_simple
if (noURLLiterals)
throw ParseError({
.hint = hintfmt("URL literals are disabled"),
- .nixCode = NixCode { .errPos = CUR_POS }
+ .errPos = CUR_POS
});
$$ = new ExprString(data->symbols.create($1));
}
@@ -492,7 +492,7 @@ attrs
} else
throw ParseError({
.hint = hintfmt("dynamic attributes not allowed in inherit"),
- .nixCode = NixCode { .errPos = makeCurPos(@2, data) },
+ .errPos = makeCurPos(@2, data)
});
}
| { $$ = new AttrPath; }
@@ -569,13 +569,24 @@ formal
namespace nix {
-Expr * EvalState::parse(const char * text,
+Expr * EvalState::parse(const char * text, FileOrigin origin,
const Path & path, const Path & basePath, StaticEnv & staticEnv)
{
yyscan_t scanner;
ParseData data(*this);
+ data.origin = origin;
+ switch (origin) {
+ case foFile:
+ data.file = data.symbols.create(path);
+ break;
+ case foStdin:
+ case foString:
+ data.file = data.symbols.create(text);
+ break;
+ default:
+ assert(false);
+ }
data.basePath = basePath;
- data.path = data.symbols.create(path);
yylex_init(&scanner);
yy_scan_string(text, scanner);
@@ -625,13 +636,13 @@ Expr * EvalState::parseExprFromFile(const Path & path)
Expr * EvalState::parseExprFromFile(const Path & path, StaticEnv & staticEnv)
{
- return parse(readFile(path).c_str(), path, dirOf(path), staticEnv);
+ return parse(readFile(path).c_str(), foFile, path, dirOf(path), staticEnv);
}
Expr * EvalState::parseExprFromString(std::string_view s, const Path & basePath, StaticEnv & staticEnv)
{
- return parse(s.data(), "(string)", basePath, staticEnv);
+ return parse(s.data(), foString, "", basePath, staticEnv);
}
@@ -644,7 +655,7 @@ Expr * EvalState::parseExprFromString(std::string_view s, const Path & basePath)
Expr * EvalState::parseStdin()
{
//Activity act(*logger, lvlTalkative, format("parsing standard input"));
- return parseExprFromString(drainFD(0), absPath("."));
+ return parse(drainFD(0).data(), foStdin, "", absPath("."), staticBaseEnv);
}
@@ -693,7 +704,7 @@ Path EvalState::findFile(SearchPath & searchPath, const string & path, const Pos
? "cannot look up '<%s>' in pure evaluation mode (use '--impure' to override)"
: "file '%s' was not found in the Nix search path (add it using $NIX_PATH or -I)",
path),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
}
diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index 16b181f3a..f9e03a09f 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -96,7 +96,7 @@ static void prim_scopedImport(EvalState & state, const Pos & pos, Value * * args
} catch (InvalidPathError & e) {
throw EvalError({
.hint = hintfmt("cannot import '%1%', since path '%2%' is not valid", path, e.path),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
}
@@ -177,7 +177,7 @@ void prim_importNative(EvalState & state, const Pos & pos, Value * * args, Value
.hint = hintfmt(
"cannot import '%1%', since path '%2%' is not valid",
path, e.path),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
}
@@ -215,7 +215,7 @@ void prim_exec(EvalState & state, const Pos & pos, Value * * args, Value & v)
if (count == 0) {
throw EvalError({
.hint = hintfmt("at least one argument to 'exec' required"),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
}
PathSet context;
@@ -230,7 +230,7 @@ void prim_exec(EvalState & state, const Pos & pos, Value * * args, Value & v)
throw EvalError({
.hint = hintfmt("cannot execute '%1%', since path '%2%' is not valid",
program, e.path),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
}
@@ -239,13 +239,13 @@ void prim_exec(EvalState & state, const Pos & pos, Value * * args, Value & v)
try {
parsed = state.parseExprFromString(output, pos.file);
} catch (Error & e) {
- e.addPrefix(fmt("While parsing the output from '%1%', at %2%\n", program, pos));
+ e.addTrace(pos, "While parsing the output from '%1%'", program);
throw;
}
try {
state.eval(parsed, v);
} catch (Error & e) {
- e.addPrefix(fmt("While evaluating the output from '%1%', at %2%\n", program, pos));
+ e.addTrace(pos, "While evaluating the output from '%1%'", program);
throw;
}
}
@@ -385,7 +385,7 @@ static void prim_genericClosure(EvalState & state, const Pos & pos, Value * * ar
if (startSet == args[0]->attrs->end())
throw EvalError({
.hint = hintfmt("attribute 'startSet' required"),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
state.forceList(*startSet->value, pos);
@@ -399,7 +399,7 @@ static void prim_genericClosure(EvalState & state, const Pos & pos, Value * * ar
if (op == args[0]->attrs->end())
throw EvalError({
.hint = hintfmt("attribute 'operator' required"),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
state.forceValue(*op->value, pos);
@@ -421,7 +421,7 @@ static void prim_genericClosure(EvalState & state, const Pos & pos, Value * * ar
if (key == e->attrs->end())
throw EvalError({
.hint = hintfmt("attribute 'key' required"),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
state.forceValue(*key->value, pos);
@@ -471,7 +471,7 @@ static void prim_addErrorContext(EvalState & state, const Pos & pos, Value * * a
v = *args[1];
} catch (Error & e) {
PathSet context;
- e.addPrefix(format("%1%\n") % state.coerceToString(pos, *args[0], context));
+ e.addTrace(std::nullopt, state.coerceToString(pos, *args[0], context));
throw;
}
}
@@ -556,14 +556,14 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
if (attr == args[0]->attrs->end())
throw EvalError({
.hint = hintfmt("required attribute 'name' missing"),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
string drvName;
Pos & posDrvName(*attr->pos);
try {
drvName = state.forceStringNoCtx(*attr->value, pos);
} catch (Error & e) {
- e.addPrefix(fmt("while evaluating the derivation attribute 'name' at %1%:\n", posDrvName));
+ e.addTrace(posDrvName, "while evaluating the derivation attribute 'name'");
throw;
}
@@ -603,7 +603,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
else
throw EvalError({
.hint = hintfmt("invalid value '%s' for 'outputHashMode' attribute", s),
- .nixCode = NixCode { .errPos = posDrvName }
+ .errPos = posDrvName
});
};
@@ -613,7 +613,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
if (outputs.find(j) != outputs.end())
throw EvalError({
.hint = hintfmt("duplicate derivation output '%1%'", j),
- .nixCode = NixCode { .errPos = posDrvName }
+ .errPos = posDrvName
});
/* !!! Check whether j is a valid attribute
name. */
@@ -623,14 +623,14 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
if (j == "drv")
throw EvalError({
.hint = hintfmt("invalid derivation output name 'drv'" ),
- .nixCode = NixCode { .errPos = posDrvName }
+ .errPos = posDrvName
});
outputs.insert(j);
}
if (outputs.empty())
throw EvalError({
.hint = hintfmt("derivation cannot have an empty set of outputs"),
- .nixCode = NixCode { .errPos = posDrvName }
+ .errPos = posDrvName
});
};
@@ -696,8 +696,9 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
}
} catch (Error & e) {
- e.addPrefix(format("while evaluating the attribute '%1%' of the derivation '%2%' at %3%:\n")
- % key % drvName % posDrvName);
+ e.addTrace(posDrvName,
+ "while evaluating the attribute '%1%' of the derivation '%2%'",
+ key, drvName);
throw;
}
}
@@ -745,20 +746,20 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
if (drv.builder == "")
throw EvalError({
.hint = hintfmt("required attribute 'builder' missing"),
- .nixCode = NixCode { .errPos = posDrvName }
+ .errPos = posDrvName
});
if (drv.platform == "")
throw EvalError({
.hint = hintfmt("required attribute 'system' missing"),
- .nixCode = NixCode { .errPos = posDrvName }
+ .errPos = posDrvName
});
/* Check whether the derivation name is valid. */
if (isDerivation(drvName))
throw EvalError({
.hint = hintfmt("derivation names are not allowed to end in '%s'", drvExtension),
- .nixCode = NixCode { .errPos = posDrvName }
+ .errPos = posDrvName
});
if (outputHash) {
@@ -766,7 +767,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
if (outputs.size() != 1 || *(outputs.begin()) != "out")
throw Error({
.hint = hintfmt("multiple outputs are not supported in fixed-output derivations"),
- .nixCode = NixCode { .errPos = posDrvName }
+ .errPos = posDrvName
});
std::optional<HashType> ht = parseHashTypeOpt(outputHashAlgo);
@@ -776,7 +777,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
if (!jsonObject) drv.env["out"] = state.store->printStorePath(outPath);
drv.outputs.insert_or_assign("out", DerivationOutput {
.path = std::move(outPath),
- .hash = DerivationOutputHash {
+ .hash = FixedOutputHash {
.method = ingestionMethod,
.hash = std::move(h),
},
@@ -795,7 +796,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
drv.outputs.insert_or_assign(i,
DerivationOutput {
.path = StorePath::dummy,
- .hash = std::optional<DerivationOutputHash> {},
+ .hash = std::optional<FixedOutputHash> {},
});
}
@@ -807,7 +808,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
drv.outputs.insert_or_assign(i,
DerivationOutput {
.path = std::move(outPath),
- .hash = std::optional<DerivationOutputHash>(),
+ .hash = std::optional<FixedOutputHash>(),
});
}
}
@@ -880,7 +881,7 @@ static void prim_storePath(EvalState & state, const Pos & pos, Value * * args, V
if (!state.store->isInStore(path))
throw EvalError({
.hint = hintfmt("path '%1%' is not in the Nix store", path),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
Path path2 = state.store->toStorePath(path);
if (!settings.readOnlyMode)
@@ -901,7 +902,7 @@ static void prim_pathExists(EvalState & state, const Pos & pos, Value * * args,
.hint = hintfmt(
"cannot check the existence of '%1%', since path '%2%' is not valid",
path, e.path),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
}
@@ -947,7 +948,7 @@ static void prim_readFile(EvalState & state, const Pos & pos, Value * * args, Va
} catch (InvalidPathError & e) {
throw EvalError({
.hint = hintfmt("cannot read '%1%', since path '%2%' is not valid", path, e.path),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
}
string s = readFile(state.checkSourcePath(state.toRealPath(path, context)));
@@ -978,7 +979,7 @@ static void prim_findFile(EvalState & state, const Pos & pos, Value * * args, Va
if (i == v2.attrs->end())
throw EvalError({
.hint = hintfmt("attribute 'path' missing"),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
PathSet context;
@@ -989,7 +990,7 @@ static void prim_findFile(EvalState & state, const Pos & pos, Value * * args, Va
} catch (InvalidPathError & e) {
throw EvalError({
.hint = hintfmt("cannot find '%1%', since path '%2%' is not valid", path, e.path),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
}
@@ -1009,7 +1010,7 @@ static void prim_hashFile(EvalState & state, const Pos & pos, Value * * args, Va
if (!ht)
throw Error({
.hint = hintfmt("unknown hash type '%1%'", type),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
PathSet context; // discarded
@@ -1028,7 +1029,7 @@ static void prim_readDir(EvalState & state, const Pos & pos, Value * * args, Val
} catch (InvalidPathError & e) {
throw EvalError({
.hint = hintfmt("cannot read '%1%', since path '%2%' is not valid", path, e.path),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
}
@@ -1104,7 +1105,7 @@ static void prim_toFile(EvalState & state, const Pos & pos, Value * * args, Valu
"in 'toFile': the file named '%1%' must not contain a reference "
"to a derivation but contains (%2%)",
name, path),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
refs.insert(state.store->parseStorePath(path));
}
@@ -1175,7 +1176,7 @@ static void prim_filterSource(EvalState & state, const Pos & pos, Value * * args
if (!context.empty())
throw EvalError({
.hint = hintfmt("string '%1%' cannot refer to other paths", path),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
state.forceValue(*args[0], pos);
@@ -1184,7 +1185,7 @@ static void prim_filterSource(EvalState & state, const Pos & pos, Value * * args
.hint = hintfmt(
"first argument in call to 'filterSource' is not a function but %1%",
showType(*args[0])),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
addPath(state, pos, std::string(baseNameOf(path)), path, args[0], FileIngestionMethod::Recursive, Hash(), v);
@@ -1207,7 +1208,7 @@ static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value
if (!context.empty())
throw EvalError({
.hint = hintfmt("string '%1%' cannot refer to other paths", path),
- .nixCode = NixCode { .errPos = *attr.pos }
+ .errPos = *attr.pos
});
} else if (attr.name == state.sName)
name = state.forceStringNoCtx(*attr.value, *attr.pos);
@@ -1221,13 +1222,13 @@ static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value
else
throw EvalError({
.hint = hintfmt("unsupported argument '%1%' to 'addPath'", attr.name),
- .nixCode = NixCode { .errPos = *attr.pos }
+ .errPos = *attr.pos
});
}
if (path.empty())
throw EvalError({
.hint = hintfmt("'path' required"),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
if (name.empty())
name = baseNameOf(path);
@@ -1288,7 +1289,7 @@ void prim_getAttr(EvalState & state, const Pos & pos, Value * * args, Value & v)
if (i == args[1]->attrs->end())
throw EvalError({
.hint = hintfmt("attribute '%1%' missing", attr),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
// !!! add to stack trace?
if (state.countCalls && i->pos) state.attrSelects[*i->pos]++;
@@ -1371,7 +1372,7 @@ static void prim_listToAttrs(EvalState & state, const Pos & pos, Value * * args,
if (j == v2.attrs->end())
throw TypeError({
.hint = hintfmt("'name' attribute missing in a call to 'listToAttrs'"),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
string name = state.forceStringNoCtx(*j->value, pos);
@@ -1381,7 +1382,7 @@ static void prim_listToAttrs(EvalState & state, const Pos & pos, Value * * args,
if (j2 == v2.attrs->end())
throw TypeError({
.hint = hintfmt("'value' attribute missing in a call to 'listToAttrs'"),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
v.attrs->push_back(Attr(sym, j2->value, j2->pos));
}
@@ -1457,7 +1458,7 @@ static void prim_functionArgs(EvalState & state, const Pos & pos, Value * * args
if (args[0]->type != tLambda)
throw TypeError({
.hint = hintfmt("'functionArgs' requires a function"),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
if (!args[0]->lambda.fun->matchAttrs) {
@@ -1513,7 +1514,7 @@ static void elemAt(EvalState & state, const Pos & pos, Value & list, int n, Valu
if (n < 0 || (unsigned int) n >= list.listSize())
throw Error({
.hint = hintfmt("list index %1% is out of bounds", n),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
state.forceValue(*list.listElems()[n], pos);
v = *list.listElems()[n];
@@ -1543,7 +1544,7 @@ static void prim_tail(EvalState & state, const Pos & pos, Value * * args, Value
if (args[0]->listSize() == 0)
throw Error({
.hint = hintfmt("'tail' called on an empty list"),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
state.mkList(v, args[0]->listSize() - 1);
@@ -1688,7 +1689,7 @@ static void prim_genList(EvalState & state, const Pos & pos, Value * * args, Val
if (len < 0)
throw EvalError({
.hint = hintfmt("cannot create list of size %1%", len),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
state.mkList(v, len);
@@ -1850,7 +1851,7 @@ static void prim_div(EvalState & state, const Pos & pos, Value * * args, Value &
if (f2 == 0)
throw EvalError({
.hint = hintfmt("division by zero"),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
if (args[0]->type == tFloat || args[1]->type == tFloat) {
@@ -1862,7 +1863,7 @@ static void prim_div(EvalState & state, const Pos & pos, Value * * args, Value &
if (i1 == std::numeric_limits<NixInt>::min() && i2 == -1)
throw EvalError({
.hint = hintfmt("overflow in integer division"),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
mkInt(v, i1 / i2);
@@ -1923,7 +1924,7 @@ static void prim_substring(EvalState & state, const Pos & pos, Value * * args, V
if (start < 0)
throw EvalError({
.hint = hintfmt("negative start position in 'substring'"),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
mkString(v, (unsigned int) start >= s.size() ? "" : string(s, start, len), context);
@@ -1946,7 +1947,7 @@ static void prim_hashString(EvalState & state, const Pos & pos, Value * * args,
if (!ht)
throw Error({
.hint = hintfmt("unknown hash type '%1%'", type),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
PathSet context; // discarded
@@ -1992,12 +1993,12 @@ void prim_match(EvalState & state, const Pos & pos, Value * * args, Value & v)
// limit is _GLIBCXX_REGEX_STATE_LIMIT for libstdc++
throw EvalError({
.hint = hintfmt("memory limit exceeded by regular expression '%s'", re),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
} else {
throw EvalError({
.hint = hintfmt("invalid regular expression '%s'", re),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
}
}
@@ -2065,12 +2066,12 @@ static void prim_split(EvalState & state, const Pos & pos, Value * * args, Value
// limit is _GLIBCXX_REGEX_STATE_LIMIT for libstdc++
throw EvalError({
.hint = hintfmt("memory limit exceeded by regular expression '%s'", re),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
} else {
throw EvalError({
.hint = hintfmt("invalid regular expression '%s'", re),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
}
}
@@ -2104,7 +2105,7 @@ static void prim_replaceStrings(EvalState & state, const Pos & pos, Value * * ar
if (args[0]->listSize() != args[1]->listSize())
throw EvalError({
.hint = hintfmt("'from' and 'to' arguments to 'replaceStrings' have different lengths"),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
vector<string> from;
diff --git a/src/libexpr/primops/context.cc b/src/libexpr/primops/context.cc
index 301e8c5dd..dbb93bae6 100644
--- a/src/libexpr/primops/context.cc
+++ b/src/libexpr/primops/context.cc
@@ -148,7 +148,7 @@ static void prim_appendContext(EvalState & state, const Pos & pos, Value * * arg
if (!state.store->isStorePath(i.name))
throw EvalError({
.hint = hintfmt("Context key '%s' is not a store path", i.name),
- .nixCode = NixCode { .errPos = *i.pos }
+ .errPos = *i.pos
});
if (!settings.readOnlyMode)
state.store->ensurePath(state.store->parseStorePath(i.name));
@@ -165,7 +165,7 @@ static void prim_appendContext(EvalState & state, const Pos & pos, Value * * arg
if (!isDerivation(i.name)) {
throw EvalError({
.hint = hintfmt("Tried to add all-outputs context of %s, which is not a derivation, to a string", i.name),
- .nixCode = NixCode { .errPos = *i.pos }
+ .errPos = *i.pos
});
}
context.insert("=" + string(i.name));
@@ -178,7 +178,7 @@ static void prim_appendContext(EvalState & state, const Pos & pos, Value * * arg
if (iter->value->listSize() && !isDerivation(i.name)) {
throw EvalError({
.hint = hintfmt("Tried to add derivation output context of %s, which is not a derivation, to a string", i.name),
- .nixCode = NixCode { .errPos = *i.pos }
+ .errPos = *i.pos
});
}
for (unsigned int n = 0; n < iter->value->listSize(); ++n) {
diff --git a/src/libexpr/primops/fetchGit.cc b/src/libexpr/primops/fetchGit.cc
index dd7229a3d..36b0db2bd 100644
--- a/src/libexpr/primops/fetchGit.cc
+++ b/src/libexpr/primops/fetchGit.cc
@@ -37,14 +37,14 @@ static void prim_fetchGit(EvalState & state, const Pos & pos, Value * * args, Va
else
throw EvalError({
.hint = hintfmt("unsupported argument '%s' to 'fetchGit'", attr.name),
- .nixCode = NixCode { .errPos = *attr.pos }
+ .errPos = *attr.pos
});
}
if (url.empty())
throw EvalError({
.hint = hintfmt("'url' argument required"),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
} else
diff --git a/src/libexpr/primops/fetchMercurial.cc b/src/libexpr/primops/fetchMercurial.cc
index 9bace8f89..59166b777 100644
--- a/src/libexpr/primops/fetchMercurial.cc
+++ b/src/libexpr/primops/fetchMercurial.cc
@@ -40,14 +40,14 @@ static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * ar
else
throw EvalError({
.hint = hintfmt("unsupported argument '%s' to 'fetchMercurial'", attr.name),
- .nixCode = NixCode { .errPos = *attr.pos }
+ .errPos = *attr.pos
});
}
if (url.empty())
throw EvalError({
.hint = hintfmt("'url' argument required"),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
} else
diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc
index 9be93710a..01d6ad8b0 100644
--- a/src/libexpr/primops/fetchTree.cc
+++ b/src/libexpr/primops/fetchTree.cc
@@ -68,7 +68,7 @@ static void prim_fetchTree(EvalState & state, const Pos & pos, Value * * args, V
if (!attrs.count("type"))
throw Error({
.hint = hintfmt("attribute 'type' is missing in call to 'fetchTree'"),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
input = fetchers::inputFromAttrs(attrs);
@@ -112,14 +112,14 @@ static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
else
throw EvalError({
.hint = hintfmt("unsupported argument '%s' to '%s'", attr.name, who),
- .nixCode = NixCode { .errPos = *attr.pos }
+ .errPos = *attr.pos
});
}
if (!url)
throw EvalError({
.hint = hintfmt("'url' argument required"),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
} else
url = state.forceStringNoCtx(*args[0], pos);
diff --git a/src/libexpr/primops/fromTOML.cc b/src/libexpr/primops/fromTOML.cc
index 7615d1379..b00827a4b 100644
--- a/src/libexpr/primops/fromTOML.cc
+++ b/src/libexpr/primops/fromTOML.cc
@@ -83,7 +83,7 @@ static void prim_fromTOML(EvalState & state, const Pos & pos, Value * * args, Va
} catch (std::runtime_error & e) {
throw EvalError({
.hint = hintfmt("while parsing a TOML string: %s", e.what()),
- .nixCode = NixCode { .errPos = pos }
+ .errPos = pos
});
}
}
diff --git a/src/libfetchers/cache.hh b/src/libfetchers/cache.hh
index d76ab1233..3db4f081c 100644
--- a/src/libfetchers/cache.hh
+++ b/src/libfetchers/cache.hh
@@ -6,6 +6,8 @@ namespace nix::fetchers {
struct Cache
{
+ virtual ~Cache() { }
+
virtual void add(
ref<Store> store,
const Attrs & inAttrs,
diff --git a/src/libfetchers/tarball.cc b/src/libfetchers/tarball.cc
index ac83d52b9..f5356f0af 100644
--- a/src/libfetchers/tarball.cc
+++ b/src/libfetchers/tarball.cc
@@ -70,7 +70,10 @@ DownloadFileResult downloadFile(
ValidPathInfo info(store->makeFixedOutputPath(FileIngestionMethod::Flat, hash, name));
info.narHash = hashString(htSHA256, *sink.s);
info.narSize = sink.s->size();
- info.ca = makeFixedOutputCA(FileIngestionMethod::Flat, hash);
+ info.ca = FixedOutputHash {
+ .method = FileIngestionMethod::Flat,
+ .hash = hash,
+ };
auto source = StringSource { *sink.s };
store->addToStore(info, source, NoRepair, NoCheckSigs);
storePath = std::move(info.path);
diff --git a/src/libmain/loggers.cc b/src/libmain/loggers.cc
index c44bb6408..fa18f991d 100644
--- a/src/libmain/loggers.cc
+++ b/src/libmain/loggers.cc
@@ -26,7 +26,7 @@ Logger * makeDefaultLogger() {
case LogFormat::rawWithLogs:
return makeSimpleLogger(true);
case LogFormat::internalJson:
- return makeJSONLogger(*makeSimpleLogger());
+ return makeJSONLogger(*makeSimpleLogger(true));
case LogFormat::bar:
return makeProgressBar();
case LogFormat::barWithLogs:
diff --git a/src/libmain/progress-bar.cc b/src/libmain/progress-bar.cc
index 95a9187de..3f7d99a1d 100644
--- a/src/libmain/progress-bar.cc
+++ b/src/libmain/progress-bar.cc
@@ -131,7 +131,7 @@ public:
auto state(state_.lock());
std::stringstream oss;
- oss << ei;
+ showErrorInfo(oss, ei, loggerSettings.showTrace.get());
log(*state, ei.level, oss.str());
}
diff --git a/src/libmain/shared.cc b/src/libmain/shared.cc
index 1cb422967..52718c231 100644
--- a/src/libmain/shared.cc
+++ b/src/libmain/shared.cc
@@ -323,10 +323,8 @@ int handleExceptions(const string & programName, std::function<void()> fun)
printError("Try '%1% --help' for more information.", programName);
return 1;
} catch (BaseError & e) {
- if (settings.showTrace && e.prefix() != "")
- printError(e.prefix());
logError(e.info());
- if (e.prefix() != "" && !settings.showTrace)
+ if (e.hasTrace() && !loggerSettings.showTrace.get())
printError("(use '--show-trace' to show detailed location information)");
return e.status;
} catch (std::bad_alloc & e) {
diff --git a/src/libstore/build.cc b/src/libstore/build.cc
index 82a2ab831..347fe1b99 100644
--- a/src/libstore/build.cc
+++ b/src/libstore/build.cc
@@ -1950,8 +1950,11 @@ void linkOrCopy(const Path & from, const Path & to)
/* Hard-linking fails if we exceed the maximum link count on a
file (e.g. 32000 of ext3), which is quite possible after a
'nix-store --optimise'. FIXME: actually, why don't we just
- bind-mount in this case? */
- if (errno != EMLINK)
+ bind-mount in this case?
+
+ It can also fail with EPERM in BeegFS v7 and earlier versions
+ which don't allow hard-links to other directories */
+ if (errno != EMLINK && errno != EPERM)
throw SysError("linking '%s' to '%s'", to, from);
copyPath(from, to);
}
@@ -2750,8 +2753,8 @@ struct RestrictedStore : public LocalFSStore
void queryReferrers(const StorePath & path, StorePathSet & referrers) override
{ }
- StorePathSet queryDerivationOutputs(const StorePath & path) override
- { throw Error("queryDerivationOutputs"); }
+ OutputPathMap queryDerivationOutputMap(const StorePath & path) override
+ { throw Error("queryDerivationOutputMap"); }
std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override
{ throw Error("queryPathFromHashPart"); }
@@ -3714,7 +3717,7 @@ void DerivationGoal::registerOutputs()
/* Check that fixed-output derivations produced the right
outputs (i.e., the content hash should match the specified
hash). */
- std::string ca;
+ std::optional<ContentAddress> ca;
if (fixedOutput) {
@@ -3764,7 +3767,10 @@ void DerivationGoal::registerOutputs()
else
assert(worker.store.parseStorePath(path) == dest);
- ca = makeFixedOutputCA(i.second.hash->method, h2);
+ ca = FixedOutputHash {
+ .method = i.second.hash->method,
+ .hash = h2,
+ };
}
/* Get rid of all weird permissions. This also checks that
@@ -3837,7 +3843,10 @@ void DerivationGoal::registerOutputs()
info.ca = ca;
worker.store.signPathInfo(info);
- if (!info.references.empty()) info.ca.clear();
+ if (!info.references.empty()) {
+ // FIXME don't we have an experimental feature for fixed output with references?
+ info.ca = {};
+ }
infos.emplace(i.first, std::move(info));
}
diff --git a/src/libstore/builtins/fetchurl.cc b/src/libstore/builtins/fetchurl.cc
index 1cfe4a46a..e630cf6f1 100644
--- a/src/libstore/builtins/fetchurl.cc
+++ b/src/libstore/builtins/fetchurl.cc
@@ -58,13 +58,16 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData)
}
};
+ /* We always have one output, and if it's a fixed-output derivation (as
+ checked below) it must be the only output */
+ auto & output = drv.outputs.begin()->second;
+
/* Try the hashed mirrors first. */
- if (getAttr("outputHashMode") == "flat")
+ if (output.hash && output.hash->method == FileIngestionMethod::Flat)
for (auto hashedMirror : settings.hashedMirrors.get())
try {
if (!hasSuffix(hashedMirror, "/")) hashedMirror += '/';
- auto ht = parseHashTypeOpt(getAttr("outputHashAlgo"));
- auto h = Hash(getAttr("outputHash"), ht);
+ auto & h = output.hash->hash;
fetch(hashedMirror + printHashType(*h.type) + "/" + h.to_string(Base16, false));
return;
} catch (Error & e) {
diff --git a/src/libstore/content-address.cc b/src/libstore/content-address.cc
new file mode 100644
index 000000000..3d753836f
--- /dev/null
+++ b/src/libstore/content-address.cc
@@ -0,0 +1,85 @@
+#include "content-address.hh"
+
+namespace nix {
+
+std::string FixedOutputHash::printMethodAlgo() const {
+ return makeFileIngestionPrefix(method) + printHashType(*hash.type);
+}
+
+std::string makeFileIngestionPrefix(const FileIngestionMethod m) {
+ switch (m) {
+ case FileIngestionMethod::Flat:
+ return "";
+ case FileIngestionMethod::Recursive:
+ return "r:";
+ default:
+ throw Error("impossible, caught both cases");
+ }
+}
+
+std::string makeFixedOutputCA(FileIngestionMethod method, const Hash & hash)
+{
+ return "fixed:"
+ + makeFileIngestionPrefix(method)
+ + hash.to_string(Base32, true);
+}
+
+// FIXME Put this somewhere?
+template<class... Ts> struct overloaded : Ts... { using Ts::operator()...; };
+template<class... Ts> overloaded(Ts...) -> overloaded<Ts...>;
+
+std::string renderContentAddress(ContentAddress ca) {
+ return std::visit(overloaded {
+ [](TextHash th) {
+ return "text:" + th.hash.to_string(Base32, true);
+ },
+ [](FixedOutputHash fsh) {
+ return makeFixedOutputCA(fsh.method, fsh.hash);
+ }
+ }, ca);
+}
+
+ContentAddress parseContentAddress(std::string_view rawCa) {
+ auto prefixSeparator = rawCa.find(':');
+ if (prefixSeparator != string::npos) {
+ auto prefix = string(rawCa, 0, prefixSeparator);
+ if (prefix == "text") {
+ auto hashTypeAndHash = rawCa.substr(prefixSeparator+1, string::npos);
+ Hash hash = Hash(string(hashTypeAndHash));
+ if (*hash.type != htSHA256) {
+ throw Error("parseContentAddress: the text hash should have type SHA256");
+ }
+ return TextHash { hash };
+ } else if (prefix == "fixed") {
+ // This has to be an inverse of makeFixedOutputCA
+ auto methodAndHash = rawCa.substr(prefixSeparator+1, string::npos);
+ if (methodAndHash.substr(0,2) == "r:") {
+ std::string_view hashRaw = methodAndHash.substr(2,string::npos);
+ return FixedOutputHash {
+ .method = FileIngestionMethod::Recursive,
+ .hash = Hash(string(hashRaw)),
+ };
+ } else {
+ std::string_view hashRaw = methodAndHash;
+ return FixedOutputHash {
+ .method = FileIngestionMethod::Flat,
+ .hash = Hash(string(hashRaw)),
+ };
+ }
+ } else {
+ throw Error("parseContentAddress: format not recognized; has to be text or fixed");
+ }
+ } else {
+ throw Error("Not a content address because it lacks an appropriate prefix");
+ }
+};
+
+std::optional<ContentAddress> parseContentAddressOpt(std::string_view rawCaOpt) {
+ return rawCaOpt == "" ? std::optional<ContentAddress> {} : parseContentAddress(rawCaOpt);
+};
+
+std::string renderContentAddress(std::optional<ContentAddress> ca) {
+ return ca ? renderContentAddress(*ca) : "";
+}
+
+}
diff --git a/src/libstore/content-address.hh b/src/libstore/content-address.hh
new file mode 100644
index 000000000..ba4797f5b
--- /dev/null
+++ b/src/libstore/content-address.hh
@@ -0,0 +1,56 @@
+#pragma once
+
+#include <variant>
+#include "hash.hh"
+
+namespace nix {
+
+enum struct FileIngestionMethod : uint8_t {
+ Flat = false,
+ Recursive = true
+};
+
+struct TextHash {
+ Hash hash;
+};
+
+/// Pair of a hash, and how the file system was ingested
+struct FixedOutputHash {
+ FileIngestionMethod method;
+ Hash hash;
+ std::string printMethodAlgo() const;
+};
+
+/*
+ We've accumulated several types of content-addressed paths over the years;
+ fixed-output derivations support multiple hash algorithms and serialisation
+ methods (flat file vs NAR). Thus, ‘ca’ has one of the following forms:
+
+ * ‘text:sha256:<sha256 hash of file contents>’: For paths
+ computed by makeTextPath() / addTextToStore().
+
+ * ‘fixed:<r?>:<ht>:<h>’: For paths computed by
+ makeFixedOutputPath() / addToStore().
+*/
+typedef std::variant<
+ TextHash, // for paths computed by makeTextPath() / addTextToStore
+ FixedOutputHash // for path computed by makeFixedOutputPath
+> ContentAddress;
+
+/* Compute the prefix to the hash algorithm which indicates how the files were
+ ingested. */
+std::string makeFileIngestionPrefix(const FileIngestionMethod m);
+
+/* Compute the content-addressability assertion (ValidPathInfo::ca)
+ for paths created by makeFixedOutputPath() / addToStore(). */
+std::string makeFixedOutputCA(FileIngestionMethod method, const Hash & hash);
+
+std::string renderContentAddress(ContentAddress ca);
+
+std::string renderContentAddress(std::optional<ContentAddress> ca);
+
+ContentAddress parseContentAddress(std::string_view rawCa);
+
+std::optional<ContentAddress> parseContentAddressOpt(std::string_view rawCaOpt);
+
+}
diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc
index e370e278c..a8fb62e0a 100644
--- a/src/libstore/daemon.cc
+++ b/src/libstore/daemon.cc
@@ -78,10 +78,10 @@ struct TunnelLogger : public Logger
if (ei.level > verbosity) return;
std::stringstream oss;
- oss << ei;
+ showErrorInfo(oss, ei, false);
StringSink buf;
- buf << STDERR_NEXT << oss.str() << "\n"; // (fs.s + "\n");
+ buf << STDERR_NEXT << oss.str() << "\n";
enqueueMsg(*buf.s);
}
@@ -347,6 +347,15 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
break;
}
+ case wopQueryDerivationOutputMap: {
+ auto path = store->parseStorePath(readString(from));
+ logger->startWork();
+ OutputPathMap outputs = store->queryDerivationOutputMap(path);
+ logger->stopWork();
+ writeOutputPathMap(*store, to, outputs);
+ break;
+ }
+
case wopQueryDeriver: {
auto path = store->parseStorePath(readString(from));
logger->startWork();
@@ -652,7 +661,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
if (GET_PROTOCOL_MINOR(clientVersion) >= 16) {
to << info->ultimate
<< info->sigs
- << info->ca;
+ << renderContentAddress(info->ca);
}
} else {
assert(GET_PROTOCOL_MINOR(clientVersion) >= 17);
@@ -710,7 +719,8 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
info.references = readStorePaths<StorePathSet>(*store, from);
from >> info.registrationTime >> info.narSize >> info.ultimate;
info.sigs = readStrings<StringSet>(from);
- from >> info.ca >> repair >> dontCheckSigs;
+ info.ca = parseContentAddressOpt(readString(from));
+ from >> repair >> dontCheckSigs;
if (!trusted && dontCheckSigs)
dontCheckSigs = false;
if (!trusted)
diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc
index 9f2c9b2cb..f0f5c95bf 100644
--- a/src/libstore/derivations.cc
+++ b/src/libstore/derivations.cc
@@ -7,11 +7,6 @@
namespace nix {
-std::string DerivationOutputHash::printMethodAlgo() const {
- return makeFileIngestionPrefix(method) + printHashType(*hash.type);
-}
-
-
const StorePath & BasicDerivation::findOutput(const string & id) const
{
auto i = outputs.find(id);
@@ -112,7 +107,7 @@ static DerivationOutput parseDerivationOutput(const Store & store, istringstream
expect(str, ","); const auto hash = parseString(str);
expect(str, ")");
- std::optional<DerivationOutputHash> fsh;
+ std::optional<FixedOutputHash> fsh;
if (hashAlgo != "") {
auto method = FileIngestionMethod::Flat;
if (string(hashAlgo, 0, 2) == "r:") {
@@ -120,7 +115,7 @@ static DerivationOutput parseDerivationOutput(const Store & store, istringstream
hashAlgo = string(hashAlgo, 2);
}
const HashType hashType = parseHashType(hashAlgo);
- fsh = DerivationOutputHash {
+ fsh = FixedOutputHash {
.method = std::move(method),
.hash = Hash(hash, hashType),
};
@@ -380,17 +375,17 @@ static DerivationOutput readDerivationOutput(Source & in, const Store & store)
{
auto path = store.parseStorePath(readString(in));
auto hashAlgo = readString(in);
- const auto hash = readString(in);
+ auto hash = readString(in);
- std::optional<DerivationOutputHash> fsh;
+ std::optional<FixedOutputHash> fsh;
if (hashAlgo != "") {
auto method = FileIngestionMethod::Flat;
if (string(hashAlgo, 0, 2) == "r:") {
method = FileIngestionMethod::Recursive;
hashAlgo = string(hashAlgo, 2);
}
- const HashType hashType = parseHashType(hashAlgo);
- fsh = DerivationOutputHash {
+ auto hashType = parseHashType(hashAlgo);
+ fsh = FixedOutputHash {
.method = std::move(method),
.hash = Hash(hash, hashType),
};
@@ -439,11 +434,16 @@ Source & readDerivation(Source & in, const Store & store, BasicDerivation & drv)
void writeDerivation(Sink & out, const Store & store, const BasicDerivation & drv)
{
out << drv.outputs.size();
- for (auto & i : drv.outputs)
+ for (auto & i : drv.outputs) {
out << i.first
- << store.printStorePath(i.second.path)
- << i.second.hash->printMethodAlgo()
- << i.second.hash->hash.to_string(Base16, false);
+ << store.printStorePath(i.second.path);
+ if (i.second.hash) {
+ out << i.second.hash->printMethodAlgo()
+ << i.second.hash->hash.to_string(Base16, false);
+ } else {
+ out << "" << "";
+ }
+ }
writeStorePaths(store, out, drv.inputSrcs);
out << drv.platform << drv.builder << drv.args;
out << drv.env.size();
diff --git a/src/libstore/derivations.hh b/src/libstore/derivations.hh
index d09db53f2..6a195d314 100644
--- a/src/libstore/derivations.hh
+++ b/src/libstore/derivations.hh
@@ -3,6 +3,7 @@
#include "path.hh"
#include "types.hh"
#include "hash.hh"
+#include "content-address.hh"
#include <map>
@@ -12,18 +13,10 @@ namespace nix {
/* Abstract syntax of derivations. */
-/// Pair of a hash, and how the file system was ingested
-struct DerivationOutputHash {
- FileIngestionMethod method;
- Hash hash;
- std::string printMethodAlgo() const;
-};
-
struct DerivationOutput
{
StorePath path;
- std::optional<DerivationOutputHash> hash; /* hash used for expected hash computation */
- void parseHashInfo(FileIngestionMethod & recursive, Hash & hash) const;
+ std::optional<FixedOutputHash> hash; /* hash used for expected hash computation */
};
typedef std::map<string, DerivationOutput> DerivationOutputs;
diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc
index bee94cbd8..683fa5196 100644
--- a/src/libstore/globals.cc
+++ b/src/libstore/globals.cc
@@ -35,7 +35,7 @@ Settings::Settings()
, nixLibexecDir(canonPath(getEnv("NIX_LIBEXEC_DIR").value_or(NIX_LIBEXEC_DIR)))
, nixBinDir(canonPath(getEnv("NIX_BIN_DIR").value_or(NIX_BIN_DIR)))
, nixManDir(canonPath(NIX_MAN_DIR))
- , nixDaemonSocketFile(canonPath(nixStateDir + DEFAULT_SOCKET_PATH))
+ , nixDaemonSocketFile(canonPath(getEnv("NIX_DAEMON_SOCKET_PATH").value_or(nixStateDir + DEFAULT_SOCKET_PATH)))
{
buildUsersGroup = getuid() == 0 ? "nixbld" : "";
lockCPU = getEnv("NIX_AFFINITY_HACK") == "1";
diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh
index 2fbcafff8..58cf08763 100644
--- a/src/libstore/globals.hh
+++ b/src/libstore/globals.hh
@@ -196,10 +196,6 @@ public:
/* Whether to lock the Nix client and worker to the same CPU. */
bool lockCPU;
- /* Whether to show a stack trace if Nix evaluation fails. */
- Setting<bool> showTrace{this, false, "show-trace",
- "Whether to show a stack trace on evaluation errors."};
-
Setting<SandboxMode> sandboxMode{this,
#if __linux__
smEnabled
diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc
index 45c70fad6..5657aa593 100644
--- a/src/libstore/legacy-ssh-store.cc
+++ b/src/libstore/legacy-ssh-store.cc
@@ -114,7 +114,7 @@ struct LegacySSHStore : public Store
if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 4) {
auto s = readString(conn->from);
info->narHash = s.empty() ? Hash() : Hash(s);
- conn->from >> info->ca;
+ info->ca = parseContentAddressOpt(readString(conn->from));
info->sigs = readStrings<StringSet>(conn->from);
}
@@ -146,7 +146,7 @@ struct LegacySSHStore : public Store
<< info.narSize
<< info.ultimate
<< info.sigs
- << info.ca;
+ << renderContentAddress(info.ca);
try {
copyNAR(source, conn->to);
} catch (...) {
diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc
index c6b55ff7c..eed225349 100644
--- a/src/libstore/local-store.cc
+++ b/src/libstore/local-store.cc
@@ -580,7 +580,7 @@ void LocalStore::checkDerivationOutputs(const StorePath & drvPath, const Derivat
uint64_t LocalStore::addValidPath(State & state,
const ValidPathInfo & info, bool checkOutputs)
{
- if (info.ca != "" && !info.isContentAddressed(*this))
+ if (info.ca.has_value() && !info.isContentAddressed(*this))
throw Error("cannot add path '%s' to the Nix store because it claims to be content-addressed but isn't",
printStorePath(info.path));
@@ -592,7 +592,7 @@ uint64_t LocalStore::addValidPath(State & state,
(info.narSize, info.narSize != 0)
(info.ultimate ? 1 : 0, info.ultimate)
(concatStringsSep(" ", info.sigs), !info.sigs.empty())
- (info.ca, !info.ca.empty())
+ (renderContentAddress(info.ca), (bool) info.ca)
.exec();
uint64_t id = sqlite3_last_insert_rowid(state.db);
@@ -666,7 +666,7 @@ void LocalStore::queryPathInfoUncached(const StorePath & path,
if (s) info->sigs = tokenizeString<StringSet>(s, " ");
s = (const char *) sqlite3_column_text(state->stmtQueryPathInfo, 7);
- if (s) info->ca = s;
+ if (s) info->ca = parseContentAddressOpt(s);
/* Get the references. */
auto useQueryReferences(state->stmtQueryReferences.use()(info->id));
@@ -689,7 +689,7 @@ void LocalStore::updatePathInfo(State & state, const ValidPathInfo & info)
(info.narHash.to_string(Base16, true))
(info.ultimate ? 1 : 0, info.ultimate)
(concatStringsSep(" ", info.sigs), !info.sigs.empty())
- (info.ca, !info.ca.empty())
+ (renderContentAddress(info.ca), (bool) info.ca)
(printStorePath(info.path))
.exec();
}
@@ -774,17 +774,20 @@ StorePathSet LocalStore::queryValidDerivers(const StorePath & path)
}
-StorePathSet LocalStore::queryDerivationOutputs(const StorePath & path)
+OutputPathMap LocalStore::queryDerivationOutputMap(const StorePath & path)
{
- return retrySQLite<StorePathSet>([&]() {
+ return retrySQLite<OutputPathMap>([&]() {
auto state(_state.lock());
auto useQueryDerivationOutputs(state->stmtQueryDerivationOutputs.use()
(queryValidPathId(*state, path)));
- StorePathSet outputs;
+ OutputPathMap outputs;
while (useQueryDerivationOutputs.next())
- outputs.insert(parseStorePath(useQueryDerivationOutputs.getStr(1)));
+ outputs.emplace(
+ useQueryDerivationOutputs.getStr(0),
+ parseStorePath(useQueryDerivationOutputs.getStr(1))
+ );
return outputs;
});
@@ -985,15 +988,15 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
deletePath(realPath);
- if (info.ca != "" &&
- !((hasPrefix(info.ca, "text:") && !info.references.count(info.path))
- || info.references.empty()))
+ // text hashing has long been allowed to have non-self-references because it is used for drv files.
+ bool refersToSelf = info.references.count(info.path) > 0;
+ if (info.ca.has_value() && !info.references.empty() && !(std::holds_alternative<TextHash>(*info.ca) && !refersToSelf))
settings.requireExperimentalFeature("ca-references");
/* While restoring the path from the NAR, compute the hash
of the NAR. */
std::unique_ptr<AbstractHashSink> hashSink;
- if (info.ca == "" || !info.references.count(info.path))
+ if (!info.ca.has_value() || !info.references.count(info.path))
hashSink = std::make_unique<HashSink>(htSHA256);
else
hashSink = std::make_unique<HashModuloSink>(htSHA256, std::string(info.path.hashPart()));
@@ -1079,7 +1082,7 @@ StorePath LocalStore::addToStoreFromDump(const string & dump, const string & nam
ValidPathInfo info(dstPath);
info.narHash = hash.first;
info.narSize = hash.second;
- info.ca = makeFixedOutputCA(method, h);
+ info.ca = FixedOutputHash { .method = method, .hash = h };
registerValidPath(info);
}
@@ -1143,7 +1146,7 @@ StorePath LocalStore::addTextToStore(const string & name, const string & s,
info.narHash = narHash;
info.narSize = sink.s->size();
info.references = references;
- info.ca = "text:" + hash.to_string(Base32, true);
+ info.ca = TextHash { .hash = hash };
registerValidPath(info);
}
@@ -1254,7 +1257,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
printMsg(lvlTalkative, "checking contents of '%s'", printStorePath(i));
std::unique_ptr<AbstractHashSink> hashSink;
- if (info->ca == "" || !info->references.count(info->path))
+ if (!info->ca || !info->references.count(info->path))
hashSink = std::make_unique<HashSink>(*info->narHash.type);
else
hashSink = std::make_unique<HashModuloSink>(*info->narHash.type, std::string(info->path.hashPart()));
diff --git a/src/libstore/local-store.hh b/src/libstore/local-store.hh
index 872500957..03837bc5f 100644
--- a/src/libstore/local-store.hh
+++ b/src/libstore/local-store.hh
@@ -130,7 +130,7 @@ public:
StorePathSet queryValidDerivers(const StorePath & path) override;
- StorePathSet queryDerivationOutputs(const StorePath & path) override;
+ OutputPathMap queryDerivationOutputMap(const StorePath & path) override;
std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override;
diff --git a/src/libstore/nar-info-disk-cache.cc b/src/libstore/nar-info-disk-cache.cc
index 552970248..012dea6ea 100644
--- a/src/libstore/nar-info-disk-cache.cc
+++ b/src/libstore/nar-info-disk-cache.cc
@@ -203,7 +203,7 @@ public:
narInfo->deriver = StorePath(queryNAR.getStr(9));
for (auto & sig : tokenizeString<Strings>(queryNAR.getStr(10), " "))
narInfo->sigs.insert(sig);
- narInfo->ca = queryNAR.getStr(11);
+ narInfo->ca = parseContentAddressOpt(queryNAR.getStr(11));
return {oValid, narInfo};
});
@@ -237,7 +237,7 @@ public:
(concatStringsSep(" ", info->shortRefs()))
(info->deriver ? std::string(info->deriver->to_string()) : "", (bool) info->deriver)
(concatStringsSep(" ", info->sigs))
- (info->ca)
+ (renderContentAddress(info->ca))
(time(0)).exec();
} else {
diff --git a/src/libstore/nar-info.cc b/src/libstore/nar-info.cc
index bb4448c90..04550ed97 100644
--- a/src/libstore/nar-info.cc
+++ b/src/libstore/nar-info.cc
@@ -67,8 +67,9 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
else if (name == "Sig")
sigs.insert(value);
else if (name == "CA") {
- if (!ca.empty()) corrupt();
- ca = value;
+ if (ca) corrupt();
+ // FIXME: allow blank ca or require skipping field?
+ ca = parseContentAddressOpt(value);
}
pos = eol + 1;
@@ -104,8 +105,8 @@ std::string NarInfo::to_string(const Store & store) const
for (auto sig : sigs)
res += "Sig: " + sig + "\n";
- if (!ca.empty())
- res += "CA: " + ca + "\n";
+ if (ca)
+ res += "CA: " + renderContentAddress(*ca) + "\n";
return res;
}
diff --git a/src/libstore/path.hh b/src/libstore/path.hh
index aaebd3ec3..e43a8b50c 100644
--- a/src/libstore/path.hh
+++ b/src/libstore/path.hh
@@ -1,5 +1,6 @@
#pragma once
+#include "content-address.hh"
#include "types.hh"
namespace nix {
@@ -61,15 +62,11 @@ public:
typedef std::set<StorePath> StorePathSet;
typedef std::vector<StorePath> StorePaths;
+typedef std::map<string, StorePath> OutputPathMap;
/* Extension of derivations in the Nix store. */
const std::string drvExtension = ".drv";
-enum struct FileIngestionMethod : uint8_t {
- Flat = false,
- Recursive = true
-};
-
struct StorePathWithOutputs
{
StorePath path;
diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc
index f5f2ab7fd..118aadf7e 100644
--- a/src/libstore/remote-store.cc
+++ b/src/libstore/remote-store.cc
@@ -8,6 +8,7 @@
#include "derivations.hh"
#include "pool.hh"
#include "finally.hh"
+#include "logging.hh"
#include <sys/types.h>
#include <sys/stat.h>
@@ -38,6 +39,29 @@ void writeStorePaths(const Store & store, Sink & out, const StorePathSet & paths
out << store.printStorePath(i);
}
+std::map<string, StorePath> readOutputPathMap(const Store & store, Source & from)
+{
+ std::map<string, StorePath> pathMap;
+ auto rawInput = readStrings<Strings>(from);
+ if (rawInput.size() % 2)
+ throw Error("got an odd number of elements from the daemon when trying to read a output path map");
+ auto curInput = rawInput.begin();
+ while (curInput != rawInput.end()) {
+ auto thisKey = *curInput++;
+ auto thisValue = *curInput++;
+ pathMap.emplace(thisKey, store.parseStorePath(thisValue));
+ }
+ return pathMap;
+}
+
+void writeOutputPathMap(const Store & store, Sink & out, const std::map<string, StorePath> & pathMap)
+{
+ out << 2*pathMap.size();
+ for (auto & i : pathMap) {
+ out << i.first;
+ out << store.printStorePath(i.second);
+ }
+}
/* TODO: Separate these store impls into different files, give them better names */
RemoteStore::RemoteStore(const Params & params)
@@ -197,7 +221,7 @@ void RemoteStore::setOptions(Connection & conn)
overrides.erase(settings.maxSilentTime.name);
overrides.erase(settings.buildCores.name);
overrides.erase(settings.useSubstitutes.name);
- overrides.erase(settings.showTrace.name);
+ overrides.erase(loggerSettings.showTrace.name);
conn.to << overrides.size();
for (auto & i : overrides)
conn.to << i.first << i.second.value;
@@ -381,7 +405,7 @@ void RemoteStore::queryPathInfoUncached(const StorePath & path,
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 16) {
conn->from >> info->ultimate;
info->sigs = readStrings<StringSet>(conn->from);
- conn->from >> info->ca;
+ info->ca = parseContentAddressOpt(readString(conn->from));
}
}
callback(std::move(info));
@@ -412,12 +436,24 @@ StorePathSet RemoteStore::queryValidDerivers(const StorePath & path)
StorePathSet RemoteStore::queryDerivationOutputs(const StorePath & path)
{
auto conn(getConnection());
+ if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 0x16) {
+ return Store::queryDerivationOutputs(path);
+ }
conn->to << wopQueryDerivationOutputs << printStorePath(path);
conn.processStderr();
return readStorePaths<StorePathSet>(*this, conn->from);
}
+OutputPathMap RemoteStore::queryDerivationOutputMap(const StorePath & path)
+{
+ auto conn(getConnection());
+ conn->to << wopQueryDerivationOutputMap << printStorePath(path);
+ conn.processStderr();
+ return readOutputPathMap(*this, conn->from);
+
+}
+
std::optional<StorePath> RemoteStore::queryPathFromHashPart(const std::string & hashPart)
{
auto conn(getConnection());
@@ -465,7 +501,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source,
<< info.narHash.to_string(Base16, false);
writeStorePaths(*this, conn->to, info.references);
conn->to << info.registrationTime << info.narSize
- << info.ultimate << info.sigs << info.ca
+ << info.ultimate << info.sigs << renderContentAddress(info.ca)
<< repair << !checkSigs;
bool tunnel = GET_PROTOCOL_MINOR(conn->daemonVersion) >= 21;
if (!tunnel) copyNAR(source, conn->to);
diff --git a/src/libstore/remote-store.hh b/src/libstore/remote-store.hh
index 80c8e9f11..fb2052752 100644
--- a/src/libstore/remote-store.hh
+++ b/src/libstore/remote-store.hh
@@ -51,6 +51,7 @@ public:
StorePathSet queryDerivationOutputs(const StorePath & path) override;
+ OutputPathMap queryDerivationOutputMap(const StorePath & path) override;
std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override;
StorePathSet querySubstitutablePaths(const StorePathSet & paths) override;
diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc
index cffa40f8c..3cb3bae4f 100644
--- a/src/libstore/store-api.cc
+++ b/src/libstore/store-api.cc
@@ -242,6 +242,16 @@ bool Store::PathInfoCacheValue::isKnownNow()
return std::chrono::steady_clock::now() < time_point + ttl;
}
+StorePathSet Store::queryDerivationOutputs(const StorePath & path)
+{
+ auto outputMap = this->queryDerivationOutputMap(path);
+ StorePathSet outputPaths;
+ for (auto & i: outputMap) {
+ outputPaths.emplace(std::move(i.second));
+ }
+ return outputPaths;
+}
+
bool Store::isValidPath(const StorePath & storePath)
{
std::string hashPart(storePath.hashPart());
@@ -471,8 +481,8 @@ void Store::pathInfoToJSON(JSONPlaceholder & jsonOut, const StorePathSet & store
jsonRefs.elem(printStorePath(ref));
}
- if (info->ca != "")
- jsonPath.attr("ca", info->ca);
+ if (info->ca)
+ jsonPath.attr("ca", renderContentAddress(info->ca));
std::pair<uint64_t, uint64_t> closureSizes;
@@ -757,41 +767,35 @@ void ValidPathInfo::sign(const Store & store, const SecretKey & secretKey)
sigs.insert(secretKey.signDetached(fingerprint(store)));
}
+// FIXME Put this somewhere?
+template<class... Ts> struct overloaded : Ts... { using Ts::operator()...; };
+template<class... Ts> overloaded(Ts...) -> overloaded<Ts...>;
bool ValidPathInfo::isContentAddressed(const Store & store) const
{
- auto warn = [&]() {
- logWarning(
- ErrorInfo{
- .name = "Path not content-addressed",
- .hint = hintfmt("path '%s' claims to be content-addressed but isn't", store.printStorePath(path))
- });
- };
+ if (! ca) return false;
- if (hasPrefix(ca, "text:")) {
- Hash hash(ca.substr(5));
- if (store.makeTextPath(path.name(), hash, references) == path)
- return true;
- else
- warn();
- }
-
- else if (hasPrefix(ca, "fixed:")) {
- FileIngestionMethod recursive { ca.compare(6, 2, "r:") == 0 };
- Hash hash(ca.substr(recursive == FileIngestionMethod::Recursive ? 8 : 6));
- auto refs = references;
- bool hasSelfReference = false;
- if (refs.count(path)) {
- hasSelfReference = true;
- refs.erase(path);
+ auto caPath = std::visit(overloaded {
+ [&](TextHash th) {
+ return store.makeTextPath(path.name(), th.hash, references);
+ },
+ [&](FixedOutputHash fsh) {
+ auto refs = references;
+ bool hasSelfReference = false;
+ if (refs.count(path)) {
+ hasSelfReference = true;
+ refs.erase(path);
+ }
+ return store.makeFixedOutputPath(fsh.method, fsh.hash, path.name(), refs, hasSelfReference);
}
- if (store.makeFixedOutputPath(recursive, hash, path.name(), refs, hasSelfReference) == path)
- return true;
- else
- warn();
- }
+ }, *ca);
+
+ bool res = caPath == path;
+
+ if (!res)
+ printError("warning: path '%s' claims to be content-addressed but isn't", store.printStorePath(path));
- return false;
+ return res;
}
@@ -822,25 +826,6 @@ Strings ValidPathInfo::shortRefs() const
}
-std::string makeFileIngestionPrefix(const FileIngestionMethod m) {
- switch (m) {
- case FileIngestionMethod::Flat:
- return "";
- case FileIngestionMethod::Recursive:
- return "r:";
- default:
- throw Error("impossible, caught both cases");
- }
-}
-
-std::string makeFixedOutputCA(FileIngestionMethod method, const Hash & hash)
-{
- return "fixed:"
- + makeFileIngestionPrefix(method)
- + hash.to_string(Base32, true);
-}
-
-
Derivation Store::derivationFromPath(const StorePath & drvPath)
{
ensurePath(drvPath);
diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh
index dc47f4d6e..b122e05d6 100644
--- a/src/libstore/store-api.hh
+++ b/src/libstore/store-api.hh
@@ -2,6 +2,7 @@
#include "path.hh"
#include "hash.hh"
+#include "content-address.hh"
#include "serialise.hh"
#include "crypto.hh"
#include "lru-cache.hh"
@@ -18,6 +19,7 @@
#include <memory>
#include <string>
#include <chrono>
+#include <variant>
namespace nix {
@@ -109,7 +111,6 @@ struct SubstitutablePathInfo
typedef std::map<StorePath, SubstitutablePathInfo> SubstitutablePathInfos;
-
struct ValidPathInfo
{
StorePath path;
@@ -138,21 +139,11 @@ struct ValidPathInfo
that a particular output path was produced by a derivation; the
path then implies the contents.)
- Ideally, the content-addressability assertion would just be a
- Boolean, and the store path would be computed from
- the name component, ‘narHash’ and ‘references’. However,
- 1) we've accumulated several types of content-addressed paths
- over the years; and 2) fixed-output derivations support
- multiple hash algorithms and serialisation methods (flat file
- vs NAR). Thus, ‘ca’ has one of the following forms:
-
- * ‘text:sha256:<sha256 hash of file contents>’: For paths
- computed by makeTextPath() / addTextToStore().
-
- * ‘fixed:<r?>:<ht>:<h>’: For paths computed by
- makeFixedOutputPath() / addToStore().
+ Ideally, the content-addressability assertion would just be a Boolean,
+ and the store path would be computed from the name component, ‘narHash’
+ and ‘references’. However, we support many types of content addresses.
*/
- std::string ca;
+ std::optional<ContentAddress> ca;
bool operator == (const ValidPathInfo & i) const
{
@@ -427,8 +418,11 @@ public:
virtual StorePathSet queryValidDerivers(const StorePath & path) { return {}; };
/* Query the outputs of the derivation denoted by `path'. */
- virtual StorePathSet queryDerivationOutputs(const StorePath & path)
- { unsupported("queryDerivationOutputs"); }
+ virtual StorePathSet queryDerivationOutputs(const StorePath & path);
+
+ /* Query the mapping outputName=>outputPath for the given derivation */
+ virtual OutputPathMap queryDerivationOutputMap(const StorePath & path)
+ { unsupported("queryDerivationOutputMap"); }
/* Query the full store path given the hash part of a valid store
path, or empty if the path doesn't exist. */
@@ -837,15 +831,6 @@ std::optional<ValidPathInfo> decodeValidPathInfo(
std::istream & str,
bool hashGiven = false);
-/* Compute the prefix to the hash algorithm which indicates how the files were
- ingested. */
-std::string makeFileIngestionPrefix(const FileIngestionMethod m);
-
-/* Compute the content-addressability assertion (ValidPathInfo::ca)
- for paths created by makeFixedOutputPath() / addToStore(). */
-std::string makeFixedOutputCA(FileIngestionMethod method, const Hash & hash);
-
-
/* Split URI into protocol+hierarchy part and its parameter set. */
std::pair<std::string, Store::Params> splitUriAndParams(const std::string & uri);
diff --git a/src/libstore/worker-protocol.hh b/src/libstore/worker-protocol.hh
index ac42457fc..8b538f6da 100644
--- a/src/libstore/worker-protocol.hh
+++ b/src/libstore/worker-protocol.hh
@@ -6,7 +6,7 @@ namespace nix {
#define WORKER_MAGIC_1 0x6e697863
#define WORKER_MAGIC_2 0x6478696f
-#define PROTOCOL_VERSION 0x115
+#define PROTOCOL_VERSION 0x116
#define GET_PROTOCOL_MAJOR(x) ((x) & 0xff00)
#define GET_PROTOCOL_MINOR(x) ((x) & 0x00ff)
@@ -30,7 +30,7 @@ typedef enum {
wopSetOptions = 19,
wopCollectGarbage = 20,
wopQuerySubstitutablePathInfo = 21,
- wopQueryDerivationOutputs = 22,
+ wopQueryDerivationOutputs = 22, // obsolete
wopQueryAllValidPaths = 23,
wopQueryFailedPaths = 24,
wopClearFailedPaths = 25,
@@ -49,6 +49,7 @@ typedef enum {
wopNarFromPath = 38,
wopAddToStoreNar = 39,
wopQueryMissing = 40,
+ wopQueryDerivationOutputMap = 41,
} WorkerOp;
@@ -69,5 +70,6 @@ template<class T> T readStorePaths(const Store & store, Source & from);
void writeStorePaths(const Store & store, Sink & out, const StorePathSet & paths);
+void writeOutputPathMap(const Store & store, Sink & out, const OutputPathMap & paths);
}
diff --git a/src/libutil/archive.cc b/src/libutil/archive.cc
index 6a8484705..51c88537e 100644
--- a/src/libutil/archive.cc
+++ b/src/libutil/archive.cc
@@ -262,7 +262,7 @@ static void parse(ParseSink & sink, Source & source, const Path & path)
names[name] = 0;
}
} else if (s == "node") {
- if (s.empty()) throw badArchive("entry name missing");
+ if (name.empty()) throw badArchive("entry name missing");
parse(sink, source, path + "/" + name);
} else
throw badArchive("unknown field " + s);
diff --git a/src/libutil/error.cc b/src/libutil/error.cc
index 0fad9ae42..a4ee7afc2 100644
--- a/src/libutil/error.cc
+++ b/src/libutil/error.cc
@@ -7,14 +7,11 @@
namespace nix {
-
const std::string nativeSystem = SYSTEM;
-// addPrefix is used for show-trace. Strings added with addPrefix
-// will print ahead of the error itself.
-BaseError & BaseError::addPrefix(const FormatOrString & fs)
+BaseError & BaseError::addTrace(std::optional<ErrPos> e, hintformat hint)
{
- prefix_ = fs.s + prefix_;
+ err.traces.push_front(Trace { .pos = e, .hint = hint});
return *this;
}
@@ -28,7 +25,7 @@ const string& BaseError::calcWhat() const
err.name = sname();
std::ostringstream oss;
- oss << err;
+ showErrorInfo(oss, err, false);
what_ = oss.str();
return *what_;
@@ -56,28 +53,114 @@ string showErrPos(const ErrPos &errPos)
}
}
-// if nixCode contains lines of code, print them to the ostream, indicating the error column.
-void printCodeLines(std::ostream &out, const string &prefix, const NixCode &nixCode)
+std::optional<LinesOfCode> getCodeLines(const ErrPos &errPos)
+{
+ if (errPos.line <= 0)
+ return std::nullopt;
+
+ if (errPos.origin == foFile) {
+ LinesOfCode loc;
+ try {
+ AutoCloseFD fd = open(errPos.file.c_str(), O_RDONLY | O_CLOEXEC);
+ if (!fd) {
+ logError(SysError("opening file '%1%'", errPos.file).info());
+ return std::nullopt;
+ }
+ else
+ {
+ // count the newlines.
+ int count = 0;
+ string line;
+ int pl = errPos.line - 1;
+ do
+ {
+ line = readLine(fd.get());
+ ++count;
+ if (count < pl)
+ {
+ ;
+ }
+ else if (count == pl) {
+ loc.prevLineOfCode = line;
+ } else if (count == pl + 1) {
+ loc.errLineOfCode = line;
+ } else if (count == pl + 2) {
+ loc.nextLineOfCode = line;
+ break;
+ }
+ } while (true);
+ return loc;
+ }
+ }
+ catch (EndOfFile &eof) {
+ if (loc.errLineOfCode.has_value())
+ return loc;
+ else
+ return std::nullopt;
+ }
+ catch (std::exception &e) {
+ printError("error reading nix file: %s\n%s", errPos.file, e.what());
+ return std::nullopt;
+ }
+ } else {
+ std::istringstream iss(errPos.file);
+ // count the newlines.
+ int count = 0;
+ string line;
+ int pl = errPos.line - 1;
+
+ LinesOfCode loc;
+
+ do
+ {
+ std::getline(iss, line);
+ ++count;
+ if (count < pl)
+ {
+ ;
+ }
+ else if (count == pl) {
+ loc.prevLineOfCode = line;
+ } else if (count == pl + 1) {
+ loc.errLineOfCode = line;
+ } else if (count == pl + 2) {
+ loc.nextLineOfCode = line;
+ break;
+ }
+
+ if (!iss.good())
+ break;
+ } while (true);
+
+ return loc;
+ }
+}
+
+// print lines of code to the ostream, indicating the error column.
+void printCodeLines(std::ostream &out,
+ const string &prefix,
+ const ErrPos &errPos,
+ const LinesOfCode &loc)
{
// previous line of code.
- if (nixCode.prevLineOfCode.has_value()) {
+ if (loc.prevLineOfCode.has_value()) {
out << std::endl
<< fmt("%1% %|2$5d|| %3%",
- prefix,
- (nixCode.errPos.line - 1),
- *nixCode.prevLineOfCode);
+ prefix,
+ (errPos.line - 1),
+ *loc.prevLineOfCode);
}
- if (nixCode.errLineOfCode.has_value()) {
+ if (loc.errLineOfCode.has_value()) {
// line of code containing the error.
out << std::endl
<< fmt("%1% %|2$5d|| %3%",
- prefix,
- (nixCode.errPos.line),
- *nixCode.errLineOfCode);
+ prefix,
+ (errPos.line),
+ *loc.errLineOfCode);
// error arrows for the column range.
- if (nixCode.errPos.column > 0) {
- int start = nixCode.errPos.column;
+ if (errPos.column > 0) {
+ int start = errPos.column;
std::string spaces;
for (int i = 0; i < start; ++i) {
spaces.append(" ");
@@ -87,23 +170,49 @@ void printCodeLines(std::ostream &out, const string &prefix, const NixCode &nixC
out << std::endl
<< fmt("%1% |%2%" ANSI_RED "%3%" ANSI_NORMAL,
- prefix,
- spaces,
- arrows);
+ prefix,
+ spaces,
+ arrows);
}
}
// next line of code.
- if (nixCode.nextLineOfCode.has_value()) {
+ if (loc.nextLineOfCode.has_value()) {
out << std::endl
<< fmt("%1% %|2$5d|| %3%",
- prefix,
- (nixCode.errPos.line + 1),
- *nixCode.nextLineOfCode);
+ prefix,
+ (errPos.line + 1),
+ *loc.nextLineOfCode);
+ }
+}
+
+void printAtPos(const string &prefix, const ErrPos &pos, std::ostream &out)
+{
+ if (pos)
+ {
+ switch (pos.origin) {
+ case foFile: {
+ out << prefix << ANSI_BLUE << "at: " << ANSI_YELLOW << showErrPos(pos) <<
+ ANSI_BLUE << " in file: " << ANSI_NORMAL << pos.file;
+ break;
+ }
+ case foString: {
+ out << prefix << ANSI_BLUE << "at: " << ANSI_YELLOW << showErrPos(pos) <<
+ ANSI_BLUE << " from string" << ANSI_NORMAL;
+ break;
+ }
+ case foStdin: {
+ out << prefix << ANSI_BLUE << "at: " << ANSI_YELLOW << showErrPos(pos) <<
+ ANSI_BLUE << " from stdin" << ANSI_NORMAL;
+ break;
+ }
+ default:
+ throw Error("invalid FileOrigin in errPos");
+ }
}
}
-std::ostream& operator<<(std::ostream &out, const ErrorInfo &einfo)
+std::ostream& showErrorInfo(std::ostream &out, const ErrorInfo &einfo, bool showTrace)
{
auto errwidth = std::max<size_t>(getWindowSize().second, 20);
string prefix = "";
@@ -158,8 +267,12 @@ std::ostream& operator<<(std::ostream &out, const ErrorInfo &einfo)
}
}
- auto ndl = prefix.length() + levelString.length() + 3 + einfo.name.length() + einfo.programName.value_or("").length();
- auto dashwidth = ndl > (errwidth - 3) ? 3 : errwidth - ndl;
+ auto ndl = prefix.length()
+ + filterANSIEscapes(levelString, true).length()
+ + 7
+ + einfo.name.length()
+ + einfo.programName.value_or("").length();
+ auto dashwidth = std::max<int>(errwidth - ndl, 3);
std::string dashes(dashwidth, '-');
@@ -179,16 +292,9 @@ std::ostream& operator<<(std::ostream &out, const ErrorInfo &einfo)
einfo.programName.value_or(""));
bool nl = false; // intersperse newline between sections.
- if (einfo.nixCode.has_value()) {
- if (einfo.nixCode->errPos.file != "") {
- // filename, line, column.
- out << std::endl << fmt("%1%in file: " ANSI_BLUE "%2% %3%" ANSI_NORMAL,
- prefix,
- einfo.nixCode->errPos.file,
- showErrPos(einfo.nixCode->errPos));
- } else {
- out << std::endl << fmt("%1%from command line argument", prefix);
- }
+ if (einfo.errPos.has_value() && (*einfo.errPos)) {
+ out << prefix << std::endl;
+ printAtPos(prefix, *einfo.errPos, out);
nl = true;
}
@@ -200,12 +306,16 @@ std::ostream& operator<<(std::ostream &out, const ErrorInfo &einfo)
nl = true;
}
- // lines of code.
- if (einfo.nixCode.has_value() && einfo.nixCode->errLineOfCode.has_value()) {
- if (nl)
- out << std::endl << prefix;
- printCodeLines(out, prefix, *einfo.nixCode);
- nl = true;
+ if (einfo.errPos.has_value() && (*einfo.errPos)) {
+ auto loc = getCodeLines(*einfo.errPos);
+
+ // lines of code.
+ if (loc.has_value()) {
+ if (nl)
+ out << std::endl << prefix;
+ printCodeLines(out, prefix, *einfo.errPos, *loc);
+ nl = true;
+ }
}
// hint
@@ -216,6 +326,59 @@ std::ostream& operator<<(std::ostream &out, const ErrorInfo &einfo)
nl = true;
}
+ // traces
+ if (showTrace && !einfo.traces.empty())
+ {
+ const string tracetitle(" show-trace ");
+
+ int fill = errwidth - tracetitle.length();
+ int lw = 0;
+ int rw = 0;
+ const int min_dashes = 3;
+ if (fill > min_dashes * 2) {
+ if (fill % 2 != 0) {
+ lw = fill / 2;
+ rw = lw + 1;
+ }
+ else
+ {
+ lw = rw = fill / 2;
+ }
+ }
+ else
+ lw = rw = min_dashes;
+
+ if (nl)
+ out << std::endl << prefix;
+
+ out << ANSI_BLUE << std::string(lw, '-') << tracetitle << std::string(rw, '-') << ANSI_NORMAL;
+
+ for (auto iter = einfo.traces.rbegin(); iter != einfo.traces.rend(); ++iter)
+ {
+ try {
+ out << std::endl << prefix;
+ out << ANSI_BLUE << "trace: " << ANSI_NORMAL << iter->hint.str();
+
+ nl = true;
+ if (*iter->pos) {
+ auto pos = iter->pos.value();
+ out << std::endl << prefix;
+
+ printAtPos(prefix, pos, out);
+ auto loc = getCodeLines(pos);
+ if (loc.has_value())
+ {
+ out << std::endl << prefix;
+ printCodeLines(out, prefix, pos, *loc);
+ out << std::endl << prefix;
+ }
+ }
+ } catch(const std::bad_optional_access& e) {
+ out << iter->hint.str() << std::endl;
+ }
+ }
+ }
+
return out;
}
}
diff --git a/src/libutil/error.hh b/src/libutil/error.hh
index 1e6102ce1..1b0fb43b8 100644
--- a/src/libutil/error.hh
+++ b/src/libutil/error.hh
@@ -25,20 +25,20 @@ namespace nix {
/*
-This file defines two main structs/classes used in nix error handling.
+ This file defines two main structs/classes used in nix error handling.
-ErrorInfo provides a standard payload of error information, with conversion to string
-happening in the logger rather than at the call site.
+ ErrorInfo provides a standard payload of error information, with conversion to string
+ happening in the logger rather than at the call site.
-BaseError is the ancestor of nix specific exceptions (and Interrupted), and contains
-an ErrorInfo.
+ BaseError is the ancestor of nix specific exceptions (and Interrupted), and contains
+ an ErrorInfo.
-ErrorInfo structs are sent to the logger as part of an exception, or directly with the
-logError or logWarning macros.
+ ErrorInfo structs are sent to the logger as part of an exception, or directly with the
+ logError or logWarning macros.
-See the error-demo.cc program for usage examples.
+ See the error-demo.cc program for usage examples.
-*/
+ */
typedef enum {
lvlError = 0,
@@ -50,11 +50,25 @@ typedef enum {
lvlVomit
} Verbosity;
+typedef enum {
+ foFile,
+ foStdin,
+ foString
+} FileOrigin;
+
+// the lines of code surrounding an error.
+struct LinesOfCode {
+ std::optional<string> prevLineOfCode;
+ std::optional<string> errLineOfCode;
+ std::optional<string> nextLineOfCode;
+};
+
// ErrPos indicates the location of an error in a nix file.
struct ErrPos {
int line = 0;
int column = 0;
string file;
+ FileOrigin origin;
operator bool() const
{
@@ -65,9 +79,14 @@ struct ErrPos {
template <class P>
ErrPos& operator=(const P &pos)
{
+ origin = pos.origin;
line = pos.line;
column = pos.column;
- file = pos.file;
+ // is file symbol null?
+ if (pos.file.set())
+ file = pos.file;
+ else
+ file = "";
return *this;
}
@@ -78,11 +97,9 @@ struct ErrPos {
}
};
-struct NixCode {
- ErrPos errPos;
- std::optional<string> prevLineOfCode;
- std::optional<string> errLineOfCode;
- std::optional<string> nextLineOfCode;
+struct Trace {
+ std::optional<ErrPos> pos;
+ hintformat hint;
};
struct ErrorInfo {
@@ -90,19 +107,19 @@ struct ErrorInfo {
string name;
string description;
std::optional<hintformat> hint;
- std::optional<NixCode> nixCode;
+ std::optional<ErrPos> errPos;
+ std::list<Trace> traces;
static std::optional<string> programName;
};
-std::ostream& operator<<(std::ostream &out, const ErrorInfo &einfo);
+std::ostream& showErrorInfo(std::ostream &out, const ErrorInfo &einfo, bool showTrace);
/* BaseError should generally not be caught, as it has Interrupted as
a subclass. Catch Error instead. */
class BaseError : public std::exception
{
protected:
- string prefix_; // used for location traces etc.
mutable ErrorInfo err;
mutable std::optional<string> what_;
@@ -113,23 +130,23 @@ public:
template<typename... Args>
BaseError(unsigned int status, const Args & ... args)
- : err { .level = lvlError,
- .hint = hintfmt(args...)
- }
+ : err {.level = lvlError,
+ .hint = hintfmt(args...)
+ }
, status(status)
{ }
template<typename... Args>
BaseError(const std::string & fs, const Args & ... args)
- : err { .level = lvlError,
- .hint = hintfmt(fs, args...)
- }
+ : err {.level = lvlError,
+ .hint = hintfmt(fs, args...)
+ }
{ }
BaseError(hintformat hint)
- : err { .level = lvlError,
- .hint = hint
- }
+ : err {.level = lvlError,
+ .hint = hint
+ }
{ }
BaseError(ErrorInfo && e)
@@ -150,10 +167,17 @@ public:
#endif
const string & msg() const { return calcWhat(); }
- const string & prefix() const { return prefix_; }
- BaseError & addPrefix(const FormatOrString & fs);
-
const ErrorInfo & info() { calcWhat(); return err; }
+
+ template<typename... Args>
+ BaseError & addTrace(std::optional<ErrPos> e, const string &fs, const Args & ... args)
+ {
+ return addTrace(e, hintfmt(fs, args...));
+ }
+
+ BaseError & addTrace(std::optional<ErrPos> e, hintformat hint);
+
+ bool hasTrace() const { return !err.traces.empty(); }
};
#define MakeError(newClass, superClass) \
@@ -173,7 +197,7 @@ public:
template<typename... Args>
SysError(const Args & ... args)
- :Error("")
+ : Error("")
{
errNo = errno;
auto hf = hintfmt(args...);
diff --git a/src/libutil/fmt.hh b/src/libutil/fmt.hh
index 12ab9c407..a39de041f 100644
--- a/src/libutil/fmt.hh
+++ b/src/libutil/fmt.hh
@@ -1,6 +1,7 @@
#pragma once
#include <boost/format.hpp>
+#include <boost/algorithm/string/replace.hpp>
#include <string>
#include "ansicolor.hh"
@@ -103,7 +104,9 @@ class hintformat
public:
hintformat(const string &format) :fmt(format)
{
- fmt.exceptions(boost::io::all_error_bits ^ boost::io::too_many_args_bit);
+ fmt.exceptions(boost::io::all_error_bits ^
+ boost::io::too_many_args_bit ^
+ boost::io::too_few_args_bit);
}
hintformat(const hintformat &hf)
@@ -117,6 +120,13 @@ public:
return *this;
}
+ template<class T>
+ hintformat& operator%(const normaltxt<T> &value)
+ {
+ fmt % value.value;
+ return *this;
+ }
+
std::string str() const
{
return fmt.str();
@@ -136,4 +146,9 @@ inline hintformat hintfmt(const std::string & fs, const Args & ... args)
return f;
}
+inline hintformat hintfmt(std::string plain_string)
+{
+ // we won't be receiving any args in this case, so just print the original string
+ return hintfmt("%s", normaltxt(plain_string));
+}
}
diff --git a/src/libutil/hash.cc b/src/libutil/hash.cc
index e49eb4569..1a3e7c5d8 100644
--- a/src/libutil/hash.cc
+++ b/src/libutil/hash.cc
@@ -19,7 +19,7 @@ namespace nix {
void Hash::init()
{
- if (!type) abort();
+ assert(type);
switch (*type) {
case htMD5: hashSize = md5HashSize; break;
case htSHA1: hashSize = sha1HashSize; break;
@@ -101,15 +101,15 @@ static string printHash32(const Hash & hash)
string printHash16or32(const Hash & hash)
{
+ assert(hash.type);
return hash.to_string(hash.type == htMD5 ? Base16 : Base32, false);
}
-HashType assertInitHashType(const Hash & h) {
- if (h.type)
- return *h.type;
- else
- abort();
+HashType assertInitHashType(const Hash & h)
+{
+ assert(h.type);
+ return *h.type;
}
std::string Hash::to_string(Base base, bool includeType) const
@@ -223,7 +223,7 @@ Hash newHashAllowEmpty(std::string hashStr, std::optional<HashType> ht)
if (!ht)
throw BadHash("empty hash requires explicit hash type");
Hash h(*ht);
- warn("found empty hash, assuming '%s'", h.to_string(Base::SRI, true));
+ warn("found empty hash, assuming '%s'", h.to_string(SRI, true));
return h;
} else
return Hash(hashStr, ht);
@@ -363,14 +363,15 @@ HashType parseHashType(const string & s)
string printHashType(HashType ht)
{
switch (ht) {
- case htMD5: return "md5"; break;
- case htSHA1: return "sha1"; break;
- case htSHA256: return "sha256"; break;
- case htSHA512: return "sha512"; break;
+ case htMD5: return "md5";
+ case htSHA1: return "sha1";
+ case htSHA256: return "sha256";
+ case htSHA512: return "sha512";
+ default:
+ // illegal hash type enum value internally, as opposed to external input
+ // which should be validated with nice error message.
+ assert(false);
}
- // illegal hash type enum value internally, as opposed to external input
- // which should be validated with nice error message.
- abort();
}
}
diff --git a/src/libutil/hash.hh b/src/libutil/hash.hh
index 0d9916508..23259dced 100644
--- a/src/libutil/hash.hh
+++ b/src/libutil/hash.hh
@@ -10,7 +10,7 @@ namespace nix {
MakeError(BadHash, Error);
-enum HashType : char { htMD5, htSHA1, htSHA256, htSHA512 };
+enum HashType : char { htMD5 = 42, htSHA1, htSHA256, htSHA512 };
const int md5HashSize = 16;
diff --git a/src/libutil/logging.cc b/src/libutil/logging.cc
index 105fadb15..90c6afe81 100644
--- a/src/libutil/logging.cc
+++ b/src/libutil/logging.cc
@@ -1,5 +1,6 @@
#include "logging.hh"
#include "util.hh"
+#include "config.hh"
#include <atomic>
#include <nlohmann/json.hpp>
@@ -7,6 +8,10 @@
namespace nix {
+LoggerSettings loggerSettings;
+
+static GlobalConfig::Register r1(&loggerSettings);
+
static thread_local ActivityId curActivity = 0;
ActivityId getCurActivity()
@@ -72,10 +77,11 @@ public:
void logEI(const ErrorInfo & ei) override
{
std::stringstream oss;
- oss << ei;
+ showErrorInfo(oss, ei, loggerSettings.showTrace.get());
log(ei.level, oss.str());
}
+
void startActivity(ActivityId act, Verbosity lvl, ActivityType type,
const std::string & s, const Fields & fields, ActivityId parent)
@@ -173,7 +179,7 @@ struct JSONLogger : Logger {
void logEI(const ErrorInfo & ei) override
{
std::ostringstream oss;
- oss << ei;
+ showErrorInfo(oss, ei, loggerSettings.showTrace.get());
nlohmann::json json;
json["action"] = "msg";
diff --git a/src/libutil/logging.hh b/src/libutil/logging.hh
index b1583eced..09619aac6 100644
--- a/src/libutil/logging.hh
+++ b/src/libutil/logging.hh
@@ -2,6 +2,7 @@
#include "types.hh"
#include "error.hh"
+#include "config.hh"
namespace nix {
@@ -34,6 +35,16 @@ typedef enum {
typedef uint64_t ActivityId;
+struct LoggerSettings : Config
+{
+ Setting<bool> showTrace{this,
+ false,
+ "show-trace",
+ "Whether to show a stack trace on evaluation errors."};
+};
+
+extern LoggerSettings loggerSettings;
+
class Logger
{
friend struct Activity;
diff --git a/src/libutil/tests/logging.cc b/src/libutil/tests/logging.cc
index 4cb54995b..ef22e9966 100644
--- a/src/libutil/tests/logging.cc
+++ b/src/libutil/tests/logging.cc
@@ -1,6 +1,7 @@
#include "logging.hh"
#include "nixexpr.hh"
#include "util.hh"
+#include <fstream>
#include <gtest/gtest.h>
@@ -10,6 +11,13 @@ namespace nix {
* logEI
* --------------------------------------------------------------------------*/
+ const char *test_file =
+ "previous line of code\n"
+ "this is the problem line of code\n"
+ "next line of code\n";
+ const char *one_liner =
+ "this is the other problem line of code";
+
TEST(logEI, catpuresBasicProperties) {
MakeError(TestError, Error);
@@ -42,7 +50,7 @@ namespace nix {
logger->logEI(ei);
auto str = testing::internal::GetCapturedStderr();
- ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- TestError --- error-unit-test\x1B[0m\n\x1B[33;1m\x1B[0minitial error\x1B[0m; subsequent error message.\n");
+ ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- TestError --- error-unit-test\x1B[0m\ninitial error; subsequent error message.\n");
}
}
@@ -60,8 +68,7 @@ namespace nix {
logError(e.info());
auto str = testing::internal::GetCapturedStderr();
- ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- SysError --- error-unit-test\x1B[0m\n\x1B[33;1m\x1B[0mstatting file\x1B[0m: \x1B[33;1mBad file descriptor\x1B[0m\n");
-
+ ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- SysError --- error-unit-test\x1B[0m\nstatting file: \x1B[33;1mBad file descriptor\x1B[0m\n");
}
}
@@ -69,9 +76,9 @@ namespace nix {
testing::internal::CaptureStderr();
logger->logEI({ .level = lvlInfo,
- .name = "Info name",
- .description = "Info description",
- });
+ .name = "Info name",
+ .description = "Info description",
+ });
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[32;1minfo:\x1B[0m\x1B[34;1m --- Info name --- error-unit-test\x1B[0m\nInfo description\n");
@@ -85,7 +92,7 @@ namespace nix {
logger->logEI({ .level = lvlTalkative,
.name = "Talkative name",
.description = "Talkative description",
- });
+ });
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[32;1mtalk:\x1B[0m\x1B[34;1m --- Talkative name --- error-unit-test\x1B[0m\nTalkative description\n");
@@ -99,7 +106,7 @@ namespace nix {
logger->logEI({ .level = lvlChatty,
.name = "Chatty name",
.description = "Talkative description",
- });
+ });
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[32;1mchat:\x1B[0m\x1B[34;1m --- Chatty name --- error-unit-test\x1B[0m\nTalkative description\n");
@@ -113,7 +120,7 @@ namespace nix {
logger->logEI({ .level = lvlDebug,
.name = "Debug name",
.description = "Debug description",
- });
+ });
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[33;1mdebug:\x1B[0m\x1B[34;1m --- Debug name --- error-unit-test\x1B[0m\nDebug description\n");
@@ -127,7 +134,7 @@ namespace nix {
logger->logEI({ .level = lvlVomit,
.name = "Vomit name",
.description = "Vomit description",
- });
+ });
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[32;1mvomit:\x1B[0m\x1B[34;1m --- Vomit name --- error-unit-test\x1B[0m\nVomit description\n");
@@ -137,14 +144,13 @@ namespace nix {
* logError
* --------------------------------------------------------------------------*/
-
TEST(logError, logErrorWithoutHintOrCode) {
testing::internal::CaptureStderr();
logError({
.name = "name",
.description = "error description",
- });
+ });
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- name --- error-unit-test\x1B[0m\nerror description\n");
@@ -152,7 +158,7 @@ namespace nix {
TEST(logError, logErrorWithPreviousAndNextLinesOfCode) {
SymbolTable testTable;
- auto problem_file = testTable.create("myfile.nix");
+ auto problem_file = testTable.create(test_file);
testing::internal::CaptureStderr();
@@ -160,53 +166,43 @@ namespace nix {
.name = "error name",
.description = "error with code lines",
.hint = hintfmt("this hint has %1% templated %2%!!",
- "yellow",
- "values"),
- .nixCode = NixCode {
- .errPos = Pos(problem_file, 40, 13),
- .prevLineOfCode = "previous line of code",
- .errLineOfCode = "this is the problem line of code",
- .nextLineOfCode = "next line of code",
- }});
-
+ "yellow",
+ "values"),
+ .errPos = Pos(foString, problem_file, 02, 13),
+ });
auto str = testing::internal::GetCapturedStderr();
- ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- error name --- error-unit-test\x1B[0m\nin file: \x1B[34;1mmyfile.nix (40:13)\x1B[0m\n\nerror with code lines\n\n 39| previous line of code\n 40| this is the problem line of code\n | \x1B[31;1m^\x1B[0m\n 41| next line of code\n\nthis hint has \x1B[33;1myellow\x1B[0m templated \x1B[33;1mvalues\x1B[0m!!\n");
+ ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- error name --- error-unit-test\x1B[0m\n\x1B[34;1mat: \x1B[33;1m(2:13)\x1B[34;1m from string\x1B[0m\n\nerror with code lines\n\n 1| previous line of code\n 2| this is the problem line of code\n | \x1B[31;1m^\x1B[0m\n 3| next line of code\n\nthis hint has \x1B[33;1myellow\x1B[0m templated \x1B[33;1mvalues\x1B[0m!!\n");
}
- TEST(logError, logErrorWithoutLinesOfCode) {
+ TEST(logError, logErrorWithInvalidFile) {
SymbolTable testTable;
- auto problem_file = testTable.create("myfile.nix");
+ auto problem_file = testTable.create("invalid filename");
testing::internal::CaptureStderr();
logError({
.name = "error name",
.description = "error without any code lines.",
.hint = hintfmt("this hint has %1% templated %2%!!",
- "yellow",
- "values"),
- .nixCode = NixCode {
- .errPos = Pos(problem_file, 40, 13)
- }});
+ "yellow",
+ "values"),
+ .errPos = Pos(foFile, problem_file, 02, 13)
+ });
auto str = testing::internal::GetCapturedStderr();
- ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- error name --- error-unit-test\x1B[0m\nin file: \x1B[34;1mmyfile.nix (40:13)\x1B[0m\n\nerror without any code lines.\n\nthis hint has \x1B[33;1myellow\x1B[0m templated \x1B[33;1mvalues\x1B[0m!!\n");
+ ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- SysError --- error-unit-test\x1B[0m\nopening file '\x1B[33;1minvalid filename\x1B[0m': \x1B[33;1mNo such file or directory\x1B[0m\n\x1B[31;1merror:\x1B[0m\x1B[34;1m --- error name --- error-unit-test\x1B[0m\n\x1B[34;1mat: \x1B[33;1m(2:13)\x1B[34;1m in file: \x1B[0minvalid filename\n\nerror without any code lines.\n\nthis hint has \x1B[33;1myellow\x1B[0m templated \x1B[33;1mvalues\x1B[0m!!\n");
}
TEST(logError, logErrorWithOnlyHintAndName) {
- SymbolTable testTable;
- auto problem_file = testTable.create("myfile.nix");
testing::internal::CaptureStderr();
logError({
.name = "error name",
.hint = hintfmt("hint %1%", "only"),
- .nixCode = NixCode {
- .errPos = Pos(problem_file, 40, 13)
- }});
+ });
auto str = testing::internal::GetCapturedStderr();
- ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- error name --- error-unit-test\x1B[0m\nin file: \x1B[34;1mmyfile.nix (40:13)\x1B[0m\n\nhint \x1B[33;1monly\x1B[0m\n");
+ ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- error name --- error-unit-test\x1B[0m\nhint \x1B[33;1monly\x1B[0m\n");
}
@@ -218,19 +214,19 @@ namespace nix {
testing::internal::CaptureStderr();
logWarning({
- .name = "name",
- .description = "error description",
- .hint = hintfmt("there was a %1%", "warning"),
- });
+ .name = "name",
+ .description = "warning description",
+ .hint = hintfmt("there was a %1%", "warning"),
+ });
auto str = testing::internal::GetCapturedStderr();
- ASSERT_STREQ(str.c_str(), "\x1B[33;1mwarning:\x1B[0m\x1B[34;1m --- name --- error-unit-test\x1B[0m\nerror description\n\nthere was a \x1B[33;1mwarning\x1B[0m\n");
+ ASSERT_STREQ(str.c_str(), "\x1B[33;1mwarning:\x1B[0m\x1B[34;1m --- name --- error-unit-test\x1B[0m\nwarning description\n\nthere was a \x1B[33;1mwarning\x1B[0m\n");
}
TEST(logWarning, logWarningWithFileLineNumAndCode) {
SymbolTable testTable;
- auto problem_file = testTable.create("myfile.nix");
+ auto problem_file = testTable.create(test_file);
testing::internal::CaptureStderr();
@@ -238,18 +234,125 @@ namespace nix {
.name = "warning name",
.description = "warning description",
.hint = hintfmt("this hint has %1% templated %2%!!",
- "yellow",
- "values"),
- .nixCode = NixCode {
- .errPos = Pos(problem_file, 40, 13),
- .prevLineOfCode = std::nullopt,
- .errLineOfCode = "this is the problem line of code",
- .nextLineOfCode = std::nullopt
- }});
+ "yellow",
+ "values"),
+ .errPos = Pos(foStdin, problem_file, 2, 13),
+ });
+
+
+ auto str = testing::internal::GetCapturedStderr();
+ ASSERT_STREQ(str.c_str(), "\x1B[33;1mwarning:\x1B[0m\x1B[34;1m --- warning name --- error-unit-test\x1B[0m\n\x1B[34;1mat: \x1B[33;1m(2:13)\x1B[34;1m from stdin\x1B[0m\n\nwarning description\n\n 1| previous line of code\n 2| this is the problem line of code\n | \x1B[31;1m^\x1B[0m\n 3| next line of code\n\nthis hint has \x1B[33;1myellow\x1B[0m templated \x1B[33;1mvalues\x1B[0m!!\n");
+ }
+
+ /* ----------------------------------------------------------------------------
+ * traces
+ * --------------------------------------------------------------------------*/
+
+ TEST(addTrace, showTracesWithShowTrace) {
+ SymbolTable testTable;
+ auto problem_file = testTable.create(test_file);
+
+ auto oneliner_file = testTable.create(one_liner);
+
+ auto e = AssertionError(ErrorInfo {
+ .name = "wat",
+ .description = "a well-known problem occurred",
+ .hint = hintfmt("it has been %1% days since our last error", "zero"),
+ .errPos = Pos(foString, problem_file, 2, 13),
+ });
+
+ e.addTrace(Pos(foStdin, oneliner_file, 1, 19), "while trying to compute %1%", 42);
+ e.addTrace(std::nullopt, "while doing something without a %1%", "pos");
+
+ testing::internal::CaptureStderr();
+
+ loggerSettings.showTrace.assign(true);
+
+ logError(e.info());
+
+ auto str = testing::internal::GetCapturedStderr();
+ ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- AssertionError --- error-unit-test\x1B[0m\n\x1B[34;1mat: \x1B[33;1m(2:13)\x1B[34;1m from string\x1B[0m\n\na well-known problem occurred\n\n 1| previous line of code\n 2| this is the problem line of code\n | \x1B[31;1m^\x1B[0m\n 3| next line of code\n\nit has been \x1B[33;1mzero\x1B[0m days since our last error\n\x1B[34;1m---- show-trace ----\x1B[0m\n\x1B[34;1mtrace: \x1B[0mwhile trying to compute \x1B[33;1m42\x1B[0m\n\x1B[34;1mat: \x1B[33;1m(1:19)\x1B[34;1m from stdin\x1B[0m\n\n 1| this is the other problem line of code\n | \x1B[31;1m^\x1B[0m\n\n\x1B[34;1mtrace: \x1B[0mwhile doing something without a \x1B[33;1mpos\x1B[0m\n");
+ }
+
+ TEST(addTrace, hideTracesWithoutShowTrace) {
+ SymbolTable testTable;
+ auto problem_file = testTable.create(test_file);
+
+ auto oneliner_file = testTable.create(one_liner);
+
+ auto e = AssertionError(ErrorInfo {
+ .name = "wat",
+ .description = "a well-known problem occurred",
+ .hint = hintfmt("it has been %1% days since our last error", "zero"),
+ .errPos = Pos(foString, problem_file, 2, 13),
+ });
+
+ e.addTrace(Pos(foStdin, oneliner_file, 1, 19), "while trying to compute %1%", 42);
+ e.addTrace(std::nullopt, "while doing something without a %1%", "pos");
+ testing::internal::CaptureStderr();
+
+ loggerSettings.showTrace.assign(false);
+
+ logError(e.info());
auto str = testing::internal::GetCapturedStderr();
- ASSERT_STREQ(str.c_str(), "\x1B[33;1mwarning:\x1B[0m\x1B[34;1m --- warning name --- error-unit-test\x1B[0m\nin file: \x1B[34;1mmyfile.nix (40:13)\x1B[0m\n\nwarning description\n\n 40| this is the problem line of code\n | \x1B[31;1m^\x1B[0m\n\nthis hint has \x1B[33;1myellow\x1B[0m templated \x1B[33;1mvalues\x1B[0m!!\n");
+ ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- AssertionError --- error-unit-test\x1B[0m\n\x1B[34;1mat: \x1B[33;1m(2:13)\x1B[34;1m from string\x1B[0m\n\na well-known problem occurred\n\n 1| previous line of code\n 2| this is the problem line of code\n | \x1B[31;1m^\x1B[0m\n 3| next line of code\n\nit has been \x1B[33;1mzero\x1B[0m days since our last error\n");
+ }
+
+ /* ----------------------------------------------------------------------------
+ * hintfmt
+ * --------------------------------------------------------------------------*/
+
+ TEST(hintfmt, percentStringWithoutArgs) {
+
+ const char *teststr = "this is 100%s correct!";
+
+ ASSERT_STREQ(
+ hintfmt(teststr).str().c_str(),
+ teststr);
+
+ }
+
+ TEST(hintfmt, fmtToHintfmt) {
+
+ ASSERT_STREQ(
+ hintfmt(fmt("the color of this this text is %1%", "not yellow")).str().c_str(),
+ "the color of this this text is not yellow");
+
+ }
+
+ TEST(hintfmt, tooFewArguments) {
+
+ ASSERT_STREQ(
+ hintfmt("only one arg %1% %2%", "fulfilled").str().c_str(),
+ "only one arg " ANSI_YELLOW "fulfilled" ANSI_NORMAL " ");
+
+ }
+
+ TEST(hintfmt, tooManyArguments) {
+
+ ASSERT_STREQ(
+ hintfmt("what about this %1% %2%", "%3%", "one", "two").str().c_str(),
+ "what about this " ANSI_YELLOW "%3%" ANSI_NORMAL " " ANSI_YELLOW "one" ANSI_NORMAL);
+
+ }
+
+ /* ----------------------------------------------------------------------------
+ * ErrPos
+ * --------------------------------------------------------------------------*/
+
+ TEST(errpos, invalidPos) {
+
+ // contains an invalid symbol, which we should not dereference!
+ Pos invalid;
+
+ // constructing without access violation.
+ ErrPos ep(invalid);
+
+ // assignment without access violation.
+ ep = invalid;
+
}
}
diff --git a/src/nix-env/nix-env.cc b/src/nix-env/nix-env.cc
index 8b0692035..c992b7d74 100644
--- a/src/nix-env/nix-env.cc
+++ b/src/nix-env/nix-env.cc
@@ -593,7 +593,7 @@ static void upgradeDerivations(Globals & globals,
} else newElems.push_back(i);
} catch (Error & e) {
- e.addPrefix(fmt("while trying to find an upgrade for '%s':\n", i.queryName()));
+ e.addTrace(std::nullopt, "while trying to find an upgrade for '%s'", i.queryName());
throw;
}
}
@@ -1185,7 +1185,7 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs)
} catch (AssertionError & e) {
printMsg(lvlTalkative, "skipping derivation named '%1%' which gives an assertion failure", i.queryName());
} catch (Error & e) {
- e.addPrefix(fmt("while querying the derivation named '%1%':\n", i.queryName()));
+ e.addTrace(std::nullopt, "while querying the derivation named '%1%'", i.queryName());
throw;
}
}
diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc
index 4e02aa2bf..7d81bf54f 100644
--- a/src/nix-store/nix-store.cc
+++ b/src/nix-store/nix-store.cc
@@ -864,7 +864,7 @@ static void opServe(Strings opFlags, Strings opArgs)
out << info->narSize // downloadSize
<< info->narSize;
if (GET_PROTOCOL_MINOR(clientVersion) >= 4)
- out << (info->narHash ? info->narHash.to_string(Base32, true) : "") << info->ca << info->sigs;
+ out << (info->narHash ? info->narHash.to_string(Base32, true) : "") << renderContentAddress(info->ca) << info->sigs;
} catch (InvalidPath &) {
}
}
@@ -952,7 +952,7 @@ static void opServe(Strings opFlags, Strings opArgs)
info.references = readStorePaths<StorePathSet>(*store, in);
in >> info.registrationTime >> info.narSize >> info.ultimate;
info.sigs = readStrings<StringSet>(in);
- in >> info.ca;
+ info.ca = parseContentAddressOpt(readString(in));
if (info.narSize == 0)
throw Error("narInfo is too old and missing the narSize field");
diff --git a/src/nix/add-to-store.cc b/src/nix/add-to-store.cc
index f43f774c1..f9d6de16e 100644
--- a/src/nix/add-to-store.cc
+++ b/src/nix/add-to-store.cc
@@ -48,7 +48,10 @@ struct CmdAddToStore : MixDryRun, StoreCommand
ValidPathInfo info(store->makeFixedOutputPath(FileIngestionMethod::Recursive, narHash, *namePart));
info.narHash = narHash;
info.narSize = sink.s->size();
- info.ca = makeFixedOutputCA(FileIngestionMethod::Recursive, info.narHash);
+ info.ca = std::optional { FixedOutputHash {
+ .method = FileIngestionMethod::Recursive,
+ .hash = info.narHash,
+ } };
if (!dryRun) {
auto source = StringSource { *sink.s };
diff --git a/src/nix/develop.cc b/src/nix/develop.cc
index 8b85caf82..eb93f56fc 100644
--- a/src/nix/develop.cc
+++ b/src/nix/develop.cc
@@ -50,7 +50,7 @@ BuildEnvironment readEnvironment(const Path & path)
R"re((?:\$?'(?:[^'\\]|\\[abeEfnrtv\\'"?])*'))re";
static std::string indexedArrayRegex =
- R"re((?:\(( *\[[0-9]+]="(?:[^"\\]|\\.)*")**\)))re";
+ R"re((?:\(( *\[[0-9]+\]="(?:[^"\\]|\\.)*")*\)))re";
static std::regex varRegex(
"^(" + varNameRegex + ")=(" + simpleStringRegex + "|" + quotedStringRegex + "|" + indexedArrayRegex + ")\n");
@@ -135,13 +135,7 @@ StorePath getDerivationEnvironment(ref<Store> store, const StorePath & drvPath)
drv.inputSrcs.insert(std::move(getEnvShPath));
Hash h = hashDerivationModulo(*store, drv, true);
auto shellOutPath = store->makeOutputPath("out", h, drvName);
- drv.outputs.insert_or_assign("out", DerivationOutput {
- .path = shellOutPath,
- .hash = DerivationOutputHash {
- .method = FileIngestionMethod::Flat,
- .hash = Hash { },
- },
- });
+ drv.outputs.insert_or_assign("out", DerivationOutput { .path = shellOutPath });
drv.env["out"] = store->printStorePath(shellOutPath);
auto shellDrvPath2 = writeDerivation(store, drv, drvName);
diff --git a/src/nix/hash.cc b/src/nix/hash.cc
index f435192fc..b97c6d21f 100644
--- a/src/nix/hash.cc
+++ b/src/nix/hash.cc
@@ -1,5 +1,6 @@
#include "command.hh"
#include "hash.hh"
+#include "content-address.hh"
#include "legacy.hh"
#include "shared.hh"
#include "references.hh"
diff --git a/src/nix/make-content-addressable.cc b/src/nix/make-content-addressable.cc
index 0ebb8f13b..fb36fc410 100644
--- a/src/nix/make-content-addressable.cc
+++ b/src/nix/make-content-addressable.cc
@@ -82,7 +82,10 @@ struct CmdMakeContentAddressable : StorePathsCommand, MixJSON
if (hasSelfReference) info.references.insert(info.path);
info.narHash = narHash;
info.narSize = sink.s->size();
- info.ca = makeFixedOutputCA(FileIngestionMethod::Recursive, info.narHash);
+ info.ca = FixedOutputHash {
+ .method = FileIngestionMethod::Recursive,
+ .hash = info.narHash,
+ };
if (!json)
printInfo("rewrote '%s' to '%s'", pathS, store->printStorePath(info.path));
diff --git a/src/nix/path-info.cc b/src/nix/path-info.cc
index fb7bacc4c..b89a44f83 100644
--- a/src/nix/path-info.cc
+++ b/src/nix/path-info.cc
@@ -115,7 +115,7 @@ struct CmdPathInfo : StorePathsCommand, MixJSON
std::cout << '\t';
Strings ss;
if (info->ultimate) ss.push_back("ultimate");
- if (info->ca != "") ss.push_back("ca:" + info->ca);
+ if (info->ca) ss.push_back("ca:" + renderContentAddress(*info->ca));
for (auto & sig : info->sigs) ss.push_back(sig);
std::cout << concatStringsSep(" ", ss);
}
diff --git a/src/nix/repl.cc b/src/nix/repl.cc
index d5661f7b7..ab8ae41e8 100644
--- a/src/nix/repl.cc
+++ b/src/nix/repl.cc
@@ -211,12 +211,12 @@ void NixRepl::mainLoop(const std::vector<std::string> & files)
// input without clearing the input so far.
continue;
} else {
- printMsg(lvlError, error + "%1%%2%", (settings.showTrace ? e.prefix() : ""), e.msg());
+ printMsg(lvlError, e.msg());
}
} catch (Error & e) {
- printMsg(lvlError, error + "%1%%2%", (settings.showTrace ? e.prefix() : ""), e.msg());
+ printMsg(lvlError, e.msg());
} catch (Interrupted & e) {
- printMsg(lvlError, error + "%1%%2%", (settings.showTrace ? e.prefix() : ""), e.msg());
+ printMsg(lvlError, e.msg());
}
// We handled the current input fully, so we should clear it
diff --git a/src/nix/search.cc b/src/nix/search.cc
index ba72c1e79..93c3f3f83 100644
--- a/src/nix/search.cc
+++ b/src/nix/search.cc
@@ -216,7 +216,7 @@ struct CmdSearch : SourceExprCommand, MixJSON
} catch (AssertionError & e) {
} catch (Error & e) {
if (!toplevel) {
- e.addPrefix(fmt("While evaluating the attribute '%s':\n", attrPath));
+ e.addTrace(std::nullopt, "While evaluating the attribute '%s'", attrPath);
throw;
}
}
diff --git a/src/nix/verify.cc b/src/nix/verify.cc
index d1aba08e3..bb5e4529b 100644
--- a/src/nix/verify.cc
+++ b/src/nix/verify.cc
@@ -87,7 +87,7 @@ struct CmdVerify : StorePathsCommand
if (!noContents) {
std::unique_ptr<AbstractHashSink> hashSink;
- if (info->ca == "")
+ if (!info->ca)
hashSink = std::make_unique<HashSink>(*info->narHash.type);
else
hashSink = std::make_unique<HashModuloSink>(*info->narHash.type, std::string(info->path.hashPart()));
diff --git a/tests/build-hook.nix b/tests/build-hook.nix
index 8c5ca8cd3..a19c10dde 100644
--- a/tests/build-hook.nix
+++ b/tests/build-hook.nix
@@ -1,23 +1,39 @@
+{ busybox }:
+
with import ./config.nix;
let
+ mkDerivation = args:
+ derivation ({
+ inherit system;
+ builder = busybox;
+ args = ["sh" "-e" args.builder or (builtins.toFile "builder-${args.name}.sh" "if [ -e .attrs.sh ]; then source .attrs.sh; fi; eval \"$buildCommand\"")];
+ } // removeAttrs args ["builder" "meta"])
+ // { meta = args.meta or {}; };
+
input1 = mkDerivation {
- name = "build-hook-input-1";
- buildCommand = "mkdir $out; echo FOO > $out/foo";
+ shell = busybox;
+ name = "build-remote-input-1";
+ buildCommand = "echo FOO > $out";
requiredSystemFeatures = ["foo"];
};
input2 = mkDerivation {
- name = "build-hook-input-2";
- buildCommand = "mkdir $out; echo BAR > $out/bar";
+ shell = busybox;
+ name = "build-remote-input-2";
+ buildCommand = "echo BAR > $out";
};
in
mkDerivation {
- name = "build-hook";
- builder = ./dependencies.builder0.sh;
- input1 = " " + input1 + "/.";
- input2 = " ${input2}/.";
+ shell = busybox;
+ name = "build-remote";
+ buildCommand =
+ ''
+ read x < ${input1}
+ read y < ${input2}
+ echo $x$y > $out
+ '';
}
diff --git a/tests/build-remote.sh b/tests/build-remote.sh
index a550f4460..4dfb753e1 100644
--- a/tests/build-remote.sh
+++ b/tests/build-remote.sh
@@ -3,22 +3,29 @@ source common.sh
clearStore
if ! canUseSandbox; then exit; fi
-if [[ ! $SHELL =~ /nix/store ]]; then exit; fi
+if ! [[ $busybox =~ busybox ]]; then exit; fi
-chmod -R u+w $TEST_ROOT/store0 || true
-chmod -R u+w $TEST_ROOT/store1 || true
-rm -rf $TEST_ROOT/store0 $TEST_ROOT/store1
+chmod -R u+w $TEST_ROOT/machine0 || true
+chmod -R u+w $TEST_ROOT/machine1 || true
+chmod -R u+w $TEST_ROOT/machine2 || true
+rm -rf $TEST_ROOT/machine0 $TEST_ROOT/machine1 $TEST_ROOT/machine2
+rm -f $TEST_ROOT/result
-nix build -f build-hook.nix -o $TEST_ROOT/result --max-jobs 0 \
- --sandbox-paths /nix/store --sandbox-build-dir /build-tmp \
- --builders "$TEST_ROOT/store0; $TEST_ROOT/store1 - - 1 1 foo" \
+unset NIX_STORE_DIR
+unset NIX_STATE_DIR
+
+# Note: ssh://localhost bypasses ssh, directly invoking nix-store as a
+# child process. This allows us to test LegacySSHStore::buildDerivation().
+nix build -L -v -f build-hook.nix -o $TEST_ROOT/result --max-jobs 0 \
+ --arg busybox $busybox \
+ --store $TEST_ROOT/machine0 \
+ --builders "ssh://localhost?remote-store=$TEST_ROOT/machine1; $TEST_ROOT/machine2 - - 1 1 foo" \
--system-features foo
-outPath=$TEST_ROOT/result
+outPath=$(readlink -f $TEST_ROOT/result)
-cat $outPath/foobar | grep FOOBAR
+cat $TEST_ROOT/machine0/$outPath | grep FOOBAR
-# Ensure that input1 was built on store1 due to the required feature.
-p=$(readlink -f $outPath/input-2)
-(! nix path-info --store $TEST_ROOT/store0 --all | grep builder-build-hook-input-1.sh)
-nix path-info --store $TEST_ROOT/store1 --all | grep builder-build-hook-input-1.sh
+# Ensure that input1 was built on store2 due to the required feature.
+(! nix path-info --store $TEST_ROOT/machine1 --all | grep builder-build-remote-input-1.sh)
+nix path-info --store $TEST_ROOT/machine2 --all | grep builder-build-remote-input-1.sh
diff --git a/tests/common.sh.in b/tests/common.sh.in
index dd7e61822..308126094 100644
--- a/tests/common.sh.in
+++ b/tests/common.sh.in
@@ -1,6 +1,6 @@
set -e
-export TEST_ROOT=$(realpath ${TMPDIR:-/tmp}/nix-test)
+export TEST_ROOT=$(realpath ${TMPDIR:-/tmp}/nix-test)/${TEST_NAME:-default}
export NIX_STORE_DIR
if ! NIX_STORE_DIR=$(readlink -f $TEST_ROOT/store 2> /dev/null); then
# Maybe the build directory is symlinked.
@@ -11,6 +11,7 @@ export NIX_LOCALSTATE_DIR=$TEST_ROOT/var
export NIX_LOG_DIR=$TEST_ROOT/var/log/nix
export NIX_STATE_DIR=$TEST_ROOT/var/nix
export NIX_CONF_DIR=$TEST_ROOT/etc
+export NIX_DAEMON_SOCKET_PATH=$TEST_ROOT/daemon-socket
unset NIX_USER_CONF_FILES
export _NIX_TEST_SHARED=$TEST_ROOT/shared
if [[ -n $NIX_STORE ]]; then
@@ -35,6 +36,7 @@ export xmllint="@xmllint@"
export SHELL="@bash@"
export PAGER=cat
export HAVE_SODIUM="@HAVE_SODIUM@"
+export busybox="@sandbox_shell@"
export version=@PACKAGE_VERSION@
export system=@system@
@@ -75,7 +77,7 @@ startDaemon() {
rm -f $NIX_STATE_DIR/daemon-socket/socket
nix-daemon &
for ((i = 0; i < 30; i++)); do
- if [ -e $NIX_STATE_DIR/daemon-socket/socket ]; then break; fi
+ if [ -e $NIX_DAEMON_SOCKET_PATH ]; then break; fi
sleep 1
done
pidDaemon=$!
diff --git a/tests/gc-auto.sh b/tests/gc-auto.sh
index de1e2cfe4..b282644ca 100644
--- a/tests/gc-auto.sh
+++ b/tests/gc-auto.sh
@@ -13,24 +13,32 @@ fake_free=$TEST_ROOT/fake-free
export _NIX_TEST_FREE_SPACE_FILE=$fake_free
echo 1100 > $fake_free
+fifoLock=$TEST_ROOT/fifoLock
+mkfifo "$fifoLock"
+
expr=$(cat <<EOF
with import ./config.nix; mkDerivation {
name = "gc-A";
buildCommand = ''
set -x
[[ \$(ls \$NIX_STORE/*-garbage? | wc -l) = 3 ]]
+
mkdir \$out
echo foo > \$out/bar
- echo 1...
- sleep 2
- echo 200 > ${fake_free}.tmp1
+
+ # Pretend that we run out of space
+ echo 100 > ${fake_free}.tmp1
mv ${fake_free}.tmp1 $fake_free
- echo 2...
- sleep 2
- echo 3...
- sleep 2
- echo 4...
- [[ \$(ls \$NIX_STORE/*-garbage? | wc -l) = 1 ]]
+
+ # Wait for the GC to run
+ for i in {1..20}; do
+ echo ''\${i}...
+ if [[ \$(ls \$NIX_STORE/*-garbage? | wc -l) = 1 ]]; then
+ exit 0
+ fi
+ sleep 1
+ done
+ exit 1
'';
}
EOF
@@ -43,15 +51,9 @@ with import ./config.nix; mkDerivation {
set -x
mkdir \$out
echo foo > \$out/bar
- echo 1...
- sleep 2
- echo 200 > ${fake_free}.tmp2
- mv ${fake_free}.tmp2 $fake_free
- echo 2...
- sleep 2
- echo 3...
- sleep 2
- echo 4...
+
+ # Wait for the first build to finish
+ cat "$fifoLock"
'';
}
EOF
@@ -59,12 +61,19 @@ EOF
nix build -v -o $TEST_ROOT/result-A -L "($expr)" \
--min-free 1000 --max-free 2000 --min-free-check-interval 1 &
-pid=$!
+pid1=$!
nix build -v -o $TEST_ROOT/result-B -L "($expr2)" \
- --min-free 1000 --max-free 2000 --min-free-check-interval 1
+ --min-free 1000 --max-free 2000 --min-free-check-interval 1 &
+pid2=$!
-wait "$pid"
+# Once the first build is done, unblock the second one.
+# If the first build fails, we need to postpone the failure to still allow
+# the second one to finish
+wait "$pid1" || FIRSTBUILDSTATUS=$?
+echo "unlock" > $fifoLock
+( exit ${FIRSTBUILDSTATUS:-0} )
+wait "$pid2"
[[ foo = $(cat $TEST_ROOT/result-A/bar) ]]
[[ foo = $(cat $TEST_ROOT/result-B/bar) ]]
diff --git a/tests/gc-concurrent.builder.sh b/tests/gc-concurrent.builder.sh
index 0cd67df3a..bb6dcd4cf 100644
--- a/tests/gc-concurrent.builder.sh
+++ b/tests/gc-concurrent.builder.sh
@@ -1,7 +1,10 @@
+echo "Build started" > "$lockFifo"
+
mkdir $out
echo $(cat $input1/foo)$(cat $input2/bar) > $out/foobar
-sleep 10
+# Wait for someone to write on the fifo
+cat "$lockFifo"
# $out should not have been GC'ed while we were sleeping, but just in
# case...
diff --git a/tests/gc-concurrent.nix b/tests/gc-concurrent.nix
index 21671ea2c..0aba1f983 100644
--- a/tests/gc-concurrent.nix
+++ b/tests/gc-concurrent.nix
@@ -1,5 +1,7 @@
with import ./config.nix;
+{ lockFifo ? null }:
+
rec {
input1 = mkDerivation {
@@ -16,6 +18,7 @@ rec {
name = "gc-concurrent";
builder = ./gc-concurrent.builder.sh;
inherit input1 input2;
+ inherit lockFifo;
};
test2 = mkDerivation {
diff --git a/tests/gc-concurrent.sh b/tests/gc-concurrent.sh
index d395930ca..2c6622c62 100644
--- a/tests/gc-concurrent.sh
+++ b/tests/gc-concurrent.sh
@@ -2,7 +2,10 @@ source common.sh
clearStore
-drvPath1=$(nix-instantiate gc-concurrent.nix -A test1)
+lockFifo1=$TEST_ROOT/test1.fifo
+mkfifo "$lockFifo1"
+
+drvPath1=$(nix-instantiate gc-concurrent.nix -A test1 --argstr lockFifo "$lockFifo1")
outPath1=$(nix-store -q $drvPath1)
drvPath2=$(nix-instantiate gc-concurrent.nix -A test2)
@@ -22,19 +25,16 @@ ln -s $outPath3 "$NIX_STATE_DIR"/gcroots/foo2
nix-store -rvv "$drvPath1" &
pid1=$!
-# Start build #2 in the background after 10 seconds.
-(sleep 10 && nix-store -rvv "$drvPath2") &
-pid2=$!
+# Wait for the build of $drvPath1 to start
+cat $lockFifo1
# Run the garbage collector while the build is running.
-sleep 6
nix-collect-garbage
-# Wait for build #1/#2 to finish.
+# Unlock the build of $drvPath1
+echo "" > $lockFifo1
echo waiting for pid $pid1 to finish...
wait $pid1
-echo waiting for pid $pid2 to finish...
-wait $pid2
# Check that the root of build #1 and its dependencies haven't been
# deleted. The should not be deleted by the GC because they were
@@ -42,8 +42,9 @@ wait $pid2
cat $outPath1/foobar
cat $outPath1/input-2/bar
-# Check that build #2 has succeeded. It should succeed because the
-# derivation is a GC root.
+# Check that the build build $drvPath2 succeeds.
+# It should succeed because the derivation is a GC root.
+nix-store -rvv "$drvPath2"
cat $outPath2/foobar
rm -f "$NIX_STATE_DIR"/gcroots/foo*
diff --git a/tests/gc-concurrent2.builder.sh b/tests/gc-concurrent2.builder.sh
index 4bfb33103..4f6c58b96 100644
--- a/tests/gc-concurrent2.builder.sh
+++ b/tests/gc-concurrent2.builder.sh
@@ -3,5 +3,3 @@ echo $(cat $input1/foo)$(cat $input2/bar)xyzzy > $out/foobar
# Check that the GC hasn't deleted the lock on our output.
test -e "$out.lock"
-
-sleep 6
diff --git a/tests/init.sh b/tests/init.sh
index c62c4856a..0c2c0e170 100644
--- a/tests/init.sh
+++ b/tests/init.sh
@@ -18,6 +18,7 @@ build-users-group =
keep-derivations = false
sandbox = false
experimental-features = nix-command flakes
+gc-reserved-space = 0
include nix.conf.extra
EOF
diff --git a/tests/local.mk b/tests/local.mk
index 536661af8..f3ac330d8 100644
--- a/tests/local.mk
+++ b/tests/local.mk
@@ -40,4 +40,4 @@ tests-environment = NIX_REMOTE= $(bash) -e
clean-files += $(d)/common.sh
-installcheck: $(d)/common.sh $(d)/config.nix $(d)/plugins/libplugintest.$(SO_EXT)
+test-deps += tests/common.sh tests/config.nix tests/plugins/libplugintest.$(SO_EXT)
diff --git a/tests/misc.sh b/tests/misc.sh
index fd4908e25..a81c9dbb1 100644
--- a/tests/misc.sh
+++ b/tests/misc.sh
@@ -16,6 +16,11 @@ nix-env --foo 2>&1 | grep "no operation"
nix-env -q --foo 2>&1 | grep "unknown flag"
# Eval Errors.
-eval_res=$(nix-instantiate --eval -E 'let a = {} // a; in a.foo' 2>&1 || true)
-echo $eval_res | grep "(string) (1:15)"
-echo $eval_res | grep "infinite recursion encountered"
+eval_arg_res=$(nix-instantiate --eval -E 'let a = {} // a; in a.foo' 2>&1 || true)
+echo $eval_arg_res | grep "at: (1:15) from string"
+echo $eval_arg_res | grep "infinite recursion encountered"
+
+eval_stdin_res=$(echo 'let a = {} // a; in a.foo' | nix-instantiate --eval -E - 2>&1 || true)
+echo $eval_stdin_res | grep "at: (1:15) from stdin"
+echo $eval_stdin_res | grep "infinite recursion encountered"
+
diff --git a/tests/nix-shell.sh b/tests/nix-shell.sh
index 235e2a5ff..650904057 100644
--- a/tests/nix-shell.sh
+++ b/tests/nix-shell.sh
@@ -55,3 +55,10 @@ chmod a+rx $TEST_ROOT/shell.shebang.rb
output=$($TEST_ROOT/shell.shebang.rb abc ruby)
[ "$output" = '-e load("'"$TEST_ROOT"'/shell.shebang.rb") -- abc ruby' ]
+
+# Test 'nix develop'.
+nix develop -f shell.nix shellDrv -c bash -c '[[ -n $stdenv ]]'
+
+# Test 'nix print-dev-env'.
+source <(nix print-dev-env -f shell.nix shellDrv)
+[[ -n $stdenv ]]
diff --git a/tests/post-hook.sh b/tests/post-hook.sh
index a02657215..aa3e6a574 100644
--- a/tests/post-hook.sh
+++ b/tests/post-hook.sh
@@ -2,6 +2,8 @@ source common.sh
clearStore
+rm -f $TEST_ROOT/result
+
export REMOTE_STORE=$TEST_ROOT/remote_store
# Build the dependencies and push them to the remote store
diff --git a/tests/recursive.sh b/tests/recursive.sh
index 394ae5ddb..2d4f83895 100644
--- a/tests/recursive.sh
+++ b/tests/recursive.sh
@@ -5,6 +5,8 @@ if [[ $(uname) != Linux ]]; then exit; fi
clearStore
+rm -f $TEST_ROOT/result
+
export unreachable=$(nix add-to-store ./recursive.sh)
nix --experimental-features 'nix-command recursive-nix' build -o $TEST_ROOT/result -L '(
diff --git a/tests/structured-attrs.sh b/tests/structured-attrs.sh
index 646bdb876..dcfe6d580 100644
--- a/tests/structured-attrs.sh
+++ b/tests/structured-attrs.sh
@@ -2,6 +2,8 @@ source common.sh
clearStore
+rm -f $TEST_ROOT/result
+
nix-build structured-attrs.nix -A all -o $TEST_ROOT/result
[[ $(cat $TEST_ROOT/result/foo) = bar ]]