aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.github/ISSUE_TEMPLATE/installer.md36
-rw-r--r--.github/assign-by-files.yml5
-rw-r--r--.github/workflows/assign-reviewer.yml12
-rw-r--r--.version2
-rw-r--r--configure.ac2
-rw-r--r--doc/manual/generate-options.nix66
-rw-r--r--doc/manual/local.mk6
-rw-r--r--doc/manual/redirects.js3
-rw-r--r--doc/manual/src/SUMMARY.md.in1
-rw-r--r--doc/manual/src/advanced-topics/diff-hook.md34
-rw-r--r--doc/manual/src/advanced-topics/post-build-hook.md15
-rw-r--r--doc/manual/src/command-ref/env-common.md41
-rw-r--r--doc/manual/src/command-ref/nix-build.md12
-rw-r--r--doc/manual/src/command-ref/nix-store.md7
-rw-r--r--doc/manual/src/installation/installing-binary.md33
-rw-r--r--doc/manual/src/release-notes/rl-2.12.md43
-rw-r--r--doc/manual/src/release-notes/rl-next.md59
-rw-r--r--flake.nix8
-rw-r--r--misc/launchd/org.nixos.nix-daemon.plist.in2
-rw-r--r--misc/systemd/nix-daemon.service.in2
-rw-r--r--scripts/install-multi-user.sh9
-rw-r--r--src/libcmd/common-eval-args.cc95
-rw-r--r--src/libcmd/installables.cc80
-rw-r--r--src/libcmd/repl.cc30
-rw-r--r--src/libexpr/eval-cache.cc6
-rw-r--r--src/libexpr/eval.cc60
-rw-r--r--src/libexpr/eval.hh14
-rw-r--r--src/libexpr/flake/config.cc2
-rw-r--r--src/libexpr/flake/flake.cc53
-rw-r--r--src/libexpr/flake/flake.hh6
-rw-r--r--src/libexpr/flake/flakeref.hh2
-rw-r--r--src/libexpr/flake/lockfile.cc48
-rw-r--r--src/libexpr/flake/lockfile.hh9
-rw-r--r--src/libexpr/get-drvs.cc2
-rw-r--r--src/libexpr/nixexpr.cc73
-rw-r--r--src/libexpr/nixexpr.hh29
-rw-r--r--src/libexpr/parser.y65
-rw-r--r--src/libexpr/primops.cc15
-rw-r--r--src/libexpr/primops/fetchTree.cc2
-rw-r--r--src/libexpr/tests/libexprtests.hh2
-rw-r--r--src/libexpr/tests/primops.cc15
-rw-r--r--src/libexpr/value-to-xml.cc3
-rw-r--r--src/libfetchers/fetch-settings.hh7
-rw-r--r--src/libfetchers/fetchers.cc6
-rw-r--r--src/libfetchers/fetchers.hh13
-rw-r--r--src/libfetchers/git.cc60
-rw-r--r--src/libfetchers/github.cc53
-rw-r--r--src/libfetchers/indirect.cc10
-rw-r--r--src/libfetchers/mercurial.cc10
-rw-r--r--src/libfetchers/path.cc8
-rw-r--r--src/libfetchers/registry.cc3
-rw-r--r--src/libfetchers/tarball.cc11
-rw-r--r--src/libmain/progress-bar.cc12
-rw-r--r--src/libstore/binary-cache-store.cc2
-rw-r--r--src/libstore/build/derivation-goal.cc53
-rw-r--r--src/libstore/build/derivation-goal.hh5
-rw-r--r--src/libstore/build/entry-points.cc6
-rw-r--r--src/libstore/build/local-derivation-goal.cc64
-rw-r--r--src/libstore/daemon.cc1
-rw-r--r--src/libstore/derivations.cc2
-rw-r--r--src/libstore/derivations.hh2
-rw-r--r--src/libstore/derived-path.cc24
-rw-r--r--src/libstore/derived-path.hh2
-rw-r--r--src/libstore/filetransfer.cc24
-rw-r--r--src/libstore/filetransfer.hh5
-rw-r--r--src/libstore/globals.hh27
-rw-r--r--src/libstore/legacy-ssh-store.cc4
-rw-r--r--src/libstore/lock.cc2
-rw-r--r--src/libstore/remote-store.cc2
-rw-r--r--src/libstore/sqlite.cc15
-rw-r--r--src/libstore/sqlite.hh11
-rw-r--r--src/libstore/store-api.cc8
-rw-r--r--src/libstore/store-api.hh4
-rw-r--r--src/libutil/archive.cc4
-rw-r--r--src/libutil/archive.hh4
-rw-r--r--src/libutil/canon-path.cc103
-rw-r--r--src/libutil/canon-path.hh173
-rw-r--r--src/libutil/error.cc170
-rw-r--r--src/libutil/error.hh58
-rw-r--r--src/libutil/fmt.hh2
-rw-r--r--src/libutil/logging.cc44
-rw-r--r--src/libutil/logging.hh8
-rw-r--r--src/libutil/ref.hh5
-rw-r--r--src/libutil/serialise.cc2
-rw-r--r--src/libutil/serialise.hh14
-rw-r--r--src/libutil/tests/canon-path.cc155
-rw-r--r--src/libutil/util.cc15
-rw-r--r--src/libutil/util.hh27
-rw-r--r--src/nix-env/nix-env.cc6
-rw-r--r--src/nix-store/nix-store.cc16
-rw-r--r--src/nix/daemon.cc2
-rw-r--r--src/nix/develop.cc2
-rw-r--r--src/nix/flake-update.md2
-rw-r--r--src/nix/flake.cc4
-rw-r--r--src/nix/flake.md73
-rw-r--r--src/nix/nix.md16
-rw-r--r--src/nix/profile-list.md6
-rw-r--r--src/nix/profile-upgrade.md6
-rw-r--r--src/nix/profile.md3
-rw-r--r--src/nix/registry.cc8
-rw-r--r--tests/build.sh54
-rw-r--r--tests/check.nix2
-rw-r--r--tests/check.sh26
-rw-r--r--tests/eval.sh4
-rw-r--r--tests/fetchGit.sh1
-rw-r--r--tests/flakes/absolute-paths.sh17
-rw-r--r--tests/flakes/flakes.sh33
-rw-r--r--tests/flakes/unlocked-override.sh30
-rwxr-xr-xtests/function-trace.sh30
-rw-r--r--tests/impure-derivations.sh1
-rw-r--r--tests/lang.sh5
-rw-r--r--tests/lang/eval-okay-closure.exp1
-rw-r--r--tests/lang/eval-okay-functionargs.exp1
-rw-r--r--tests/lang/eval-okay-path-antiquotation.exp1
-rw-r--r--tests/lang/eval-okay-path.exp1
-rw-r--r--tests/local.mk5
-rw-r--r--tests/nix_path.sh3
-rw-r--r--tests/restricted.sh2
-rw-r--r--tests/toString-path.sh8
119 files changed, 1631 insertions, 987 deletions
diff --git a/.github/ISSUE_TEMPLATE/installer.md b/.github/ISSUE_TEMPLATE/installer.md
new file mode 100644
index 000000000..3768a49c9
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/installer.md
@@ -0,0 +1,36 @@
+---
+name: Installer issue
+about: Report problems with installation
+title: ''
+labels: installer
+assignees: ''
+
+---
+
+## Platform
+
+<!-- select the platform on which you tried to install Nix -->
+
+- [ ] Linux: <!-- state your distribution, e.g. Arch Linux, Ubuntu, ... -->
+- [ ] macOS
+- [ ] WSL
+
+## Additional information
+
+<!-- state special circumstances on your system or additional steps you have taken prior to installation -->
+
+## Output
+
+<details><summary>Output</summary>
+
+```log
+
+<!-- paste console output here and remove this comment -->
+
+```
+
+</details>
+
+## Priorities
+
+Add :+1: to [issues you find important](https://github.com/NixOS/nix/issues?q=is%3Aissue+is%3Aopen+sort%3Areactions-%2B1-desc).
diff --git a/.github/assign-by-files.yml b/.github/assign-by-files.yml
deleted file mode 100644
index f13b71776..000000000
--- a/.github/assign-by-files.yml
+++ /dev/null
@@ -1,5 +0,0 @@
----
-# This files is used by https://github.com/marketplace/actions/auto-assign-reviewer-by-files
-# to assign maintainers
-"doc/**/*":
- - fricklerhandwerk
diff --git a/.github/workflows/assign-reviewer.yml b/.github/workflows/assign-reviewer.yml
deleted file mode 100644
index 4371cbff4..000000000
--- a/.github/workflows/assign-reviewer.yml
+++ /dev/null
@@ -1,12 +0,0 @@
-name: "Auto Assign"
-on:
- - pull_request
-
-jobs:
- assign_reviewer:
- runs-on: ubuntu-latest
- steps:
- - uses: shufo/auto-assign-reviewer-by-files@v1.1.4
- with:
- config: ".github/assign-by-files.yml"
- token: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.version b/.version
index 3ca2c9b2c..fb2c0766b 100644
--- a/.version
+++ b/.version
@@ -1 +1 @@
-2.12.0 \ No newline at end of file
+2.13.0
diff --git a/configure.ac b/configure.ac
index 64fa12fc7..c0e989d85 100644
--- a/configure.ac
+++ b/configure.ac
@@ -177,7 +177,7 @@ fi
PKG_CHECK_MODULES([OPENSSL], [libcrypto], [CXXFLAGS="$OPENSSL_CFLAGS $CXXFLAGS"])
-# Checks for libarchive
+# Look for libarchive.
PKG_CHECK_MODULES([LIBARCHIVE], [libarchive >= 3.1.2], [CXXFLAGS="$LIBARCHIVE_CFLAGS $CXXFLAGS"])
# Workaround until https://github.com/libarchive/libarchive/issues/1446 is fixed
if test "$shared" != yes; then
diff --git a/doc/manual/generate-options.nix b/doc/manual/generate-options.nix
index 680b709c8..a4ec36477 100644
--- a/doc/manual/generate-options.nix
+++ b/doc/manual/generate-options.nix
@@ -1,29 +1,41 @@
-with builtins;
-with import ./utils.nix;
+let
+ inherit (builtins) attrNames concatStringsSep isAttrs isBool;
+ inherit (import ./utils.nix) concatStrings squash splitLines;
+in
-options:
+optionsInfo:
+let
+ showOption = name:
+ let
+ inherit (optionsInfo.${name}) description documentDefault defaultValue aliases;
+ result = squash ''
+ - <span id="conf-${name}">[`${name}`](#conf-${name})</span>
-concatStrings (map
- (name:
- let option = options.${name}; in
- " - [`${name}`](#conf-${name})"
- + "<p id=\"conf-${name}\"></p>\n\n"
- + concatStrings (map (s: " ${s}\n") (splitLines option.description)) + "\n\n"
- + (if option.documentDefault
- then " **Default:** " + (
- if option.defaultValue == "" || option.defaultValue == []
- then "*empty*"
- else if isBool option.defaultValue
- then (if option.defaultValue then "`true`" else "`false`")
- else
- # n.b. a StringMap value type is specified as a string, but
- # this shows the value type. The empty stringmap is "null" in
- # JSON, but that converts to "{ }" here.
- (if isAttrs option.defaultValue then "`\"\"`"
- else "`" + toString option.defaultValue + "`")) + "\n\n"
- else " **Default:** *machine-specific*\n")
- + (if option.aliases != []
- then " **Deprecated alias:** " + (concatStringsSep ", " (map (s: "`${s}`") option.aliases)) + "\n\n"
- else "")
- )
- (attrNames options))
+ ${indent " " body}
+ '';
+ # separate body to cleanly handle indentation
+ body = ''
+ ${description}
+
+ **Default:** ${showDefault documentDefault defaultValue}
+
+ ${showAliases aliases}
+ '';
+ showDefault = documentDefault: defaultValue:
+ if documentDefault then
+ # a StringMap value type is specified as a string, but
+ # this shows the value type. The empty stringmap is `null` in
+ # JSON, but that converts to `{ }` here.
+ if defaultValue == "" || defaultValue == [] || isAttrs defaultValue
+ then "*empty*"
+ else if isBool defaultValue then
+ if defaultValue then "`true`" else "`false`"
+ else "`${toString defaultValue}`"
+ else "*machine-specific*";
+ showAliases = aliases:
+ if aliases == [] then "" else
+ "**Deprecated alias:** ${(concatStringsSep ", " (map (s: "`${s}`") aliases))}";
+ indent = prefix: s:
+ concatStringsSep "\n" (map (x: if x == "" then x else "${prefix}${x}") (splitLines s));
+ in result;
+in concatStrings (map showOption (attrNames optionsInfo))
diff --git a/doc/manual/local.mk b/doc/manual/local.mk
index 486dbd7a2..c0f69e00f 100644
--- a/doc/manual/local.mk
+++ b/doc/manual/local.mk
@@ -29,19 +29,19 @@ nix-eval = $(dummy-env) $(bindir)/nix eval --experimental-features nix-command -
$(d)/%.1: $(d)/src/command-ref/%.md
@printf "Title: %s\n\n" "$$(basename $@ .1)" > $^.tmp
@cat $^ >> $^.tmp
- $(trace-gen) lowdown -sT man -M section=1 $^.tmp -o $@
+ $(trace-gen) lowdown -sT man --nroff-nolinks -M section=1 $^.tmp -o $@
@rm $^.tmp
$(d)/%.8: $(d)/src/command-ref/%.md
@printf "Title: %s\n\n" "$$(basename $@ .8)" > $^.tmp
@cat $^ >> $^.tmp
- $(trace-gen) lowdown -sT man -M section=8 $^.tmp -o $@
+ $(trace-gen) lowdown -sT man --nroff-nolinks -M section=8 $^.tmp -o $@
@rm $^.tmp
$(d)/nix.conf.5: $(d)/src/command-ref/conf-file.md
@printf "Title: %s\n\n" "$$(basename $@ .5)" > $^.tmp
@cat $^ >> $^.tmp
- $(trace-gen) lowdown -sT man -M section=5 $^.tmp -o $@
+ $(trace-gen) lowdown -sT man --nroff-nolinks -M section=5 $^.tmp -o $@
@rm $^.tmp
$(d)/src/SUMMARY.md: $(d)/src/SUMMARY.md.in $(d)/src/command-ref/new-cli
diff --git a/doc/manual/redirects.js b/doc/manual/redirects.js
index 2e77edd0f..69f75d3a0 100644
--- a/doc/manual/redirects.js
+++ b/doc/manual/redirects.js
@@ -35,7 +35,6 @@ const redirects = {
"conf-build-max-jobs": "command-ref/conf-file.html#conf-build-max-jobs",
"conf-build-max-log-size": "command-ref/conf-file.html#conf-build-max-log-size",
"conf-build-max-silent-time": "command-ref/conf-file.html#conf-build-max-silent-time",
- "conf-build-repeat": "command-ref/conf-file.html#conf-build-repeat",
"conf-build-timeout": "command-ref/conf-file.html#conf-build-timeout",
"conf-build-use-chroot": "command-ref/conf-file.html#conf-build-use-chroot",
"conf-build-use-sandbox": "command-ref/conf-file.html#conf-build-use-sandbox",
@@ -47,7 +46,6 @@ const redirects = {
"conf-connect-timeout": "command-ref/conf-file.html#conf-connect-timeout",
"conf-cores": "command-ref/conf-file.html#conf-cores",
"conf-diff-hook": "command-ref/conf-file.html#conf-diff-hook",
- "conf-enforce-determinism": "command-ref/conf-file.html#conf-enforce-determinism",
"conf-env-keep-derivations": "command-ref/conf-file.html#conf-env-keep-derivations",
"conf-extra-binary-caches": "command-ref/conf-file.html#conf-extra-binary-caches",
"conf-extra-platforms": "command-ref/conf-file.html#conf-extra-platforms",
@@ -74,7 +72,6 @@ const redirects = {
"conf-plugin-files": "command-ref/conf-file.html#conf-plugin-files",
"conf-post-build-hook": "command-ref/conf-file.html#conf-post-build-hook",
"conf-pre-build-hook": "command-ref/conf-file.html#conf-pre-build-hook",
- "conf-repeat": "command-ref/conf-file.html#conf-repeat",
"conf-require-sigs": "command-ref/conf-file.html#conf-require-sigs",
"conf-restrict-eval": "command-ref/conf-file.html#conf-restrict-eval",
"conf-run-diff-hook": "command-ref/conf-file.html#conf-run-diff-hook",
diff --git a/doc/manual/src/SUMMARY.md.in b/doc/manual/src/SUMMARY.md.in
index 908e7e3d9..6a514fa2c 100644
--- a/doc/manual/src/SUMMARY.md.in
+++ b/doc/manual/src/SUMMARY.md.in
@@ -65,6 +65,7 @@
- [CLI guideline](contributing/cli-guideline.md)
- [Release Notes](release-notes/release-notes.md)
- [Release X.Y (202?-??-??)](release-notes/rl-next.md)
+ - [Release 2.12 (2022-12-06)](release-notes/rl-2.12.md)
- [Release 2.11 (2022-08-25)](release-notes/rl-2.11.md)
- [Release 2.10 (2022-07-11)](release-notes/rl-2.10.md)
- [Release 2.9 (2022-05-30)](release-notes/rl-2.9.md)
diff --git a/doc/manual/src/advanced-topics/diff-hook.md b/doc/manual/src/advanced-topics/diff-hook.md
index 161e64b2a..4a742c160 100644
--- a/doc/manual/src/advanced-topics/diff-hook.md
+++ b/doc/manual/src/advanced-topics/diff-hook.md
@@ -121,37 +121,3 @@ error:
are not valid, so checking is not possible
Run the build without `--check`, and then try with `--check` again.
-
-# Automatic and Optionally Enforced Determinism Verification
-
-Automatically verify every build at build time by executing the build
-multiple times.
-
-Setting `repeat` and `enforce-determinism` in your `nix.conf` permits
-the automated verification of every build Nix performs.
-
-The following configuration will run each build three times, and will
-require the build to be deterministic:
-
- enforce-determinism = true
- repeat = 2
-
-Setting `enforce-determinism` to false as in the following
-configuration will run the build multiple times, execute the build
-hook, but will allow the build to succeed even if it does not build
-reproducibly:
-
- enforce-determinism = false
- repeat = 1
-
-An example output of this configuration:
-
-```console
-$ nix-build ./test.nix -A unstable
-this derivation will be built:
- /nix/store/ch6llwpr2h8c3jmnf3f2ghkhx59aa97f-unstable.drv
-building '/nix/store/ch6llwpr2h8c3jmnf3f2ghkhx59aa97f-unstable.drv' (round 1/2)...
-building '/nix/store/ch6llwpr2h8c3jmnf3f2ghkhx59aa97f-unstable.drv' (round 2/2)...
-output '/nix/store/6xg356v9gl03hpbbg8gws77n19qanh02-unstable' of '/nix/store/ch6llwpr2h8c3jmnf3f2ghkhx59aa97f-unstable.drv' differs from '/nix/store/6xg356v9gl03hpbbg8gws77n19qanh02-unstable.check' from previous round
-/nix/store/6xg356v9gl03hpbbg8gws77n19qanh02-unstable
-```
diff --git a/doc/manual/src/advanced-topics/post-build-hook.md b/doc/manual/src/advanced-topics/post-build-hook.md
index fcb52d878..1479cc3a4 100644
--- a/doc/manual/src/advanced-topics/post-build-hook.md
+++ b/doc/manual/src/advanced-topics/post-build-hook.md
@@ -33,12 +33,17 @@ distribute the public key for verifying the authenticity of the paths.
example-nix-cache-1:1/cKDz3QCCOmwcztD2eV6Coggp6rqc9DGjWv7C0G+rM=
```
-Then, add the public key and the cache URL to your `nix.conf`'s
-`trusted-public-keys` and `substituters` options:
+Then update [`nix.conf`](../command-ref/conf-file.md) on any machine that will access the cache.
+Add the cache URL to [`substituters`](../command-ref/conf-file.md#conf-substituters) and the public key to [`trusted-public-keys`](../command-ref/conf-file.md#conf-trusted-public-keys):
substituters = https://cache.nixos.org/ s3://example-nix-cache
trusted-public-keys = cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY= example-nix-cache-1:1/cKDz3QCCOmwcztD2eV6Coggp6rqc9DGjWv7C0G+rM=
+Machines that build for the cache must sign derivations using the private key.
+On those machines, add the path to the key file to the [`secret-key-files`](../command-ref/conf-file.md#conf-secret-key-files) field in their [`nix.conf`](../command-ref/conf-file.md):
+
+ secret-key-files = /etc/nix/key.private
+
We will restart the Nix daemon in a later step.
# Implementing the build hook
@@ -52,14 +57,12 @@ set -eu
set -f # disable globbing
export IFS=' '
-echo "Signing paths" $OUT_PATHS
-nix store sign --key-file /etc/nix/key.private $OUT_PATHS
echo "Uploading paths" $OUT_PATHS
-exec nix copy --to 's3://example-nix-cache' $OUT_PATHS
+exec nix copy --to "s3://example-nix-cache" $OUT_PATHS
```
> **Note**
->
+>
> The `$OUT_PATHS` variable is a space-separated list of Nix store
> paths. In this case, we expect and want the shell to perform word
> splitting to make each output path its own argument to `nix
diff --git a/doc/manual/src/command-ref/env-common.md b/doc/manual/src/command-ref/env-common.md
index 3f3eb6915..5845bdc43 100644
--- a/doc/manual/src/command-ref/env-common.md
+++ b/doc/manual/src/command-ref/env-common.md
@@ -7,42 +7,11 @@ Most Nix commands interpret the following environment variables:
`nix-shell`. It can have the values `pure` or `impure`.
- [`NIX_PATH`]{#env-NIX_PATH}\
- A colon-separated list of directories used to look up Nix
- expressions enclosed in angle brackets (i.e., `<path>`). For
- instance, the value
-
- /home/eelco/Dev:/etc/nixos
-
- will cause Nix to look for paths relative to `/home/eelco/Dev` and
- `/etc/nixos`, in this order. It is also possible to match paths
- against a prefix. For example, the value
-
- nixpkgs=/home/eelco/Dev/nixpkgs-branch:/etc/nixos
-
- will cause Nix to search for `<nixpkgs/path>` in
- `/home/eelco/Dev/nixpkgs-branch/path` and `/etc/nixos/nixpkgs/path`.
-
- If a path in the Nix search path starts with `http://` or
- `https://`, it is interpreted as the URL of a tarball that will be
- downloaded and unpacked to a temporary location. The tarball must
- consist of a single top-level directory. For example, setting
- `NIX_PATH` to
-
- nixpkgs=https://github.com/NixOS/nixpkgs/archive/master.tar.gz
-
- tells Nix to download and use the current contents of the
- `master` branch in the `nixpkgs` repository.
-
- The URLs of the tarballs from the official nixos.org channels (see
- [the manual for `nix-channel`](nix-channel.md)) can be abbreviated
- as `channel:<channel-name>`. For instance, the following two
- values of `NIX_PATH` are equivalent:
-
- nixpkgs=channel:nixos-21.05
- nixpkgs=https://nixos.org/channels/nixos-21.05/nixexprs.tar.xz
-
- The Nix search path can also be extended using the `-I` option to
- many Nix commands, which takes precedence over `NIX_PATH`.
+ A colon-separated list of directories used to look up the location of Nix
+ expressions using [paths](../language/values.md#type-path)
+ enclosed in angle brackets (i.e., `<path>`),
+ e.g. `/home/eelco/Dev:/etc/nixos`. It can be extended using the
+ [`-I` option](./opt-common#opt-I).
- [`NIX_IGNORE_SYMLINK_STORE`]{#env-NIX_IGNORE_SYMLINK_STORE}\
Normally, the Nix store directory (typically `/nix/store`) is not
diff --git a/doc/manual/src/command-ref/nix-build.md b/doc/manual/src/command-ref/nix-build.md
index 49c6f3f55..3a47feaae 100644
--- a/doc/manual/src/command-ref/nix-build.md
+++ b/doc/manual/src/command-ref/nix-build.md
@@ -53,16 +53,18 @@ All options not listed here are passed to `nix-store
--realise`, except for `--arg` and `--attr` / `-A` which are passed to
`nix-instantiate`.
- - [`--no-out-link`]{#opt-no-out-link}\
+ - <span id="opt-no-out-link">[`--no-out-link`](#opt-no-out-link)<span>
+
Do not create a symlink to the output path. Note that as a result
the output does not become a root of the garbage collector, and so
- might be deleted by `nix-store
- --gc`.
+ might be deleted by `nix-store --gc`.
+
+ - <span id="opt-dry-run">[`--dry-run`](#opt-dry-run)</span>
- - [`--dry-run`]{#opt-dry-run}\
Show what store paths would be built or downloaded.
- - [`--out-link`]{#opt-out-link} / `-o` *outlink*\
+ - <span id="opt-out-link">[`--out-link`](#opt-out-link)</span> / `-o` *outlink*
+
Change the name of the symlink to the output path created from
`result` to *outlink*.
diff --git a/doc/manual/src/command-ref/nix-store.md b/doc/manual/src/command-ref/nix-store.md
index 1251888e9..b712a7463 100644
--- a/doc/manual/src/command-ref/nix-store.md
+++ b/doc/manual/src/command-ref/nix-store.md
@@ -22,7 +22,8 @@ This section lists the options that are common to all operations. These
options are allowed for every subcommand, though they may not always
have an effect.
- - [`--add-root`]{#opt-add-root} *path*\
+ - <span id="opt-add-root">[`--add-root`](#opt-add-root)</span> *path*
+
Causes the result of a realisation (`--realise` and
`--force-realise`) to be registered as a root of the garbage
collector. *path* will be created as a symlink to the resulting
@@ -104,10 +105,6 @@ The following flags are available:
previous build, the new output path is left in
`/nix/store/name.check.`
- See also the `build-repeat` configuration option, which repeats a
- derivation a number of times and prevents its outputs from being
- registered as “valid” in the Nix store unless they are identical.
-
Special exit codes:
- `100`\
diff --git a/doc/manual/src/installation/installing-binary.md b/doc/manual/src/installation/installing-binary.md
index 31faeadc2..a9378681d 100644
--- a/doc/manual/src/installation/installing-binary.md
+++ b/doc/manual/src/installation/installing-binary.md
@@ -88,6 +88,29 @@ extension. The installer will also create `/etc/profile.d/nix.sh`.
### Linux
+If you are on Linux with systemd:
+
+1. Remove the Nix daemon service:
+
+ ```console
+ sudo systemctl stop nix-daemon.service
+ sudo systemctl disable nix-daemon.socket nix-daemon.service
+ sudo systemctl daemon-reload
+ ```
+
+1. Remove systemd service files:
+
+ ```console
+ sudo rm /etc/systemd/system/nix-daemon.service /etc/systemd/system/nix-daemon.socket
+ ```
+
+1. The installer script uses systemd-tmpfiles to create the socket directory.
+ You may also want to remove the configuration for that:
+
+ ```console
+ sudo rm /etc/tmpfiles.d/nix-daemon.conf
+ ```
+
Remove files created by Nix:
```console
@@ -103,16 +126,6 @@ done
sudo groupdel 30000
```
-If you are on Linux with systemd, remove the Nix daemon service:
-
-```console
-sudo systemctl stop nix-daemon.socket
-sudo systemctl stop nix-daemon.service
-sudo systemctl disable nix-daemon.socket
-sudo systemctl disable nix-daemon.service
-sudo systemctl daemon-reload
-```
-
There may also be references to Nix in
- `/etc/profile`
diff --git a/doc/manual/src/release-notes/rl-2.12.md b/doc/manual/src/release-notes/rl-2.12.md
new file mode 100644
index 000000000..e2045d7bf
--- /dev/null
+++ b/doc/manual/src/release-notes/rl-2.12.md
@@ -0,0 +1,43 @@
+# Release 2.12 (2022-12-06)
+
+* On Linux, Nix can now run builds in a user namespace where they run
+ as root (UID 0) and have 65,536 UIDs available.
+ <!-- FIXME: move this to its own section about system features -->
+ This is primarily useful for running containers such as `systemd-nspawn`
+ inside a Nix build. For an example, see [`tests/systemd-nspawn/nix`][nspawn].
+
+ [nspawn]: https://github.com/NixOS/nix/blob/67bcb99700a0da1395fa063d7c6586740b304598/tests/systemd-nspawn.nix.
+
+ A build can enable this by setting the derivation attribute:
+
+ ```
+ requiredSystemFeatures = [ "uid-range" ];
+ ```
+
+ The `uid-range` [system feature] requires the [`auto-allocate-uids`]
+ setting to be enabled.
+
+ [system feature]: ../command-ref/conf-file.md#conf-system-features
+
+* Nix can now automatically pick UIDs for builds, removing the need to
+ create `nixbld*` user accounts. See [`auto-allocate-uids`].
+
+ [`auto-allocate-uids`]: ../command-ref/conf-file.md#conf-auto-allocate-uids
+
+* On Linux, Nix has experimental support for running builds inside a
+ cgroup. See
+ [`use-cgroups`](../command-ref/conf-file.md#conf-use-cgroups).
+
+* `<nix/fetchurl.nix>` now accepts an additional argument `impure` which
+ defaults to `false`. If it is set to `true`, the `hash` and `sha256`
+ arguments will be ignored and the resulting derivation will have
+ `__impure` set to `true`, making it an impure derivation.
+
+* If `builtins.readFile` is called on a file with context, then only
+ the parts of the context that appear in the content of the file are
+ retained. This avoids a lot of spurious errors where strings end up
+ having a context just because they are read from a store path
+ ([#7260](https://github.com/NixOS/nix/pull/7260)).
+
+* `nix build --json` now prints some statistics about top-level
+ derivations, such as CPU statistics when cgroups are enabled.
diff --git a/doc/manual/src/release-notes/rl-next.md b/doc/manual/src/release-notes/rl-next.md
index bf51aa1f7..6c169bd09 100644
--- a/doc/manual/src/release-notes/rl-next.md
+++ b/doc/manual/src/release-notes/rl-next.md
@@ -1,48 +1,27 @@
# Release X.Y (202?-??-??)
-* `<nix/fetchurl.nix>` now accepts an additional argument `impure` which
- defaults to `false`. If it is set to `true`, the `hash` and `sha256`
- arguments will be ignored and the resulting derivation will have
- `__impure` set to `true`, making it an impure derivation.
+* The `repeat` and `enforce-determinism` options have been removed
+ since they had been broken under many circumstances for a long time.
-* If `builtins.readFile` is called on a file with context, then only the parts
- of that context that appear in the content of the file are retained.
- This avoids a lot of spurious errors where some benign strings end-up having
- a context just because they are read from a store path
- ([#7260](https://github.com/NixOS/nix/pull/7260)).
+* You can now use [flake references] in the [old command line interface], e.g.
-* Nix can now automatically pick UIDs for builds, removing the need to
- create `nixbld*` user accounts.
-
- See [`auto-allocate-uids`].
-
- [`auto-allocate-uids`]: (../command-ref/conf-file.md#conf-auto-allocate-uids)
-
-* On Linux, Nix can now run builds in a user namespace where the build
- runs as root (UID 0) and has 65,536 UIDs available.
-
- <!-- FIXME: move this to its own section about system features -->
-
- This is primarily useful for running containers such as `systemd-nspawn`
- inside a Nix build. For an example, see [`tests/systemd-nspawn/nix`][nspawn].
-
- [nspawn]: https://github.com/NixOS/nix/blob/67bcb99700a0da1395fa063d7c6586740b304598/tests/systemd-nspawn.nix.
-
- A build can enable this by by setting the derivation attribute:
+ [flake references]: ../command-ref/new-cli/nix3-flake.md#flake-references
+ [old command line interface]: ../command-ref/main-commands.md
```
- requiredSystemFeatures = [ "uid-range" ];
+ # nix-build flake:nixpkgs -A hello
+ # nix-build -I nixpkgs=flake:github:NixOS/nixpkgs/nixos-22.05 \
+ '<nixpkgs>' -A hello
+ # NIX_PATH=nixpkgs=flake:nixpkgs nix-build '<nixpkgs>' -A hello
```
- The `uid-range` [system feature] requires the [`auto-allocate-uids`]
- setting to be enabled.
-
- [system feature]: (../command-ref/conf-file.md#conf-system-features),
-
-* On Linux, Nix has experimental support for running builds inside a
- cgroup.
-
- See [`use-cgroups`](../command-ref/conf-file.md#conf-use-cgroups).
-
-* `nix build --json` now prints some statistics about top-level
- derivations, such as CPU statistics when cgroups are enabled.
+* Allow explicitly selecting outputs in a store derivation installable, just like we can do with other sorts of installables.
+ For example,
+ ```shell-session
+ $ nix-build /nix/store/gzaflydcr6sb3567hap9q6srzx8ggdgg-glibc-2.33-78.drv^dev`
+ ```
+ now works just as
+ ```shell-session
+ $ nix-build glibc^dev`
+ ```
+ does already.
diff --git a/flake.nix b/flake.nix
index d9d01da10..4ba3f04b0 100644
--- a/flake.nix
+++ b/flake.nix
@@ -9,14 +9,14 @@
let
- version = builtins.readFile ./.version + versionSuffix;
+ officialRelease = false;
+
+ version = nixpkgs.lib.fileContents ./.version + versionSuffix;
versionSuffix =
if officialRelease
then ""
else "pre${builtins.substring 0 8 (self.lastModifiedDate or self.lastModified or "19700101")}_${self.shortRev or "dirty"}";
- officialRelease = false;
-
linux64BitSystems = [ "x86_64-linux" "aarch64-linux" ];
linuxSystems = linux64BitSystems ++ [ "i686-linux" ];
systems = linuxSystems ++ [ "x86_64-darwin" "aarch64-darwin" ];
@@ -420,6 +420,8 @@
buildCross = nixpkgs.lib.genAttrs crossSystems (crossSystem:
nixpkgs.lib.genAttrs ["x86_64-linux"] (system: self.packages.${system}."nix-${crossSystem}"));
+ buildNoGc = nixpkgs.lib.genAttrs systems (system: self.packages.${system}.nix.overrideAttrs (a: { configureFlags = (a.configureFlags or []) ++ ["--enable-gc=no"];}));
+
# Perl bindings for various platforms.
perlBindings = nixpkgs.lib.genAttrs systems (system: self.packages.${system}.nix.perl-bindings);
diff --git a/misc/launchd/org.nixos.nix-daemon.plist.in b/misc/launchd/org.nixos.nix-daemon.plist.in
index da1970f69..5fa489b20 100644
--- a/misc/launchd/org.nixos.nix-daemon.plist.in
+++ b/misc/launchd/org.nixos.nix-daemon.plist.in
@@ -28,7 +28,7 @@
<key>SoftResourceLimits</key>
<dict>
<key>NumberOfFiles</key>
- <integer>4096</integer>
+ <integer>1048576</integer>
</dict>
</dict>
</plist>
diff --git a/misc/systemd/nix-daemon.service.in b/misc/systemd/nix-daemon.service.in
index e3ac42beb..f46413630 100644
--- a/misc/systemd/nix-daemon.service.in
+++ b/misc/systemd/nix-daemon.service.in
@@ -9,7 +9,7 @@ ConditionPathIsReadWrite=@localstatedir@/nix/daemon-socket
[Service]
ExecStart=@@bindir@/nix-daemon nix-daemon --daemon
KillMode=process
-LimitNOFILE=4096
+LimitNOFILE=1048576
[Install]
WantedBy=multi-user.target
diff --git a/scripts/install-multi-user.sh b/scripts/install-multi-user.sh
index 96c0f302b..ec82e0560 100644
--- a/scripts/install-multi-user.sh
+++ b/scripts/install-multi-user.sh
@@ -97,13 +97,10 @@ is_os_darwin() {
}
contact_us() {
- echo "You can open an issue at https://github.com/nixos/nix/issues"
+ echo "You can open an issue at"
+ echo "https://github.com/NixOS/nix/issues/new?labels=installer&template=installer.md"
echo ""
- echo "Or feel free to contact the team:"
- echo " - Matrix: #nix:nixos.org"
- echo " - IRC: in #nixos on irc.libera.chat"
- echo " - twitter: @nixos_org"
- echo " - forum: https://discourse.nixos.org"
+ echo "Or get in touch with the community: https://nixos.org/community"
}
get_help() {
echo "We'd love to help if you need it."
diff --git a/src/libcmd/common-eval-args.cc b/src/libcmd/common-eval-args.cc
index 140ed3b88..0e321e5e4 100644
--- a/src/libcmd/common-eval-args.cc
+++ b/src/libcmd/common-eval-args.cc
@@ -32,7 +32,77 @@ MixEvalArgs::MixEvalArgs()
addFlag({
.longName = "include",
.shortName = 'I',
- .description = "Add *path* to the list of locations used to look up `<...>` file names.",
+ .description = R"(
+ Add *path* to the Nix search path. The Nix search path is
+ initialized from the colon-separated [`NIX_PATH`](./env-common.md#env-NIX_PATH) environment
+ variable, and is used to look up the location of Nix expressions using [paths](../language/values.md#type-path) enclosed in angle
+ brackets (i.e., `<nixpkgs>`).
+
+ For instance, passing
+
+ ```
+ -I /home/eelco/Dev
+ -I /etc/nixos
+ ```
+
+ will cause Nix to look for paths relative to `/home/eelco/Dev` and
+ `/etc/nixos`, in that order. This is equivalent to setting the
+ `NIX_PATH` environment variable to
+
+ ```
+ /home/eelco/Dev:/etc/nixos
+ ```
+
+ It is also possible to match paths against a prefix. For example,
+ passing
+
+ ```
+ -I nixpkgs=/home/eelco/Dev/nixpkgs-branch
+ -I /etc/nixos
+ ```
+
+ will cause Nix to search for `<nixpkgs/path>` in
+ `/home/eelco/Dev/nixpkgs-branch/path` and `/etc/nixos/nixpkgs/path`.
+
+ If a path in the Nix search path starts with `http://` or `https://`,
+ it is interpreted as the URL of a tarball that will be downloaded and
+ unpacked to a temporary location. The tarball must consist of a single
+ top-level directory. For example, passing
+
+ ```
+ -I nixpkgs=https://github.com/NixOS/nixpkgs/archive/master.tar.gz
+ ```
+
+ tells Nix to download and use the current contents of the `master`
+ branch in the `nixpkgs` repository.
+
+ The URLs of the tarballs from the official `nixos.org` channels
+ (see [the manual page for `nix-channel`](../nix-channel.md)) can be
+ abbreviated as `channel:<channel-name>`. For instance, the
+ following two flags are equivalent:
+
+ ```
+ -I nixpkgs=channel:nixos-21.05
+ -I nixpkgs=https://nixos.org/channels/nixos-21.05/nixexprs.tar.xz
+ ```
+
+ You can also fetch source trees using [flake URLs](./nix3-flake.md#url-like-syntax) and add them to the
+ search path. For instance,
+
+ ```
+ -I nixpkgs=flake:nixpkgs
+ ```
+
+ specifies that the prefix `nixpkgs` shall refer to the source tree
+ downloaded from the `nixpkgs` entry in the flake registry. Similarly,
+
+ ```
+ -I nixpkgs=flake:github:NixOS/nixpkgs/nixos-22.05
+ ```
+
+ makes `<nixpkgs>` refer to a particular branch of the
+ `NixOS/nixpkgs` repository on GitHub.
+ )",
.category = category,
.labels = {"path"},
.handler = {[&](std::string s) { searchPath.push_back(s); }}
@@ -89,14 +159,25 @@ Bindings * MixEvalArgs::getAutoArgs(EvalState & state)
Path lookupFileArg(EvalState & state, std::string_view s)
{
- if (isUri(s)) {
- return state.store->toRealPath(
- fetchers::downloadTarball(
- state.store, resolveUri(s), "source", false).first.storePath);
- } else if (s.size() > 2 && s.at(0) == '<' && s.at(s.size() - 1) == '>') {
+ if (EvalSettings::isPseudoUrl(s)) {
+ auto storePath = fetchers::downloadTarball(
+ state.store, EvalSettings::resolvePseudoUrl(s), "source", false).first.storePath;
+ return state.store->toRealPath(storePath);
+ }
+
+ else if (hasPrefix(s, "flake:")) {
+ settings.requireExperimentalFeature(Xp::Flakes);
+ auto flakeRef = parseFlakeRef(std::string(s.substr(6)), {}, true, false);
+ auto storePath = flakeRef.resolve(state.store).fetchTree(state.store).first.storePath;
+ return state.store->toRealPath(storePath);
+ }
+
+ else if (s.size() > 2 && s.at(0) == '<' && s.at(s.size() - 1) == '>') {
Path p(s.substr(1, s.size() - 2));
return state.findFile(p);
- } else
+ }
+
+ else
return absPath(std::string(s));
}
diff --git a/src/libcmd/installables.cc b/src/libcmd/installables.cc
index dbe4a449d..5cdd3e12c 100644
--- a/src/libcmd/installables.cc
+++ b/src/libcmd/installables.cc
@@ -399,44 +399,56 @@ static StorePath getDeriver(
struct InstallableStorePath : Installable
{
ref<Store> store;
- StorePath storePath;
+ DerivedPath req;
InstallableStorePath(ref<Store> store, StorePath && storePath)
- : store(store), storePath(std::move(storePath)) { }
+ : store(store),
+ req(storePath.isDerivation()
+ ? (DerivedPath) DerivedPath::Built {
+ .drvPath = std::move(storePath),
+ .outputs = {},
+ }
+ : (DerivedPath) DerivedPath::Opaque {
+ .path = std::move(storePath),
+ })
+ { }
+
+ InstallableStorePath(ref<Store> store, DerivedPath && req)
+ : store(store), req(std::move(req))
+ { }
- std::string what() const override { return store->printStorePath(storePath); }
+ std::string what() const override
+ {
+ return req.to_string(*store);
+ }
DerivedPaths toDerivedPaths() override
{
- if (storePath.isDerivation()) {
- auto drv = store->readDerivation(storePath);
- return {
- DerivedPath::Built {
- .drvPath = storePath,
- .outputs = drv.outputNames(),
- }
- };
- } else {
- return {
- DerivedPath::Opaque {
- .path = storePath,
- }
- };
- }
+ return { req };
}
StorePathSet toDrvPaths(ref<Store> store) override
{
- if (storePath.isDerivation()) {
- return {storePath};
- } else {
- return {getDeriver(store, *this, storePath)};
- }
+ return std::visit(overloaded {
+ [&](const DerivedPath::Built & bfd) -> StorePathSet {
+ return { bfd.drvPath };
+ },
+ [&](const DerivedPath::Opaque & bo) -> StorePathSet {
+ return { getDeriver(store, *this, bo.path) };
+ },
+ }, req.raw());
}
std::optional<StorePath> getStorePath() override
{
- return storePath;
+ return std::visit(overloaded {
+ [&](const DerivedPath::Built & bfd) {
+ return bfd.drvPath;
+ },
+ [&](const DerivedPath::Opaque & bo) {
+ return bo.path;
+ },
+ }, req.raw());
}
};
@@ -781,7 +793,8 @@ std::vector<std::shared_ptr<Installable>> SourceExprCommand::parseInstallables(
if (file == "-") {
auto e = state->parseStdin();
state->eval(e, *vFile);
- } else if (file)
+ }
+ else if (file)
state->evalFile(lookupFileArg(*state, *file), *vFile);
else {
auto e = state->parseExprFromString(*expr, absPath("."));
@@ -802,7 +815,22 @@ std::vector<std::shared_ptr<Installable>> SourceExprCommand::parseInstallables(
for (auto & s : ss) {
std::exception_ptr ex;
- if (s.find('/') != std::string::npos) {
+ auto found = s.rfind('^');
+ if (found != std::string::npos) {
+ try {
+ result.push_back(std::make_shared<InstallableStorePath>(
+ store,
+ DerivedPath::Built::parse(*store, s.substr(0, found), s.substr(found + 1))));
+ continue;
+ } catch (BadStorePath &) {
+ } catch (...) {
+ if (!ex)
+ ex = std::current_exception();
+ }
+ }
+
+ found = s.find('/');
+ if (found != std::string::npos) {
try {
result.push_back(std::make_shared<InstallableStorePath>(store, store->followLinksToStorePath(s)));
continue;
diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc
index 557952277..5400fcd69 100644
--- a/src/libcmd/repl.cc
+++ b/src/libcmd/repl.cc
@@ -215,17 +215,15 @@ static std::ostream & showDebugTrace(std::ostream & out, const PosTable & positi
out << dt.hint.str() << "\n";
// prefer direct pos, but if noPos then try the expr.
- auto pos = *dt.pos
- ? *dt.pos
- : positions[dt.expr.getPos() ? dt.expr.getPos() : noPos];
+ auto pos = dt.pos
+ ? dt.pos
+ : static_cast<std::shared_ptr<AbstractPos>>(positions[dt.expr.getPos() ? dt.expr.getPos() : noPos]);
if (pos) {
- printAtPos(pos, out);
-
- auto loc = getCodeLines(pos);
- if (loc.has_value()) {
+ out << pos;
+ if (auto loc = pos->getCodeLines()) {
out << "\n";
- printCodeLines(out, "", pos, *loc);
+ printCodeLines(out, "", *pos, *loc);
out << "\n";
}
}
@@ -589,15 +587,17 @@ bool NixRepl::processLine(std::string line)
Value v;
evalString(arg, v);
- const auto [file, line] = [&] () -> std::pair<std::string, uint32_t> {
+ const auto [path, line] = [&] () -> std::pair<Path, uint32_t> {
if (v.type() == nPath || v.type() == nString) {
PathSet context;
- auto filename = state->coerceToString(noPos, v, context).toOwned();
- state->symbols.create(filename);
- return {filename, 0};
+ auto path = state->coerceToPath(noPos, v, context);
+ return {path, 0};
} else if (v.isLambda()) {
auto pos = state->positions[v.lambda.fun->pos];
- return {pos.file, pos.line};
+ if (auto path = std::get_if<Path>(&pos.origin))
+ return {*path, pos.line};
+ else
+ throw Error("'%s' cannot be shown in an editor", pos);
} else {
// assume it's a derivation
return findPackageFilename(*state, v, arg);
@@ -605,7 +605,7 @@ bool NixRepl::processLine(std::string line)
}();
// Open in EDITOR
- auto args = editorFor(file, line);
+ auto args = editorFor(path, line);
auto editor = args.front();
args.pop_front();
@@ -787,7 +787,7 @@ void NixRepl::loadFlake(const std::string & flakeRefS)
flake::LockFlags {
.updateLockFile = false,
.useRegistries = !evalSettings.pureEval,
- .allowMutable = !evalSettings.pureEval,
+ .allowUnlocked = !evalSettings.pureEval,
}),
v);
addAttrsToScope(v);
diff --git a/src/libexpr/eval-cache.cc b/src/libexpr/eval-cache.cc
index b259eec63..3e2a8665e 100644
--- a/src/libexpr/eval-cache.cc
+++ b/src/libexpr/eval-cache.cc
@@ -645,17 +645,17 @@ NixInt AttrCursor::getInt()
cachedValue = root->db->getAttr(getKey());
if (cachedValue && !std::get_if<placeholder_t>(&cachedValue->second)) {
if (auto i = std::get_if<int_t>(&cachedValue->second)) {
- debug("using cached Integer attribute '%s'", getAttrPathStr());
+ debug("using cached integer attribute '%s'", getAttrPathStr());
return i->x;
} else
- throw TypeError("'%s' is not an Integer", getAttrPathStr());
+ throw TypeError("'%s' is not an integer", getAttrPathStr());
}
}
auto & v = forceValue();
if (v.type() != nInt)
- throw TypeError("'%s' is not an Integer", getAttrPathStr());
+ throw TypeError("'%s' is not an integer", getAttrPathStr());
return v.integer;
}
diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc
index 76a10b9f8..084ccbee2 100644
--- a/src/libexpr/eval.cc
+++ b/src/libexpr/eval.cc
@@ -45,7 +45,7 @@ static char * allocString(size_t size)
#if HAVE_BOEHMGC
t = (char *) GC_MALLOC_ATOMIC(size);
#else
- t = malloc(size);
+ t = (char *) malloc(size);
#endif
if (!t) throw std::bad_alloc();
return t;
@@ -402,7 +402,8 @@ static Strings parseNixPath(const std::string & s)
}
if (*p == ':') {
- if (isUri(std::string(start2, s.end()))) {
+ auto prefix = std::string(start2, s.end());
+ if (EvalSettings::isPseudoUrl(prefix) || hasPrefix(prefix, "flake:")) {
++p;
while (p != s.end() && *p != ':') ++p;
}
@@ -470,9 +471,6 @@ EvalState::EvalState(
#if HAVE_BOEHMGC
, valueAllocCache(std::allocate_shared<void *>(traceable_allocator<void *>(), nullptr))
, env1AllocCache(std::allocate_shared<void *>(traceable_allocator<void *>(), nullptr))
-#else
- , valueAllocCache(std::make_shared<void *>(nullptr))
- , env1AllocCache(std::make_shared<void *>(nullptr))
#endif
, baseEnv(allocEnv(128))
, staticBaseEnv{std::make_shared<StaticEnv>(false, nullptr)}
@@ -822,7 +820,7 @@ void EvalState::runDebugRepl(const Error * error, const Env & env, const Expr &
? std::make_unique<DebugTraceStacker>(
*this,
DebugTrace {
- .pos = error->info().errPos ? *error->info().errPos : positions[expr.getPos()],
+ .pos = error->info().errPos ? error->info().errPos : static_cast<std::shared_ptr<AbstractPos>>(positions[expr.getPos()]),
.expr = expr,
.env = env,
.hint = error->info().msg,
@@ -1011,7 +1009,7 @@ void EvalState::throwMissingArgumentError(const PosIdx pos, const char * s, cons
void EvalState::addErrorTrace(Error & e, const char * s, const std::string & s2) const
{
- e.addTrace(std::nullopt, s, s2);
+ e.addTrace(nullptr, s, s2);
}
void EvalState::addErrorTrace(Error & e, const PosIdx pos, const char * s, const std::string & s2) const
@@ -1023,13 +1021,13 @@ static std::unique_ptr<DebugTraceStacker> makeDebugTraceStacker(
EvalState & state,
Expr & expr,
Env & env,
- std::optional<ErrPos> pos,
+ std::shared_ptr<AbstractPos> && pos,
const char * s,
const std::string & s2)
{
return std::make_unique<DebugTraceStacker>(state,
DebugTrace {
- .pos = pos,
+ .pos = std::move(pos),
.expr = expr,
.env = env,
.hint = hintfmt(s, s2),
@@ -1135,9 +1133,9 @@ void EvalState::mkThunk_(Value & v, Expr * expr)
void EvalState::mkPos(Value & v, PosIdx p)
{
auto pos = positions[p];
- if (!pos.file.empty()) {
+ if (auto path = std::get_if<Path>(&pos.origin)) {
auto attrs = buildBindings(3);
- attrs.alloc(sFile).mkString(pos.file);
+ attrs.alloc(sFile).mkString(*path);
attrs.alloc(sLine).mkInt(pos.line);
attrs.alloc(sColumn).mkInt(pos.column);
v.mkAttrs(attrs);
@@ -1245,7 +1243,7 @@ void EvalState::cacheFile(
*this,
*e,
this->baseEnv,
- e->getPos() ? std::optional(ErrPos(positions[e->getPos()])) : std::nullopt,
+ e->getPos() ? static_cast<std::shared_ptr<AbstractPos>>(positions[e->getPos()]) : nullptr,
"while evaluating the file '%1%':", resolvedPath)
: nullptr;
@@ -1516,10 +1514,13 @@ void ExprSelect::eval(EvalState & state, Env & env, Value & v)
state.forceValue(*vAttrs, (pos2 ? pos2 : this->pos ) );
} catch (Error & e) {
- auto pos2r = state.positions[pos2];
- if (pos2 && pos2r.file != state.derivationNixPath)
- state.addErrorTrace(e, pos2, "while evaluating the attribute '%1%'",
- showAttrPath(state, env, attrPath));
+ if (pos2) {
+ auto pos2r = state.positions[pos2];
+ auto origin = std::get_if<Path>(&pos2r.origin);
+ if (!(origin && *origin == state.derivationNixPath))
+ state.addErrorTrace(e, pos2, "while evaluating the attribute '%1%'",
+ showAttrPath(state, env, attrPath));
+ }
throw;
}
@@ -1659,7 +1660,7 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value &
(lambda.name
? concatStrings("'", symbols[lambda.name], "'")
: "anonymous lambda"));
- addErrorTrace(e, pos, "from call site%s", "");
+ addErrorTrace(e, pos, "while evaluating call site%s", "");
}
throw;
}
@@ -1806,7 +1807,7 @@ void EvalState::autoCallFunction(Bindings & args, Value & fun, Value & res)
Nix attempted to evaluate a function as a top level expression; in
this case it must have its arguments supplied either by default
values, or passed explicitly with '--arg' or '--argstr'. See
-https://nixos.org/manual/nix/stable/expressions/language-constructs.html#functions.)", symbols[i.name],
+https://nixos.org/manual/nix/stable/language/constructs.html#functions.)", symbols[i.name],
*fun.lambda.env, *fun.lambda.fun);
}
}
@@ -2496,7 +2497,8 @@ void EvalState::printStats()
else
obj["name"] = nullptr;
if (auto pos = positions[fun->pos]) {
- obj["file"] = (std::string_view) pos.file;
+ if (auto path = std::get_if<Path>(&pos.origin))
+ obj["file"] = *path;
obj["line"] = pos.line;
obj["column"] = pos.column;
}
@@ -2510,7 +2512,8 @@ void EvalState::printStats()
for (auto & i : attrSelects) {
json obj = json::object();
if (auto pos = positions[i.first]) {
- obj["file"] = (const std::string &) pos.file;
+ if (auto path = std::get_if<Path>(&pos.origin))
+ obj["file"] = *path;
obj["line"] = pos.line;
obj["column"] = pos.column;
}
@@ -2583,6 +2586,23 @@ Strings EvalSettings::getDefaultNixPath()
return res;
}
+bool EvalSettings::isPseudoUrl(std::string_view s)
+{
+ if (s.compare(0, 8, "channel:") == 0) return true;
+ size_t pos = s.find("://");
+ if (pos == std::string::npos) return false;
+ std::string scheme(s, 0, pos);
+ return scheme == "http" || scheme == "https" || scheme == "file" || scheme == "channel" || scheme == "git" || scheme == "s3" || scheme == "ssh";
+}
+
+std::string EvalSettings::resolvePseudoUrl(std::string_view url)
+{
+ if (hasPrefix(url, "channel:"))
+ return "https://nixos.org/channels/" + std::string(url.substr(8)) + "/nixexprs.tar.xz";
+ else
+ return std::string(url);
+}
+
EvalSettings evalSettings;
static GlobalConfig::Register rEvalSettings(&evalSettings);
diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh
index f07f15d43..21666339b 100644
--- a/src/libexpr/eval.hh
+++ b/src/libexpr/eval.hh
@@ -78,7 +78,7 @@ struct RegexCache;
std::shared_ptr<RegexCache> makeRegexCache();
struct DebugTrace {
- std::optional<ErrPos> pos;
+ std::shared_ptr<AbstractPos> pos;
const Expr & expr;
const Env & env;
hintformat hint;
@@ -457,8 +457,12 @@ private:
friend struct ExprAttrs;
friend struct ExprLet;
- Expr * parse(char * text, size_t length, FileOrigin origin, const PathView path,
- const PathView basePath, std::shared_ptr<StaticEnv> & staticEnv);
+ Expr * parse(
+ char * text,
+ size_t length,
+ Pos::Origin origin,
+ Path basePath,
+ std::shared_ptr<StaticEnv> & staticEnv);
public:
@@ -590,6 +594,10 @@ struct EvalSettings : Config
static Strings getDefaultNixPath();
+ static bool isPseudoUrl(std::string_view s);
+
+ static std::string resolvePseudoUrl(std::string_view url);
+
Setting<bool> enableNativeCode{this, false, "allow-unsafe-native-code-during-evaluation",
"Whether builtin functions that allow executing native code should be enabled."};
diff --git a/src/libexpr/flake/config.cc b/src/libexpr/flake/config.cc
index 6df95f1f0..89ddbde7e 100644
--- a/src/libexpr/flake/config.cc
+++ b/src/libexpr/flake/config.cc
@@ -56,7 +56,7 @@ void ConfigFile::apply()
auto tlname = get(trustedList, name);
if (auto saved = tlname ? get(*tlname, valueS) : nullptr) {
trusted = *saved;
- warn("Using saved setting for '%s = %s' from ~/.local/share/nix/trusted-settings.json.", name,valueS);
+ printInfo("Using saved setting for '%s = %s' from ~/.local/share/nix/trusted-settings.json.", name, valueS);
} else {
// FIXME: filter ANSI escapes, newlines, \r, etc.
if (std::tolower(logger->ask(fmt("do you want to allow configuration setting '%s' to be set to '" ANSI_RED "%s" ANSI_NORMAL "' (y/N)?", name, valueS)).value_or('n')) == 'y') {
diff --git a/src/libexpr/flake/flake.cc b/src/libexpr/flake/flake.cc
index 119c556ac..105d32467 100644
--- a/src/libexpr/flake/flake.cc
+++ b/src/libexpr/flake/flake.cc
@@ -143,7 +143,7 @@ static FlakeInput parseFlakeInput(EvalState & state,
} catch (Error & e) {
e.addTrace(
state.positions[attr.pos],
- hintfmt("in flake attribute '%s'", state.symbols[attr.name]));
+ hintfmt("while evaluating flake attribute '%s'", state.symbols[attr.name]));
throw;
}
}
@@ -152,7 +152,7 @@ static FlakeInput parseFlakeInput(EvalState & state,
try {
input.ref = FlakeRef::fromAttrs(attrs);
} catch (Error & e) {
- e.addTrace(state.positions[pos], hintfmt("in flake input"));
+ e.addTrace(state.positions[pos], hintfmt("while evaluating flake input"));
throw;
}
else {
@@ -220,7 +220,7 @@ static Flake getFlake(
Value vInfo;
state.evalFile(flakeFile, vInfo, true); // FIXME: symlink attack
- expectType(state, nAttrs, vInfo, state.positions.add({flakeFile, foFile}, 0, 0));
+ expectType(state, nAttrs, vInfo, state.positions.add({flakeFile}, 1, 1));
if (auto description = vInfo.attrs->get(state.sDescription)) {
expectType(state, nString, *description->value, description->pos);
@@ -353,7 +353,7 @@ LockedFlake lockFlake(
std::function<void(
const FlakeInputs & flakeInputs,
- std::shared_ptr<Node> node,
+ ref<Node> node,
const InputPath & inputPathPrefix,
std::shared_ptr<const Node> oldNode,
const InputPath & lockRootPath,
@@ -362,9 +362,15 @@ LockedFlake lockFlake(
computeLocks;
computeLocks = [&](
+ /* The inputs of this node, either from flake.nix or
+ flake.lock. */
const FlakeInputs & flakeInputs,
- std::shared_ptr<Node> node,
+ /* The node whose locks are to be updated.*/
+ ref<Node> node,
+ /* The path to this node in the lock file graph. */
const InputPath & inputPathPrefix,
+ /* The old node, if any, from which locks can be
+ copied. */
std::shared_ptr<const Node> oldNode,
const InputPath & lockRootPath,
const Path & parentPath,
@@ -452,7 +458,7 @@ LockedFlake lockFlake(
/* Copy the input from the old lock since its flakeref
didn't change and there is no override from a
higher level flake. */
- auto childNode = std::make_shared<LockedNode>(
+ auto childNode = make_ref<LockedNode>(
oldLock->lockedRef, oldLock->originalRef, oldLock->isFlake);
node->inputs.insert_or_assign(id, childNode);
@@ -481,7 +487,7 @@ LockedFlake lockFlake(
.isFlake = (*lockedNode)->isFlake,
});
} else if (auto follows = std::get_if<1>(&i.second)) {
- if (! trustLock) {
+ if (!trustLock) {
// It is possible that the flake has changed,
// so we must confirm all the follows that are in the lock file are also in the flake.
auto overridePath(inputPath);
@@ -521,8 +527,8 @@ LockedFlake lockFlake(
this input. */
debug("creating new input '%s'", inputPathS);
- if (!lockFlags.allowMutable && !input.ref->input.isLocked())
- throw Error("cannot update flake input '%s' in pure mode", inputPathS);
+ if (!lockFlags.allowUnlocked && !input.ref->input.isLocked())
+ throw Error("cannot update unlocked flake input '%s' in pure mode", inputPathS);
/* Note: in case of an --override-input, we use
the *original* ref (input2.ref) for the
@@ -544,7 +550,7 @@ LockedFlake lockFlake(
auto inputFlake = getFlake(state, localRef, useRegistries, flakeCache, inputPath);
- auto childNode = std::make_shared<LockedNode>(inputFlake.lockedRef, ref);
+ auto childNode = make_ref<LockedNode>(inputFlake.lockedRef, ref);
node->inputs.insert_or_assign(id, childNode);
@@ -564,15 +570,19 @@ LockedFlake lockFlake(
oldLock
? std::dynamic_pointer_cast<const Node>(oldLock)
: LockFile::read(
- inputFlake.sourceInfo->actualPath + "/" + inputFlake.lockedRef.subdir + "/flake.lock").root,
- oldLock ? lockRootPath : inputPath, localPath, false);
+ inputFlake.sourceInfo->actualPath + "/" + inputFlake.lockedRef.subdir + "/flake.lock").root.get_ptr(),
+ oldLock ? lockRootPath : inputPath,
+ localPath,
+ false);
}
else {
auto [sourceInfo, resolvedRef, lockedRef] = fetchOrSubstituteTree(
state, *input.ref, useRegistries, flakeCache);
- node->inputs.insert_or_assign(id,
- std::make_shared<LockedNode>(lockedRef, ref, false));
+
+ auto childNode = make_ref<LockedNode>(lockedRef, ref, false);
+
+ node->inputs.insert_or_assign(id, childNode);
}
}
@@ -587,8 +597,13 @@ LockedFlake lockFlake(
auto parentPath = canonPath(flake.sourceInfo->actualPath + "/" + flake.lockedRef.subdir, true);
computeLocks(
- flake.inputs, newLockFile.root, {},
- lockFlags.recreateLockFile ? nullptr : oldLockFile.root, {}, parentPath, false);
+ flake.inputs,
+ newLockFile.root,
+ {},
+ lockFlags.recreateLockFile ? nullptr : oldLockFile.root.get_ptr(),
+ {},
+ parentPath,
+ false);
for (auto & i : lockFlags.inputOverrides)
if (!overridesUsed.count(i.first))
@@ -611,9 +626,9 @@ LockedFlake lockFlake(
if (lockFlags.writeLockFile) {
if (auto sourcePath = topRef.input.getSourcePath()) {
- if (!newLockFile.isImmutable()) {
+ if (auto unlockedInput = newLockFile.isUnlocked()) {
if (fetchSettings.warnDirty)
- warn("will not write lock file of flake '%s' because it has a mutable input", topRef);
+ warn("will not write lock file of flake '%s' because it has an unlocked input ('%s')", topRef, *unlockedInput);
} else {
if (!lockFlags.updateLockFile)
throw Error("flake '%s' requires lock file changes but they're not allowed due to '--no-update-lock-file'", topRef);
@@ -737,7 +752,7 @@ static void prim_getFlake(EvalState & state, const PosIdx pos, Value * * args, V
.updateLockFile = false,
.writeLockFile = false,
.useRegistries = !evalSettings.pureEval && fetchSettings.useRegistries,
- .allowMutable = !evalSettings.pureEval,
+ .allowUnlocked = !evalSettings.pureEval,
}),
v);
}
diff --git a/src/libexpr/flake/flake.hh b/src/libexpr/flake/flake.hh
index 524b18af1..10301d8aa 100644
--- a/src/libexpr/flake/flake.hh
+++ b/src/libexpr/flake/flake.hh
@@ -108,11 +108,11 @@ struct LockFlags
bool applyNixConfig = false;
- /* Whether mutable flake references (i.e. those without a Git
+ /* Whether unlocked flake references (i.e. those without a Git
revision or similar) without a corresponding lock are
- allowed. Mutable flake references with a lock are always
+ allowed. Unlocked flake references with a lock are always
allowed. */
- bool allowMutable = true;
+ bool allowUnlocked = true;
/* Whether to commit changes to flake.lock. */
bool commitLockFile = false;
diff --git a/src/libexpr/flake/flakeref.hh b/src/libexpr/flake/flakeref.hh
index fe4f67193..a36d852a8 100644
--- a/src/libexpr/flake/flakeref.hh
+++ b/src/libexpr/flake/flakeref.hh
@@ -35,7 +35,7 @@ typedef std::string FlakeId;
struct FlakeRef
{
- /* fetcher-specific representation of the input, sufficient to
+ /* Fetcher-specific representation of the input, sufficient to
perform the fetch operation. */
fetchers::Input input;
diff --git a/src/libexpr/flake/lockfile.cc b/src/libexpr/flake/lockfile.cc
index 629d2e669..a3ed90e1f 100644
--- a/src/libexpr/flake/lockfile.cc
+++ b/src/libexpr/flake/lockfile.cc
@@ -31,7 +31,7 @@ FlakeRef getFlakeRef(
}
LockedNode::LockedNode(const nlohmann::json & json)
- : lockedRef(getFlakeRef(json, "locked", "info"))
+ : lockedRef(getFlakeRef(json, "locked", "info")) // FIXME: remove "info"
, originalRef(getFlakeRef(json, "original", nullptr))
, isFlake(json.find("flake") != json.end() ? (bool) json["flake"] : true)
{
@@ -49,15 +49,15 @@ std::shared_ptr<Node> LockFile::findInput(const InputPath & path)
{
auto pos = root;
- if (!pos) return {};
-
for (auto & elem : path) {
if (auto i = get(pos->inputs, elem)) {
if (auto node = std::get_if<0>(&*i))
pos = *node;
else if (auto follows = std::get_if<1>(&*i)) {
- pos = findInput(*follows);
- if (!pos) return {};
+ if (auto p = findInput(*follows))
+ pos = ref(p);
+ else
+ return {};
}
} else
return {};
@@ -72,7 +72,7 @@ LockFile::LockFile(const nlohmann::json & json, const Path & path)
if (version < 5 || version > 7)
throw Error("lock file '%s' has unsupported version %d", path, version);
- std::unordered_map<std::string, std::shared_ptr<Node>> nodeMap;
+ std::map<std::string, ref<Node>> nodeMap;
std::function<void(Node & node, const nlohmann::json & jsonNode)> getInputs;
@@ -93,12 +93,12 @@ LockFile::LockFile(const nlohmann::json & json, const Path & path)
auto jsonNode2 = nodes.find(inputKey);
if (jsonNode2 == nodes.end())
throw Error("lock file references missing node '%s'", inputKey);
- auto input = std::make_shared<LockedNode>(*jsonNode2);
+ auto input = make_ref<LockedNode>(*jsonNode2);
k = nodeMap.insert_or_assign(inputKey, input).first;
getInputs(*input, *jsonNode2);
}
- if (auto child = std::dynamic_pointer_cast<LockedNode>(k->second))
- node.inputs.insert_or_assign(i.key(), child);
+ if (auto child = k->second.dynamic_pointer_cast<LockedNode>())
+ node.inputs.insert_or_assign(i.key(), ref(child));
else
// FIXME: replace by follows node
throw Error("lock file contains cycle to root node");
@@ -122,9 +122,9 @@ nlohmann::json LockFile::toJSON() const
std::unordered_map<std::shared_ptr<const Node>, std::string> nodeKeys;
std::unordered_set<std::string> keys;
- std::function<std::string(const std::string & key, std::shared_ptr<const Node> node)> dumpNode;
+ std::function<std::string(const std::string & key, ref<const Node> node)> dumpNode;
- dumpNode = [&](std::string key, std::shared_ptr<const Node> node) -> std::string
+ dumpNode = [&](std::string key, ref<const Node> node) -> std::string
{
auto k = nodeKeys.find(node);
if (k != nodeKeys.end())
@@ -159,10 +159,11 @@ nlohmann::json LockFile::toJSON() const
n["inputs"] = std::move(inputs);
}
- if (auto lockedNode = std::dynamic_pointer_cast<const LockedNode>(node)) {
+ if (auto lockedNode = node.dynamic_pointer_cast<const LockedNode>()) {
n["original"] = fetchers::attrsToJSON(lockedNode->originalRef.toAttrs());
n["locked"] = fetchers::attrsToJSON(lockedNode->lockedRef.toAttrs());
- if (!lockedNode->isFlake) n["flake"] = false;
+ if (!lockedNode->isFlake)
+ n["flake"] = false;
}
nodes[key] = std::move(n);
@@ -201,13 +202,13 @@ void LockFile::write(const Path & path) const
writeFile(path, fmt("%s\n", *this));
}
-bool LockFile::isImmutable() const
+std::optional<FlakeRef> LockFile::isUnlocked() const
{
- std::unordered_set<std::shared_ptr<const Node>> nodes;
+ std::set<ref<const Node>> nodes;
- std::function<void(std::shared_ptr<const Node> node)> visit;
+ std::function<void(ref<const Node> node)> visit;
- visit = [&](std::shared_ptr<const Node> node)
+ visit = [&](ref<const Node> node)
{
if (!nodes.insert(node).second) return;
for (auto & i : node->inputs)
@@ -219,11 +220,12 @@ bool LockFile::isImmutable() const
for (auto & i : nodes) {
if (i == root) continue;
- auto lockedNode = std::dynamic_pointer_cast<const LockedNode>(i);
- if (lockedNode && !lockedNode->lockedRef.input.isLocked()) return false;
+ auto node = i.dynamic_pointer_cast<const LockedNode>();
+ if (node && !node->lockedRef.input.isLocked())
+ return node->lockedRef;
}
- return true;
+ return {};
}
bool LockFile::operator ==(const LockFile & other) const
@@ -247,12 +249,12 @@ InputPath parseInputPath(std::string_view s)
std::map<InputPath, Node::Edge> LockFile::getAllInputs() const
{
- std::unordered_set<std::shared_ptr<Node>> done;
+ std::set<ref<Node>> done;
std::map<InputPath, Node::Edge> res;
- std::function<void(const InputPath & prefix, std::shared_ptr<Node> node)> recurse;
+ std::function<void(const InputPath & prefix, ref<Node> node)> recurse;
- recurse = [&](const InputPath & prefix, std::shared_ptr<Node> node)
+ recurse = [&](const InputPath & prefix, ref<Node> node)
{
if (!done.insert(node).second) return;
diff --git a/src/libexpr/flake/lockfile.hh b/src/libexpr/flake/lockfile.hh
index 96f1edc76..02e9bdfbc 100644
--- a/src/libexpr/flake/lockfile.hh
+++ b/src/libexpr/flake/lockfile.hh
@@ -20,7 +20,7 @@ struct LockedNode;
type LockedNode. */
struct Node : std::enable_shared_from_this<Node>
{
- typedef std::variant<std::shared_ptr<LockedNode>, InputPath> Edge;
+ typedef std::variant<ref<LockedNode>, InputPath> Edge;
std::map<FlakeId, Edge> inputs;
@@ -47,11 +47,13 @@ struct LockedNode : Node
struct LockFile
{
- std::shared_ptr<Node> root = std::make_shared<Node>();
+ ref<Node> root = make_ref<Node>();
LockFile() {};
LockFile(const nlohmann::json & json, const Path & path);
+ typedef std::map<ref<const Node>, std::string> KeyMap;
+
nlohmann::json toJSON() const;
std::string to_string() const;
@@ -60,7 +62,8 @@ struct LockFile
void write(const Path & path) const;
- bool isImmutable() const;
+ /* Check whether this lock file has any unlocked inputs. */
+ std::optional<FlakeRef> isUnlocked() const;
bool operator ==(const LockFile & other) const;
diff --git a/src/libexpr/get-drvs.cc b/src/libexpr/get-drvs.cc
index 346741dd5..5ad5d1fd4 100644
--- a/src/libexpr/get-drvs.cc
+++ b/src/libexpr/get-drvs.cc
@@ -150,7 +150,7 @@ DrvInfo::Outputs DrvInfo::queryOutputs(bool withPaths, bool onlyOutputsToInstall
/* Check for `meta.outputsToInstall` and return `outputs` reduced to that. */
const Value * outTI = queryMeta("outputsToInstall");
if (!outTI) return outputs;
- const auto errMsg = Error("this derivation has bad 'meta.outputsToInstall'");
+ auto errMsg = Error("this derivation has bad 'meta.outputsToInstall'");
/* ^ this shows during `nix-env -i` right under the bad derivation */
if (!outTI->isList()) throw errMsg;
Outputs result;
diff --git a/src/libexpr/nixexpr.cc b/src/libexpr/nixexpr.cc
index 7c623a07d..eb6f062b4 100644
--- a/src/libexpr/nixexpr.cc
+++ b/src/libexpr/nixexpr.cc
@@ -8,6 +8,58 @@
namespace nix {
+struct PosAdapter : AbstractPos
+{
+ Pos::Origin origin;
+
+ PosAdapter(Pos::Origin origin)
+ : origin(std::move(origin))
+ {
+ }
+
+ std::optional<std::string> getSource() const override
+ {
+ return std::visit(overloaded {
+ [](const Pos::none_tag &) -> std::optional<std::string> {
+ return std::nullopt;
+ },
+ [](const Pos::Stdin & s) -> std::optional<std::string> {
+ // Get rid of the null terminators added by the parser.
+ return std::string(s.source->c_str());
+ },
+ [](const Pos::String & s) -> std::optional<std::string> {
+ // Get rid of the null terminators added by the parser.
+ return std::string(s.source->c_str());
+ },
+ [](const Path & path) -> std::optional<std::string> {
+ try {
+ return readFile(path);
+ } catch (Error &) {
+ return std::nullopt;
+ }
+ }
+ }, origin);
+ }
+
+ void print(std::ostream & out) const override
+ {
+ std::visit(overloaded {
+ [&](const Pos::none_tag &) { out << "«none»"; },
+ [&](const Pos::Stdin &) { out << "«stdin»"; },
+ [&](const Pos::String & s) { out << "«string»"; },
+ [&](const Path & path) { out << path; }
+ }, origin);
+ }
+};
+
+Pos::operator std::shared_ptr<AbstractPos>() const
+{
+ auto pos = std::make_shared<PosAdapter>(origin);
+ pos->line = line;
+ pos->column = column;
+ return pos;
+}
+
/* Displaying abstract syntax trees. */
static void showString(std::ostream & str, std::string_view s)
@@ -248,24 +300,10 @@ void ExprPos::show(const SymbolTable & symbols, std::ostream & str) const
std::ostream & operator << (std::ostream & str, const Pos & pos)
{
- if (!pos)
+ if (auto pos2 = (std::shared_ptr<AbstractPos>) pos) {
+ str << *pos2;
+ } else
str << "undefined position";
- else
- {
- auto f = format(ANSI_BOLD "%1%" ANSI_NORMAL ":%2%:%3%");
- switch (pos.origin) {
- case foFile:
- f % (const std::string &) pos.file;
- break;
- case foStdin:
- case foString:
- f % "(string)";
- break;
- default:
- throw Error("unhandled Pos origin!");
- }
- str << (f % pos.line % pos.column).str();
- }
return str;
}
@@ -289,7 +327,6 @@ std::string showAttrPath(const SymbolTable & symbols, const AttrPath & attrPath)
}
-
/* Computing levels/displacements for variables. */
void Expr::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env)
diff --git a/src/libexpr/nixexpr.hh b/src/libexpr/nixexpr.hh
index 5eb022770..ac7ce021e 100644
--- a/src/libexpr/nixexpr.hh
+++ b/src/libexpr/nixexpr.hh
@@ -23,15 +23,22 @@ MakeError(MissingArgumentError, EvalError);
MakeError(RestrictedPathError, Error);
/* Position objects. */
-
struct Pos
{
- std::string file;
- FileOrigin origin;
uint32_t line;
uint32_t column;
+ struct none_tag { };
+ struct Stdin { ref<std::string> source; };
+ struct String { ref<std::string> source; };
+
+ typedef std::variant<none_tag, Stdin, String, Path> Origin;
+
+ Origin origin;
+
explicit operator bool() const { return line > 0; }
+
+ operator std::shared_ptr<AbstractPos>() const;
};
class PosIdx {
@@ -47,7 +54,11 @@ public:
explicit operator bool() const { return id > 0; }
- bool operator<(const PosIdx other) const { return id < other.id; }
+ bool operator <(const PosIdx other) const { return id < other.id; }
+
+ bool operator ==(const PosIdx other) const { return id == other.id; }
+
+ bool operator !=(const PosIdx other) const { return id != other.id; }
};
class PosTable
@@ -61,13 +72,13 @@ public:
// current origins.back() can be reused or not.
mutable uint32_t idx = std::numeric_limits<uint32_t>::max();
- explicit Origin(uint32_t idx): idx(idx), file{}, origin{} {}
+ // Used for searching in PosTable::[].
+ explicit Origin(uint32_t idx): idx(idx), origin{Pos::none_tag()} {}
public:
- const std::string file;
- const FileOrigin origin;
+ const Pos::Origin origin;
- Origin(std::string file, FileOrigin origin): file(std::move(file)), origin(origin) {}
+ Origin(Pos::Origin origin): origin(origin) {}
};
struct Offset {
@@ -107,7 +118,7 @@ public:
[] (const auto & a, const auto & b) { return a.idx < b.idx; });
const auto origin = *std::prev(pastOrigin);
const auto offset = offsets[idx];
- return {origin.file, origin.origin, offset.line, offset.column};
+ return {offset.line, offset.column, origin.origin};
}
};
diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y
index 7c9b5a2db..e07909f8e 100644
--- a/src/libexpr/parser.y
+++ b/src/libexpr/parser.y
@@ -34,11 +34,6 @@ namespace nix {
Path basePath;
PosTable::Origin origin;
std::optional<ErrorInfo> error;
- ParseData(EvalState & state, PosTable::Origin origin)
- : state(state)
- , symbols(state.symbols)
- , origin(std::move(origin))
- { };
};
struct ParserFormals {
@@ -643,29 +638,26 @@ formal
#include "filetransfer.hh"
#include "fetchers.hh"
#include "store-api.hh"
+#include "flake/flake.hh"
namespace nix {
-Expr * EvalState::parse(char * text, size_t length, FileOrigin origin,
- const PathView path, const PathView basePath, std::shared_ptr<StaticEnv> & staticEnv)
+Expr * EvalState::parse(
+ char * text,
+ size_t length,
+ Pos::Origin origin,
+ Path basePath,
+ std::shared_ptr<StaticEnv> & staticEnv)
{
yyscan_t scanner;
- std::string file;
- switch (origin) {
- case foFile:
- file = path;
- break;
- case foStdin:
- case foString:
- file = text;
- break;
- default:
- assert(false);
- }
- ParseData data(*this, {file, origin});
- data.basePath = basePath;
+ ParseData data {
+ .state = *this,
+ .symbols = symbols,
+ .basePath = std::move(basePath),
+ .origin = {origin},
+ };
yylex_init(&scanner);
yy_scan_buffer(text, length, scanner);
@@ -717,14 +709,15 @@ Expr * EvalState::parseExprFromFile(const Path & path, std::shared_ptr<StaticEnv
auto buffer = readFile(path);
// readFile should have left some extra space for terminators
buffer.append("\0\0", 2);
- return parse(buffer.data(), buffer.size(), foFile, path, dirOf(path), staticEnv);
+ return parse(buffer.data(), buffer.size(), path, dirOf(path), staticEnv);
}
-Expr * EvalState::parseExprFromString(std::string s, const Path & basePath, std::shared_ptr<StaticEnv> & staticEnv)
+Expr * EvalState::parseExprFromString(std::string s_, const Path & basePath, std::shared_ptr<StaticEnv> & staticEnv)
{
- s.append("\0\0", 2);
- return parse(s.data(), s.size(), foString, "", basePath, staticEnv);
+ auto s = make_ref<std::string>(std::move(s_));
+ s->append("\0\0", 2);
+ return parse(s->data(), s->size(), Pos::String{.source = s}, basePath, staticEnv);
}
@@ -740,7 +733,8 @@ Expr * EvalState::parseStdin()
auto buffer = drainFD(0);
// drainFD should have left some extra space for terminators
buffer.append("\0\0", 2);
- return parse(buffer.data(), buffer.size(), foStdin, "", absPath("."), staticBaseEnv);
+ auto s = make_ref<std::string>(std::move(buffer));
+ return parse(s->data(), s->size(), Pos::Stdin{.source = s}, absPath("."), staticBaseEnv);
}
@@ -805,17 +799,28 @@ std::pair<bool, std::string> EvalState::resolveSearchPathElem(const SearchPathEl
std::pair<bool, std::string> res;
- if (isUri(elem.second)) {
+ if (EvalSettings::isPseudoUrl(elem.second)) {
try {
- res = { true, store->toRealPath(fetchers::downloadTarball(
- store, resolveUri(elem.second), "source", false).first.storePath) };
+ auto storePath = fetchers::downloadTarball(
+ store, EvalSettings::resolvePseudoUrl(elem.second), "source", false).first.storePath;
+ res = { true, store->toRealPath(storePath) };
} catch (FileTransferError & e) {
logWarning({
.msg = hintfmt("Nix search path entry '%1%' cannot be downloaded, ignoring", elem.second)
});
res = { false, "" };
}
- } else {
+ }
+
+ else if (hasPrefix(elem.second, "flake:")) {
+ settings.requireExperimentalFeature(Xp::Flakes);
+ auto flakeRef = parseFlakeRef(elem.second.substr(6), {}, true, false);
+ debug("fetching flake search path element '%s''", elem.second);
+ auto storePath = flakeRef.resolve(store).fetchTree(store).first.storePath;
+ res = { true, store->toRealPath(storePath) };
+ }
+
+ else {
auto path = absPath(elem.second);
if (pathExists(path))
res = { true, path };
diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index 8a4c19f7c..7efe50324 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -368,8 +368,7 @@ void prim_exec(EvalState & state, const PosIdx pos, Value * * args, Value & v)
auto output = runProgram(program, true, commandArgs);
Expr * parsed;
try {
- auto base = state.positions[pos];
- parsed = state.parseExprFromString(std::move(output), base.file);
+ parsed = state.parseExprFromString(std::move(output), "/");
} catch (Error & e) {
e.addTrace(state.positions[pos], "While parsing the output from '%1%'", program);
throw;
@@ -798,7 +797,7 @@ static void prim_addErrorContext(EvalState & state, const PosIdx pos, Value * *
v = *args[1];
} catch (Error & e) {
PathSet context;
- e.addTrace(std::nullopt, state.coerceToString(pos, *args[0], context).toOwned());
+ e.addTrace(nullptr, state.coerceToString(pos, *args[0], context).toOwned());
throw;
}
}
@@ -1461,10 +1460,10 @@ static RegisterPrimOp primop_storePath({
static void prim_pathExists(EvalState & state, const PosIdx pos, Value * * args, Value & v)
{
/* We don’t check the path right now, because we don’t want to
- throw if the path isn’t allowed, but just return false (and we
- can’t just catch the exception here because we still want to
- throw if something in the evaluation of `*args[0]` tries to
- access an unauthorized path). */
+ throw if the path isn’t allowed, but just return false (and we
+ can’t just catch the exception here because we still want to
+ throw if something in the evaluation of `*args[0]` tries to
+ access an unauthorized path). */
auto path = realisePath(state, pos, *args[0], { .checkForPureEval = false });
try {
@@ -4018,7 +4017,7 @@ void EvalState::createBaseEnv()
// the parser needs two NUL bytes as terminators; one of them
// is implied by being a C string.
"\0";
- eval(parse(code, sizeof(code), foFile, derivationNixPath, "/", staticBaseEnv), *vDerivation);
+ eval(parse(code, sizeof(code), derivationNixPath, "/", staticBaseEnv), *vDerivation);
}
diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc
index 84e7f5c02..680446787 100644
--- a/src/libexpr/primops/fetchTree.cc
+++ b/src/libexpr/primops/fetchTree.cc
@@ -220,8 +220,6 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
} else
url = state.forceStringNoCtx(*args[0], pos);
- url = resolveUri(*url);
-
state.checkURI(*url);
if (name == "")
diff --git a/src/libexpr/tests/libexprtests.hh b/src/libexpr/tests/libexprtests.hh
index 4f6915882..5bb5e66d3 100644
--- a/src/libexpr/tests/libexprtests.hh
+++ b/src/libexpr/tests/libexprtests.hh
@@ -123,7 +123,7 @@ namespace nix {
MATCHER_P(IsAttrsOfSize, n, fmt("Is a set of size [%1%]", n)) {
if (arg.type() != nAttrs) {
- *result_listener << "Expexted set got " << arg.type();
+ *result_listener << "Expected set got " << arg.type();
return false;
} else if (arg.attrs->size() != (size_t)n) {
*result_listener << "Expected a set with " << n << " attributes but got " << arg.attrs->size();
diff --git a/src/libexpr/tests/primops.cc b/src/libexpr/tests/primops.cc
index 16cf66d2c..49fbc5e98 100644
--- a/src/libexpr/tests/primops.cc
+++ b/src/libexpr/tests/primops.cc
@@ -151,20 +151,7 @@ namespace nix {
// The `y` attribute is at position
const char* expr = "builtins.unsafeGetAttrPos \"y\" { y = \"x\"; }";
auto v = eval(expr);
- ASSERT_THAT(v, IsAttrsOfSize(3));
-
- auto file = v.attrs->find(createSymbol("file"));
- ASSERT_NE(file, nullptr);
- // FIXME: The file when running these tests is the input string?!?
- ASSERT_THAT(*file->value, IsStringEq(expr));
-
- auto line = v.attrs->find(createSymbol("line"));
- ASSERT_NE(line, nullptr);
- ASSERT_THAT(*line->value, IsIntEq(1));
-
- auto column = v.attrs->find(createSymbol("column"));
- ASSERT_NE(column, nullptr);
- ASSERT_THAT(*column->value, IsIntEq(33));
+ ASSERT_THAT(v, IsNull());
}
TEST_F(PrimOpTest, hasAttr) {
diff --git a/src/libexpr/value-to-xml.cc b/src/libexpr/value-to-xml.cc
index 7c3bf9492..3f6222768 100644
--- a/src/libexpr/value-to-xml.cc
+++ b/src/libexpr/value-to-xml.cc
@@ -24,7 +24,8 @@ static void printValueAsXML(EvalState & state, bool strict, bool location,
static void posToXML(EvalState & state, XMLAttrs & xmlAttrs, const Pos & pos)
{
- xmlAttrs["path"] = pos.file;
+ if (auto path = std::get_if<Path>(&pos.origin))
+ xmlAttrs["path"] = *path;
xmlAttrs["line"] = (format("%1%") % pos.line).str();
xmlAttrs["column"] = (format("%1%") % pos.column).str();
}
diff --git a/src/libfetchers/fetch-settings.hh b/src/libfetchers/fetch-settings.hh
index 6452143a1..f33cbdcfc 100644
--- a/src/libfetchers/fetch-settings.hh
+++ b/src/libfetchers/fetch-settings.hh
@@ -71,7 +71,12 @@ struct FetchSettings : public Config
"Whether to warn about dirty Git/Mercurial trees."};
Setting<std::string> flakeRegistry{this, "https://channels.nixos.org/flake-registry.json", "flake-registry",
- "Path or URI of the global flake registry."};
+ R"(
+ Path or URI of the global flake registry.
+
+ When empty, disables the global flake registry.
+ )"};
+
Setting<bool> useRegistries{this, true, "use-registries",
"Whether to use flake registries to resolve flake references."};
diff --git a/src/libfetchers/fetchers.cc b/src/libfetchers/fetchers.cc
index 6957d2da4..c767e72e5 100644
--- a/src/libfetchers/fetchers.cc
+++ b/src/libfetchers/fetchers.cc
@@ -266,7 +266,7 @@ std::optional<time_t> Input::getLastModified() const
return {};
}
-ParsedURL InputScheme::toURL(const Input & input)
+ParsedURL InputScheme::toURL(const Input & input) const
{
throw Error("don't know how to convert input '%s' to a URL", attrsToJSON(input.attrs));
}
@@ -274,7 +274,7 @@ ParsedURL InputScheme::toURL(const Input & input)
Input InputScheme::applyOverrides(
const Input & input,
std::optional<std::string> ref,
- std::optional<Hash> rev)
+ std::optional<Hash> rev) const
{
if (ref)
throw Error("don't know how to set branch/tag name of input '%s' to '%s'", input.to_string(), *ref);
@@ -293,7 +293,7 @@ void InputScheme::markChangedFile(const Input & input, std::string_view file, st
assert(false);
}
-void InputScheme::clone(const Input & input, const Path & destDir)
+void InputScheme::clone(const Input & input, const Path & destDir) const
{
throw Error("do not know how to clone input '%s'", input.to_string());
}
diff --git a/src/libfetchers/fetchers.hh b/src/libfetchers/fetchers.hh
index bc9a76b0b..17da37f47 100644
--- a/src/libfetchers/fetchers.hh
+++ b/src/libfetchers/fetchers.hh
@@ -107,26 +107,25 @@ public:
* recognized. The Input object contains the information the fetcher
* needs to actually perform the "fetch()" when called.
*/
-
struct InputScheme
{
virtual ~InputScheme()
{ }
- virtual std::optional<Input> inputFromURL(const ParsedURL & url) = 0;
+ virtual std::optional<Input> inputFromURL(const ParsedURL & url) const = 0;
- virtual std::optional<Input> inputFromAttrs(const Attrs & attrs) = 0;
+ virtual std::optional<Input> inputFromAttrs(const Attrs & attrs) const = 0;
- virtual ParsedURL toURL(const Input & input);
+ virtual ParsedURL toURL(const Input & input) const;
- virtual bool hasAllInfo(const Input & input) = 0;
+ virtual bool hasAllInfo(const Input & input) const = 0;
virtual Input applyOverrides(
const Input & input,
std::optional<std::string> ref,
- std::optional<Hash> rev);
+ std::optional<Hash> rev) const;
- virtual void clone(const Input & input, const Path & destDir);
+ virtual void clone(const Input & input, const Path & destDir) const;
virtual std::optional<Path> getSourcePath(const Input & input);
diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc
index 7b7a1be35..1f7d7c07d 100644
--- a/src/libfetchers/git.cc
+++ b/src/libfetchers/git.cc
@@ -18,6 +18,7 @@
using namespace std::string_literals;
namespace nix::fetchers {
+
namespace {
// Explicit initial branch of our bare repo to suppress warnings from new version of git.
@@ -26,23 +27,23 @@ namespace {
// old version of git, which will ignore unrecognized `-c` options.
const std::string gitInitialBranch = "__nix_dummy_branch";
-bool isCacheFileWithinTtl(const time_t now, const struct stat & st)
+bool isCacheFileWithinTtl(time_t now, const struct stat & st)
{
return st.st_mtime + settings.tarballTtl > now;
}
-bool touchCacheFile(const Path& path, const time_t& touch_time)
+bool touchCacheFile(const Path & path, time_t touch_time)
{
- struct timeval times[2];
- times[0].tv_sec = touch_time;
- times[0].tv_usec = 0;
- times[1].tv_sec = touch_time;
- times[1].tv_usec = 0;
+ struct timeval times[2];
+ times[0].tv_sec = touch_time;
+ times[0].tv_usec = 0;
+ times[1].tv_sec = touch_time;
+ times[1].tv_usec = 0;
- return lutimes(path.c_str(), times) == 0;
+ return lutimes(path.c_str(), times) == 0;
}
-Path getCachePath(std::string key)
+Path getCachePath(std::string_view key)
{
return getCacheDir() + "/nix/gitv3/" +
hashString(htSHA256, key).to_string(Base32, false);
@@ -57,13 +58,12 @@ Path getCachePath(std::string key)
// ...
std::optional<std::string> readHead(const Path & path)
{
- auto [exit_code, output] = runProgram(RunOptions {
+ auto [status, output] = runProgram(RunOptions {
.program = "git",
+ // FIXME: use 'HEAD' to avoid returning all refs
.args = {"ls-remote", "--symref", path},
});
- if (exit_code != 0) {
- return std::nullopt;
- }
+ if (status != 0) return std::nullopt;
std::string_view line = output;
line = line.substr(0, line.find("\n"));
@@ -82,12 +82,11 @@ std::optional<std::string> readHead(const Path & path)
}
// Persist the HEAD ref from the remote repo in the local cached repo.
-bool storeCachedHead(const std::string& actualUrl, const std::string& headRef)
+bool storeCachedHead(const std::string & actualUrl, const std::string & headRef)
{
Path cacheDir = getCachePath(actualUrl);
- auto gitDir = ".";
try {
- runProgram("git", true, { "-C", cacheDir, "--git-dir", gitDir, "symbolic-ref", "--", "HEAD", headRef });
+ runProgram("git", true, { "-C", cacheDir, "--git-dir", ".", "symbolic-ref", "--", "HEAD", headRef });
} catch (ExecError &e) {
if (!WIFEXITED(e.status)) throw;
return false;
@@ -96,7 +95,7 @@ bool storeCachedHead(const std::string& actualUrl, const std::string& headRef)
return true;
}
-std::optional<std::string> readHeadCached(const std::string& actualUrl)
+std::optional<std::string> readHeadCached(const std::string & actualUrl)
{
// Create a cache path to store the branch of the HEAD ref. Append something
// in front of the URL to prevent collision with the repository itself.
@@ -110,16 +109,15 @@ std::optional<std::string> readHeadCached(const std::string& actualUrl)
cachedRef = readHead(cacheDir);
if (cachedRef != std::nullopt &&
*cachedRef != gitInitialBranch &&
- isCacheFileWithinTtl(now, st)) {
+ isCacheFileWithinTtl(now, st))
+ {
debug("using cached HEAD ref '%s' for repo '%s'", *cachedRef, actualUrl);
return cachedRef;
}
}
auto ref = readHead(actualUrl);
- if (ref) {
- return ref;
- }
+ if (ref) return ref;
if (cachedRef) {
// If the cached git ref is expired in fetch() below, and the 'git fetch'
@@ -250,7 +248,7 @@ std::pair<StorePath, Input> fetchFromWorkdir(ref<Store> store, Input & input, co
struct GitInputScheme : InputScheme
{
- std::optional<Input> inputFromURL(const ParsedURL & url) override
+ std::optional<Input> inputFromURL(const ParsedURL & url) const override
{
if (url.scheme != "git" &&
url.scheme != "git+http" &&
@@ -265,7 +263,7 @@ struct GitInputScheme : InputScheme
Attrs attrs;
attrs.emplace("type", "git");
- for (auto &[name, value] : url.query) {
+ for (auto & [name, value] : url.query) {
if (name == "rev" || name == "ref")
attrs.emplace(name, value);
else if (name == "shallow" || name == "submodules")
@@ -279,7 +277,7 @@ struct GitInputScheme : InputScheme
return inputFromAttrs(attrs);
}
- std::optional<Input> inputFromAttrs(const Attrs & attrs) override
+ std::optional<Input> inputFromAttrs(const Attrs & attrs) const override
{
if (maybeGetStrAttr(attrs, "type") != "git") return {};
@@ -302,7 +300,7 @@ struct GitInputScheme : InputScheme
return input;
}
- ParsedURL toURL(const Input & input) override
+ ParsedURL toURL(const Input & input) const override
{
auto url = parseURL(getStrAttr(input.attrs, "url"));
if (url.scheme != "git") url.scheme = "git+" + url.scheme;
@@ -313,7 +311,7 @@ struct GitInputScheme : InputScheme
return url;
}
- bool hasAllInfo(const Input & input) override
+ bool hasAllInfo(const Input & input) const override
{
bool maybeDirty = !input.getRef();
bool shallow = maybeGetBoolAttr(input.attrs, "shallow").value_or(false);
@@ -325,7 +323,7 @@ struct GitInputScheme : InputScheme
Input applyOverrides(
const Input & input,
std::optional<std::string> ref,
- std::optional<Hash> rev) override
+ std::optional<Hash> rev) const override
{
auto res(input);
if (rev) res.attrs.insert_or_assign("rev", rev->gitRev());
@@ -335,7 +333,7 @@ struct GitInputScheme : InputScheme
return res;
}
- void clone(const Input & input, const Path & destDir) override
+ void clone(const Input & input, const Path & destDir) const override
{
auto [isLocal, actualUrl] = getActualUrl(input);
@@ -603,9 +601,9 @@ struct GitInputScheme : InputScheme
{
throw Error(
"Cannot find Git revision '%s' in ref '%s' of repository '%s'! "
- "Please make sure that the " ANSI_BOLD "rev" ANSI_NORMAL " exists on the "
- ANSI_BOLD "ref" ANSI_NORMAL " you've specified or add " ANSI_BOLD
- "allRefs = true;" ANSI_NORMAL " to " ANSI_BOLD "fetchGit" ANSI_NORMAL ".",
+ "Please make sure that the " ANSI_BOLD "rev" ANSI_NORMAL " exists on the "
+ ANSI_BOLD "ref" ANSI_NORMAL " you've specified or add " ANSI_BOLD
+ "allRefs = true;" ANSI_NORMAL " to " ANSI_BOLD "fetchGit" ANSI_NORMAL ".",
input.getRev()->gitRev(),
*input.getRef(),
actualUrl
diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc
index 2115ce2f5..1ed09d30d 100644
--- a/src/libfetchers/github.cc
+++ b/src/libfetchers/github.cc
@@ -26,11 +26,11 @@ std::regex hostRegex(hostRegexS, std::regex::ECMAScript);
struct GitArchiveInputScheme : InputScheme
{
- virtual std::string type() = 0;
+ virtual std::string type() const = 0;
virtual std::optional<std::pair<std::string, std::string>> accessHeaderFromToken(const std::string & token) const = 0;
- std::optional<Input> inputFromURL(const ParsedURL & url) override
+ std::optional<Input> inputFromURL(const ParsedURL & url) const override
{
if (url.scheme != type()) return {};
@@ -100,7 +100,7 @@ struct GitArchiveInputScheme : InputScheme
return input;
}
- std::optional<Input> inputFromAttrs(const Attrs & attrs) override
+ std::optional<Input> inputFromAttrs(const Attrs & attrs) const override
{
if (maybeGetStrAttr(attrs, "type") != type()) return {};
@@ -116,7 +116,7 @@ struct GitArchiveInputScheme : InputScheme
return input;
}
- ParsedURL toURL(const Input & input) override
+ ParsedURL toURL(const Input & input) const override
{
auto owner = getStrAttr(input.attrs, "owner");
auto repo = getStrAttr(input.attrs, "repo");
@@ -132,7 +132,7 @@ struct GitArchiveInputScheme : InputScheme
};
}
- bool hasAllInfo(const Input & input) override
+ bool hasAllInfo(const Input & input) const override
{
return input.getRev() && maybeGetIntAttr(input.attrs, "lastModified");
}
@@ -140,7 +140,7 @@ struct GitArchiveInputScheme : InputScheme
Input applyOverrides(
const Input & _input,
std::optional<std::string> ref,
- std::optional<Hash> rev) override
+ std::optional<Hash> rev) const override
{
auto input(_input);
if (rev && ref)
@@ -227,7 +227,7 @@ struct GitArchiveInputScheme : InputScheme
struct GitHubInputScheme : GitArchiveInputScheme
{
- std::string type() override { return "github"; }
+ std::string type() const override { return "github"; }
std::optional<std::pair<std::string, std::string>> accessHeaderFromToken(const std::string & token) const override
{
@@ -240,14 +240,29 @@ struct GitHubInputScheme : GitArchiveInputScheme
return std::pair<std::string, std::string>("Authorization", fmt("token %s", token));
}
+ std::string getHost(const Input & input) const
+ {
+ return maybeGetStrAttr(input.attrs, "host").value_or("github.com");
+ }
+
+ std::string getOwner(const Input & input) const
+ {
+ return getStrAttr(input.attrs, "owner");
+ }
+
+ std::string getRepo(const Input & input) const
+ {
+ return getStrAttr(input.attrs, "repo");
+ }
+
Hash getRevFromRef(nix::ref<Store> store, const Input & input) const override
{
- auto host = maybeGetStrAttr(input.attrs, "host").value_or("github.com");
+ auto host = getHost(input);
auto url = fmt(
host == "github.com"
? "https://api.%s/repos/%s/%s/commits/%s"
: "https://%s/api/v3/repos/%s/%s/commits/%s",
- host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), *input.getRef());
+ host, getOwner(input), getRepo(input), *input.getRef());
Headers headers = makeHeadersWithAuthTokens(host);
@@ -262,8 +277,10 @@ struct GitHubInputScheme : GitArchiveInputScheme
DownloadUrl getDownloadUrl(const Input & input) const override
{
- auto host = maybeGetStrAttr(input.attrs, "host").value_or("github.com");
+ auto host = getHost(input);
+
Headers headers = makeHeadersWithAuthTokens(host);
+
// If we have no auth headers then we default to the public archive
// urls so we do not run into rate limits.
const auto urlFmt =
@@ -273,17 +290,17 @@ struct GitHubInputScheme : GitArchiveInputScheme
? "https://%s/%s/%s/archive/%s.tar.gz"
: "https://api.%s/repos/%s/%s/tarball/%s";
- const auto url = fmt(urlFmt, host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"),
+ const auto url = fmt(urlFmt, host, getOwner(input), getRepo(input),
input.getRev()->to_string(Base16, false));
return DownloadUrl { url, headers };
}
- void clone(const Input & input, const Path & destDir) override
+ void clone(const Input & input, const Path & destDir) const override
{
- auto host = maybeGetStrAttr(input.attrs, "host").value_or("github.com");
+ auto host = getHost(input);
Input::fromURL(fmt("git+https://%s/%s/%s.git",
- host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo")))
+ host, getOwner(input), getRepo(input)))
.applyOverrides(input.getRef(), input.getRev())
.clone(destDir);
}
@@ -291,7 +308,7 @@ struct GitHubInputScheme : GitArchiveInputScheme
struct GitLabInputScheme : GitArchiveInputScheme
{
- std::string type() override { return "gitlab"; }
+ std::string type() const override { return "gitlab"; }
std::optional<std::pair<std::string, std::string>> accessHeaderFromToken(const std::string & token) const override
{
@@ -346,7 +363,7 @@ struct GitLabInputScheme : GitArchiveInputScheme
return DownloadUrl { url, headers };
}
- void clone(const Input & input, const Path & destDir) override
+ void clone(const Input & input, const Path & destDir) const override
{
auto host = maybeGetStrAttr(input.attrs, "host").value_or("gitlab.com");
// FIXME: get username somewhere
@@ -359,7 +376,7 @@ struct GitLabInputScheme : GitArchiveInputScheme
struct SourceHutInputScheme : GitArchiveInputScheme
{
- std::string type() override { return "sourcehut"; }
+ std::string type() const override { return "sourcehut"; }
std::optional<std::pair<std::string, std::string>> accessHeaderFromToken(const std::string & token) const override
{
@@ -433,7 +450,7 @@ struct SourceHutInputScheme : GitArchiveInputScheme
return DownloadUrl { url, headers };
}
- void clone(const Input & input, const Path & destDir) override
+ void clone(const Input & input, const Path & destDir) const override
{
auto host = maybeGetStrAttr(input.attrs, "host").value_or("git.sr.ht");
Input::fromURL(fmt("git+https://%s/%s/%s",
diff --git a/src/libfetchers/indirect.cc b/src/libfetchers/indirect.cc
index 9288fc6cf..b99504a16 100644
--- a/src/libfetchers/indirect.cc
+++ b/src/libfetchers/indirect.cc
@@ -7,7 +7,7 @@ std::regex flakeRegex("[a-zA-Z][a-zA-Z0-9_-]*", std::regex::ECMAScript);
struct IndirectInputScheme : InputScheme
{
- std::optional<Input> inputFromURL(const ParsedURL & url) override
+ std::optional<Input> inputFromURL(const ParsedURL & url) const override
{
if (url.scheme != "flake") return {};
@@ -50,7 +50,7 @@ struct IndirectInputScheme : InputScheme
return input;
}
- std::optional<Input> inputFromAttrs(const Attrs & attrs) override
+ std::optional<Input> inputFromAttrs(const Attrs & attrs) const override
{
if (maybeGetStrAttr(attrs, "type") != "indirect") return {};
@@ -68,7 +68,7 @@ struct IndirectInputScheme : InputScheme
return input;
}
- ParsedURL toURL(const Input & input) override
+ ParsedURL toURL(const Input & input) const override
{
ParsedURL url;
url.scheme = "flake";
@@ -78,7 +78,7 @@ struct IndirectInputScheme : InputScheme
return url;
}
- bool hasAllInfo(const Input & input) override
+ bool hasAllInfo(const Input & input) const override
{
return false;
}
@@ -86,7 +86,7 @@ struct IndirectInputScheme : InputScheme
Input applyOverrides(
const Input & _input,
std::optional<std::string> ref,
- std::optional<Hash> rev) override
+ std::optional<Hash> rev) const override
{
auto input(_input);
if (rev) input.attrs.insert_or_assign("rev", rev->gitRev());
diff --git a/src/libfetchers/mercurial.cc b/src/libfetchers/mercurial.cc
index 5c5671681..86e8f81f4 100644
--- a/src/libfetchers/mercurial.cc
+++ b/src/libfetchers/mercurial.cc
@@ -43,7 +43,7 @@ static std::string runHg(const Strings & args, const std::optional<std::string>
struct MercurialInputScheme : InputScheme
{
- std::optional<Input> inputFromURL(const ParsedURL & url) override
+ std::optional<Input> inputFromURL(const ParsedURL & url) const override
{
if (url.scheme != "hg+http" &&
url.scheme != "hg+https" &&
@@ -69,7 +69,7 @@ struct MercurialInputScheme : InputScheme
return inputFromAttrs(attrs);
}
- std::optional<Input> inputFromAttrs(const Attrs & attrs) override
+ std::optional<Input> inputFromAttrs(const Attrs & attrs) const override
{
if (maybeGetStrAttr(attrs, "type") != "hg") return {};
@@ -89,7 +89,7 @@ struct MercurialInputScheme : InputScheme
return input;
}
- ParsedURL toURL(const Input & input) override
+ ParsedURL toURL(const Input & input) const override
{
auto url = parseURL(getStrAttr(input.attrs, "url"));
url.scheme = "hg+" + url.scheme;
@@ -98,7 +98,7 @@ struct MercurialInputScheme : InputScheme
return url;
}
- bool hasAllInfo(const Input & input) override
+ bool hasAllInfo(const Input & input) const override
{
// FIXME: ugly, need to distinguish between dirty and clean
// default trees.
@@ -108,7 +108,7 @@ struct MercurialInputScheme : InputScheme
Input applyOverrides(
const Input & input,
std::optional<std::string> ref,
- std::optional<Hash> rev) override
+ std::optional<Hash> rev) const override
{
auto res(input);
if (rev) res.attrs.insert_or_assign("rev", rev->gitRev());
diff --git a/src/libfetchers/path.cc b/src/libfetchers/path.cc
index f0ef97da5..61541e69d 100644
--- a/src/libfetchers/path.cc
+++ b/src/libfetchers/path.cc
@@ -6,7 +6,7 @@ namespace nix::fetchers {
struct PathInputScheme : InputScheme
{
- std::optional<Input> inputFromURL(const ParsedURL & url) override
+ std::optional<Input> inputFromURL(const ParsedURL & url) const override
{
if (url.scheme != "path") return {};
@@ -32,7 +32,7 @@ struct PathInputScheme : InputScheme
return input;
}
- std::optional<Input> inputFromAttrs(const Attrs & attrs) override
+ std::optional<Input> inputFromAttrs(const Attrs & attrs) const override
{
if (maybeGetStrAttr(attrs, "type") != "path") return {};
@@ -54,7 +54,7 @@ struct PathInputScheme : InputScheme
return input;
}
- ParsedURL toURL(const Input & input) override
+ ParsedURL toURL(const Input & input) const override
{
auto query = attrsToQuery(input.attrs);
query.erase("path");
@@ -66,7 +66,7 @@ struct PathInputScheme : InputScheme
};
}
- bool hasAllInfo(const Input & input) override
+ bool hasAllInfo(const Input & input) const override
{
return true;
}
diff --git a/src/libfetchers/registry.cc b/src/libfetchers/registry.cc
index acd1ff866..43c03beec 100644
--- a/src/libfetchers/registry.cc
+++ b/src/libfetchers/registry.cc
@@ -153,6 +153,9 @@ static std::shared_ptr<Registry> getGlobalRegistry(ref<Store> store)
{
static auto reg = [&]() {
auto path = fetchSettings.flakeRegistry.get();
+ if (path == "") {
+ return std::make_shared<Registry>(Registry::Global); // empty registry
+ }
if (!hasPrefix(path, "/")) {
auto storePath = downloadFile(store, path, "flake-registry.json", false).storePath;
diff --git a/src/libfetchers/tarball.cc b/src/libfetchers/tarball.cc
index 6c551bd93..e9686262a 100644
--- a/src/libfetchers/tarball.cc
+++ b/src/libfetchers/tarball.cc
@@ -185,7 +185,7 @@ struct CurlInputScheme : InputScheme
virtual bool isValidURL(const ParsedURL & url) const = 0;
- std::optional<Input> inputFromURL(const ParsedURL & url) override
+ std::optional<Input> inputFromURL(const ParsedURL & url) const override
{
if (!isValidURL(url))
return std::nullopt;
@@ -203,7 +203,7 @@ struct CurlInputScheme : InputScheme
return input;
}
- std::optional<Input> inputFromAttrs(const Attrs & attrs) override
+ std::optional<Input> inputFromAttrs(const Attrs & attrs) const override
{
auto type = maybeGetStrAttr(attrs, "type");
if (type != inputType()) return {};
@@ -220,16 +220,17 @@ struct CurlInputScheme : InputScheme
return input;
}
- ParsedURL toURL(const Input & input) override
+ ParsedURL toURL(const Input & input) const override
{
auto url = parseURL(getStrAttr(input.attrs, "url"));
- // NAR hashes are preferred over file hashes since tar/zip files // don't have a canonical representation.
+ // NAR hashes are preferred over file hashes since tar/zip
+ // files don't have a canonical representation.
if (auto narHash = input.getNarHash())
url.query.insert_or_assign("narHash", narHash->to_string(SRI, true));
return url;
}
- bool hasAllInfo(const Input & input) override
+ bool hasAllInfo(const Input & input) const override
{
return true;
}
diff --git a/src/libmain/progress-bar.cc b/src/libmain/progress-bar.cc
index 961f4e18a..e9205a5e5 100644
--- a/src/libmain/progress-bar.cc
+++ b/src/libmain/progress-bar.cc
@@ -132,7 +132,7 @@ public:
log(*state, lvl, fs.s);
}
- void logEI(const ErrorInfo &ei) override
+ void logEI(const ErrorInfo & ei) override
{
auto state(state_.lock());
@@ -180,10 +180,12 @@ public:
auto machineName = getS(fields, 1);
if (machineName != "")
i->s += fmt(" on " ANSI_BOLD "%s" ANSI_NORMAL, machineName);
- auto curRound = getI(fields, 2);
- auto nrRounds = getI(fields, 3);
- if (nrRounds != 1)
- i->s += fmt(" (round %d/%d)", curRound, nrRounds);
+
+ // Used to be curRound and nrRounds, but the
+ // implementation was broken for a long time.
+ if (getI(fields, 2) != 1 || getI(fields, 3) != 1) {
+ throw Error("log message indicated repeating builds, but this is not currently implemented");
+ }
i->name = DrvName(name).name;
}
diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc
index 12d0c32fb..149d414d3 100644
--- a/src/libstore/binary-cache-store.cc
+++ b/src/libstore/binary-cache-store.cc
@@ -346,7 +346,7 @@ void BinaryCacheStore::narFromPath(const StorePath & storePath, Sink & sink)
try {
getFile(info->url, *decompressor);
} catch (NoSuchBinaryCacheFile & e) {
- throw SubstituteGone(e.info());
+ throw SubstituteGone(std::move(e.info()));
}
decompressor->finish();
diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc
index 5aed51bcd..d3b995a4f 100644
--- a/src/libstore/build/derivation-goal.cc
+++ b/src/libstore/build/derivation-goal.cc
@@ -134,7 +134,7 @@ void DerivationGoal::killChild()
void DerivationGoal::timedOut(Error && ex)
{
killChild();
- done(BuildResult::TimedOut, {}, ex);
+ done(BuildResult::TimedOut, {}, std::move(ex));
}
@@ -571,10 +571,6 @@ void DerivationGoal::inputsRealised()
/* What type of derivation are we building? */
derivationType = drv->type();
- /* Don't repeat fixed-output derivations since they're already
- verified by their output hash.*/
- nrRounds = derivationType.isFixed() ? 1 : settings.buildRepeat + 1;
-
/* Okay, try to build. Note that here we don't wait for a build
slot to become available, since we don't need one if there is a
build hook. */
@@ -589,12 +585,11 @@ void DerivationGoal::started()
auto msg = fmt(
buildMode == bmRepair ? "repairing outputs of '%s'" :
buildMode == bmCheck ? "checking outputs of '%s'" :
- nrRounds > 1 ? "building '%s' (round %d/%d)" :
- "building '%s'", worker.store.printStorePath(drvPath), curRound, nrRounds);
+ "building '%s'", worker.store.printStorePath(drvPath));
fmt("building '%s'", worker.store.printStorePath(drvPath));
if (hook) msg += fmt(" on '%s'", machineName);
act = std::make_unique<Activity>(*logger, lvlInfo, actBuild, msg,
- Logger::Fields{worker.store.printStorePath(drvPath), hook ? machineName : "", curRound, nrRounds});
+ Logger::Fields{worker.store.printStorePath(drvPath), hook ? machineName : "", 1, 1});
mcRunningBuilds = std::make_unique<MaintainCount<uint64_t>>(worker.runningBuilds);
worker.updateProgress();
}
@@ -948,14 +943,6 @@ void DerivationGoal::buildDone()
cleanupPostOutputsRegisteredModeNonCheck();
- /* Repeat the build if necessary. */
- if (curRound++ < nrRounds) {
- outputLocks.unlock();
- state = &DerivationGoal::tryToBuild;
- worker.wakeUp(shared_from_this());
- return;
- }
-
/* It is now safe to delete the lock files, since all future
lockers will see that the output paths are valid; they will
not create new lock files with the same names as the old
@@ -984,7 +971,7 @@ void DerivationGoal::buildDone()
BuildResult::PermanentFailure;
}
- done(st, {}, e);
+ done(st, {}, std::move(e));
return;
}
}
@@ -1016,22 +1003,34 @@ void DerivationGoal::resolvedFinished()
throw Error(
"derivation '%s' doesn't have expected output '%s' (derivation-goal.cc/resolvedFinished,resolve)",
worker.store.printStorePath(drvPath), wantedOutput);
- auto realisation = get(resolvedResult.builtOutputs, DrvOutput { *resolvedHash, wantedOutput });
- if (!realisation)
- throw Error(
- "derivation '%s' doesn't have expected output '%s' (derivation-goal.cc/resolvedFinished,realisation)",
- worker.store.printStorePath(resolvedDrvGoal->drvPath), wantedOutput);
+
+ auto realisation = [&]{
+ auto take1 = get(resolvedResult.builtOutputs, DrvOutput { *resolvedHash, wantedOutput });
+ if (take1) return *take1;
+
+ /* The above `get` should work. But sateful tracking of
+ outputs in resolvedResult, this can get out of sync with the
+ store, which is our actual source of truth. For now we just
+ check the store directly if it fails. */
+ auto take2 = worker.evalStore.queryRealisation(DrvOutput { *resolvedHash, wantedOutput });
+ if (take2) return *take2;
+
+ throw Error(
+ "derivation '%s' doesn't have expected output '%s' (derivation-goal.cc/resolvedFinished,realisation)",
+ worker.store.printStorePath(resolvedDrvGoal->drvPath), wantedOutput);
+ }();
+
if (drv->type().isPure()) {
- auto newRealisation = *realisation;
+ auto newRealisation = realisation;
newRealisation.id = DrvOutput { initialOutput->outputHash, wantedOutput };
newRealisation.signatures.clear();
if (!drv->type().isFixed())
- newRealisation.dependentRealisations = drvOutputReferences(worker.store, *drv, realisation->outPath);
+ newRealisation.dependentRealisations = drvOutputReferences(worker.store, *drv, realisation.outPath);
signRealisation(newRealisation);
worker.store.registerDrvOutput(newRealisation);
}
- outputPaths.insert(realisation->outPath);
- builtOutputs.emplace(realisation->id, *realisation);
+ outputPaths.insert(realisation.outPath);
+ builtOutputs.emplace(realisation.id, realisation);
}
runPostBuildHook(
@@ -1435,7 +1434,7 @@ void DerivationGoal::done(
fs << worker.store.printStorePath(drvPath) << "\t" << buildResult.toString() << std::endl;
}
- amDone(buildResult.success() ? ecSuccess : ecFailed, ex);
+ amDone(buildResult.success() ? ecSuccess : ecFailed, std::move(ex));
}
diff --git a/src/libstore/build/derivation-goal.hh b/src/libstore/build/derivation-goal.hh
index 2d8bfd592..d33e04cbc 100644
--- a/src/libstore/build/derivation-goal.hh
+++ b/src/libstore/build/derivation-goal.hh
@@ -115,11 +115,6 @@ struct DerivationGoal : public Goal
BuildMode buildMode;
- /* The current round, if we're building multiple times. */
- size_t curRound = 1;
-
- size_t nrRounds;
-
std::unique_ptr<MaintainCount<uint64_t>> mcExpectedBuilds, mcRunningBuilds;
std::unique_ptr<Activity> act;
diff --git a/src/libstore/build/entry-points.cc b/src/libstore/build/entry-points.cc
index bea7363db..e1b80165e 100644
--- a/src/libstore/build/entry-points.cc
+++ b/src/libstore/build/entry-points.cc
@@ -30,7 +30,7 @@ void Store::buildPaths(const std::vector<DerivedPath> & reqs, BuildMode buildMod
if (ex)
logError(i->ex->info());
else
- ex = i->ex;
+ ex = std::move(i->ex);
}
if (i->exitCode != Goal::ecSuccess) {
if (auto i2 = dynamic_cast<DerivationGoal *>(i.get())) failed.insert(i2->drvPath);
@@ -40,7 +40,7 @@ void Store::buildPaths(const std::vector<DerivedPath> & reqs, BuildMode buildMod
if (failed.size() == 1 && ex) {
ex->status = worker.exitStatus();
- throw *ex;
+ throw std::move(*ex);
} else if (!failed.empty()) {
if (ex) logError(ex->info());
throw Error(worker.exitStatus(), "build of %s failed", showPaths(failed));
@@ -109,7 +109,7 @@ void Store::ensurePath(const StorePath & path)
if (goal->exitCode != Goal::ecSuccess) {
if (goal->ex) {
goal->ex->status = worker.exitStatus();
- throw *goal->ex;
+ throw std::move(*goal->ex);
} else
throw Error(worker.exitStatus(), "path '%s' does not exist and cannot be created", printStorePath(path));
}
diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc
index d2798888b..dccd096ec 100644
--- a/src/libstore/build/local-derivation-goal.cc
+++ b/src/libstore/build/local-derivation-goal.cc
@@ -230,7 +230,7 @@ void LocalDerivationGoal::tryLocalBuild() {
outputLocks.unlock();
buildUser.reset();
worker.permanentFailure = true;
- done(BuildResult::InputRejected, {}, e);
+ done(BuildResult::InputRejected, {}, std::move(e));
return;
}
@@ -2260,7 +2260,6 @@ DrvOutputs LocalDerivationGoal::registerOutputs()
InodesSeen inodesSeen;
Path checkSuffix = ".check";
- bool keepPreviousRound = settings.keepFailed || settings.runDiffHook;
std::exception_ptr delayedException;
@@ -2688,10 +2687,8 @@ DrvOutputs LocalDerivationGoal::registerOutputs()
debug("unreferenced input: '%1%'", worker.store.printStorePath(i));
}
- if (curRound == nrRounds) {
- localStore.optimisePath(actualPath, NoRepair); // FIXME: combine with scanForReferences()
- worker.markContentsGood(newInfo.path);
- }
+ localStore.optimisePath(actualPath, NoRepair); // FIXME: combine with scanForReferences()
+ worker.markContentsGood(newInfo.path);
newInfo.deriver = drvPath;
newInfo.ultimate = true;
@@ -2720,61 +2717,6 @@ DrvOutputs LocalDerivationGoal::registerOutputs()
/* Apply output checks. */
checkOutputs(infos);
- /* Compare the result with the previous round, and report which
- path is different, if any.*/
- if (curRound > 1 && prevInfos != infos) {
- assert(prevInfos.size() == infos.size());
- for (auto i = prevInfos.begin(), j = infos.begin(); i != prevInfos.end(); ++i, ++j)
- if (!(*i == *j)) {
- buildResult.isNonDeterministic = true;
- Path prev = worker.store.printStorePath(i->second.path) + checkSuffix;
- bool prevExists = keepPreviousRound && pathExists(prev);
- hintformat hint = prevExists
- ? hintfmt("output '%s' of '%s' differs from '%s' from previous round",
- worker.store.printStorePath(i->second.path), worker.store.printStorePath(drvPath), prev)
- : hintfmt("output '%s' of '%s' differs from previous round",
- worker.store.printStorePath(i->second.path), worker.store.printStorePath(drvPath));
-
- handleDiffHook(
- buildUser ? buildUser->getUID() : getuid(),
- buildUser ? buildUser->getGID() : getgid(),
- prev, worker.store.printStorePath(i->second.path),
- worker.store.printStorePath(drvPath), tmpDir);
-
- if (settings.enforceDeterminism)
- throw NotDeterministic(hint);
-
- printError(hint);
-
- curRound = nrRounds; // we know enough, bail out early
- }
- }
-
- /* If this is the first round of several, then move the output out of the way. */
- if (nrRounds > 1 && curRound == 1 && curRound < nrRounds && keepPreviousRound) {
- for (auto & [_, outputStorePath] : finalOutputs) {
- auto path = worker.store.printStorePath(outputStorePath);
- Path prev = path + checkSuffix;
- deletePath(prev);
- Path dst = path + checkSuffix;
- renameFile(path, dst);
- }
- }
-
- if (curRound < nrRounds) {
- prevInfos = std::move(infos);
- return {};
- }
-
- /* Remove the .check directories if we're done. FIXME: keep them
- if the result was not determistic? */
- if (curRound == nrRounds) {
- for (auto & [_, outputStorePath] : finalOutputs) {
- Path prev = worker.store.printStorePath(outputStorePath) + checkSuffix;
- deletePath(prev);
- }
- }
-
/* Register each output path as valid, and register the sets of
paths referenced by each of them. If there are cycles in the
outputs, this will fail. */
diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc
index 48dd5c247..12596ba49 100644
--- a/src/libstore/daemon.cc
+++ b/src/libstore/daemon.cc
@@ -238,7 +238,6 @@ struct ClientSettings
}
else if (trusted
|| name == settings.buildTimeout.name
- || name == settings.buildRepeat.name
|| name == settings.maxSilentTime.name
|| name == settings.pollInterval.name
|| name == "connect-timeout"
diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc
index fe99c3c5e..42a53912e 100644
--- a/src/libstore/derivations.cc
+++ b/src/libstore/derivations.cc
@@ -448,7 +448,7 @@ std::string Derivation::unparse(const Store & store, bool maskOutputs,
// FIXME: remove
-bool isDerivation(const std::string & fileName)
+bool isDerivation(std::string_view fileName)
{
return hasSuffix(fileName, drvExtension);
}
diff --git a/src/libstore/derivations.hh b/src/libstore/derivations.hh
index af198a767..f3cd87fb1 100644
--- a/src/libstore/derivations.hh
+++ b/src/libstore/derivations.hh
@@ -224,7 +224,7 @@ StorePath writeDerivation(Store & store,
Derivation parseDerivation(const Store & store, std::string && s, std::string_view name);
// FIXME: remove
-bool isDerivation(const std::string & fileName);
+bool isDerivation(std::string_view fileName);
/* Calculate the name that will be used for the store path for this
output.
diff --git a/src/libstore/derived-path.cc b/src/libstore/derived-path.cc
index 88b59f615..3fa5ae4f7 100644
--- a/src/libstore/derived-path.cc
+++ b/src/libstore/derived-path.cc
@@ -20,11 +20,12 @@ nlohmann::json DerivedPath::Built::toJSON(ref<Store> store) const {
// Fallback for the input-addressed derivation case: We expect to always be
// able to print the output paths, so let’s do it
const auto knownOutputs = store->queryPartialDerivationOutputMap(drvPath);
- for (const auto& output : outputs) {
+ for (const auto & output : outputs) {
auto knownOutput = get(knownOutputs, output);
- res["outputs"][output] = (knownOutput && *knownOutput)
- ? store->printStorePath(**knownOutput)
- : nullptr;
+ if (knownOutput && *knownOutput)
+ res["outputs"][output] = store->printStorePath(**knownOutput);
+ else
+ res["outputs"][output] = nullptr;
}
return res;
}
@@ -78,15 +79,16 @@ DerivedPath::Opaque DerivedPath::Opaque::parse(const Store & store, std::string_
return {store.parseStorePath(s)};
}
-DerivedPath::Built DerivedPath::Built::parse(const Store & store, std::string_view s)
+DerivedPath::Built DerivedPath::Built::parse(const Store & store, std::string_view drvS, std::string_view outputsS)
{
- size_t n = s.find("!");
- assert(n != s.npos);
- auto drvPath = store.parseStorePath(s.substr(0, n));
- auto outputsS = s.substr(n + 1);
+ auto drvPath = store.parseStorePath(drvS);
std::set<std::string> outputs;
- if (outputsS != "*")
+ if (outputsS != "*") {
outputs = tokenizeString<std::set<std::string>>(outputsS, ",");
+ if (outputs.empty())
+ throw Error(
+ "Explicit list of wanted outputs '%s' must not be empty. Consider using '*' as a wildcard meaning all outputs if no output in particular is wanted.", outputsS);
+ }
return {drvPath, outputs};
}
@@ -95,7 +97,7 @@ DerivedPath DerivedPath::parse(const Store & store, std::string_view s)
size_t n = s.find("!");
return n == s.npos
? (DerivedPath) DerivedPath::Opaque::parse(store, s)
- : (DerivedPath) DerivedPath::Built::parse(store, s);
+ : (DerivedPath) DerivedPath::Built::parse(store, s.substr(0, n), s.substr(n + 1));
}
RealisedPath::Set BuiltPath::toRealisedPaths(Store & store) const
diff --git a/src/libstore/derived-path.hh b/src/libstore/derived-path.hh
index 878696136..706e5dcb4 100644
--- a/src/libstore/derived-path.hh
+++ b/src/libstore/derived-path.hh
@@ -47,7 +47,7 @@ struct DerivedPathBuilt {
std::set<std::string> outputs;
std::string to_string(const Store & store) const;
- static DerivedPathBuilt parse(const Store & store, std::string_view);
+ static DerivedPathBuilt parse(const Store & store, std::string_view, std::string_view);
nlohmann::json toJSON(ref<Store> store) const;
bool operator < (const DerivedPathBuilt & b) const
diff --git a/src/libstore/filetransfer.cc b/src/libstore/filetransfer.cc
index 5746c32a3..756bd4423 100644
--- a/src/libstore/filetransfer.cc
+++ b/src/libstore/filetransfer.cc
@@ -33,14 +33,6 @@ FileTransferSettings fileTransferSettings;
static GlobalConfig::Register rFileTransferSettings(&fileTransferSettings);
-std::string resolveUri(std::string_view uri)
-{
- if (uri.compare(0, 8, "channel:") == 0)
- return "https://nixos.org/channels/" + std::string(uri.substr(8)) + "/nixexprs.tar.xz";
- else
- return std::string(uri);
-}
-
struct curlFileTransfer : public FileTransfer
{
CURLM * curlm = 0;
@@ -142,9 +134,9 @@ struct curlFileTransfer : public FileTransfer
}
template<class T>
- void fail(const T & e)
+ void fail(T && e)
{
- failEx(std::make_exception_ptr(e));
+ failEx(std::make_exception_ptr(std::move(e)));
}
LambdaSink finalSink;
@@ -472,7 +464,7 @@ struct curlFileTransfer : public FileTransfer
fileTransfer.enqueueItem(shared_from_this());
}
else
- fail(exc);
+ fail(std::move(exc));
}
}
};
@@ -873,14 +865,4 @@ FileTransferError::FileTransferError(FileTransfer::Error error, std::optional<st
err.msg = hf;
}
-bool isUri(std::string_view s)
-{
- if (s.compare(0, 8, "channel:") == 0) return true;
- size_t pos = s.find("://");
- if (pos == std::string::npos) return false;
- std::string scheme(s, 0, pos);
- return scheme == "http" || scheme == "https" || scheme == "file" || scheme == "channel" || scheme == "git" || scheme == "s3" || scheme == "ssh";
-}
-
-
}
diff --git a/src/libstore/filetransfer.hh b/src/libstore/filetransfer.hh
index 40e7cf52c..07d58f53a 100644
--- a/src/libstore/filetransfer.hh
+++ b/src/libstore/filetransfer.hh
@@ -125,9 +125,4 @@ public:
FileTransferError(FileTransfer::Error error, std::optional<std::string> response, const Args & ... args);
};
-bool isUri(std::string_view s);
-
-/* Resolve deprecated 'channel:<foo>' URLs. */
-std::string resolveUri(std::string_view uri);
-
}
diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh
index ca72ad31e..274a15dd7 100644
--- a/src/libstore/globals.hh
+++ b/src/libstore/globals.hh
@@ -281,7 +281,10 @@ public:
`NIX_REMOTE` is empty, the uid under which the Nix daemon runs if
`NIX_REMOTE` is `daemon`). Obviously, this should not be used in
multi-user settings with untrusted users.
- )"};
+
+ Defaults to `nixbld` when running as root, *empty* otherwise.
+ )",
+ {}, false};
Setting<bool> autoAllocateUids{this, false, "auto-allocate-uids",
R"(
@@ -373,11 +376,6 @@ public:
)",
{"build-max-log-size"}};
- /* When buildRepeat > 0 and verboseBuild == true, whether to print
- repeated builds (i.e. builds other than the first one) to
- stderr. Hack to prevent Hydra logs from being polluted. */
- bool printRepeatedBuilds = true;
-
Setting<unsigned int> pollInterval{this, 5, "build-poll-interval",
"How often (in seconds) to poll for locks."};
@@ -501,19 +499,6 @@ public:
Setting<bool> sandboxFallback{this, true, "sandbox-fallback",
"Whether to disable sandboxing when the kernel doesn't allow it."};
- Setting<size_t> buildRepeat{
- this, 0, "repeat",
- R"(
- How many times to repeat builds to check whether they are
- deterministic. The default value is 0. If the value is non-zero,
- every build is repeated the specified number of times. If the
- contents of any of the runs differs from the previous ones and
- `enforce-determinism` is true, the build is rejected and the
- resulting store paths are not registered as “valid” in Nix’s
- database.
- )",
- {"build-repeat"}};
-
#if __linux__
Setting<std::string> sandboxShmSize{
this, "50%", "sandbox-dev-shm-size",
@@ -577,10 +562,6 @@ public:
configuration file, and cannot be passed at the command line.
)"};
- Setting<bool> enforceDeterminism{
- this, true, "enforce-determinism",
- "Whether to fail if repeated builds produce different output. See `repeat`."};
-
Setting<Strings> trustedPublicKeys{
this,
{"cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY="},
diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc
index dd34b19c6..4d398b21d 100644
--- a/src/libstore/legacy-ssh-store.cc
+++ b/src/libstore/legacy-ssh-store.cc
@@ -255,8 +255,8 @@ private:
<< settings.maxLogSize;
if (GET_PROTOCOL_MINOR(conn.remoteVersion) >= 3)
conn.to
- << settings.buildRepeat
- << settings.enforceDeterminism;
+ << 0 // buildRepeat hasn't worked for ages anyway
+ << 0;
if (GET_PROTOCOL_MINOR(conn.remoteVersion) >= 7) {
conn.to << ((int) settings.keepFailed);
diff --git a/src/libstore/lock.cc b/src/libstore/lock.cc
index 2858137d6..d02d20b4c 100644
--- a/src/libstore/lock.cc
+++ b/src/libstore/lock.cc
@@ -185,7 +185,7 @@ std::unique_ptr<UserLock> acquireUserLock(uid_t nrIds, bool useChroot)
bool useBuildUsers()
{
#if __linux__
- static bool b = (settings.buildUsersGroup != "" || settings.startId.get() != 0) && getuid() == 0;
+ static bool b = (settings.buildUsersGroup != "" || settings.autoAllocateUids) && getuid() == 0;
return b;
#elif __APPLE__
static bool b = settings.buildUsersGroup != "" && getuid() == 0;
diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc
index 96a29155c..48cf731a8 100644
--- a/src/libstore/remote-store.cc
+++ b/src/libstore/remote-store.cc
@@ -447,7 +447,7 @@ void RemoteStore::queryPathInfoUncached(const StorePath & path,
} catch (Error & e) {
// Ugly backwards compatibility hack.
if (e.msg().find("is not valid") != std::string::npos)
- throw InvalidPath(e.info());
+ throw InvalidPath(std::move(e.info()));
throw;
}
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 17) {
diff --git a/src/libstore/sqlite.cc b/src/libstore/sqlite.cc
index 2090beabd..6c350888f 100644
--- a/src/libstore/sqlite.cc
+++ b/src/libstore/sqlite.cc
@@ -8,12 +8,15 @@
namespace nix {
-SQLiteError::SQLiteError(const char *path, int errNo, int extendedErrNo, hintformat && hf)
- : Error(""), path(path), errNo(errNo), extendedErrNo(extendedErrNo)
+SQLiteError::SQLiteError(const char *path, const char *errMsg, int errNo, int extendedErrNo, int offset, hintformat && hf)
+ : Error(""), path(path), errMsg(errMsg), errNo(errNo), extendedErrNo(extendedErrNo), offset(offset)
{
- err.msg = hintfmt("%s: %s (in '%s')",
+ auto offsetStr = (offset == -1) ? "" : "at offset " + std::to_string(offset) + ": ";
+ err.msg = hintfmt("%s: %s%s, %s (in '%s')",
normaltxt(hf.str()),
+ offsetStr,
sqlite3_errstr(extendedErrNo),
+ errMsg,
path ? path : "(in-memory)");
}
@@ -21,11 +24,13 @@ SQLiteError::SQLiteError(const char *path, int errNo, int extendedErrNo, hintfor
{
int err = sqlite3_errcode(db);
int exterr = sqlite3_extended_errcode(db);
+ int offset = sqlite3_error_offset(db);
auto path = sqlite3_db_filename(db, nullptr);
+ auto errMsg = sqlite3_errmsg(db);
if (err == SQLITE_BUSY || err == SQLITE_PROTOCOL) {
- auto exp = SQLiteBusy(path, err, exterr, std::move(hf));
+ auto exp = SQLiteBusy(path, errMsg, err, exterr, offset, std::move(hf));
exp.err.msg = hintfmt(
err == SQLITE_PROTOCOL
? "SQLite database '%s' is busy (SQLITE_PROTOCOL)"
@@ -33,7 +38,7 @@ SQLiteError::SQLiteError(const char *path, int errNo, int extendedErrNo, hintfor
path ? path : "(in-memory)");
throw exp;
} else
- throw SQLiteError(path, err, exterr, std::move(hf));
+ throw SQLiteError(path, errMsg, err, exterr, offset, std::move(hf));
}
SQLite::SQLite(const Path & path, bool create)
diff --git a/src/libstore/sqlite.hh b/src/libstore/sqlite.hh
index 1d1c553ea..1853731a2 100644
--- a/src/libstore/sqlite.hh
+++ b/src/libstore/sqlite.hh
@@ -98,21 +98,22 @@ struct SQLiteTxn
struct SQLiteError : Error
{
- const char *path;
- int errNo, extendedErrNo;
+ std::string path;
+ std::string errMsg;
+ int errNo, extendedErrNo, offset;
template<typename... Args>
[[noreturn]] static void throw_(sqlite3 * db, const std::string & fs, const Args & ... args) {
throw_(db, hintfmt(fs, args...));
}
- SQLiteError(const char *path, int errNo, int extendedErrNo, hintformat && hf);
+ SQLiteError(const char *path, const char *errMsg, int errNo, int extendedErrNo, int offset, hintformat && hf);
protected:
template<typename... Args>
- SQLiteError(const char *path, int errNo, int extendedErrNo, const std::string & fs, const Args & ... args)
- : SQLiteError(path, errNo, extendedErrNo, hintfmt(fs, args...))
+ SQLiteError(const char *path, const char *errMsg, int errNo, int extendedErrNo, int offset, const std::string & fs, const Args & ... args)
+ : SQLiteError(path, errNo, extendedErrNo, offset, hintfmt(fs, args...))
{ }
[[noreturn]] static void throw_(sqlite3 * db, hintformat && hf);
diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc
index 8811ab578..80b60ca1b 100644
--- a/src/libstore/store-api.cc
+++ b/src/libstore/store-api.cc
@@ -19,21 +19,21 @@ using json = nlohmann::json;
namespace nix {
-bool Store::isInStore(const Path & path) const
+bool Store::isInStore(PathView path) const
{
return isInDir(path, storeDir);
}
-std::pair<StorePath, Path> Store::toStorePath(const Path & path) const
+std::pair<StorePath, Path> Store::toStorePath(PathView path) const
{
if (!isInStore(path))
throw Error("path '%1%' is not in the Nix store", path);
- Path::size_type slash = path.find('/', storeDir.size() + 1);
+ auto slash = path.find('/', storeDir.size() + 1);
if (slash == Path::npos)
return {parseStorePath(path), ""};
else
- return {parseStorePath(std::string_view(path).substr(0, slash)), path.substr(slash)};
+ return {parseStorePath(path.substr(0, slash)), (Path) path.substr(slash)};
}
diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh
index 151ec10d6..4a88d7216 100644
--- a/src/libstore/store-api.hh
+++ b/src/libstore/store-api.hh
@@ -179,7 +179,7 @@ public:
/* Return true if ‘path’ is in the Nix store (but not the Nix
store itself). */
- bool isInStore(const Path & path) const;
+ bool isInStore(PathView path) const;
/* Return true if ‘path’ is a store path, i.e. a direct child of
the Nix store. */
@@ -187,7 +187,7 @@ public:
/* Split a path like /nix/store/<hash>-<name>/<bla> into
/nix/store/<hash>-<name> and /<bla>. */
- std::pair<StorePath, Path> toStorePath(const Path & path) const;
+ std::pair<StorePath, Path> toStorePath(PathView path) const;
/* Follow symlinks until we end up with a path in the Nix store. */
Path followLinksToStore(std::string_view path) const;
diff --git a/src/libutil/archive.cc b/src/libutil/archive.cc
index 4b0636129..0e2b9d12c 100644
--- a/src/libutil/archive.cc
+++ b/src/libutil/archive.cc
@@ -35,10 +35,6 @@ static ArchiveSettings archiveSettings;
static GlobalConfig::Register rArchiveSettings(&archiveSettings);
-const std::string narVersionMagic1 = "nix-archive-1";
-
-static std::string caseHackSuffix = "~nix~case~hack~";
-
PathFilter defaultPathFilter = [](const Path &) { return true; };
diff --git a/src/libutil/archive.hh b/src/libutil/archive.hh
index ac4183bf5..e42dea540 100644
--- a/src/libutil/archive.hh
+++ b/src/libutil/archive.hh
@@ -103,7 +103,9 @@ void copyNAR(Source & source, Sink & sink);
void copyPath(const Path & from, const Path & to);
-extern const std::string narVersionMagic1;
+inline constexpr std::string_view narVersionMagic1 = "nix-archive-1";
+
+inline constexpr std::string_view caseHackSuffix = "~nix~case~hack~";
}
diff --git a/src/libutil/canon-path.cc b/src/libutil/canon-path.cc
new file mode 100644
index 000000000..b132b4262
--- /dev/null
+++ b/src/libutil/canon-path.cc
@@ -0,0 +1,103 @@
+#include "canon-path.hh"
+#include "util.hh"
+
+namespace nix {
+
+CanonPath CanonPath::root = CanonPath("/");
+
+CanonPath::CanonPath(std::string_view raw)
+ : path(absPath((Path) raw, "/"))
+{ }
+
+CanonPath::CanonPath(std::string_view raw, const CanonPath & root)
+ : path(absPath((Path) raw, root.abs()))
+{ }
+
+std::optional<CanonPath> CanonPath::parent() const
+{
+ if (isRoot()) return std::nullopt;
+ return CanonPath(unchecked_t(), path.substr(0, std::max((size_t) 1, path.rfind('/'))));
+}
+
+void CanonPath::pop()
+{
+ assert(!isRoot());
+ path.resize(std::max((size_t) 1, path.rfind('/')));
+}
+
+bool CanonPath::isWithin(const CanonPath & parent) const
+{
+ return !(
+ path.size() < parent.path.size()
+ || path.substr(0, parent.path.size()) != parent.path
+ || (parent.path.size() > 1 && path.size() > parent.path.size()
+ && path[parent.path.size()] != '/'));
+}
+
+CanonPath CanonPath::removePrefix(const CanonPath & prefix) const
+{
+ assert(isWithin(prefix));
+ if (prefix.isRoot()) return *this;
+ if (path.size() == prefix.path.size()) return root;
+ return CanonPath(unchecked_t(), path.substr(prefix.path.size()));
+}
+
+void CanonPath::extend(const CanonPath & x)
+{
+ if (x.isRoot()) return;
+ if (isRoot())
+ path += x.rel();
+ else
+ path += x.abs();
+}
+
+CanonPath CanonPath::operator + (const CanonPath & x) const
+{
+ auto res = *this;
+ res.extend(x);
+ return res;
+}
+
+void CanonPath::push(std::string_view c)
+{
+ assert(c.find('/') == c.npos);
+ assert(c != "." && c != "..");
+ if (!isRoot()) path += '/';
+ path += c;
+}
+
+CanonPath CanonPath::operator + (std::string_view c) const
+{
+ auto res = *this;
+ res.push(c);
+ return res;
+}
+
+bool CanonPath::isAllowed(const std::set<CanonPath> & allowed) const
+{
+ /* Check if `this` is an exact match or the parent of an
+ allowed path. */
+ auto lb = allowed.lower_bound(*this);
+ if (lb != allowed.end()) {
+ if (lb->isWithin(*this))
+ return true;
+ }
+
+ /* Check if a parent of `this` is allowed. */
+ auto path = *this;
+ while (!path.isRoot()) {
+ path.pop();
+ if (allowed.count(path))
+ return true;
+ }
+
+ return false;
+}
+
+std::ostream & operator << (std::ostream & stream, const CanonPath & path)
+{
+ stream << path.abs();
+ return stream;
+}
+
+}
diff --git a/src/libutil/canon-path.hh b/src/libutil/canon-path.hh
new file mode 100644
index 000000000..c5e7f0596
--- /dev/null
+++ b/src/libutil/canon-path.hh
@@ -0,0 +1,173 @@
+#pragma once
+
+#include <string>
+#include <optional>
+#include <cassert>
+#include <iostream>
+#include <set>
+
+namespace nix {
+
+/* A canonical representation of a path. It ensures the following:
+
+ - It always starts with a slash.
+
+ - It never ends with a slash, except if the path is "/".
+
+ - A slash is never followed by a slash (i.e. no empty components).
+
+ - There are no components equal to '.' or '..'.
+
+ Note that the path does not need to correspond to an actually
+ existing path, and there is no guarantee that symlinks are
+ resolved.
+*/
+class CanonPath
+{
+ std::string path;
+
+public:
+
+ /* Construct a canon path from a non-canonical path. Any '.', '..'
+ or empty components are removed. */
+ CanonPath(std::string_view raw);
+
+ explicit CanonPath(const char * raw)
+ : CanonPath(std::string_view(raw))
+ { }
+
+ struct unchecked_t { };
+
+ CanonPath(unchecked_t _, std::string path)
+ : path(std::move(path))
+ { }
+
+ static CanonPath root;
+
+ /* If `raw` starts with a slash, return
+ `CanonPath(raw)`. Otherwise return a `CanonPath` representing
+ `root + "/" + raw`. */
+ CanonPath(std::string_view raw, const CanonPath & root);
+
+ bool isRoot() const
+ { return path.size() <= 1; }
+
+ explicit operator std::string_view() const
+ { return path; }
+
+ const std::string & abs() const
+ { return path; }
+
+ /* Like abs(), but return an empty string if this path is
+ '/'. Thus the returned string never ends in a slash. */
+ const std::string & absOrEmpty() const
+ {
+ const static std::string epsilon;
+ return isRoot() ? epsilon : path;
+ }
+
+ const char * c_str() const
+ { return path.c_str(); }
+
+ std::string_view rel() const
+ { return ((std::string_view) path).substr(1); }
+
+ struct Iterator
+ {
+ std::string_view remaining;
+ size_t slash;
+
+ Iterator(std::string_view remaining)
+ : remaining(remaining)
+ , slash(remaining.find('/'))
+ { }
+
+ bool operator != (const Iterator & x) const
+ { return remaining.data() != x.remaining.data(); }
+
+ const std::string_view operator * () const
+ { return remaining.substr(0, slash); }
+
+ void operator ++ ()
+ {
+ if (slash == remaining.npos)
+ remaining = remaining.substr(remaining.size());
+ else {
+ remaining = remaining.substr(slash + 1);
+ slash = remaining.find('/');
+ }
+ }
+ };
+
+ Iterator begin() const { return Iterator(rel()); }
+ Iterator end() const { return Iterator(rel().substr(path.size() - 1)); }
+
+ std::optional<CanonPath> parent() const;
+
+ /* Remove the last component. Panics if this path is the root. */
+ void pop();
+
+ std::optional<std::string_view> dirOf() const
+ {
+ if (isRoot()) return std::nullopt;
+ return path.substr(0, path.rfind('/'));
+ }
+
+ std::optional<std::string_view> baseName() const
+ {
+ if (isRoot()) return std::nullopt;
+ return ((std::string_view) path).substr(path.rfind('/') + 1);
+ }
+
+ bool operator == (const CanonPath & x) const
+ { return path == x.path; }
+
+ bool operator != (const CanonPath & x) const
+ { return path != x.path; }
+
+ /* Compare paths lexicographically except that path separators
+ are sorted before any other character. That is, in the sorted order
+ a directory is always followed directly by its children. For
+ instance, 'foo' < 'foo/bar' < 'foo!'. */
+ bool operator < (const CanonPath & x) const
+ {
+ auto i = path.begin();
+ auto j = x.path.begin();
+ for ( ; i != path.end() && j != x.path.end(); ++i, ++j) {
+ auto c_i = *i;
+ if (c_i == '/') c_i = 0;
+ auto c_j = *j;
+ if (c_j == '/') c_j = 0;
+ if (c_i < c_j) return true;
+ if (c_i > c_j) return false;
+ }
+ return i == path.end() && j != x.path.end();
+ }
+
+ /* Return true if `this` is equal to `parent` or a child of
+ `parent`. */
+ bool isWithin(const CanonPath & parent) const;
+
+ CanonPath removePrefix(const CanonPath & prefix) const;
+
+ /* Append another path to this one. */
+ void extend(const CanonPath & x);
+
+ /* Concatenate two paths. */
+ CanonPath operator + (const CanonPath & x) const;
+
+ /* Add a path component to this one. It must not contain any slashes. */
+ void push(std::string_view c);
+
+ CanonPath operator + (std::string_view c) const;
+
+ /* Check whether access to this path is allowed, which is the case
+ if 1) `this` is within any of the `allowed` paths; or 2) any of
+ the `allowed` paths are within `this`. (The latter condition
+ ensures access to the parents of allowed paths.) */
+ bool isAllowed(const std::set<CanonPath> & allowed) const;
+};
+
+std::ostream & operator << (std::ostream & stream, const CanonPath & path);
+
+}
diff --git a/src/libutil/error.cc b/src/libutil/error.cc
index 9172f67a6..1a1aecea5 100644
--- a/src/libutil/error.cc
+++ b/src/libutil/error.cc
@@ -9,9 +9,9 @@ namespace nix {
const std::string nativeSystem = SYSTEM;
-void BaseError::addTrace(std::optional<ErrPos> e, hintformat hint)
+void BaseError::addTrace(std::shared_ptr<AbstractPos> && e, hintformat hint)
{
- err.traces.push_front(Trace { .pos = e, .hint = hint });
+ err.traces.push_front(Trace { .pos = std::move(e), .hint = hint });
}
// c++ std::exception descendants must have a 'const char* what()' function.
@@ -30,91 +30,46 @@ const std::string & BaseError::calcWhat() const
std::optional<std::string> ErrorInfo::programName = std::nullopt;
-std::ostream & operator<<(std::ostream & os, const hintformat & hf)
+std::ostream & operator <<(std::ostream & os, const hintformat & hf)
{
return os << hf.str();
}
-std::string showErrPos(const ErrPos & errPos)
+std::ostream & operator <<(std::ostream & str, const AbstractPos & pos)
{
- if (errPos.line > 0) {
- if (errPos.column > 0) {
- return fmt("%d:%d", errPos.line, errPos.column);
- } else {
- return fmt("%d", errPos.line);
- }
- }
- else {
- return "";
- }
+ pos.print(str);
+ str << ":" << pos.line;
+ if (pos.column > 0)
+ str << ":" << pos.column;
+ return str;
}
-std::optional<LinesOfCode> getCodeLines(const ErrPos & errPos)
+std::optional<LinesOfCode> AbstractPos::getCodeLines() const
{
- if (errPos.line <= 0)
+ if (line == 0)
return std::nullopt;
- if (errPos.origin == foFile) {
- LinesOfCode loc;
- try {
- // FIXME: when running as the daemon, make sure we don't
- // open a file to which the client doesn't have access.
- AutoCloseFD fd = open(errPos.file.c_str(), O_RDONLY | O_CLOEXEC);
- if (!fd) return {};
-
- // count the newlines.
- int count = 0;
- std::string line;
- int pl = errPos.line - 1;
- do
- {
- line = readLine(fd.get());
- ++count;
- if (count < pl)
- ;
- else if (count == pl)
- loc.prevLineOfCode = line;
- else if (count == pl + 1)
- loc.errLineOfCode = line;
- else if (count == pl + 2) {
- loc.nextLineOfCode = line;
- break;
- }
- } while (true);
- return loc;
- }
- catch (EndOfFile & eof) {
- if (loc.errLineOfCode.has_value())
- return loc;
- else
- return std::nullopt;
- }
- catch (std::exception & e) {
- return std::nullopt;
- }
- } else {
- std::istringstream iss(errPos.file);
+ if (auto source = getSource()) {
+
+ std::istringstream iss(*source);
// count the newlines.
int count = 0;
- std::string line;
- int pl = errPos.line - 1;
+ std::string curLine;
+ int pl = line - 1;
LinesOfCode loc;
- do
- {
- std::getline(iss, line);
+ do {
+ std::getline(iss, curLine);
++count;
if (count < pl)
- {
;
- }
else if (count == pl) {
- loc.prevLineOfCode = line;
+ loc.prevLineOfCode = curLine;
} else if (count == pl + 1) {
- loc.errLineOfCode = line;
+ loc.errLineOfCode = curLine;
} else if (count == pl + 2) {
- loc.nextLineOfCode = line;
+ loc.nextLineOfCode = curLine;
break;
}
@@ -124,12 +79,14 @@ std::optional<LinesOfCode> getCodeLines(const ErrPos & errPos)
return loc;
}
+
+ return std::nullopt;
}
// print lines of code to the ostream, indicating the error column.
void printCodeLines(std::ostream & out,
const std::string & prefix,
- const ErrPos & errPos,
+ const AbstractPos & errPos,
const LinesOfCode & loc)
{
// previous line of code.
@@ -176,28 +133,6 @@ void printCodeLines(std::ostream & out,
}
}
-void printAtPos(const ErrPos & pos, std::ostream & out)
-{
- if (pos) {
- switch (pos.origin) {
- case foFile: {
- out << fmt(ANSI_BLUE "at " ANSI_WARNING "%s:%s" ANSI_NORMAL ":", pos.file, showErrPos(pos));
- break;
- }
- case foString: {
- out << fmt(ANSI_BLUE "at " ANSI_WARNING "«string»:%s" ANSI_NORMAL ":", showErrPos(pos));
- break;
- }
- case foStdin: {
- out << fmt(ANSI_BLUE "at " ANSI_WARNING "«stdin»:%s" ANSI_NORMAL ":", showErrPos(pos));
- break;
- }
- default:
- throw Error("invalid FileOrigin in errPos");
- }
- }
-}
-
static std::string indent(std::string_view indentFirst, std::string_view indentRest, std::string_view s)
{
std::string res;
@@ -262,49 +197,48 @@ std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool s
prefix += ":" ANSI_NORMAL " ";
std::ostringstream oss;
- oss << einfo.msg << "\n";
- if (einfo.errPos.has_value() && *einfo.errPos) {
- oss << "\n";
- printAtPos(*einfo.errPos, oss);
+ auto noSource = ANSI_ITALIC " (source not available)" ANSI_NORMAL "\n";
- auto loc = getCodeLines(*einfo.errPos);
+ // traces
+ if (showTrace && !einfo.traces.empty()) {
+ for (const auto & trace : einfo.traces) {
+ oss << "\n" << "… " << trace.hint.str() << "\n";
+
+ if (trace.pos) {
+ oss << "\n" << ANSI_BLUE << "at " ANSI_WARNING << *trace.pos << ANSI_NORMAL << ":";
+
+ if (auto loc = trace.pos->getCodeLines()) {
+ oss << "\n";
+ printCodeLines(oss, "", *trace.pos, *loc);
+ oss << "\n";
+ } else
+ oss << noSource;
+ }
+ }
+ oss << "\n" << prefix;
+ }
+
+ oss << einfo.msg << "\n";
+
+ if (einfo.errPos) {
+ oss << "\n" << ANSI_BLUE << "at " ANSI_WARNING << *einfo.errPos << ANSI_NORMAL << ":";
- // lines of code.
- if (loc.has_value()) {
+ if (auto loc = einfo.errPos->getCodeLines()) {
oss << "\n";
printCodeLines(oss, "", *einfo.errPos, *loc);
oss << "\n";
- }
+ } else
+ oss << noSource;
}
auto suggestions = einfo.suggestions.trim();
- if (! suggestions.suggestions.empty()){
+ if (!suggestions.suggestions.empty()) {
oss << "Did you mean " <<
suggestions.trim() <<
"?" << std::endl;
}
- // traces
- if (showTrace && !einfo.traces.empty()) {
- for (auto iter = einfo.traces.rbegin(); iter != einfo.traces.rend(); ++iter) {
- oss << "\n" << "… " << iter->hint.str() << "\n";
-
- if (iter->pos.has_value() && (*iter->pos)) {
- auto pos = iter->pos.value();
- oss << "\n";
- printAtPos(pos, oss);
-
- auto loc = getCodeLines(pos);
- if (loc.has_value()) {
- oss << "\n";
- printCodeLines(oss, "", pos, *loc);
- oss << "\n";
- }
- }
- }
- }
-
out << indent(prefix, std::string(filterANSIEscapes(prefix, true).size(), ' '), chomp(oss.str()));
return out;
diff --git a/src/libutil/error.hh b/src/libutil/error.hh
index 3d1479c54..c3bb8c0df 100644
--- a/src/libutil/error.hh
+++ b/src/libutil/error.hh
@@ -54,13 +54,6 @@ typedef enum {
lvlVomit
} Verbosity;
-/* adjust Pos::origin bit width when adding stuff here */
-typedef enum {
- foFile,
- foStdin,
- foString
-} FileOrigin;
-
// the lines of code surrounding an error.
struct LinesOfCode {
std::optional<std::string> prevLineOfCode;
@@ -68,54 +61,37 @@ struct LinesOfCode {
std::optional<std::string> nextLineOfCode;
};
-// ErrPos indicates the location of an error in a nix file.
-struct ErrPos {
- int line = 0;
- int column = 0;
- std::string file;
- FileOrigin origin;
+/* An abstract type that represents a location in a source file. */
+struct AbstractPos
+{
+ uint32_t line = 0;
+ uint32_t column = 0;
- operator bool() const
- {
- return line != 0;
- }
+ /* Return the contents of the source file. */
+ virtual std::optional<std::string> getSource() const
+ { return std::nullopt; };
- // convert from the Pos struct, found in libexpr.
- template <class P>
- ErrPos & operator=(const P & pos)
- {
- origin = pos.origin;
- line = pos.line;
- column = pos.column;
- file = pos.file;
- return *this;
- }
+ virtual void print(std::ostream & out) const = 0;
- template <class P>
- ErrPos(const P & p)
- {
- *this = p;
- }
+ std::optional<LinesOfCode> getCodeLines() const;
};
-std::optional<LinesOfCode> getCodeLines(const ErrPos & errPos);
+std::ostream & operator << (std::ostream & str, const AbstractPos & pos);
void printCodeLines(std::ostream & out,
const std::string & prefix,
- const ErrPos & errPos,
+ const AbstractPos & errPos,
const LinesOfCode & loc);
-void printAtPos(const ErrPos & pos, std::ostream & out);
-
struct Trace {
- std::optional<ErrPos> pos;
+ std::shared_ptr<AbstractPos> pos;
hintformat hint;
};
struct ErrorInfo {
Verbosity level;
hintformat msg;
- std::optional<ErrPos> errPos;
+ std::shared_ptr<AbstractPos> errPos;
std::list<Trace> traces;
Suggestions suggestions;
@@ -177,12 +153,12 @@ public:
const ErrorInfo & info() const { calcWhat(); return err; }
template<typename... Args>
- void addTrace(std::optional<ErrPos> e, const std::string & fs, const Args & ... args)
+ void addTrace(std::shared_ptr<AbstractPos> && e, const std::string & fs, const Args & ... args)
{
- addTrace(e, hintfmt(fs, args...));
+ addTrace(std::move(e), hintfmt(fs, args...));
}
- void addTrace(std::optional<ErrPos> e, hintformat hint);
+ void addTrace(std::shared_ptr<AbstractPos> && e, hintformat hint);
bool hasTrace() const { return !err.traces.empty(); }
};
diff --git a/src/libutil/fmt.hh b/src/libutil/fmt.hh
index 7664e5c04..e879fd3b8 100644
--- a/src/libutil/fmt.hh
+++ b/src/libutil/fmt.hh
@@ -148,7 +148,7 @@ inline hintformat hintfmt(const std::string & fs, const Args & ... args)
return f;
}
-inline hintformat hintfmt(std::string plain_string)
+inline hintformat hintfmt(const std::string & plain_string)
{
// we won't be receiving any args in this case, so just print the original string
return hintfmt("%s", normaltxt(plain_string));
diff --git a/src/libutil/logging.cc b/src/libutil/logging.cc
index cb2b15b41..904ba6ebe 100644
--- a/src/libutil/logging.cc
+++ b/src/libutil/logging.cc
@@ -105,14 +105,6 @@ public:
Verbosity verbosity = lvlInfo;
-void warnOnce(bool & haveWarned, const FormatOrString & fs)
-{
- if (!haveWarned) {
- warn(fs.s);
- haveWarned = true;
- }
-}
-
void writeToStderr(std::string_view s)
{
try {
@@ -130,15 +122,30 @@ Logger * makeSimpleLogger(bool printBuildLogs)
return new SimpleLogger(printBuildLogs);
}
-std::atomic<uint64_t> nextId{(uint64_t) getpid() << 32};
+std::atomic<uint64_t> nextId{0};
Activity::Activity(Logger & logger, Verbosity lvl, ActivityType type,
const std::string & s, const Logger::Fields & fields, ActivityId parent)
- : logger(logger), id(nextId++)
+ : logger(logger), id(nextId++ + (((uint64_t) getpid()) << 32))
{
logger.startActivity(id, lvl, type, s, fields, parent);
}
+void to_json(nlohmann::json & json, std::shared_ptr<AbstractPos> pos)
+{
+ if (pos) {
+ json["line"] = pos->line;
+ json["column"] = pos->column;
+ std::ostringstream str;
+ pos->print(str);
+ json["file"] = str.str();
+ } else {
+ json["line"] = nullptr;
+ json["column"] = nullptr;
+ json["file"] = nullptr;
+ }
+}
+
struct JSONLogger : Logger {
Logger & prevLogger;
@@ -185,27 +192,14 @@ struct JSONLogger : Logger {
json["level"] = ei.level;
json["msg"] = oss.str();
json["raw_msg"] = ei.msg.str();
-
- if (ei.errPos.has_value() && (*ei.errPos)) {
- json["line"] = ei.errPos->line;
- json["column"] = ei.errPos->column;
- json["file"] = ei.errPos->file;
- } else {
- json["line"] = nullptr;
- json["column"] = nullptr;
- json["file"] = nullptr;
- }
+ to_json(json, ei.errPos);
if (loggerSettings.showTrace.get() && !ei.traces.empty()) {
nlohmann::json traces = nlohmann::json::array();
for (auto iter = ei.traces.rbegin(); iter != ei.traces.rend(); ++iter) {
nlohmann::json stackFrame;
stackFrame["raw_msg"] = iter->hint.str();
- if (iter->pos.has_value() && (*iter->pos)) {
- stackFrame["line"] = iter->pos->line;
- stackFrame["column"] = iter->pos->column;
- stackFrame["file"] = iter->pos->file;
- }
+ to_json(stackFrame, iter->pos);
traces.push_back(stackFrame);
}
diff --git a/src/libutil/logging.hh b/src/libutil/logging.hh
index d0817b4a9..4642c49f7 100644
--- a/src/libutil/logging.hh
+++ b/src/libutil/logging.hh
@@ -82,7 +82,7 @@ public:
log(lvlInfo, fs);
}
- virtual void logEI(const ErrorInfo &ei) = 0;
+ virtual void logEI(const ErrorInfo & ei) = 0;
void logEI(Verbosity lvl, ErrorInfo ei)
{
@@ -225,7 +225,11 @@ inline void warn(const std::string & fs, const Args & ... args)
logger->warn(f.str());
}
-void warnOnce(bool & haveWarned, const FormatOrString & fs);
+#define warnOnce(haveWarned, args...) \
+ if (!haveWarned) { \
+ haveWarned = true; \
+ warn(args); \
+ }
void writeToStderr(std::string_view s);
diff --git a/src/libutil/ref.hh b/src/libutil/ref.hh
index bf26321db..7d38b059c 100644
--- a/src/libutil/ref.hh
+++ b/src/libutil/ref.hh
@@ -83,6 +83,11 @@ public:
return p != other.p;
}
+ bool operator < (const ref<T> & other) const
+ {
+ return p < other.p;
+ }
+
private:
template<typename T2, typename... Args>
diff --git a/src/libutil/serialise.cc b/src/libutil/serialise.cc
index 2c3597775..c653db9d0 100644
--- a/src/libutil/serialise.cc
+++ b/src/libutil/serialise.cc
@@ -338,7 +338,7 @@ Sink & operator << (Sink & sink, const StringSet & s)
Sink & operator << (Sink & sink, const Error & ex)
{
- auto info = ex.info();
+ auto & info = ex.info();
sink
<< "Error"
<< info.level
diff --git a/src/libutil/serialise.hh b/src/libutil/serialise.hh
index 84847835a..7da5b07fd 100644
--- a/src/libutil/serialise.hh
+++ b/src/libutil/serialise.hh
@@ -331,17 +331,9 @@ T readNum(Source & source)
unsigned char buf[8];
source((char *) buf, sizeof(buf));
- uint64_t n =
- ((uint64_t) buf[0]) |
- ((uint64_t) buf[1] << 8) |
- ((uint64_t) buf[2] << 16) |
- ((uint64_t) buf[3] << 24) |
- ((uint64_t) buf[4] << 32) |
- ((uint64_t) buf[5] << 40) |
- ((uint64_t) buf[6] << 48) |
- ((uint64_t) buf[7] << 56);
-
- if (n > (uint64_t)std::numeric_limits<T>::max())
+ auto n = readLittleEndian<uint64_t>(buf);
+
+ if (n > (uint64_t) std::numeric_limits<T>::max())
throw SerialisationError("serialised integer %d is too large for type '%s'", n, typeid(T).name());
return (T) n;
diff --git a/src/libutil/tests/canon-path.cc b/src/libutil/tests/canon-path.cc
new file mode 100644
index 000000000..c1c5adadf
--- /dev/null
+++ b/src/libutil/tests/canon-path.cc
@@ -0,0 +1,155 @@
+#include "canon-path.hh"
+
+#include <gtest/gtest.h>
+
+namespace nix {
+
+ TEST(CanonPath, basic) {
+ {
+ CanonPath p("/");
+ ASSERT_EQ(p.abs(), "/");
+ ASSERT_EQ(p.rel(), "");
+ ASSERT_EQ(p.baseName(), std::nullopt);
+ ASSERT_EQ(p.dirOf(), std::nullopt);
+ ASSERT_FALSE(p.parent());
+ }
+
+ {
+ CanonPath p("/foo//");
+ ASSERT_EQ(p.abs(), "/foo");
+ ASSERT_EQ(p.rel(), "foo");
+ ASSERT_EQ(*p.baseName(), "foo");
+ ASSERT_EQ(*p.dirOf(), ""); // FIXME: do we want this?
+ ASSERT_EQ(p.parent()->abs(), "/");
+ }
+
+ {
+ CanonPath p("foo/bar");
+ ASSERT_EQ(p.abs(), "/foo/bar");
+ ASSERT_EQ(p.rel(), "foo/bar");
+ ASSERT_EQ(*p.baseName(), "bar");
+ ASSERT_EQ(*p.dirOf(), "/foo");
+ ASSERT_EQ(p.parent()->abs(), "/foo");
+ }
+
+ {
+ CanonPath p("foo//bar/");
+ ASSERT_EQ(p.abs(), "/foo/bar");
+ ASSERT_EQ(p.rel(), "foo/bar");
+ ASSERT_EQ(*p.baseName(), "bar");
+ ASSERT_EQ(*p.dirOf(), "/foo");
+ }
+ }
+
+ TEST(CanonPath, pop) {
+ CanonPath p("foo/bar/x");
+ ASSERT_EQ(p.abs(), "/foo/bar/x");
+ p.pop();
+ ASSERT_EQ(p.abs(), "/foo/bar");
+ p.pop();
+ ASSERT_EQ(p.abs(), "/foo");
+ p.pop();
+ ASSERT_EQ(p.abs(), "/");
+ }
+
+ TEST(CanonPath, removePrefix) {
+ CanonPath p1("foo/bar");
+ CanonPath p2("foo/bar/a/b/c");
+ ASSERT_EQ(p2.removePrefix(p1).abs(), "/a/b/c");
+ ASSERT_EQ(p1.removePrefix(p1).abs(), "/");
+ ASSERT_EQ(p1.removePrefix(CanonPath("/")).abs(), "/foo/bar");
+ }
+
+ TEST(CanonPath, iter) {
+ {
+ CanonPath p("a//foo/bar//");
+ std::vector<std::string_view> ss;
+ for (auto & c : p) ss.push_back(c);
+ ASSERT_EQ(ss, std::vector<std::string_view>({"a", "foo", "bar"}));
+ }
+
+ {
+ CanonPath p("/");
+ std::vector<std::string_view> ss;
+ for (auto & c : p) ss.push_back(c);
+ ASSERT_EQ(ss, std::vector<std::string_view>());
+ }
+ }
+
+ TEST(CanonPath, concat) {
+ {
+ CanonPath p1("a//foo/bar//");
+ CanonPath p2("xyzzy/bla");
+ ASSERT_EQ((p1 + p2).abs(), "/a/foo/bar/xyzzy/bla");
+ }
+
+ {
+ CanonPath p1("/");
+ CanonPath p2("/a/b");
+ ASSERT_EQ((p1 + p2).abs(), "/a/b");
+ }
+
+ {
+ CanonPath p1("/a/b");
+ CanonPath p2("/");
+ ASSERT_EQ((p1 + p2).abs(), "/a/b");
+ }
+
+ {
+ CanonPath p("/foo/bar");
+ ASSERT_EQ((p + "x").abs(), "/foo/bar/x");
+ }
+
+ {
+ CanonPath p("/");
+ ASSERT_EQ((p + "foo" + "bar").abs(), "/foo/bar");
+ }
+ }
+
+ TEST(CanonPath, within) {
+ {
+ ASSERT_TRUE(CanonPath("foo").isWithin(CanonPath("foo")));
+ ASSERT_FALSE(CanonPath("foo").isWithin(CanonPath("bar")));
+ ASSERT_FALSE(CanonPath("foo").isWithin(CanonPath("fo")));
+ ASSERT_TRUE(CanonPath("foo/bar").isWithin(CanonPath("foo")));
+ ASSERT_FALSE(CanonPath("foo").isWithin(CanonPath("foo/bar")));
+ ASSERT_TRUE(CanonPath("/foo/bar/default.nix").isWithin(CanonPath("/")));
+ ASSERT_TRUE(CanonPath("/").isWithin(CanonPath("/")));
+ }
+ }
+
+ TEST(CanonPath, sort) {
+ ASSERT_FALSE(CanonPath("foo") < CanonPath("foo"));
+ ASSERT_TRUE (CanonPath("foo") < CanonPath("foo/bar"));
+ ASSERT_TRUE (CanonPath("foo/bar") < CanonPath("foo!"));
+ ASSERT_FALSE(CanonPath("foo!") < CanonPath("foo"));
+ ASSERT_TRUE (CanonPath("foo") < CanonPath("foo!"));
+ }
+
+ TEST(CanonPath, allowed) {
+ {
+ std::set<CanonPath> allowed {
+ CanonPath("foo/bar"),
+ CanonPath("foo!"),
+ CanonPath("xyzzy"),
+ CanonPath("a/b/c"),
+ };
+
+ ASSERT_TRUE (CanonPath("foo/bar").isAllowed(allowed));
+ ASSERT_TRUE (CanonPath("foo/bar/bla").isAllowed(allowed));
+ ASSERT_TRUE (CanonPath("foo").isAllowed(allowed));
+ ASSERT_FALSE(CanonPath("bar").isAllowed(allowed));
+ ASSERT_FALSE(CanonPath("bar/a").isAllowed(allowed));
+ ASSERT_TRUE (CanonPath("a").isAllowed(allowed));
+ ASSERT_TRUE (CanonPath("a/b").isAllowed(allowed));
+ ASSERT_TRUE (CanonPath("a/b/c").isAllowed(allowed));
+ ASSERT_TRUE (CanonPath("a/b/c/d").isAllowed(allowed));
+ ASSERT_TRUE (CanonPath("a/b/c/d/e").isAllowed(allowed));
+ ASSERT_FALSE(CanonPath("a/b/a").isAllowed(allowed));
+ ASSERT_FALSE(CanonPath("a/b/d").isAllowed(allowed));
+ ASSERT_FALSE(CanonPath("aaa").isAllowed(allowed));
+ ASSERT_FALSE(CanonPath("zzz").isAllowed(allowed));
+ ASSERT_TRUE (CanonPath("/").isAllowed(allowed));
+ }
+ }
+}
diff --git a/src/libutil/util.cc b/src/libutil/util.cc
index 4f2caaa40..993dc1cb6 100644
--- a/src/libutil/util.cc
+++ b/src/libutil/util.cc
@@ -1594,6 +1594,21 @@ std::string stripIndentation(std::string_view s)
}
+std::pair<std::string_view, std::string_view> getLine(std::string_view s)
+{
+ auto newline = s.find('\n');
+
+ if (newline == s.npos) {
+ return {s, ""};
+ } else {
+ auto line = s.substr(0, newline);
+ if (!line.empty() && line[line.size() - 1] == '\r')
+ line = line.substr(0, line.size() - 1);
+ return {line, s.substr(newline + 1)};
+ }
+}
+
+
//////////////////////////////////////////////////////////////////////
static Sync<std::pair<unsigned short, unsigned short>> windowSize{{0, 0}};
diff --git a/src/libutil/util.hh b/src/libutil/util.hh
index 94d8cc555..9b149de80 100644
--- a/src/libutil/util.hh
+++ b/src/libutil/util.hh
@@ -510,6 +510,18 @@ std::optional<N> string2Float(const std::string_view s)
}
+/* Convert a little-endian integer to host order. */
+template<typename T>
+T readLittleEndian(unsigned char * p)
+{
+ T x = 0;
+ for (size_t i = 0; i < sizeof(x); ++i, ++p) {
+ x |= ((T) *p) << (i * 8);
+ }
+ return x;
+}
+
+
/* Return true iff `s' starts with `prefix'. */
bool hasPrefix(std::string_view s, std::string_view prefix);
@@ -563,6 +575,12 @@ std::string base64Decode(std::string_view s);
std::string stripIndentation(std::string_view s);
+/* Get the prefix of 's' up to and excluding the next line break (LF
+ optionally preceded by CR), and the remainder following the line
+ break. */
+std::pair<std::string_view, std::string_view> getLine(std::string_view s);
+
+
/* Get a value for the specified key from an associate container. */
template <class T>
const typename T::mapped_type * get(const T & map, const typename T::key_type & key)
@@ -737,4 +755,13 @@ inline std::string operator + (std::string && s, std::string_view s2)
return std::move(s);
}
+inline std::string operator + (std::string_view s1, const char * s2)
+{
+ std::string s;
+ s.reserve(s1.size() + strlen(s2));
+ s.append(s1);
+ s.append(s2);
+ return s;
+}
+
}
diff --git a/src/nix-env/nix-env.cc b/src/nix-env/nix-env.cc
index 776c5f6db..31823a966 100644
--- a/src/nix-env/nix-env.cc
+++ b/src/nix-env/nix-env.cc
@@ -647,7 +647,7 @@ static void upgradeDerivations(Globals & globals,
} else newElems.push_back(i);
} catch (Error & e) {
- e.addTrace(std::nullopt, "while trying to find an upgrade for '%s'", i.queryName());
+ e.addTrace(nullptr, "while trying to find an upgrade for '%s'", i.queryName());
throw;
}
}
@@ -958,7 +958,7 @@ static void queryJSON(Globals & globals, std::vector<DrvInfo> & elems, bool prin
} catch (AssertionError & e) {
printMsg(lvlTalkative, "skipping derivation named '%1%' which gives an assertion failure", i.queryName());
} catch (Error & e) {
- e.addTrace(std::nullopt, "while querying the derivation named '%1%'", i.queryName());
+ e.addTrace(nullptr, "while querying the derivation named '%1%'", i.queryName());
throw;
}
}
@@ -1262,7 +1262,7 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs)
} catch (AssertionError & e) {
printMsg(lvlTalkative, "skipping derivation named '%1%' which gives an assertion failure", i.queryName());
} catch (Error & e) {
- e.addTrace(std::nullopt, "while querying the derivation named '%1%'", i.queryName());
+ e.addTrace(nullptr, "while querying the derivation named '%1%'", i.queryName());
throw;
}
}
diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc
index b59a6d026..3bbefedbe 100644
--- a/src/nix-store/nix-store.cc
+++ b/src/nix-store/nix-store.cc
@@ -808,14 +808,23 @@ static void opServe(Strings opFlags, Strings opArgs)
if (GET_PROTOCOL_MINOR(clientVersion) >= 2)
settings.maxLogSize = readNum<unsigned long>(in);
if (GET_PROTOCOL_MINOR(clientVersion) >= 3) {
- settings.buildRepeat = readInt(in);
- settings.enforceDeterminism = readInt(in);
+ auto nrRepeats = readInt(in);
+ if (nrRepeats != 0) {
+ throw Error("client requested repeating builds, but this is not currently implemented");
+ }
+ // Ignore 'enforceDeterminism'. It used to be true by
+ // default, but also only never had any effect when
+ // `nrRepeats == 0`. We have already asserted that
+ // `nrRepeats` in fact is 0, so we can safely ignore this
+ // without doing something other than what the client
+ // asked for.
+ readInt(in);
+
settings.runDiffHook = true;
}
if (GET_PROTOCOL_MINOR(clientVersion) >= 7) {
settings.keepFailed = (bool) readInt(in);
}
- settings.printRepeatedBuilds = false;
};
while (true) {
@@ -926,7 +935,6 @@ static void opServe(Strings opFlags, Strings opArgs)
worker_proto::write(*store, out, status.builtOutputs);
}
-
break;
}
diff --git a/src/nix/daemon.cc b/src/nix/daemon.cc
index 940923d3b..c527fdb0a 100644
--- a/src/nix/daemon.cc
+++ b/src/nix/daemon.cc
@@ -257,7 +257,7 @@ static void daemonLoop()
} catch (Interrupted & e) {
return;
} catch (Error & error) {
- ErrorInfo ei = error.info();
+ auto ei = error.info();
// FIXME: add to trace?
ei.msg = hintfmt("error processing connection: %1%", ei.msg.str());
logError(ei);
diff --git a/src/nix/develop.cc b/src/nix/develop.cc
index 4de109754..6c3a9c6c6 100644
--- a/src/nix/develop.cc
+++ b/src/nix/develop.cc
@@ -192,10 +192,12 @@ static StorePath getDerivationEnvironment(ref<Store> store, ref<Store> evalStore
drv.env.erase("allowedRequisites");
drv.env.erase("disallowedReferences");
drv.env.erase("disallowedRequisites");
+ drv.env.erase("name");
/* Rehash and write the derivation. FIXME: would be nice to use
'buildDerivation', but that's privileged. */
drv.name += "-env";
+ drv.env.emplace("name", drv.name);
drv.inputSrcs.insert(std::move(getEnvShPath));
if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations)) {
for (auto & output : drv.outputs) {
diff --git a/src/nix/flake-update.md b/src/nix/flake-update.md
index 2ee8a707d..8c6042d94 100644
--- a/src/nix/flake-update.md
+++ b/src/nix/flake-update.md
@@ -16,7 +16,7 @@ R""(
# Description
This command recreates the lock file of a flake (`flake.lock`), thus
-updating the lock for every mutable input (like `nixpkgs`) to its
+updating the lock for every unlocked input (like `nixpkgs`) to its
current version. This is equivalent to passing `--recreate-lock-file`
to any command that operates on a flake. That is,
diff --git a/src/nix/flake.cc b/src/nix/flake.cc
index 336f6723a..96f035117 100644
--- a/src/nix/flake.cc
+++ b/src/nix/flake.cc
@@ -215,7 +215,7 @@ struct CmdFlakeMetadata : FlakeCommand, MixJSON
if (!lockedFlake.lockFile.root->inputs.empty())
logger->cout(ANSI_BOLD "Inputs:" ANSI_NORMAL);
- std::unordered_set<std::shared_ptr<Node>> visited;
+ std::set<ref<Node>> visited;
std::function<void(const Node & node, const std::string & prefix)> recurse;
@@ -227,7 +227,7 @@ struct CmdFlakeMetadata : FlakeCommand, MixJSON
if (auto lockedNode = std::get_if<0>(&input.second)) {
logger->cout("%s" ANSI_BOLD "%s" ANSI_NORMAL ": %s",
prefix + (last ? treeLast : treeConn), input.first,
- *lockedNode ? (*lockedNode)->lockedRef : flake.lockedRef);
+ (*lockedNode)->lockedRef);
bool firstVisit = visited.insert(*lockedNode).second;
diff --git a/src/nix/flake.md b/src/nix/flake.md
index a1ab43281..810e9ebea 100644
--- a/src/nix/flake.md
+++ b/src/nix/flake.md
@@ -18,51 +18,56 @@ values such as packages or NixOS modules provided by the flake).
Flake references (*flakerefs*) are a way to specify the location of a
flake. These have two different forms:
-* An attribute set representation, e.g.
- ```nix
- {
- type = "github";
- owner = "NixOS";
- repo = "nixpkgs";
- }
- ```
+## Attribute set representation
- The only required attribute is `type`. The supported types are
- listed below.
+Example:
-* A URL-like syntax, e.g.
+```nix
+{
+ type = "github";
+ owner = "NixOS";
+ repo = "nixpkgs";
+}
+```
- ```
- github:NixOS/nixpkgs
- ```
+The only required attribute is `type`. The supported types are
+listed below.
- These are used on the command line as a more convenient alternative
- to the attribute set representation. For instance, in the command
+## URL-like syntax
- ```console
- # nix build github:NixOS/nixpkgs#hello
- ```
+Example:
- `github:NixOS/nixpkgs` is a flake reference (while `hello` is an
- output attribute). They are also allowed in the `inputs` attribute
- of a flake, e.g.
+```
+github:NixOS/nixpkgs
+```
- ```nix
- inputs.nixpkgs.url = github:NixOS/nixpkgs;
- ```
+These are used on the command line as a more convenient alternative
+to the attribute set representation. For instance, in the command
- is equivalent to
+```console
+# nix build github:NixOS/nixpkgs#hello
+```
- ```nix
- inputs.nixpkgs = {
- type = "github";
- owner = "NixOS";
- repo = "nixpkgs";
- };
- ```
+`github:NixOS/nixpkgs` is a flake reference (while `hello` is an
+output attribute). They are also allowed in the `inputs` attribute
+of a flake, e.g.
+
+```nix
+inputs.nixpkgs.url = github:NixOS/nixpkgs;
+```
+
+is equivalent to
+
+```nix
+inputs.nixpkgs = {
+ type = "github";
+ owner = "NixOS";
+ repo = "nixpkgs";
+};
+```
-## Examples
+### Examples
Here are some examples of flake references in their URL-like representation:
diff --git a/src/nix/nix.md b/src/nix/nix.md
index d48682a94..723d3c87e 100644
--- a/src/nix/nix.md
+++ b/src/nix/nix.md
@@ -164,6 +164,13 @@ operate are determined as follows:
```
+ and likewise, using a store path to a "drv" file to specify the derivation:
+
+ ```console
+ # nix build '/nix/store/gzaflydcr6sb3567hap9q6srzx8ggdgg-glibc-2.33-78.drv^dev,static'
+ …
+ ```
+
* You can also specify that *all* outputs should be used using the
syntax *installable*`^*`. For example, the following shows the size
of all outputs of the `glibc` package in the binary cache:
@@ -177,6 +184,12 @@ operate are determined as follows:
/nix/store/q6580lr01jpcsqs4r5arlh4ki2c1m9rv-glibc-2.33-123-dev 44200560
```
+ and likewise, using a store path to a "drv" file to specify the derivation:
+
+ ```console
+ # nix path-info -S '/nix/store/gzaflydcr6sb3567hap9q6srzx8ggdgg-glibc-2.33-78.drv^*'
+ …
+ ```
* If you didn't specify the desired outputs, but the derivation has an
attribute `meta.outputsToInstall`, Nix will use those outputs. For
example, since the package `nixpkgs#libxml2` has this attribute:
@@ -189,6 +202,9 @@ operate are determined as follows:
a command like `nix shell nixpkgs#libxml2` will provide only those
two outputs by default.
+ Note that a store derivation (given by `.drv` file store path) doesn't have
+ any attributes like `meta`, and thus this case doesn't apply to it.
+
* Otherwise, Nix will use all outputs of the derivation.
# Nix stores
diff --git a/src/nix/profile-list.md b/src/nix/profile-list.md
index bdab9a208..fa786162f 100644
--- a/src/nix/profile-list.md
+++ b/src/nix/profile-list.md
@@ -20,11 +20,11 @@ following fields:
* An integer that can be used to unambiguously identify the package in
invocations of `nix profile remove` and `nix profile upgrade`.
-* The original ("mutable") flake reference and output attribute path
+* The original ("unlocked") flake reference and output attribute path
used at installation time.
-* The immutable flake reference to which the mutable flake reference
- was resolved.
+* The locked flake reference to which the unlocked flake reference was
+ resolved.
* The store path(s) of the package.
diff --git a/src/nix/profile-upgrade.md b/src/nix/profile-upgrade.md
index e06e74abe..39cca428b 100644
--- a/src/nix/profile-upgrade.md
+++ b/src/nix/profile-upgrade.md
@@ -2,7 +2,7 @@ R""(
# Examples
-* Upgrade all packages that were installed using a mutable flake
+* Upgrade all packages that were installed using an unlocked flake
reference:
```console
@@ -32,9 +32,9 @@ the package was installed.
> **Warning**
>
-> This only works if you used a *mutable* flake reference at
+> This only works if you used an *unlocked* flake reference at
> installation time, e.g. `nixpkgs#hello`. It does not work if you
-> used an *immutable* flake reference
+> used a *locked* flake reference
> (e.g. `github:NixOS/nixpkgs/13d0c311e3ae923a00f734b43fd1d35b47d8943a#hello`),
> since in that case the "latest version" is always the same.
diff --git a/src/nix/profile.md b/src/nix/profile.md
index be3c5ba1a..273e02280 100644
--- a/src/nix/profile.md
+++ b/src/nix/profile.md
@@ -88,8 +88,7 @@ has the following fields:
the user at the time of installation (e.g. `nixpkgs`). This is also
the flake reference that will be used by `nix profile upgrade`.
-* `uri`: The immutable flake reference to which `originalUrl`
- resolved.
+* `uri`: The locked flake reference to which `originalUrl` resolved.
* `attrPath`: The flake output attribute that provided this
package. Note that this is not necessarily the attribute that the
diff --git a/src/nix/registry.cc b/src/nix/registry.cc
index c496f94f8..b5bdfba95 100644
--- a/src/nix/registry.cc
+++ b/src/nix/registry.cc
@@ -183,14 +183,12 @@ struct CmdRegistryPin : RegistryCommand, EvalCommand
void run(nix::ref<nix::Store> store) override
{
- if (locked.empty()) {
- locked = url;
- }
+ if (locked.empty()) locked = url;
auto registry = getRegistry();
auto ref = parseFlakeRef(url);
- auto locked_ref = parseFlakeRef(locked);
+ auto lockedRef = parseFlakeRef(locked);
registry->remove(ref.input);
- auto [tree, resolved] = locked_ref.resolve(store).input.fetch(store);
+ auto [tree, resolved] = lockedRef.resolve(store).input.fetch(store);
fetchers::Attrs extraAttrs;
if (ref.subdir != "") extraAttrs["dir"] = ref.subdir;
registry->add(ref.input, resolved, extraAttrs);
diff --git a/tests/build.sh b/tests/build.sh
index c7db039b4..036fb037e 100644
--- a/tests/build.sh
+++ b/tests/build.sh
@@ -8,13 +8,15 @@ set -o pipefail
nix build -f multiple-outputs.nix --json a b --no-link | jq --exit-status '
(.[0] |
(.drvPath | match(".*multiple-outputs-a.drv")) and
- (.outputs | keys | length == 2) and
- (.outputs.first | match(".*multiple-outputs-a-first")) and
- (.outputs.second | match(".*multiple-outputs-a-second")))
+ (.outputs |
+ (keys | length == 2) and
+ (.first | match(".*multiple-outputs-a-first")) and
+ (.second | match(".*multiple-outputs-a-second"))))
and (.[1] |
(.drvPath | match(".*multiple-outputs-b.drv")) and
- (.outputs | keys | length == 1) and
- (.outputs.out | match(".*multiple-outputs-b")))
+ (.outputs |
+ (keys | length == 1) and
+ (.out | match(".*multiple-outputs-b"))))
'
# Test output selection using the '^' syntax.
@@ -56,6 +58,48 @@ nix build -f multiple-outputs.nix --json 'e^*' --no-link | jq --exit-status '
(.outputs | keys == ["a", "b", "c"]))
'
+# Test building from raw store path to drv not expression.
+
+drv=$(nix eval -f multiple-outputs.nix --raw a.drvPath)
+if nix build "$drv^not-an-output" --no-link --json; then
+ fail "'not-an-output' should fail to build"
+fi
+
+if nix build "$drv^" --no-link --json; then
+ fail "'empty outputs list' should fail to build"
+fi
+
+if nix build "$drv^*nope" --no-link --json; then
+ fail "'* must be entire string' should fail to build"
+fi
+
+nix build "$drv^first" --no-link --json | jq --exit-status '
+ (.[0] |
+ (.drvPath | match(".*multiple-outputs-a.drv")) and
+ (.outputs |
+ (keys | length == 1) and
+ (.first | match(".*multiple-outputs-a-first")) and
+ (has("second") | not)))
+'
+
+nix build "$drv^first,second" --no-link --json | jq --exit-status '
+ (.[0] |
+ (.drvPath | match(".*multiple-outputs-a.drv")) and
+ (.outputs |
+ (keys | length == 2) and
+ (.first | match(".*multiple-outputs-a-first")) and
+ (.second | match(".*multiple-outputs-a-second"))))
+'
+
+nix build "$drv^*" --no-link --json | jq --exit-status '
+ (.[0] |
+ (.drvPath | match(".*multiple-outputs-a.drv")) and
+ (.outputs |
+ (keys | length == 2) and
+ (.first | match(".*multiple-outputs-a-first")) and
+ (.second | match(".*multiple-outputs-a-second"))))
+'
+
# Make sure that `--impure` works (regression test for https://github.com/NixOS/nix/issues/6488)
nix build --impure -f multiple-outputs.nix --json e --no-link | jq --exit-status '
(.[0] |
diff --git a/tests/check.nix b/tests/check.nix
index ed91ff845..ddab8eea9 100644
--- a/tests/check.nix
+++ b/tests/check.nix
@@ -44,7 +44,7 @@ with import ./config.nix;
};
hashmismatch = import <nix/fetchurl.nix> {
- url = "file://" + builtins.getEnv "TMPDIR" + "/dummy";
+ url = "file://" + builtins.getEnv "TEST_ROOT" + "/dummy";
sha256 = "0mdqa9w1p6cmli6976v4wi0sw9r4p5prkj7lzfd1877wk11c9c73";
};
diff --git a/tests/check.sh b/tests/check.sh
index 495202781..e77c0405d 100644
--- a/tests/check.sh
+++ b/tests/check.sh
@@ -40,14 +40,6 @@ nix-build check.nix -A deterministic --argstr checkBuildId $checkBuildId \
if grep -q 'may not be deterministic' $TEST_ROOT/log; then false; fi
checkBuildTempDirRemoved $TEST_ROOT/log
-nix build -f check.nix deterministic --rebuild --repeat 1 \
- --argstr checkBuildId $checkBuildId --keep-failed --no-link \
- 2> $TEST_ROOT/log
-if grep -q 'checking is not possible' $TEST_ROOT/log; then false; fi
-# Repeat is set to 1, ie. nix should build deterministic twice.
-if [ "$(grep "checking outputs" $TEST_ROOT/log | wc -l)" -ne 2 ]; then false; fi
-checkBuildTempDirRemoved $TEST_ROOT/log
-
nix-build check.nix -A nondeterministic --argstr checkBuildId $checkBuildId \
--no-out-link 2> $TEST_ROOT/log
checkBuildTempDirRemoved $TEST_ROOT/log
@@ -58,12 +50,6 @@ grep 'may not be deterministic' $TEST_ROOT/log
[ "$status" = "104" ]
checkBuildTempDirRemoved $TEST_ROOT/log
-nix build -f check.nix nondeterministic --rebuild --repeat 1 \
- --argstr checkBuildId $checkBuildId --keep-failed --no-link \
- 2> $TEST_ROOT/log || status=$?
-grep 'may not be deterministic' $TEST_ROOT/log
-checkBuildTempDirRemoved $TEST_ROOT/log
-
nix-build check.nix -A nondeterministic --argstr checkBuildId $checkBuildId \
--no-out-link --check --keep-failed 2> $TEST_ROOT/log || status=$?
grep 'may not be deterministic' $TEST_ROOT/log
@@ -72,12 +58,6 @@ if checkBuildTempDirRemoved $TEST_ROOT/log; then false; fi
clearStore
-nix-build dependencies.nix --no-out-link --repeat 3
-
-nix-build check.nix -A nondeterministic --no-out-link --repeat 1 2> $TEST_ROOT/log || status=$?
-[ "$status" = "1" ]
-grep 'differs from previous round' $TEST_ROOT/log
-
path=$(nix-build check.nix -A fetchurl --no-out-link)
chmod +w $path
@@ -91,13 +71,13 @@ nix-build check.nix -A fetchurl --no-out-link --check
nix-build check.nix -A fetchurl --no-out-link --repair
[[ $(cat $path) != foo ]]
-echo 'Hello World' > $TMPDIR/dummy
+echo 'Hello World' > $TEST_ROOT/dummy
nix-build check.nix -A hashmismatch --no-out-link || status=$?
[ "$status" = "102" ]
-echo -n > $TMPDIR/dummy
+echo -n > $TEST_ROOT/dummy
nix-build check.nix -A hashmismatch --no-out-link
-echo 'Hello World' > $TMPDIR/dummy
+echo 'Hello World' > $TEST_ROOT/dummy
nix-build check.nix -A hashmismatch --no-out-link --check || status=$?
[ "$status" = "102" ]
diff --git a/tests/eval.sh b/tests/eval.sh
index d74976019..ffae08a6a 100644
--- a/tests/eval.sh
+++ b/tests/eval.sh
@@ -29,3 +29,7 @@ nix-instantiate --eval -E 'assert 1 + 2 == 3; true'
[[ $(nix-instantiate -A attr --eval "./eval.nix") == '{ foo = "bar"; }' ]]
[[ $(nix-instantiate -A attr --eval --json "./eval.nix") == '{"foo":"bar"}' ]]
[[ $(nix-instantiate -A int --eval - < "./eval.nix") == 123 ]]
+
+# Check that symlink cycles don't cause a hang.
+ln -sfn cycle.nix $TEST_ROOT/cycle.nix
+(! nix eval --file $TEST_ROOT/cycle.nix)
diff --git a/tests/fetchGit.sh b/tests/fetchGit.sh
index 4ceba0293..da09c3f37 100644
--- a/tests/fetchGit.sh
+++ b/tests/fetchGit.sh
@@ -122,6 +122,7 @@ git -C $repo commit -m 'Bla3' -a
path4=$(nix eval --impure --refresh --raw --expr "(builtins.fetchGit file://$repo).outPath")
[[ $path2 = $path4 ]]
+status=0
nix eval --impure --raw --expr "(builtins.fetchGit { url = $repo; rev = \"$rev2\"; narHash = \"sha256-B5yIPHhEm0eysJKEsO7nqxprh9vcblFxpJG11gXJus1=\"; }).outPath" || status=$?
[[ "$status" = "102" ]]
diff --git a/tests/flakes/absolute-paths.sh b/tests/flakes/absolute-paths.sh
new file mode 100644
index 000000000..e7bfba12d
--- /dev/null
+++ b/tests/flakes/absolute-paths.sh
@@ -0,0 +1,17 @@
+source ./common.sh
+
+requireGit
+
+flake1Dir=$TEST_ROOT/flake1
+flake2Dir=$TEST_ROOT/flake2
+
+createGitRepo $flake1Dir
+cat > $flake1Dir/flake.nix <<EOF
+{
+ outputs = { self }: { x = builtins.readFile $(pwd)/absolute-paths.sh; };
+}
+EOF
+git -C $flake1Dir add flake.nix
+git -C $flake1Dir commit -m Initial
+
+nix eval --impure --json $flake1Dir#x
diff --git a/tests/flakes/flakes.sh b/tests/flakes/flakes.sh
index 267e2cd6f..07f1e6698 100644
--- a/tests/flakes/flakes.sh
+++ b/tests/flakes/flakes.sh
@@ -53,7 +53,11 @@ cat > $flake3Dir/flake.nix <<EOF
}
EOF
-git -C $flake3Dir add flake.nix
+cat > $flake3Dir/default.nix <<EOF
+{ x = 123; }
+EOF
+
+git -C $flake3Dir add flake.nix default.nix
git -C $flake3Dir commit -m 'Initial'
cat > $nonFlakeDir/README.md <<EOF
@@ -70,8 +74,10 @@ nix registry add --registry $registry flake3 git+file://$flake3Dir
nix registry add --registry $registry flake4 flake3
nix registry add --registry $registry nixpkgs flake1
-# Test 'nix flake list'.
+# Test 'nix registry list'.
[[ $(nix registry list | wc -l) == 5 ]]
+nix registry list | grep -q '^global'
+nix registry list | grep -q -v '^user' # nothing in user registry
# Test 'nix flake metadata'.
nix flake metadata flake1
@@ -109,11 +115,12 @@ nix build -o $TEST_ROOT/result git+file://$flake1Dir
nix build -o $flake1Dir/result git+file://$flake1Dir
nix path-info $flake1Dir/result
-# 'getFlake' on a mutable flakeref should fail in pure mode, but succeed in impure mode.
+# 'getFlake' on an unlocked flakeref should fail in pure mode, but
+# succeed in impure mode.
(! nix build -o $TEST_ROOT/result --expr "(builtins.getFlake \"$flake1Dir\").packages.$system.default")
nix build -o $TEST_ROOT/result --expr "(builtins.getFlake \"$flake1Dir\").packages.$system.default" --impure
-# 'getFlake' on an immutable flakeref should succeed even in pure mode.
+# 'getFlake' on a locked flakeref should succeed even in pure mode.
nix build -o $TEST_ROOT/result --expr "(builtins.getFlake \"git+file://$flake1Dir?rev=$hash2\").packages.$system.default"
# Building a flake with an unlocked dependency should fail in pure mode.
@@ -335,6 +342,16 @@ nix registry pin flake1 flake3
nix registry remove flake1
[[ $(nix registry list | wc -l) == 5 ]]
+# Test 'nix registry list' with a disabled global registry.
+nix registry add user-flake1 git+file://$flake1Dir
+nix registry add user-flake2 git+file://$flake2Dir
+[[ $(nix --flake-registry "" registry list | wc -l) == 2 ]]
+nix --flake-registry "" registry list | grep -q -v '^global' # nothing in global registry
+nix --flake-registry "" registry list | grep -q '^user'
+nix registry remove user-flake1
+nix registry remove user-flake2
+[[ $(nix registry list | wc -l) == 5 ]]
+
# Test 'nix flake clone'.
rm -rf $TEST_ROOT/flake1-v2
nix flake clone flake1 --dest $TEST_ROOT/flake1-v2
@@ -460,7 +477,7 @@ nix flake lock $flake3Dir --update-input flake2/flake1
# Test 'nix flake metadata --json'.
nix flake metadata $flake3Dir --json | jq .
-# Test flake in store does not evaluate
+# Test flake in store does not evaluate.
rm -rf $badFlakeDir
mkdir $badFlakeDir
echo INVALID > $badFlakeDir/flake.nix
@@ -468,3 +485,9 @@ nix store delete $(nix store add-path $badFlakeDir)
[[ $(nix path-info $(nix store add-path $flake1Dir)) =~ flake1 ]]
[[ $(nix path-info path:$(nix store add-path $flake1Dir)) =~ simple ]]
+
+# Test fetching flakerefs in the legacy CLI.
+[[ $(nix-instantiate --eval flake:flake3 -A x) = 123 ]]
+[[ $(nix-instantiate --eval flake:git+file://$flake3Dir -A x) = 123 ]]
+[[ $(nix-instantiate -I flake3=flake:flake3 --eval '<flake3>' -A x) = 123 ]]
+[[ $(NIX_PATH=flake3=flake:flake3 nix-instantiate --eval '<flake3>' -A x) = 123 ]]
diff --git a/tests/flakes/unlocked-override.sh b/tests/flakes/unlocked-override.sh
new file mode 100644
index 000000000..8abc8b7d3
--- /dev/null
+++ b/tests/flakes/unlocked-override.sh
@@ -0,0 +1,30 @@
+source ./common.sh
+
+requireGit
+
+flake1Dir=$TEST_ROOT/flake1
+flake2Dir=$TEST_ROOT/flake2
+
+createGitRepo $flake1Dir
+cat > $flake1Dir/flake.nix <<EOF
+{
+ outputs = { self }: { x = import ./x.nix; };
+}
+EOF
+echo 123 > $flake1Dir/x.nix
+git -C $flake1Dir add flake.nix x.nix
+git -C $flake1Dir commit -m Initial
+
+createGitRepo $flake2Dir
+cat > $flake2Dir/flake.nix <<EOF
+{
+ outputs = { self, flake1 }: { x = flake1.x; };
+}
+EOF
+git -C $flake2Dir add flake.nix
+
+[[ $(nix eval --json $flake2Dir#x --override-input flake1 $TEST_ROOT/flake1) = 123 ]]
+
+echo 456 > $flake1Dir/x.nix
+
+[[ $(nix eval --json $flake2Dir#x --override-input flake1 $TEST_ROOT/flake1) = 456 ]]
diff --git a/tests/function-trace.sh b/tests/function-trace.sh
index 0b7f49d82..b0d6c9d59 100755
--- a/tests/function-trace.sh
+++ b/tests/function-trace.sh
@@ -11,7 +11,7 @@ expect_trace() {
--expr "$expr" 2>&1 \
| grep "function-trace" \
| sed -e 's/ [0-9]*$//'
- );
+ )
echo -n "Tracing expression '$expr'"
set +e
@@ -32,40 +32,40 @@ expect_trace() {
# failure inside a tryEval
expect_trace 'builtins.tryEval (throw "example")' "
-function-trace entered (string):1:1 at
-function-trace entered (string):1:19 at
-function-trace exited (string):1:19 at
-function-trace exited (string):1:1 at
+function-trace entered «string»:1:1 at
+function-trace entered «string»:1:19 at
+function-trace exited «string»:1:19 at
+function-trace exited «string»:1:1 at
"
# Missing argument to a formal function
expect_trace '({ x }: x) { }' "
-function-trace entered (string):1:1 at
-function-trace exited (string):1:1 at
+function-trace entered «string»:1:1 at
+function-trace exited «string»:1:1 at
"
# Too many arguments to a formal function
expect_trace '({ x }: x) { x = "x"; y = "y"; }' "
-function-trace entered (string):1:1 at
-function-trace exited (string):1:1 at
+function-trace entered «string»:1:1 at
+function-trace exited «string»:1:1 at
"
# Not enough arguments to a lambda
expect_trace '(x: y: x + y) 1' "
-function-trace entered (string):1:1 at
-function-trace exited (string):1:1 at
+function-trace entered «string»:1:1 at
+function-trace exited «string»:1:1 at
"
# Too many arguments to a lambda
expect_trace '(x: x) 1 2' "
-function-trace entered (string):1:1 at
-function-trace exited (string):1:1 at
+function-trace entered «string»:1:1 at
+function-trace exited «string»:1:1 at
"
# Not a function
expect_trace '1 2' "
-function-trace entered (string):1:1 at
-function-trace exited (string):1:1 at
+function-trace entered «string»:1:1 at
+function-trace exited «string»:1:1 at
"
set -e
diff --git a/tests/impure-derivations.sh b/tests/impure-derivations.sh
index 7ca9ce742..23a193833 100644
--- a/tests/impure-derivations.sh
+++ b/tests/impure-derivations.sh
@@ -12,6 +12,7 @@ clearStore
# Basic test of impure derivations: building one a second time should not use the previous result.
printf 0 > $TEST_ROOT/counter
+nix build --dry-run --json --file ./impure-derivations.nix impure.all
json=$(nix build -L --no-link --json --file ./impure-derivations.nix impure.all)
path1=$(echo $json | jq -r .[].outputs.out)
path1_stuff=$(echo $json | jq -r .[].outputs.stuff)
diff --git a/tests/lang.sh b/tests/lang.sh
index c0b0fc58c..95e795e2e 100644
--- a/tests/lang.sh
+++ b/tests/lang.sh
@@ -2,6 +2,7 @@ source common.sh
export TEST_VAR=foo # for eval-okay-getenv.nix
export NIX_REMOTE=dummy://
+export NIX_STORE_DIR=/nix/store
nix-instantiate --eval -E 'builtins.trace "Hello" 123' 2>&1 | grep -q Hello
nix-instantiate --eval -E 'builtins.addErrorContext "Hello" 123' 2>&1
@@ -50,10 +51,10 @@ for i in lang/eval-okay-*.nix; do
if test -e lang/$i.flags; then
flags=$(cat lang/$i.flags)
fi
- if ! expect 0 env NIX_PATH=lang/dir3:lang/dir4 nix-instantiate $flags --eval --strict lang/$i.nix > lang/$i.out; then
+ if ! expect 0 env NIX_PATH=lang/dir3:lang/dir4 HOME=/fake-home nix-instantiate $flags --eval --strict lang/$i.nix > lang/$i.out; then
echo "FAIL: $i should evaluate"
fail=1
- elif ! diff lang/$i.out lang/$i.exp; then
+ elif ! diff <(< lang/$i.out sed -e "s|$(pwd)|/pwd|g") lang/$i.exp; then
echo "FAIL: evaluation result of $i not as expected"
fail=1
fi
diff --git a/tests/lang/eval-okay-closure.exp b/tests/lang/eval-okay-closure.exp
new file mode 100644
index 000000000..e7dbf9781
--- /dev/null
+++ b/tests/lang/eval-okay-closure.exp
@@ -0,0 +1 @@
+[ { foo = true; key = -13; } { foo = true; key = -12; } { foo = true; key = -11; } { foo = true; key = -9; } { foo = true; key = -8; } { foo = true; key = -7; } { foo = true; key = -5; } { foo = true; key = -4; } { foo = true; key = -3; } { key = -1; } { foo = true; key = 0; } { foo = true; key = 1; } { foo = true; key = 2; } { foo = true; key = 4; } { foo = true; key = 5; } { foo = true; key = 6; } { key = 8; } { foo = true; key = 9; } { foo = true; key = 10; } { foo = true; key = 13; } { foo = true; key = 14; } { foo = true; key = 15; } { key = 17; } { foo = true; key = 18; } { foo = true; key = 19; } { foo = true; key = 22; } { foo = true; key = 23; } { key = 26; } { foo = true; key = 27; } { foo = true; key = 28; } { foo = true; key = 31; } { foo = true; key = 32; } { key = 35; } { foo = true; key = 36; } { foo = true; key = 40; } { foo = true; key = 41; } { key = 44; } { foo = true; key = 45; } { foo = true; key = 49; } { key = 53; } { foo = true; key = 54; } { foo = true; key = 58; } { key = 62; } { foo = true; key = 67; } { key = 71; } { key = 80; } ]
diff --git a/tests/lang/eval-okay-functionargs.exp b/tests/lang/eval-okay-functionargs.exp
new file mode 100644
index 000000000..c1c9f8ffa
--- /dev/null
+++ b/tests/lang/eval-okay-functionargs.exp
@@ -0,0 +1 @@
+[ "stdenv" "fetchurl" "aterm-stdenv" "aterm-stdenv2" "libX11" "libXv" "mplayer-stdenv2.libXv-libX11" "mplayer-stdenv2.libXv-libX11_2" "nix-stdenv-aterm-stdenv" "nix-stdenv2-aterm2-stdenv2" ]
diff --git a/tests/lang/eval-okay-path-antiquotation.exp b/tests/lang/eval-okay-path-antiquotation.exp
new file mode 100644
index 000000000..5b8ea0243
--- /dev/null
+++ b/tests/lang/eval-okay-path-antiquotation.exp
@@ -0,0 +1 @@
+{ absolute = /foo; expr = /pwd/lang/foo/bar; home = /fake-home/foo; notfirst = /pwd/lang/bar/foo; simple = /pwd/lang/foo; slashes = /foo/bar; surrounded = /pwd/lang/a-foo-b; }
diff --git a/tests/lang/eval-okay-path.exp b/tests/lang/eval-okay-path.exp
new file mode 100644
index 000000000..3ce7f8283
--- /dev/null
+++ b/tests/lang/eval-okay-path.exp
@@ -0,0 +1 @@
+"/nix/store/ya937r4ydw0l6kayq8jkyqaips9c75jm-output"
diff --git a/tests/local.mk b/tests/local.mk
index 340817ec3..2f7f76261 100644
--- a/tests/local.mk
+++ b/tests/local.mk
@@ -7,6 +7,8 @@ nix_tests = \
flakes/follow-paths.sh \
flakes/bundle.sh \
flakes/check.sh \
+ flakes/unlocked-override.sh \
+ flakes/absolute-paths.sh \
ca/gc.sh \
gc.sh \
remote-store.sh \
@@ -110,7 +112,8 @@ nix_tests = \
fetchClosure.sh \
completions.sh \
impure-derivations.sh \
- path-from-hash-part.sh
+ path-from-hash-part.sh \
+ toString-path.sh
ifeq ($(HAVE_LIBCPUID), 1)
nix_tests += compute-levels.sh
diff --git a/tests/nix_path.sh b/tests/nix_path.sh
index d3657abf0..2b222b4a1 100644
--- a/tests/nix_path.sh
+++ b/tests/nix_path.sh
@@ -9,3 +9,6 @@ nix-instantiate --eval -E '<by-relative-path/simple.nix>' --restrict-eval
# Should ideally also test this, but there’s no pure way to do it, so just trust me that it works
# nix-instantiate --eval -E '<nixpkgs>' -I nixpkgs=channel:nixos-unstable --restrict-eval
+
+[[ $(nix-instantiate --find-file by-absolute-path/simple.nix) = $PWD/simple.nix ]]
+[[ $(nix-instantiate --find-file by-relative-path/simple.nix) = $PWD/simple.nix ]]
diff --git a/tests/restricted.sh b/tests/restricted.sh
index 242b901dd..9bd16cf51 100644
--- a/tests/restricted.sh
+++ b/tests/restricted.sh
@@ -3,7 +3,7 @@ source common.sh
clearStore
nix-instantiate --restrict-eval --eval -E '1 + 2'
-(! nix-instantiate --restrict-eval ./restricted.nix)
+(! nix-instantiate --eval --restrict-eval ./restricted.nix)
(! nix-instantiate --eval --restrict-eval <(echo '1 + 2'))
nix-instantiate --restrict-eval ./simple.nix -I src=.
nix-instantiate --restrict-eval ./simple.nix -I src1=simple.nix -I src2=config.nix -I src3=./simple.builder.sh
diff --git a/tests/toString-path.sh b/tests/toString-path.sh
new file mode 100644
index 000000000..07eb87465
--- /dev/null
+++ b/tests/toString-path.sh
@@ -0,0 +1,8 @@
+source common.sh
+
+mkdir -p $TEST_ROOT/foo
+echo bla > $TEST_ROOT/foo/bar
+
+[[ $(nix eval --raw --impure --expr "builtins.readFile (builtins.toString (builtins.fetchTree { type = \"path\"; path = \"$TEST_ROOT/foo\"; } + \"/bar\"))") = bla ]]
+
+[[ $(nix eval --json --impure --expr "builtins.readDir (builtins.toString (builtins.fetchTree { type = \"path\"; path = \"$TEST_ROOT/foo\"; }))") = '{"bar":"regular"}' ]]