aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.github/workflows/test.yml8
-rw-r--r--configure.ac2
-rw-r--r--doc/manual/generate-manpage.nix2
-rw-r--r--doc/manual/local.mk16
-rw-r--r--doc/manual/src/SUMMARY.md.in1
-rw-r--r--doc/manual/src/expressions/language-values.md7
-rw-r--r--flake.lock13
-rw-r--r--flake.nix30
-rw-r--r--mk/libraries.mk4
-rw-r--r--mk/programs.mk4
-rwxr-xr-xscripts/create-darwin-volume.sh9
-rw-r--r--scripts/install-darwin-multi-user.sh17
-rw-r--r--scripts/install-multi-user.sh2
-rwxr-xr-xscripts/install.in11
-rw-r--r--src/libcmd/command.cc23
-rw-r--r--src/libcmd/command.hh26
-rw-r--r--src/libcmd/installables.cc63
-rw-r--r--src/libcmd/local.mk2
-rw-r--r--src/libcmd/markdown.cc8
-rw-r--r--src/libexpr/attr-set.hh8
-rw-r--r--src/libexpr/eval.cc96
-rw-r--r--src/libexpr/eval.hh21
-rw-r--r--src/libexpr/flake/flake.cc58
-rw-r--r--src/libexpr/flake/flake.hh11
-rw-r--r--src/libexpr/flake/flakeref.cc6
-rw-r--r--src/libexpr/lexer.l94
-rw-r--r--src/libexpr/local.mk2
-rw-r--r--src/libexpr/parser.y25
-rw-r--r--src/libexpr/primops.cc72
-rw-r--r--src/libexpr/primops.hh4
-rw-r--r--src/libexpr/primops/fetchTree.cc44
-rw-r--r--src/libexpr/value-to-xml.cc2
-rw-r--r--src/libfetchers/git.cc35
-rw-r--r--src/libfetchers/github.cc8
-rw-r--r--src/libfetchers/local.mk2
-rw-r--r--src/libfetchers/mercurial.cc44
-rw-r--r--src/libfetchers/path.cc16
-rw-r--r--src/libmain/local.mk2
-rw-r--r--src/libmain/shared.cc30
-rw-r--r--src/libstore/binary-cache-store.cc4
-rw-r--r--src/libstore/build/derivation-goal.cc18
-rw-r--r--src/libstore/build/entry-points.cc6
-rw-r--r--src/libstore/build/local-derivation-goal.cc84
-rw-r--r--src/libstore/content-address.cc16
-rw-r--r--src/libstore/daemon.cc10
-rw-r--r--src/libstore/derivations.cc44
-rw-r--r--src/libstore/derivations.hh4
-rw-r--r--src/libstore/derived-path.cc8
-rw-r--r--src/libstore/globals.cc2
-rw-r--r--src/libstore/http-binary-cache-store.cc4
-rw-r--r--src/libstore/legacy-ssh-store.cc21
-rw-r--r--src/libstore/local-store.cc20
-rw-r--r--src/libstore/local.mk2
-rw-r--r--src/libstore/misc.cc4
-rw-r--r--src/libstore/path-with-outputs.cc4
-rw-r--r--src/libstore/profiles.cc37
-rw-r--r--src/libstore/profiles.hh9
-rw-r--r--src/libstore/remote-store.cc24
-rw-r--r--src/libstore/remote-store.hh3
-rw-r--r--src/libstore/s3-binary-cache-store.cc6
-rw-r--r--src/libstore/sandbox-defaults.sb4
-rw-r--r--src/libstore/ssh-store.cc5
-rw-r--r--src/libstore/store-api.cc8
-rw-r--r--src/libstore/store-api.hh7
-rw-r--r--src/libstore/uds-remote-store.cc6
-rw-r--r--src/libstore/uds-remote-store.hh6
-rw-r--r--src/libutil/ansicolor.hh2
-rw-r--r--src/libutil/args.cc1
-rw-r--r--src/libutil/args.hh16
-rw-r--r--src/libutil/config.cc5
-rw-r--r--src/libutil/config.hh3
-rw-r--r--src/libutil/error.cc10
-rw-r--r--src/libutil/fmt.hh2
-rw-r--r--src/libutil/local.mk2
-rw-r--r--src/libutil/logging.cc2
-rw-r--r--src/libutil/ref.hh43
-rw-r--r--src/libutil/serialise.cc3
-rw-r--r--src/libutil/tarfile.cc33
-rw-r--r--src/libutil/tarfile.hh3
-rw-r--r--src/libutil/tests/logging.cc4
-rw-r--r--src/libutil/util.cc15
-rw-r--r--src/libutil/util.hh14
-rw-r--r--src/nix-env/nix-env.cc41
-rw-r--r--src/nix-env/user-env.cc4
-rw-r--r--src/nix/build.cc6
-rw-r--r--src/nix/copy.cc2
-rw-r--r--src/nix/develop.cc15
-rw-r--r--src/nix/flake-show.md3
-rw-r--r--src/nix/flake.cc137
-rw-r--r--src/nix/local.mk4
-rw-r--r--src/nix/log.cc4
-rw-r--r--src/nix/main.cc46
-rw-r--r--src/nix/make-content-addressable.cc2
-rw-r--r--src/nix/path-info.cc2
-rw-r--r--src/nix/path-info.md2
-rw-r--r--src/nix/prefetch.cc22
-rw-r--r--src/nix/profile-history.md4
-rw-r--r--src/nix/profile-rollback.md26
-rw-r--r--src/nix/profile-wipe-history.md20
-rw-r--r--src/nix/profile.cc86
-rw-r--r--src/nix/realisation.cc2
-rw-r--r--src/nix/registry.md2
-rw-r--r--src/nix/repl.cc12
-rw-r--r--src/nix/search.cc1
-rw-r--r--src/nix/show-derivation.cc8
-rw-r--r--src/nix/sigs.cc4
-rw-r--r--src/nix/store-delete.cc2
-rw-r--r--src/nix/store-repair.cc2
-rw-r--r--src/nix/verify.cc2
-rw-r--r--tests/ca/signatures.sh4
-rw-r--r--tests/fetchurl.sh14
-rw-r--r--tests/fixed.nix8
-rw-r--r--tests/fixed.sh5
-rw-r--r--tests/flakes.sh16
-rw-r--r--tests/lang/eval-fail-nonexist-path.nix (renamed from tests/lang/eval-fail-antiquoted-path.nix)0
-rw-r--r--tests/lang/eval-okay-path-antiquotation.nix12
-rw-r--r--tests/local-store.sh5
-rw-r--r--tests/recursive.sh3
118 files changed, 1238 insertions, 642 deletions
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index b2b1f07fb..abaff75ee 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -4,6 +4,7 @@ on:
push:
jobs:
tests:
+ needs: [check_cachix]
strategy:
matrix:
os: [ubuntu-latest, macos-latest]
@@ -13,9 +14,10 @@ jobs:
- uses: actions/checkout@v2.3.4
with:
fetch-depth: 0
- - uses: cachix/install-nix-action@v13
+ - uses: cachix/install-nix-action@v14
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
- uses: cachix/cachix-action@v10
+ if: needs.check_cachix.outputs.secret == 'true'
with:
name: '${{ env.CACHIX_NAME }}'
signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}'
@@ -43,7 +45,7 @@ jobs:
with:
fetch-depth: 0
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
- - uses: cachix/install-nix-action@v13
+ - uses: cachix/install-nix-action@v14
- uses: cachix/cachix-action@v10
with:
name: '${{ env.CACHIX_NAME }}'
@@ -61,7 +63,7 @@ jobs:
steps:
- uses: actions/checkout@v2.3.4
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
- - uses: cachix/install-nix-action@v13
+ - uses: cachix/install-nix-action@v14
with:
install_url: '${{needs.installer.outputs.installerURL}}'
install_options: "--tarball-url-prefix https://${{ env.CACHIX_NAME }}.cachix.org/serve"
diff --git a/configure.ac b/configure.ac
index 2841a87c0..65478ecc5 100644
--- a/configure.ac
+++ b/configure.ac
@@ -260,6 +260,8 @@ AC_ARG_ENABLE(doc-gen, AS_HELP_STRING([--disable-doc-gen],[disable documentation
doc_generate=$enableval, doc_generate=yes)
AC_SUBST(doc_generate)
+# Look for lowdown library.
+PKG_CHECK_MODULES([LOWDOWN], [lowdown >= 0.8.0], [CXXFLAGS="$LOWDOWN_CFLAGS $CXXFLAGS"])
# Setuid installations.
AC_CHECK_FUNCS([setresuid setreuid lchown])
diff --git a/doc/manual/generate-manpage.nix b/doc/manual/generate-manpage.nix
index 964b57086..4fc9abea1 100644
--- a/doc/manual/generate-manpage.nix
+++ b/doc/manual/generate-manpage.nix
@@ -89,7 +89,7 @@ let
in
let
- manpages = processCommand { filename = "nix"; command = "nix"; def = command; };
+ manpages = processCommand { filename = "nix"; command = "nix"; def = builtins.fromJSON command; };
summary = concatStrings (map (manpage: " - [${manpage.command}](command-ref/new-cli/${manpage.name})\n") manpages);
in
(listToAttrs manpages) // { "SUMMARY.md" = summary; }
diff --git a/doc/manual/local.mk b/doc/manual/local.mk
index 93ec3cad0..a8c52f841 100644
--- a/doc/manual/local.mk
+++ b/doc/manual/local.mk
@@ -44,7 +44,7 @@ $(d)/src/SUMMARY.md: $(d)/src/SUMMARY.md.in $(d)/src/command-ref/new-cli
$(d)/src/command-ref/new-cli: $(d)/nix.json $(d)/generate-manpage.nix $(bindir)/nix
@rm -rf $@
- $(trace-gen) $(nix-eval) --write-to $@ --expr 'import doc/manual/generate-manpage.nix (builtins.fromJSON (builtins.readFile $<))'
+ $(trace-gen) $(nix-eval) --write-to $@ --expr 'import doc/manual/generate-manpage.nix (builtins.readFile $<)'
$(d)/src/command-ref/conf-file.md: $(d)/conf-file.json $(d)/generate-options.nix $(d)/src/command-ref/conf-file-prefix.md $(bindir)/nix
@cat doc/manual/src/command-ref/conf-file-prefix.md > $@.tmp
@@ -78,23 +78,23 @@ man: doc/manual/generated/man1/nix3-manpages
all: doc/manual/generated/man1/nix3-manpages
$(mandir)/man1/nix3-manpages: doc/manual/generated/man1/nix3-manpages
- @mkdir -p $$(dirname $@)
- $(trace-install) install -m 0644 $$(dirname $<)/* $$(dirname $@)
+ @mkdir -p $(DESTDIR)$$(dirname $@)
+ $(trace-install) install -m 0644 $$(dirname $<)/* $(DESTDIR)$$(dirname $@)
doc/manual/generated/man1/nix3-manpages: $(d)/src/command-ref/new-cli
- @mkdir -p $$(dirname $@)
+ @mkdir -p $(DESTDIR)$$(dirname $@)
$(trace-gen) for i in doc/manual/src/command-ref/new-cli/*.md; do \
name=$$(basename $$i .md); \
tmpFile=$$(mktemp); \
if [[ $$name = SUMMARY ]]; then continue; fi; \
printf "Title: %s\n\n" "$$name" > $$tmpFile; \
cat $$i >> $$tmpFile; \
- lowdown -sT man -M section=1 $$tmpFile -o $$(dirname $@)/$$name.1; \
+ lowdown -sT man -M section=1 $$tmpFile -o $(DESTDIR)$$(dirname $@)/$$name.1; \
rm $$tmpFile; \
done
- touch $@
+ @touch $@
-$(docdir)/manual/index.html: $(MANUAL_SRCS) $(d)/book.toml $(d)/custom.css $(d)/src/SUMMARY.md $(d)/src/command-ref/new-cli $(d)/src/command-ref/conf-file.md $(d)/src/expressions/builtins.md
- $(trace-gen) RUST_LOG=warn mdbook build doc/manual -d $(docdir)/manual
+$(docdir)/manual/index.html: $(MANUAL_SRCS) $(d)/book.toml $(d)/custom.css $(d)/src/SUMMARY.md $(d)/src/command-ref/new-cli $(d)/src/command-ref/conf-file.md $(d)/src/expressions/builtins.md $(call rwildcard, $(d)/src, *.md)
+ $(trace-gen) RUST_LOG=warn mdbook build doc/manual -d $(DESTDIR)$(docdir)/manual
endif
diff --git a/doc/manual/src/SUMMARY.md.in b/doc/manual/src/SUMMARY.md.in
index 448fee803..df9209c7d 100644
--- a/doc/manual/src/SUMMARY.md.in
+++ b/doc/manual/src/SUMMARY.md.in
@@ -70,6 +70,7 @@
- [Hacking](contributing/hacking.md)
- [CLI guideline](contributing/cli-guideline.md)
- [Release Notes](release-notes/release-notes.md)
+ - [Release 2.4 (2021-XX-XX)](release-notes/rl-2.4.md)
- [Release 2.3 (2019-09-04)](release-notes/rl-2.3.md)
- [Release 2.2 (2019-01-11)](release-notes/rl-2.2.md)
- [Release 2.1 (2018-09-02)](release-notes/rl-2.1.md)
diff --git a/doc/manual/src/expressions/language-values.md b/doc/manual/src/expressions/language-values.md
index ce31029cc..28fa23b58 100644
--- a/doc/manual/src/expressions/language-values.md
+++ b/doc/manual/src/expressions/language-values.md
@@ -139,6 +139,13 @@ Nix has the following basic data types:
environment variable `NIX_PATH` will be searched for the given file
or directory name.
+ Antiquotation is supported in any paths except those in angle brackets.
+ `./${foo}-${bar}.nix` is a more convenient way of writing
+ `./. + "/" + foo + "-" + bar + ".nix"` or `./. + "/${foo}-${bar}.nix"`. At
+ least one slash must appear *before* any antiquotations for this to be
+ recognized as a path. `a.${foo}/b.${bar}` is a syntactically valid division
+ operation. `./a.${foo}/b.${bar}` is a path.
+
- *Booleans* with values `true` and `false`.
- The null value, denoted as `null`.
diff --git a/flake.lock b/flake.lock
index 8c0d5a2d2..96a69345b 100644
--- a/flake.lock
+++ b/flake.lock
@@ -3,27 +3,26 @@
"lowdown-src": {
"flake": false,
"locked": {
- "lastModified": 1617481909,
- "narHash": "sha256-SqnfOFuLuVRRNeVJr1yeEPJue/qWoCp5N6o5Kr///p4=",
+ "lastModified": 1632468475,
+ "narHash": "sha256-NNOm9CbdA8cuwbvaBHslGbPTiU6bh1Ao+MpEPx4rSGo=",
"owner": "kristapsdz",
"repo": "lowdown",
- "rev": "148f9b2f586c41b7e36e73009db43ea68c7a1a4d",
+ "rev": "6bd668af3fd098bdd07a1bedd399564141e275da",
"type": "github"
},
"original": {
"owner": "kristapsdz",
- "ref": "VERSION_0_8_4",
"repo": "lowdown",
"type": "github"
}
},
"nixpkgs": {
"locked": {
- "lastModified": 1628689438,
- "narHash": "sha256-YMINW6YmubHZVdliGsAJpnnMYXRrvppv59LgwtnyYhs=",
+ "lastModified": 1632864508,
+ "narHash": "sha256-d127FIvGR41XbVRDPVvozUPQ/uRHbHwvfyKHwEt5xFM=",
"owner": "NixOS",
"repo": "nixpkgs",
- "rev": "f6551e1efa261568c82b76c3a582b2c2ceb1f53f",
+ "rev": "82891b5e2c2359d7e58d08849e4c89511ab94234",
"type": "github"
},
"original": {
diff --git a/flake.nix b/flake.nix
index ee76d9188..43123ca7e 100644
--- a/flake.nix
+++ b/flake.nix
@@ -2,7 +2,7 @@
description = "The purely functional package manager";
inputs.nixpkgs.url = "nixpkgs/nixos-21.05-small";
- inputs.lowdown-src = { url = "github:kristapsdz/lowdown/VERSION_0_8_4"; flake = false; };
+ inputs.lowdown-src = { url = "github:kristapsdz/lowdown"; flake = false; };
outputs = { self, nixpkgs, lowdown-src }:
@@ -70,7 +70,7 @@
[
buildPackages.bison
buildPackages.flex
- (lib.getBin buildPackages.lowdown)
+ (lib.getBin buildPackages.lowdown-nix)
buildPackages.mdbook
buildPackages.autoconf-archive
buildPackages.autoreconfHook
@@ -89,7 +89,7 @@
openssl sqlite
libarchive
boost
- lowdown
+ lowdown-nix
gmock
]
++ lib.optionals stdenv.isLinux [libseccomp]
@@ -178,8 +178,8 @@
installPhase = ''
mkdir -p $out
'';
- installCheckPhase = "make installcheck";
+ installCheckPhase = "make installcheck -j$NIX_BUILD_CORES -l$NIX_BUILD_CORES";
};
binaryTarball = buildPackages: nix: pkgs: let
@@ -259,11 +259,11 @@
# 'nix.perl-bindings' packages.
overlay = final: prev: {
- # An older version of Nix to test against when using the daemon.
- # Currently using `nixUnstable` as the stable one doesn't respect
- # `NIX_DAEMON_SOCKET_PATH` which is needed for the tests.
nixStable = prev.nix;
+ # Forward from the previous stage as we don’t want it to pick the lowdown override
+ nixUnstable = prev.nixUnstable;
+
nix = with final; with commonDeps pkgs; stdenv.mkDerivation {
name = "nix-${version}";
inherit version;
@@ -349,15 +349,8 @@
};
- lowdown = with final; stdenv.mkDerivation rec {
- name = "lowdown-0.8.4";
-
- /*
- src = fetchurl {
- url = "https://kristaps.bsd.lv/lowdown/snapshots/${name}.tar.gz";
- hash = "sha512-U9WeGoInT9vrawwa57t6u9dEdRge4/P+0wLxmQyOL9nhzOEUU2FRz2Be9H0dCjYE7p2v3vCXIYk40M+jjULATw==";
- };
- */
+ lowdown-nix = with final; stdenv.mkDerivation rec {
+ name = "lowdown-0.9.0";
src = lowdown-src;
@@ -502,10 +495,7 @@
# `NIX_DAEMON_SOCKET_PATH` which is required for the tests to work
# againstLatestStable = testNixVersions pkgs pkgs.nix pkgs.nixStable;
} "touch $out";
- } // (if system == "x86_64-linux" then (builtins.listToAttrs (map (crossSystem: {
- name = "binaryTarball-${crossSystem}";
- value = self.hydraJobs.binaryTarballCross.${system}.${crossSystem};
- }) crossSystems)) else {}));
+ });
packages = forAllSystems (system: {
inherit (nixpkgsFor.${system}) nix;
diff --git a/mk/libraries.mk b/mk/libraries.mk
index 07bd54dab..fd4d4ee72 100644
--- a/mk/libraries.mk
+++ b/mk/libraries.mk
@@ -91,7 +91,7 @@ define build-library
$(1)_PATH := $$(_d)/$$($(1)_NAME).$(SO_EXT)
$$($(1)_PATH): $$($(1)_OBJS) $$(_libs) | $$(_d)/
- $$(trace-ld) $(CXX) -o $$(abspath $$@) -shared $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$($(1)_LDFLAGS_PROPAGATED) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE)) $$($(1)_LDFLAGS_UNINSTALLED)
+ $$(trace-ld) $(CXX) -o $$(abspath $$@) -shared $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE)) $$($(1)_LDFLAGS_UNINSTALLED) $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$($(1)_LDFLAGS_PROPAGATED)
ifndef HOST_DARWIN
$(1)_LDFLAGS_USE += -Wl,-rpath,$$(abspath $$(_d))
@@ -105,7 +105,7 @@ define build-library
$$(eval $$(call create-dir, $$($(1)_INSTALL_DIR)))
$$($(1)_INSTALL_PATH): $$($(1)_OBJS) $$(_libs_final) | $(DESTDIR)$$($(1)_INSTALL_DIR)/
- $$(trace-ld) $(CXX) -o $$@ -shared $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$($(1)_LDFLAGS_PROPAGATED) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE_INSTALLED))
+ $$(trace-ld) $(CXX) -o $$@ -shared $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE_INSTALLED)) $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$($(1)_LDFLAGS_PROPAGATED)
$(1)_LDFLAGS_USE_INSTALLED += -L$$(DESTDIR)$$($(1)_INSTALL_DIR) -l$$(patsubst lib%,%,$$(strip $$($(1)_NAME)))
ifndef HOST_DARWIN
diff --git a/mk/programs.mk b/mk/programs.mk
index d0cf5baf0..70b09f0dd 100644
--- a/mk/programs.mk
+++ b/mk/programs.mk
@@ -32,7 +32,7 @@ define build-program
$$(eval $$(call create-dir, $$(_d)))
$$($(1)_PATH): $$($(1)_OBJS) $$(_libs) | $$(_d)/
- $$(trace-ld) $(CXX) -o $$@ $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE))
+ $$(trace-ld) $(CXX) -o $$@ $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE)) $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS)
$(1)_INSTALL_DIR ?= $$(bindir)
@@ -49,7 +49,7 @@ define build-program
_libs_final := $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_INSTALL_PATH))
$(DESTDIR)$$($(1)_INSTALL_PATH): $$($(1)_OBJS) $$(_libs_final) | $(DESTDIR)$$($(1)_INSTALL_DIR)/
- $$(trace-ld) $(CXX) -o $$@ $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE_INSTALLED))
+ $$(trace-ld) $(CXX) -o $$@ $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE_INSTALLED)) $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS)
else
diff --git a/scripts/create-darwin-volume.sh b/scripts/create-darwin-volume.sh
index 8aff03199..b52232dd3 100755
--- a/scripts/create-darwin-volume.sh
+++ b/scripts/create-darwin-volume.sh
@@ -715,7 +715,8 @@ create_volume() {
# 6) getting special w/ awk may be fragile, but doing it to:
# - save time over running slow diskutil commands
# - skirt risk we grab wrong volume if multiple match
- /usr/sbin/diskutil apfs addVolume "$NIX_VOLUME_USE_DISK" "$NIX_VOLUME_FS" "$NIX_VOLUME_LABEL" -nomount | /usr/bin/awk '/Created new APFS Volume/ {print $5}'
+ _sudo "to create a new APFS volume '$NIX_VOLUME_LABEL' on $NIX_VOLUME_USE_DISK" \
+ /usr/sbin/diskutil apfs addVolume "$NIX_VOLUME_USE_DISK" "$NIX_VOLUME_FS" "$NIX_VOLUME_LABEL" -nomount | /usr/bin/awk '/Created new APFS Volume/ {print $5}'
}
volume_uuid_from_special() {
@@ -738,7 +739,6 @@ await_volume() {
setup_volume() {
local use_special use_uuid profile_packages
task "Creating a Nix volume" >&2
- # DOING: I'm tempted to wrap this call in a grep to get the new disk special without doing anything too complex, but this sudo wrapper *is* a little complex, so it'll be a PITA unless maybe we can skip sudo on this. Let's just try it without.
use_special="${NIX_VOLUME_USE_SPECIAL:-$(create_volume)}"
@@ -759,6 +759,11 @@ setup_volume() {
await_volume
+ if [ "$(/usr/sbin/diskutil info -plist "$NIX_ROOT" | xmllint --xpath "(/plist/dict/key[text()='GlobalPermissionsEnabled'])/following-sibling::*[1]" -)" = "<false/>" ]; then
+ _sudo "to set enableOwnership (enabling users to own files)" \
+ /usr/sbin/diskutil enableOwnership "$NIX_ROOT"
+ fi
+
# TODO: below is a vague kludge for now; I just don't know
# what if any safe action there is to take here. Also, the
# reminder isn't very helpful.
diff --git a/scripts/install-darwin-multi-user.sh b/scripts/install-darwin-multi-user.sh
index f8d6c5e8f..32a12f2ee 100644
--- a/scripts/install-darwin-multi-user.sh
+++ b/scripts/install-darwin-multi-user.sh
@@ -13,11 +13,22 @@ NIX_BUILD_USER_NAME_TEMPLATE="_nixbld%d"
read_only_root() {
# this touch command ~should~ always produce an error
# as of this change I confirmed /usr/bin/touch emits:
+ # "touch: /: Operation not permitted" Monterey
# "touch: /: Read-only file system" Catalina+ and Big Sur
# "touch: /: Permission denied" Mojave
# (not matching prefix for compat w/ coreutils touch in case using
# an explicit path causes problems; its prefix differs)
- [[ "$(/usr/bin/touch / 2>&1)" = *"Read-only file system" ]]
+ case "$(/usr/bin/touch / 2>&1)" in
+ *"Read-only file system") # Catalina, Big Sur
+ return 0
+ ;;
+ *"Operation not permitted") # Monterey
+ return 0
+ ;;
+ *)
+ return 1
+ ;;
+ esac
# Avoiding the slow semantic way to get this information (~330ms vs ~8ms)
# unless using touch causes problems. Just in case, that approach is:
@@ -206,4 +217,8 @@ poly_prepare_to_install() {
EOF
setup_darwin_volume
fi
+
+ if [ "$(diskutil info -plist /nix | xmllint --xpath "(/plist/dict/key[text()='GlobalPermissionsEnabled'])/following-sibling::*[1]" -)" = "<false/>" ]; then
+ failure "This script needs a /nix volume with global permissions! This may require running sudo diskutil enableOwnership /nix."
+ fi
}
diff --git a/scripts/install-multi-user.sh b/scripts/install-multi-user.sh
index d02c5cac0..513127a62 100644
--- a/scripts/install-multi-user.sh
+++ b/scripts/install-multi-user.sh
@@ -33,7 +33,7 @@ NIX_BUILD_USER_NAME_TEMPLATE="nixbld%d"
readonly NIX_ROOT="/nix"
readonly NIX_EXTRA_CONF=${NIX_EXTRA_CONF:-}
-readonly PROFILE_TARGETS=("/etc/bashrc" "/etc/profile.d/nix.sh" "/etc/zshenv" "/etc/bash.bashrc" "/etc/zsh/zshenv")
+readonly PROFILE_TARGETS=("/etc/bashrc" "/etc/profile.d/nix.sh" "/etc/zshrc" "/etc/bash.bashrc" "/etc/zsh/zshrc")
readonly PROFILE_BACKUP_SUFFIX=".backup-before-nix"
readonly PROFILE_NIX_FILE="$NIX_ROOT/var/nix/profiles/default/etc/profile.d/nix-daemon.sh"
diff --git a/scripts/install.in b/scripts/install.in
index ffc1f2785..5be4f9dda 100755
--- a/scripts/install.in
+++ b/scripts/install.in
@@ -76,14 +76,21 @@ fi
tarball=$tmpDir/nix-@nixVersion@-$system.tar.xz
-require_util curl "download the binary tarball"
require_util tar "unpack the binary tarball"
if [ "$(uname -s)" != "Darwin" ]; then
require_util xz "unpack the binary tarball"
fi
+if command -v wget > /dev/null 2>&1; then
+ fetch() { wget "$1" -O "$2"; }
+elif command -v curl > /dev/null 2>&1; then
+ fetch() { curl -L "$1" -o "$2"; }
+else
+ oops "you don't have wget or curl installed, which I need to download the binary tarball"
+fi
+
echo "downloading Nix @nixVersion@ binary tarball for $system from '$url' to '$tmpDir'..."
-curl -L "$url" -o "$tarball" || oops "failed to download '$url'"
+fetch "$url" "$tarball" || oops "failed to download '$url'"
if command -v sha256sum > /dev/null 2>&1; then
hash2="$(sha256sum -b "$tarball" | cut -c1-64)"
diff --git a/src/libcmd/command.cc b/src/libcmd/command.cc
index 2daf43aa7..fd3edfc46 100644
--- a/src/libcmd/command.cc
+++ b/src/libcmd/command.cc
@@ -120,7 +120,7 @@ void BuiltPathsCommand::run(ref<Store> store)
// XXX: This only computes the store path closure, ignoring
// intermediate realisations
StorePathSet pathsRoots, pathsClosure;
- for (auto & root: paths) {
+ for (auto & root : paths) {
auto rootFromThis = root.outPaths();
pathsRoots.insert(rootFromThis.begin(), rootFromThis.end());
}
@@ -138,17 +138,20 @@ StorePathsCommand::StorePathsCommand(bool recursive)
{
}
-void StorePathsCommand::run(ref<Store> store, BuiltPaths paths)
+void StorePathsCommand::run(ref<Store> store, BuiltPaths && paths)
{
- StorePaths storePaths;
- for (auto& builtPath : paths)
- for (auto& p : builtPath.outPaths())
- storePaths.push_back(p);
+ StorePathSet storePaths;
+ for (auto & builtPath : paths)
+ for (auto & p : builtPath.outPaths())
+ storePaths.insert(p);
- run(store, std::move(storePaths));
+ auto sorted = store->topoSortPaths(storePaths);
+ std::reverse(sorted.begin(), sorted.end());
+
+ run(store, std::move(sorted));
}
-void StorePathCommand::run(ref<Store> store, std::vector<StorePath> storePaths)
+void StorePathCommand::run(ref<Store> store, std::vector<StorePath> && storePaths)
{
if (storePaths.size() != 1)
throw UsageError("this command requires exactly one store path");
@@ -200,10 +203,10 @@ void MixProfile::updateProfile(const BuiltPaths & buildables)
for (auto & buildable : buildables) {
std::visit(overloaded {
- [&](BuiltPath::Opaque bo) {
+ [&](const BuiltPath::Opaque & bo) {
result.push_back(bo.path);
},
- [&](BuiltPath::Built bfd) {
+ [&](const BuiltPath::Built & bfd) {
for (auto & output : bfd.outputs) {
result.push_back(output.second);
}
diff --git a/src/libcmd/command.hh b/src/libcmd/command.hh
index f3625ed0d..07f398468 100644
--- a/src/libcmd/command.hh
+++ b/src/libcmd/command.hh
@@ -108,6 +108,8 @@ enum class Realise {
exists. */
Derivation,
/* Evaluate in dry-run mode. Postcondition: nothing. */
+ // FIXME: currently unused, but could be revived if we can
+ // evaluate derivations in-memory.
Nothing
};
@@ -167,7 +169,7 @@ public:
using StoreCommand::run;
- virtual void run(ref<Store> store, BuiltPaths paths) = 0;
+ virtual void run(ref<Store> store, BuiltPaths && paths) = 0;
void run(ref<Store> store) override;
@@ -180,9 +182,9 @@ struct StorePathsCommand : public BuiltPathsCommand
using BuiltPathsCommand::run;
- virtual void run(ref<Store> store, std::vector<StorePath> storePaths) = 0;
+ virtual void run(ref<Store> store, std::vector<StorePath> && storePaths) = 0;
- void run(ref<Store> store, BuiltPaths paths) override;
+ void run(ref<Store> store, BuiltPaths && paths) override;
};
/* A command that operates on exactly one store path. */
@@ -192,7 +194,7 @@ struct StorePathCommand : public StorePathsCommand
virtual void run(ref<Store> store, const StorePath & storePath) = 0;
- void run(ref<Store> store, std::vector<StorePath> storePaths) override;
+ void run(ref<Store> store, std::vector<StorePath> && storePaths) override;
};
/* A helper class for registering commands globally. */
@@ -223,15 +225,18 @@ static RegisterCommand registerCommand2(std::vector<std::string> && name)
return RegisterCommand(std::move(name), [](){ return make_ref<T>(); });
}
-BuiltPaths build(ref<Store> evalStore, ref<Store> store, Realise mode,
- std::vector<std::shared_ptr<Installable>> installables, BuildMode bMode = bmNormal);
+BuiltPaths build(
+ ref<Store> evalStore,
+ ref<Store> store, Realise mode,
+ const std::vector<std::shared_ptr<Installable>> & installables,
+ BuildMode bMode = bmNormal);
std::set<StorePath> toStorePaths(
ref<Store> evalStore,
ref<Store> store,
Realise mode,
OperateOn operateOn,
- std::vector<std::shared_ptr<Installable>> installables);
+ const std::vector<std::shared_ptr<Installable>> & installables);
StorePath toStorePath(
ref<Store> evalStore,
@@ -240,8 +245,9 @@ StorePath toStorePath(
OperateOn operateOn,
std::shared_ptr<Installable> installable);
-std::set<StorePath> toDerivations(ref<Store> store,
- std::vector<std::shared_ptr<Installable>> installables,
+std::set<StorePath> toDerivations(
+ ref<Store> store,
+ const std::vector<std::shared_ptr<Installable>> & installables,
bool useDeriver = false);
BuiltPaths toBuiltPaths(
@@ -249,7 +255,7 @@ BuiltPaths toBuiltPaths(
ref<Store> store,
Realise mode,
OperateOn operateOn,
- std::vector<std::shared_ptr<Installable>> installables);
+ const std::vector<std::shared_ptr<Installable>> & installables);
/* Helper function to generate args that invoke $EDITOR on
filename:lineno. */
diff --git a/src/libcmd/installables.cc b/src/libcmd/installables.cc
index 68e0469c3..0f0fcf39e 100644
--- a/src/libcmd/installables.cc
+++ b/src/libcmd/installables.cc
@@ -654,6 +654,17 @@ std::vector<std::shared_ptr<Installable>> SourceExprCommand::parseInstallables(
for (auto & s : ss) {
std::exception_ptr ex;
+ if (s.find('/') != std::string::npos) {
+ try {
+ result.push_back(std::make_shared<InstallableStorePath>(store, store->followLinksToStorePath(s)));
+ continue;
+ } catch (BadStorePath &) {
+ } catch (...) {
+ if (!ex)
+ ex = std::current_exception();
+ }
+ }
+
try {
auto [flakeRef, fragment] = parseFlakeRefWithFragment(s, absPath("."));
result.push_back(std::make_shared<InstallableFlake>(
@@ -668,25 +679,7 @@ std::vector<std::shared_ptr<Installable>> SourceExprCommand::parseInstallables(
ex = std::current_exception();
}
- if (s.find('/') != std::string::npos) {
- try {
- result.push_back(std::make_shared<InstallableStorePath>(store, store->followLinksToStorePath(s)));
- continue;
- } catch (BadStorePath &) {
- } catch (...) {
- if (!ex)
- ex = std::current_exception();
- }
- }
-
std::rethrow_exception(ex);
-
- /*
- throw Error(
- pathExists(s)
- ? "path '%s' is not a flake or a store path"
- : "don't know how to handle argument '%s'", s);
- */
}
}
@@ -704,13 +697,13 @@ std::shared_ptr<Installable> SourceExprCommand::parseInstallable(
BuiltPaths getBuiltPaths(ref<Store> evalStore, ref<Store> store, const DerivedPaths & hopefullyBuiltPaths)
{
BuiltPaths res;
- for (auto & b : hopefullyBuiltPaths)
+ for (const auto & b : hopefullyBuiltPaths)
std::visit(
overloaded{
- [&](DerivedPath::Opaque bo) {
+ [&](const DerivedPath::Opaque & bo) {
res.push_back(BuiltPath::Opaque{bo.path});
},
- [&](DerivedPath::Built bfd) {
+ [&](const DerivedPath::Built & bfd) {
OutputPathMap outputs;
auto drv = evalStore->readDerivation(bfd.drvPath);
auto outputHashes = staticOutputHashes(*evalStore, drv); // FIXME: expensive
@@ -750,8 +743,12 @@ BuiltPaths getBuiltPaths(ref<Store> evalStore, ref<Store> store, const DerivedPa
return res;
}
-BuiltPaths build(ref<Store> evalStore, ref<Store> store, Realise mode,
- std::vector<std::shared_ptr<Installable>> installables, BuildMode bMode)
+BuiltPaths build(
+ ref<Store> evalStore,
+ ref<Store> store,
+ Realise mode,
+ const std::vector<std::shared_ptr<Installable>> & installables,
+ BuildMode bMode)
{
if (mode == Realise::Nothing)
settings.readOnlyMode = true;
@@ -763,7 +760,7 @@ BuiltPaths build(ref<Store> evalStore, ref<Store> store, Realise mode,
pathsToBuild.insert(pathsToBuild.end(), b.begin(), b.end());
}
- if (mode == Realise::Nothing)
+ if (mode == Realise::Nothing || mode == Realise::Derivation)
printMissing(store, pathsToBuild, lvlError);
else if (mode == Realise::Outputs)
store->buildPaths(pathsToBuild, bMode, evalStore);
@@ -776,7 +773,7 @@ BuiltPaths toBuiltPaths(
ref<Store> store,
Realise mode,
OperateOn operateOn,
- std::vector<std::shared_ptr<Installable>> installables)
+ const std::vector<std::shared_ptr<Installable>> & installables)
{
if (operateOn == OperateOn::Output)
return build(evalStore, store, mode, installables);
@@ -795,7 +792,7 @@ StorePathSet toStorePaths(
ref<Store> evalStore,
ref<Store> store,
Realise mode, OperateOn operateOn,
- std::vector<std::shared_ptr<Installable>> installables)
+ const std::vector<std::shared_ptr<Installable>> & installables)
{
StorePathSet outPaths;
for (auto & path : toBuiltPaths(evalStore, store, mode, operateOn, installables)) {
@@ -819,15 +816,17 @@ StorePath toStorePath(
return *paths.begin();
}
-StorePathSet toDerivations(ref<Store> store,
- std::vector<std::shared_ptr<Installable>> installables, bool useDeriver)
+StorePathSet toDerivations(
+ ref<Store> store,
+ const std::vector<std::shared_ptr<Installable>> & installables,
+ bool useDeriver)
{
StorePathSet drvPaths;
- for (auto & i : installables)
- for (auto & b : i->toDerivedPaths())
+ for (const auto & i : installables)
+ for (const auto & b : i->toDerivedPaths())
std::visit(overloaded {
- [&](DerivedPath::Opaque bo) {
+ [&](const DerivedPath::Opaque & bo) {
if (!useDeriver)
throw Error("argument '%s' did not evaluate to a derivation", i->what());
auto derivers = store->queryValidDerivers(bo.path);
@@ -836,7 +835,7 @@ StorePathSet toDerivations(ref<Store> store,
// FIXME: use all derivers?
drvPaths.insert(*derivers.begin());
},
- [&](DerivedPath::Built bfd) {
+ [&](const DerivedPath::Built & bfd) {
drvPaths.insert(bfd.drvPath);
},
}, b.raw());
diff --git a/src/libcmd/local.mk b/src/libcmd/local.mk
index 0a684468e..8b0662753 100644
--- a/src/libcmd/local.mk
+++ b/src/libcmd/local.mk
@@ -8,7 +8,7 @@ libcmd_SOURCES := $(wildcard $(d)/*.cc)
libcmd_CXXFLAGS += -I src/libutil -I src/libstore -I src/libexpr -I src/libmain -I src/libfetchers
-libcmd_LDFLAGS = -llowdown -pthread
+libcmd_LDFLAGS += -llowdown -pthread
libcmd_LIBS = libstore libutil libexpr libmain libfetchers
diff --git a/src/libcmd/markdown.cc b/src/libcmd/markdown.cc
index d25113d93..29bb4d31e 100644
--- a/src/libcmd/markdown.cc
+++ b/src/libcmd/markdown.cc
@@ -12,7 +12,7 @@ std::string renderMarkdownToTerminal(std::string_view markdown)
struct lowdown_opts opts {
.type = LOWDOWN_TERM,
.maxdepth = 20,
- .cols = std::min(getWindowSize().second, (unsigned short) 80),
+ .cols = std::max(getWindowSize().second, (unsigned short) 80),
.hmargin = 0,
.vmargin = 0,
.feat = LOWDOWN_COMMONMARK | LOWDOWN_FENCED | LOWDOWN_DEFLIST | LOWDOWN_TABLES,
@@ -25,7 +25,7 @@ std::string renderMarkdownToTerminal(std::string_view markdown)
Finally freeDoc([&]() { lowdown_doc_free(doc); });
size_t maxn = 0;
- auto node = lowdown_doc_parse(doc, &maxn, markdown.data(), markdown.size());
+ auto node = lowdown_doc_parse(doc, &maxn, markdown.data(), markdown.size(), nullptr);
if (!node)
throw Error("cannot parse Markdown document");
Finally freeNode([&]() { lowdown_node_free(node); });
@@ -40,11 +40,11 @@ std::string renderMarkdownToTerminal(std::string_view markdown)
throw Error("cannot allocate Markdown output buffer");
Finally freeBuffer([&]() { lowdown_buf_free(buf); });
- int rndr_res = lowdown_term_rndr(buf, nullptr, renderer, node);
+ int rndr_res = lowdown_term_rndr(buf, renderer, node);
if (!rndr_res)
throw Error("allocation error while rendering Markdown");
- return std::string(buf->data, buf->size);
+ return filterANSIEscapes(std::string(buf->data, buf->size), !shouldANSI());
}
}
diff --git a/src/libexpr/attr-set.hh b/src/libexpr/attr-set.hh
index 1da8d91df..7d6ffc9f3 100644
--- a/src/libexpr/attr-set.hh
+++ b/src/libexpr/attr-set.hh
@@ -17,8 +17,8 @@ struct Attr
{
Symbol name;
Value * value;
- Pos * pos;
- Attr(Symbol name, Value * value, Pos * pos = &noPos)
+ ptr<Pos> pos;
+ Attr(Symbol name, Value * value, ptr<Pos> pos = ptr(&noPos))
: name(name), value(value), pos(pos) { };
Attr() : pos(&noPos) { };
bool operator < (const Attr & a) const
@@ -35,13 +35,13 @@ class Bindings
{
public:
typedef uint32_t size_t;
- Pos *pos;
+ ptr<Pos> pos;
private:
size_t size_, capacity_;
Attr attrs[0];
- Bindings(size_t capacity) : size_(0), capacity_(capacity) { }
+ Bindings(size_t capacity) : pos(&noPos), size_(0), capacity_(capacity) { }
Bindings(const Bindings & bindings) = delete;
public:
diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc
index 327f7e974..800839a8d 100644
--- a/src/libexpr/eval.cc
+++ b/src/libexpr/eval.cc
@@ -465,6 +465,23 @@ EvalState::~EvalState()
}
+void EvalState::requireExperimentalFeatureOnEvaluation(
+ const std::string & feature,
+ const std::string_view fName,
+ const Pos & pos)
+{
+ if (!settings.isExperimentalFeatureEnabled(feature)) {
+ throw EvalError({
+ .msg = hintfmt(
+ "Cannot call '%2%' because experimental Nix feature '%1%' is disabled. You can enable it via '--extra-experimental-features %1%'.",
+ feature,
+ fName
+ ),
+ .errPos = pos
+ });
+ }
+}
+
Path EvalState::checkSourcePath(const Path & path_)
{
if (!allowedPaths) return path_;
@@ -770,7 +787,7 @@ inline Value * EvalState::lookupVar(Env * env, const ExprVar & var, bool noEval)
}
Bindings::iterator j = env->values[0]->attrs->find(var.name);
if (j != env->values[0]->attrs->end()) {
- if (countCalls && j->pos) attrSelects[*j->pos]++;
+ if (countCalls) attrSelects[*j->pos]++;
return j->value;
}
if (!env->prevWith)
@@ -825,9 +842,9 @@ void EvalState::mkThunk_(Value & v, Expr * expr)
}
-void EvalState::mkPos(Value & v, Pos * pos)
+void EvalState::mkPos(Value & v, ptr<Pos> pos)
{
- if (pos && pos->file.set()) {
+ if (pos->file.set()) {
mkAttrs(v, 3);
mkString(*allocAttr(v, sFile), pos->file);
mkInt(*allocAttr(v, sLine), pos->line);
@@ -895,23 +912,41 @@ void EvalState::evalFile(const Path & path_, Value & v, bool mustBeTrivial)
return;
}
- Path path2 = resolveExprPath(path);
- if ((i = fileEvalCache.find(path2)) != fileEvalCache.end()) {
+ Path resolvedPath = resolveExprPath(path);
+ if ((i = fileEvalCache.find(resolvedPath)) != fileEvalCache.end()) {
v = i->second;
return;
}
- printTalkative("evaluating file '%1%'", path2);
+ printTalkative("evaluating file '%1%'", resolvedPath);
Expr * e = nullptr;
- auto j = fileParseCache.find(path2);
+ auto j = fileParseCache.find(resolvedPath);
if (j != fileParseCache.end())
e = j->second;
if (!e)
- e = parseExprFromFile(checkSourcePath(path2));
+ e = parseExprFromFile(checkSourcePath(resolvedPath));
+
+ cacheFile(path, resolvedPath, e, v, mustBeTrivial);
+}
+
+
+void EvalState::resetFileCache()
+{
+ fileEvalCache.clear();
+ fileParseCache.clear();
+}
+
- fileParseCache[path2] = e;
+void EvalState::cacheFile(
+ const Path & path,
+ const Path & resolvedPath,
+ Expr * e,
+ Value & v,
+ bool mustBeTrivial)
+{
+ fileParseCache[resolvedPath] = e;
try {
// Enforce that 'flake.nix' is a direct attrset, not a
@@ -921,19 +956,12 @@ void EvalState::evalFile(const Path & path_, Value & v, bool mustBeTrivial)
throw EvalError("file '%s' must be an attribute set", path);
eval(e, v);
} catch (Error & e) {
- addErrorTrace(e, "while evaluating the file '%1%':", path2);
+ addErrorTrace(e, "while evaluating the file '%1%':", resolvedPath);
throw;
}
- fileEvalCache[path2] = v;
- if (path != path2) fileEvalCache[path] = v;
-}
-
-
-void EvalState::resetFileCache()
-{
- fileEvalCache.clear();
- fileParseCache.clear();
+ fileEvalCache[resolvedPath] = v;
+ if (path != resolvedPath) fileEvalCache[path] = v;
}
@@ -1027,7 +1055,7 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v)
} else
vAttr = i.second.e->maybeThunk(state, i.second.inherited ? env : env2);
env2.values[displ++] = vAttr;
- v.attrs->push_back(Attr(i.first, vAttr, &i.second.pos));
+ v.attrs->push_back(Attr(i.first, vAttr, ptr(&i.second.pos)));
}
/* If the rec contains an attribute called `__overrides', then
@@ -1059,7 +1087,7 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v)
else
for (auto & i : attrs)
- v.attrs->push_back(Attr(i.first, i.second.e->maybeThunk(state, env), &i.second.pos));
+ v.attrs->push_back(Attr(i.first, i.second.e->maybeThunk(state, env), ptr(&i.second.pos)));
/* Dynamic attrs apply *after* rec and __overrides. */
for (auto & i : dynamicAttrs) {
@@ -1076,11 +1104,11 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v)
i.valueExpr->setName(nameSym);
/* Keep sorted order so find can catch duplicates */
- v.attrs->push_back(Attr(nameSym, i.valueExpr->maybeThunk(state, *dynamicEnv), &i.pos));
+ v.attrs->push_back(Attr(nameSym, i.valueExpr->maybeThunk(state, *dynamicEnv), ptr(&i.pos)));
v.attrs->sort(); // FIXME: inefficient
}
- v.attrs->pos = &pos;
+ v.attrs->pos = ptr(&pos);
}
@@ -1138,7 +1166,7 @@ static string showAttrPath(EvalState & state, Env & env, const AttrPath & attrPa
void ExprSelect::eval(EvalState & state, Env & env, Value & v)
{
Value vTmp;
- Pos * pos2 = 0;
+ ptr<Pos> pos2(&noPos);
Value * vAttrs = &vTmp;
e->eval(state, env, vTmp);
@@ -1164,13 +1192,13 @@ void ExprSelect::eval(EvalState & state, Env & env, Value & v)
}
vAttrs = j->value;
pos2 = j->pos;
- if (state.countCalls && pos2) state.attrSelects[*pos2]++;
+ if (state.countCalls) state.attrSelects[*pos2]++;
}
- state.forceValue(*vAttrs, ( pos2 != NULL ? *pos2 : this->pos ) );
+ state.forceValue(*vAttrs, (*pos2 != noPos ? *pos2 : this->pos ) );
} catch (Error & e) {
- if (pos2 && pos2->file != state.sDerivationNix)
+ if (*pos2 != noPos && pos2->file != state.sDerivationNix)
addErrorTrace(e, *pos2, "while evaluating the attribute '%1%'",
showAttrPath(state, env, attrPath));
throw;
@@ -1576,7 +1604,6 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v)
and none of the strings are allowed to have contexts. */
if (first) {
firstType = vTmp.type();
- first = false;
}
if (firstType == nInt) {
@@ -1597,7 +1624,12 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v)
} else
throwEvalError(pos, "cannot add %1% to a float", showType(vTmp));
} else
- s << state.coerceToString(pos, vTmp, context, false, firstType == nString);
+ /* skip canonization of first path, which would only be not
+ canonized in the first place if it's coming from a ./${foo} type
+ path */
+ s << state.coerceToString(pos, vTmp, context, false, firstType == nString, !first);
+
+ first = false;
}
if (firstType == nInt)
@@ -1616,7 +1648,7 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v)
void ExprPos::eval(EvalState & state, Env & env, Value & v)
{
- state.mkPos(v, &pos);
+ state.mkPos(v, ptr(&pos));
}
@@ -1786,7 +1818,7 @@ std::optional<string> EvalState::tryAttrsToString(const Pos & pos, Value & v,
}
string EvalState::coerceToString(const Pos & pos, Value & v, PathSet & context,
- bool coerceMore, bool copyToStore)
+ bool coerceMore, bool copyToStore, bool canonicalizePath)
{
forceValue(v, pos);
@@ -1798,7 +1830,7 @@ string EvalState::coerceToString(const Pos & pos, Value & v, PathSet & context,
}
if (v.type() == nPath) {
- Path path(canonPath(v.path));
+ Path path(canonicalizePath ? canonPath(v.path) : v.path);
return copyToStore ? copyPathToStore(context, path) : path;
}
diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh
index 6f3474854..9df6150c6 100644
--- a/src/libexpr/eval.hh
+++ b/src/libexpr/eval.hh
@@ -100,6 +100,8 @@ public:
/* Store used to build stuff. */
const ref<Store> buildStore;
+ RootValue vCallFlake = nullptr;
+ RootValue vImportedDrvToDerivation = nullptr;
private:
SrcToStore srcToStore;
@@ -138,6 +140,12 @@ public:
std::shared_ptr<Store> buildStore = nullptr);
~EvalState();
+ void requireExperimentalFeatureOnEvaluation(
+ const std::string & feature,
+ const std::string_view fName,
+ const Pos & pos
+ );
+
void addToSearchPath(const string & s);
SearchPath getSearchPath() { return searchPath; }
@@ -170,6 +178,14 @@ public:
trivial (i.e. doesn't require arbitrary computation). */
void evalFile(const Path & path, Value & v, bool mustBeTrivial = false);
+ /* Like `cacheFile`, but with an already parsed expression. */
+ void cacheFile(
+ const Path & path,
+ const Path & resolvedPath,
+ Expr * e,
+ Value & v,
+ bool mustBeTrivial = false);
+
void resetFileCache();
/* Look up a file in the search path. */
@@ -224,7 +240,8 @@ public:
booleans and lists to a string. If `copyToStore' is set,
referenced paths are copied to the Nix store as a side effect. */
string coerceToString(const Pos & pos, Value & v, PathSet & context,
- bool coerceMore = false, bool copyToStore = true);
+ bool coerceMore = false, bool copyToStore = true,
+ bool canonicalizePath = true);
string copyPathToStore(PathSet & context, const Path & path);
@@ -308,7 +325,7 @@ public:
void mkList(Value & v, size_t length);
void mkAttrs(Value & v, size_t capacity);
void mkThunk_(Value & v, Expr * expr);
- void mkPos(Value & v, Pos * pos);
+ void mkPos(Value & v, ptr<Pos> pos);
void concatLists(Value & v, size_t nrLists, Value * * lists, const Pos & pos);
diff --git a/src/libexpr/flake/flake.cc b/src/libexpr/flake/flake.cc
index 9e00ff188..1a1fa6938 100644
--- a/src/libexpr/flake/flake.cc
+++ b/src/libexpr/flake/flake.cc
@@ -89,10 +89,12 @@ static void expectType(EvalState & state, ValueType type,
}
static std::map<FlakeId, FlakeInput> parseFlakeInputs(
- EvalState & state, Value * value, const Pos & pos);
+ EvalState & state, Value * value, const Pos & pos,
+ const std::optional<Path> & baseDir);
static FlakeInput parseFlakeInput(EvalState & state,
- const std::string & inputName, Value * value, const Pos & pos)
+ const std::string & inputName, Value * value, const Pos & pos,
+ const std::optional<Path> & baseDir)
{
expectType(state, nAttrs, *value, pos);
@@ -116,7 +118,7 @@ static FlakeInput parseFlakeInput(EvalState & state,
expectType(state, nBool, *attr.value, *attr.pos);
input.isFlake = attr.value->boolean;
} else if (attr.name == sInputs) {
- input.overrides = parseFlakeInputs(state, attr.value, *attr.pos);
+ input.overrides = parseFlakeInputs(state, attr.value, *attr.pos, baseDir);
} else if (attr.name == sFollows) {
expectType(state, nString, *attr.value, *attr.pos);
input.follows = parseInputPath(attr.value->string.s);
@@ -154,7 +156,7 @@ static FlakeInput parseFlakeInput(EvalState & state,
if (!attrs.empty())
throw Error("unexpected flake input attribute '%s', at %s", attrs.begin()->first, pos);
if (url)
- input.ref = parseFlakeRef(*url, {}, true);
+ input.ref = parseFlakeRef(*url, baseDir, true);
}
if (!input.follows && !input.ref)
@@ -164,7 +166,8 @@ static FlakeInput parseFlakeInput(EvalState & state,
}
static std::map<FlakeId, FlakeInput> parseFlakeInputs(
- EvalState & state, Value * value, const Pos & pos)
+ EvalState & state, Value * value, const Pos & pos,
+ const std::optional<Path> & baseDir)
{
std::map<FlakeId, FlakeInput> inputs;
@@ -175,7 +178,8 @@ static std::map<FlakeId, FlakeInput> parseFlakeInputs(
parseFlakeInput(state,
inputAttr.name,
inputAttr.value,
- *inputAttr.pos));
+ *inputAttr.pos,
+ baseDir));
}
return inputs;
@@ -191,7 +195,8 @@ static Flake getFlake(
state, originalRef, allowLookup, flakeCache);
// Guard against symlink attacks.
- auto flakeFile = canonPath(sourceInfo.actualPath + "/" + lockedRef.subdir + "/flake.nix");
+ auto flakeDir = canonPath(sourceInfo.actualPath + "/" + lockedRef.subdir);
+ auto flakeFile = canonPath(flakeDir + "/flake.nix");
if (!isInDir(flakeFile, sourceInfo.actualPath))
throw Error("'flake.nix' file of flake '%s' escapes from '%s'",
lockedRef, state.store->printStorePath(sourceInfo.storePath));
@@ -219,7 +224,7 @@ static Flake getFlake(
auto sInputs = state.symbols.create("inputs");
if (auto inputs = vInfo.attrs->get(sInputs))
- flake.inputs = parseFlakeInputs(state, inputs->value, *inputs->pos);
+ flake.inputs = parseFlakeInputs(state, inputs->value, *inputs->pos, flakeDir);
auto sOutputs = state.symbols.create("outputs");
@@ -488,10 +493,8 @@ LockedFlake lockFlake(
// If this input is a path, recurse it down.
// This allows us to resolve path inputs relative to the current flake.
- if (localRef.input.getType() == "path") {
- localRef.input.parent = parentPath;
- localPath = canonPath(parentPath + "/" + *input.ref->input.getSourcePath());
- }
+ if (localRef.input.getType() == "path")
+ localPath = absPath(*input.ref->input.getSourcePath(), parentPath);
auto inputFlake = getFlake(state, localRef, useRegistries, flakeCache);
@@ -635,8 +638,10 @@ LockedFlake lockFlake(
}
} else
throw Error("cannot write modified lock file of flake '%s' (use '--no-write-lock-file' to ignore)", topRef);
- } else
+ } else {
warn("not writing modified lock file of flake '%s':\n%s", topRef, chomp(diff));
+ flake.forceDirty = true;
+ }
}
return LockedFlake { .flake = std::move(flake), .lockFile = std::move(newLockFile) };
@@ -659,26 +664,32 @@ void callFlake(EvalState & state,
mkString(*vLocks, lockedFlake.lockFile.to_string());
- emitTreeAttrs(state, *lockedFlake.flake.sourceInfo, lockedFlake.flake.lockedRef.input, *vRootSrc);
+ emitTreeAttrs(
+ state,
+ *lockedFlake.flake.sourceInfo,
+ lockedFlake.flake.lockedRef.input,
+ *vRootSrc,
+ false,
+ lockedFlake.flake.forceDirty);
mkString(*vRootSubdir, lockedFlake.flake.lockedRef.subdir);
- static RootValue vCallFlake = nullptr;
-
- if (!vCallFlake) {
- vCallFlake = allocRootValue(state.allocValue());
+ if (!state.vCallFlake) {
+ state.vCallFlake = allocRootValue(state.allocValue());
state.eval(state.parseExprFromString(
#include "call-flake.nix.gen.hh"
- , "/"), **vCallFlake);
+ , "/"), **state.vCallFlake);
}
- state.callFunction(**vCallFlake, *vLocks, *vTmp1, noPos);
+ state.callFunction(**state.vCallFlake, *vLocks, *vTmp1, noPos);
state.callFunction(*vTmp1, *vRootSrc, *vTmp2, noPos);
state.callFunction(*vTmp2, *vRootSubdir, vRes, noPos);
}
static void prim_getFlake(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
+ state.requireExperimentalFeatureOnEvaluation("flakes", "builtins.getFlake", pos);
+
auto flakeRefS = state.forceStringNoCtx(*args[0], pos);
auto flakeRef = parseFlakeRef(flakeRefS, {}, true);
if (evalSettings.pureEval && !flakeRef.input.isImmutable())
@@ -688,13 +699,13 @@ static void prim_getFlake(EvalState & state, const Pos & pos, Value * * args, Va
lockFlake(state, flakeRef,
LockFlags {
.updateLockFile = false,
- .useRegistries = !evalSettings.pureEval && !settings.useRegistries,
+ .useRegistries = !evalSettings.pureEval && settings.useRegistries,
.allowMutable = !evalSettings.pureEval,
}),
v);
}
-static RegisterPrimOp r2("__getFlake", 1, prim_getFlake, "flakes");
+static RegisterPrimOp r2("__getFlake", 1, prim_getFlake);
}
@@ -704,8 +715,9 @@ Fingerprint LockedFlake::getFingerprint() const
// and we haven't changed it, then it's sufficient to use
// flake.sourceInfo.storePath for the fingerprint.
return hashString(htSHA256,
- fmt("%s;%d;%d;%s",
+ fmt("%s;%s;%d;%d;%s",
flake.sourceInfo->storePath.to_string(),
+ flake.lockedRef.subdir,
flake.lockedRef.input.getRevCount().value_or(0),
flake.lockedRef.input.getLastModified().value_or(0),
lockFile));
diff --git a/src/libexpr/flake/flake.hh b/src/libexpr/flake/flake.hh
index d46da9d68..524b18af1 100644
--- a/src/libexpr/flake/flake.hh
+++ b/src/libexpr/flake/flake.hh
@@ -58,9 +58,10 @@ struct ConfigFile
/* The contents of a flake.nix file. */
struct Flake
{
- FlakeRef originalRef; // the original flake specification (by the user)
- FlakeRef resolvedRef; // registry references and caching resolved to the specific underlying flake
- FlakeRef lockedRef; // the specific local store result of invoking the fetcher
+ FlakeRef originalRef; // the original flake specification (by the user)
+ FlakeRef resolvedRef; // registry references and caching resolved to the specific underlying flake
+ FlakeRef lockedRef; // the specific local store result of invoking the fetcher
+ bool forceDirty = false; // pretend that 'lockedRef' is dirty
std::optional<std::string> description;
std::shared_ptr<const fetchers::Tree> sourceInfo;
FlakeInputs inputs;
@@ -140,6 +141,8 @@ void emitTreeAttrs(
EvalState & state,
const fetchers::Tree & tree,
const fetchers::Input & input,
- Value & v, bool emptyRevFallback = false);
+ Value & v,
+ bool emptyRevFallback = false,
+ bool forceDirty = false);
}
diff --git a/src/libexpr/flake/flakeref.cc b/src/libexpr/flake/flakeref.cc
index 833e8a776..29128d789 100644
--- a/src/libexpr/flake/flakeref.cc
+++ b/src/libexpr/flake/flakeref.cc
@@ -172,8 +172,12 @@ std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
auto parsedURL = parseURL(url);
std::string fragment;
std::swap(fragment, parsedURL.fragment);
+
+ auto input = Input::fromURL(parsedURL);
+ input.parent = baseDir;
+
return std::make_pair(
- FlakeRef(Input::fromURL(parsedURL), get(parsedURL.query, "dir").value_or("")),
+ FlakeRef(std::move(input), get(parsedURL.query, "dir").value_or("")),
fragment);
}
}
diff --git a/src/libexpr/lexer.l b/src/libexpr/lexer.l
index 27975dc9e..51593eccd 100644
--- a/src/libexpr/lexer.l
+++ b/src/libexpr/lexer.l
@@ -9,6 +9,9 @@
%s DEFAULT
%x STRING
%x IND_STRING
+%x INPATH
+%x INPATH_SLASH
+%x PATH_START
%{
@@ -25,6 +28,8 @@ using namespace nix;
namespace nix {
+// backup to recover from yyless(0)
+YYLTYPE prev_yylloc;
static void initLoc(YYLTYPE * loc)
{
@@ -35,6 +40,8 @@ static void initLoc(YYLTYPE * loc)
static void adjustLoc(YYLTYPE * loc, const char * s, size_t len)
{
+ prev_yylloc = *loc;
+
loc->first_line = loc->last_line;
loc->first_column = loc->last_column;
@@ -97,9 +104,12 @@ ANY .|\n
ID [a-zA-Z\_][a-zA-Z0-9\_\'\-]*
INT [0-9]+
FLOAT (([1-9][0-9]*\.[0-9]*)|(0?\.[0-9]+))([Ee][+-]?[0-9]+)?
-PATH [a-zA-Z0-9\.\_\-\+]*(\/[a-zA-Z0-9\.\_\-\+]+)+\/?
-HPATH \~(\/[a-zA-Z0-9\.\_\-\+]+)+\/?
-SPATH \<[a-zA-Z0-9\.\_\-\+]+(\/[a-zA-Z0-9\.\_\-\+]+)*\>
+PATH_CHAR [a-zA-Z0-9\.\_\-\+]
+PATH {PATH_CHAR}*(\/{PATH_CHAR}+)+\/?
+PATH_SEG {PATH_CHAR}*\/
+HPATH \~(\/{PATH_CHAR}+)+\/?
+HPATH_START \~\/
+SPATH \<{PATH_CHAR}+(\/{PATH_CHAR}+)*\>
URI [a-zA-Z][a-zA-Z0-9\+\-\.]*\:[a-zA-Z0-9\%\/\?\:\@\&\=\+\$\,\-\_\.\!\~\*\']+
@@ -200,17 +210,75 @@ or { return OR_KW; }
return IND_STR;
}
+{PATH_SEG}\$\{ |
+{HPATH_START}\$\{ {
+ PUSH_STATE(PATH_START);
+ yyless(0);
+ *yylloc = prev_yylloc;
+}
+
+<PATH_START>{PATH_SEG} {
+ POP_STATE();
+ PUSH_STATE(INPATH_SLASH);
+ yylval->path = strdup(yytext);
+ return PATH;
+}
+
+<PATH_START>{HPATH_START} {
+ POP_STATE();
+ PUSH_STATE(INPATH_SLASH);
+ yylval->path = strdup(yytext);
+ return HPATH;
+}
+
+{PATH} {
+ if (yytext[yyleng-1] == '/')
+ PUSH_STATE(INPATH_SLASH);
+ else
+ PUSH_STATE(INPATH);
+ yylval->path = strdup(yytext);
+ return PATH;
+}
+{HPATH} {
+ if (yytext[yyleng-1] == '/')
+ PUSH_STATE(INPATH_SLASH);
+ else
+ PUSH_STATE(INPATH);
+ yylval->path = strdup(yytext);
+ return HPATH;
+}
+
+<INPATH,INPATH_SLASH>\$\{ {
+ POP_STATE();
+ PUSH_STATE(INPATH);
+ PUSH_STATE(DEFAULT);
+ return DOLLAR_CURLY;
+}
+<INPATH,INPATH_SLASH>{PATH}|{PATH_SEG}|{PATH_CHAR}+ {
+ POP_STATE();
+ if (yytext[yyleng-1] == '/')
+ PUSH_STATE(INPATH_SLASH);
+ else
+ PUSH_STATE(INPATH);
+ yylval->e = new ExprString(data->symbols.create(string(yytext)));
+ return STR;
+}
+<INPATH>{ANY} |
+<INPATH><<EOF>> {
+ /* if we encounter a non-path character we inform the parser that the path has
+ ended with a PATH_END token and re-parse this character in the default
+ context (it may be ')', ';', or something of that sort) */
+ POP_STATE();
+ yyless(0);
+ *yylloc = prev_yylloc;
+ return PATH_END;
+}
+
+<INPATH_SLASH>{ANY} |
+<INPATH_SLASH><<EOF>> {
+ throw ParseError("path has a trailing slash");
+}
-{PATH} { if (yytext[yyleng-1] == '/')
- throw ParseError("path '%s' has a trailing slash", yytext);
- yylval->path = strdup(yytext);
- return PATH;
- }
-{HPATH} { if (yytext[yyleng-1] == '/')
- throw ParseError("path '%s' has a trailing slash", yytext);
- yylval->path = strdup(yytext);
- return HPATH;
- }
{SPATH} { yylval->path = strdup(yytext); return SPATH; }
{URI} { yylval->uri = strdup(yytext); return URI; }
diff --git a/src/libexpr/local.mk b/src/libexpr/local.mk
index 5ba11c619..016631647 100644
--- a/src/libexpr/local.mk
+++ b/src/libexpr/local.mk
@@ -15,7 +15,7 @@ libexpr_CXXFLAGS += -I src/libutil -I src/libstore -I src/libfetchers -I src/lib
libexpr_LIBS = libutil libstore libfetchers
-libexpr_LDFLAGS = -lboost_context -pthread
+libexpr_LDFLAGS += -lboost_context -pthread
ifdef HOST_LINUX
libexpr_LDFLAGS += -ldl
endif
diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y
index f948dde47..e3749783a 100644
--- a/src/libexpr/parser.y
+++ b/src/libexpr/parser.y
@@ -290,13 +290,13 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParseData * data, const char * err
%type <formal> formal
%type <attrNames> attrs attrpath
%type <string_parts> string_parts_interpolated ind_string_parts
-%type <e> string_parts string_attr
+%type <e> path_start string_parts string_attr
%type <id> attr
%token <id> ID ATTRPATH
%token <e> STR IND_STR
%token <n> INT
%token <nf> FLOAT
-%token <path> PATH HPATH SPATH
+%token <path> PATH HPATH SPATH PATH_END
%token <uri> URI
%token IF THEN ELSE ASSERT WITH LET IN REC INHERIT EQ NEQ AND OR IMPL OR_KW
%token DOLLAR_CURLY /* == ${ */
@@ -405,8 +405,11 @@ expr_simple
| IND_STRING_OPEN ind_string_parts IND_STRING_CLOSE {
$$ = stripIndentation(CUR_POS, data->symbols, *$2);
}
- | PATH { $$ = new ExprPath(absPath($1, data->basePath)); }
- | HPATH { $$ = new ExprPath(getHome() + string{$1 + 1}); }
+ | path_start PATH_END { $$ = $1; }
+ | path_start string_parts_interpolated PATH_END {
+ $2->insert($2->begin(), $1);
+ $$ = new ExprConcatStrings(CUR_POS, false, $2);
+ }
| SPATH {
string path($1 + 1, strlen($1) - 2);
$$ = new ExprApp(CUR_POS,
@@ -452,6 +455,20 @@ string_parts_interpolated
}
;
+path_start
+ : PATH {
+ Path path(absPath($1, data->basePath));
+ /* add back in the trailing '/' to the first segment */
+ if ($1[strlen($1)-1] == '/' && strlen($1) > 1)
+ path += "/";
+ $$ = new ExprPath(path);
+ }
+ | HPATH {
+ Path path(getHome() + string($1 + 1));
+ $$ = new ExprPath(path);
+ }
+ ;
+
ind_string_parts
: ind_string_parts IND_STR { $$ = $1; $1->push_back($2); }
| ind_string_parts DOLLAR_CURLY expr '}' { $$ = $1; $1->push_back($3); }
diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index bfe41c9fa..3bf091438 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -52,7 +52,8 @@ void EvalState::realiseContext(const PathSet & context)
if (drvs.empty()) return;
if (!evalSettings.enableImportFromDerivation)
- throw EvalError("attempted to realize '%1%' during evaluation but 'allow-import-from-derivation' is false",
+ throw Error(
+ "cannot build '%1%' during evaluation because the option 'allow-import-from-derivation' is disabled",
store->printStorePath(drvs.begin()->drvPath));
/* For performance, prefetch all substitute info. */
@@ -124,7 +125,7 @@ static void import(EvalState & state, const Pos & pos, Value & vPath, Value * vS
});
} catch (Error & e) {
e.addTrace(pos, "while importing '%s'", path);
- throw e;
+ throw;
}
Path realPath = state.checkSourcePath(state.toRealPath(path, context));
@@ -160,16 +161,15 @@ static void import(EvalState & state, const Pos & pos, Value & vPath, Value * vS
}
w.attrs->sort();
- static RootValue fun;
- if (!fun) {
- fun = allocRootValue(state.allocValue());
+ if (!state.vImportedDrvToDerivation) {
+ state.vImportedDrvToDerivation = allocRootValue(state.allocValue());
state.eval(state.parseExprFromString(
#include "imported-drv-to-derivation.nix.gen.hh"
- , "/"), **fun);
+ , "/"), **state.vImportedDrvToDerivation);
}
- state.forceFunction(**fun, pos);
- mkApp(v, **fun, w);
+ state.forceFunction(**state.vImportedDrvToDerivation, pos);
+ mkApp(v, **state.vImportedDrvToDerivation, w);
state.forceAttrs(v, pos);
}
@@ -579,7 +579,7 @@ static Bindings::iterator getAttr(
// Adding another trace for the function name to make it clear
// which call received wrong arguments.
e.addTrace(pos, hintfmt("while invoking '%s'", funcName));
- throw e;
+ throw;
}
}
@@ -1174,7 +1174,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
// hash per output.
auto hashModulo = hashDerivationModulo(*state.store, Derivation(drv), true);
std::visit(overloaded {
- [&](Hash h) {
+ [&](Hash & h) {
for (auto & i : outputs) {
auto outPath = state.store->makeOutputPath(i, h, drvName);
drv.env[i] = state.store->printStorePath(outPath);
@@ -1186,11 +1186,11 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
});
}
},
- [&](CaOutputHashes) {
+ [&](CaOutputHashes &) {
// Shouldn't happen as the toplevel derivation is not CA.
assert(false);
},
- [&](DeferredHash _) {
+ [&](DeferredHash &) {
for (auto & i : outputs) {
drv.outputs.insert_or_assign(i,
DerivationOutput {
@@ -1493,15 +1493,20 @@ static void prim_hashFile(EvalState & state, const Pos & pos, Value * * args, Va
string type = state.forceStringNoCtx(*args[0], pos);
std::optional<HashType> ht = parseHashType(type);
if (!ht)
- throw Error({
- .msg = hintfmt("unknown hash type '%1%'", type),
- .errPos = pos
- });
+ throw Error({
+ .msg = hintfmt("unknown hash type '%1%'", type),
+ .errPos = pos
+ });
- PathSet context; // discarded
- Path p = state.coerceToPath(pos, *args[1], context);
+ PathSet context;
+ Path path = state.coerceToPath(pos, *args[1], context);
+ try {
+ state.realiseContext(context);
+ } catch (InvalidPathError & e) {
+ throw EvalError("cannot read '%s' since path '%s' is not valid, at %s", path, e.path, pos);
+ }
- mkString(v, hashFile(*ht, state.checkSourcePath(p)).to_string(Base16, false), context);
+ mkString(v, hashFile(*ht, state.checkSourcePath(state.toRealPath(path, context))).to_string(Base16, false));
}
static RegisterPrimOp primop_hashFile({
@@ -1712,7 +1717,7 @@ static void prim_fromJSON(EvalState & state, const Pos & pos, Value * * args, Va
parseJSON(state, s, v);
} catch (JSONParseError &e) {
e.addTrace(pos, "while decoding a JSON string");
- throw e;
+ throw;
}
}
@@ -2109,7 +2114,7 @@ void prim_getAttr(EvalState & state, const Pos & pos, Value * * args, Value & v)
pos
);
// !!! add to stack trace?
- if (state.countCalls && i->pos) state.attrSelects[*i->pos]++;
+ if (state.countCalls && *i->pos != noPos) state.attrSelects[*i->pos]++;
state.forceValue(*i->value, pos);
v = *i->value;
}
@@ -2369,7 +2374,7 @@ static void prim_functionArgs(EvalState & state, const Pos & pos, Value * * args
for (auto & i : args[0]->lambda.fun->formals->formals) {
// !!! should optimise booleans (allocate only once)
Value * value = state.allocValue();
- v.attrs->push_back(Attr(i.name, value, &i.pos));
+ v.attrs->push_back(Attr(i.name, value, ptr(&i.pos)));
mkBool(*value, i.def);
}
v.attrs->sort();
@@ -2897,7 +2902,7 @@ static void prim_concatMap(EvalState & state, const Pos & pos, Value * * args, V
state.forceList(lists[n], lists[n].determinePos(args[0]->determinePos(pos)));
} catch (TypeError &e) {
e.addTrace(pos, hintfmt("while invoking '%s'", "concatMap"));
- throw e;
+ throw;
}
len += lists[n].listSize();
}
@@ -3194,7 +3199,7 @@ static void prim_hashString(EvalState & state, const Pos & pos, Value * * args,
PathSet context; // discarded
string s = state.forceString(*args[1], context, pos);
- mkString(v, hashString(*ht, s).to_string(Base16, false), context);
+ mkString(v, hashString(*ht, s).to_string(Base16, false));
}
static RegisterPrimOp primop_hashString({
@@ -3601,15 +3606,13 @@ static RegisterPrimOp primop_splitVersion({
RegisterPrimOp::PrimOps * RegisterPrimOp::primOps;
-RegisterPrimOp::RegisterPrimOp(std::string name, size_t arity, PrimOpFun fun,
- std::optional<std::string> requiredFeature)
+RegisterPrimOp::RegisterPrimOp(std::string name, size_t arity, PrimOpFun fun)
{
if (!primOps) primOps = new PrimOps;
primOps->push_back({
.name = name,
.args = {},
.arity = arity,
- .requiredFeature = std::move(requiredFeature),
.fun = fun
});
}
@@ -3683,14 +3686,13 @@ void EvalState::createBaseEnv()
if (RegisterPrimOp::primOps)
for (auto & primOp : *RegisterPrimOp::primOps)
- if (!primOp.requiredFeature || settings.isExperimentalFeatureEnabled(*primOp.requiredFeature))
- addPrimOp({
- .fun = primOp.fun,
- .arity = std::max(primOp.args.size(), primOp.arity),
- .name = symbols.create(primOp.name),
- .args = std::move(primOp.args),
- .doc = primOp.doc,
- });
+ addPrimOp({
+ .fun = primOp.fun,
+ .arity = std::max(primOp.args.size(), primOp.arity),
+ .name = symbols.create(primOp.name),
+ .args = std::move(primOp.args),
+ .doc = primOp.doc,
+ });
/* Add a wrapper around the derivation primop that computes the
`drvPath' and `outPath' attributes lazily. */
diff --git a/src/libexpr/primops.hh b/src/libexpr/primops.hh
index 9d42d6539..5b16e075f 100644
--- a/src/libexpr/primops.hh
+++ b/src/libexpr/primops.hh
@@ -15,7 +15,6 @@ struct RegisterPrimOp
std::vector<std::string> args;
size_t arity = 0;
const char * doc;
- std::optional<std::string> requiredFeature;
PrimOpFun fun;
};
@@ -28,8 +27,7 @@ struct RegisterPrimOp
RegisterPrimOp(
std::string name,
size_t arity,
- PrimOpFun fun,
- std::optional<std::string> requiredFeature = {});
+ PrimOpFun fun);
RegisterPrimOp(Info && info);
};
diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc
index 730db84ed..06bdec003 100644
--- a/src/libexpr/primops/fetchTree.cc
+++ b/src/libexpr/primops/fetchTree.cc
@@ -16,7 +16,8 @@ void emitTreeAttrs(
const fetchers::Tree & tree,
const fetchers::Input & input,
Value & v,
- bool emptyRevFallback)
+ bool emptyRevFallback,
+ bool forceDirty)
{
assert(input.isImmutable());
@@ -33,24 +34,28 @@ void emitTreeAttrs(
mkString(*state.allocAttr(v, state.symbols.create("narHash")),
narHash->to_string(SRI, true));
- if (auto rev = input.getRev()) {
- mkString(*state.allocAttr(v, state.symbols.create("rev")), rev->gitRev());
- mkString(*state.allocAttr(v, state.symbols.create("shortRev")), rev->gitShortRev());
- } else if (emptyRevFallback) {
- // Backwards compat for `builtins.fetchGit`: dirty repos return an empty sha1 as rev
- auto emptyHash = Hash(htSHA1);
- mkString(*state.allocAttr(v, state.symbols.create("rev")), emptyHash.gitRev());
- mkString(*state.allocAttr(v, state.symbols.create("shortRev")), emptyHash.gitShortRev());
- }
-
if (input.getType() == "git")
mkBool(*state.allocAttr(v, state.symbols.create("submodules")),
fetchers::maybeGetBoolAttr(input.attrs, "submodules").value_or(false));
- if (auto revCount = input.getRevCount())
- mkInt(*state.allocAttr(v, state.symbols.create("revCount")), *revCount);
- else if (emptyRevFallback)
- mkInt(*state.allocAttr(v, state.symbols.create("revCount")), 0);
+ if (!forceDirty) {
+
+ if (auto rev = input.getRev()) {
+ mkString(*state.allocAttr(v, state.symbols.create("rev")), rev->gitRev());
+ mkString(*state.allocAttr(v, state.symbols.create("shortRev")), rev->gitShortRev());
+ } else if (emptyRevFallback) {
+ // Backwards compat for `builtins.fetchGit`: dirty repos return an empty sha1 as rev
+ auto emptyHash = Hash(htSHA1);
+ mkString(*state.allocAttr(v, state.symbols.create("rev")), emptyHash.gitRev());
+ mkString(*state.allocAttr(v, state.symbols.create("shortRev")), emptyHash.gitShortRev());
+ }
+
+ if (auto revCount = input.getRevCount())
+ mkInt(*state.allocAttr(v, state.symbols.create("revCount")), *revCount);
+ else if (emptyRevFallback)
+ mkInt(*state.allocAttr(v, state.symbols.create("revCount")), 0);
+
+ }
if (auto lastModified = input.getLastModified()) {
mkInt(*state.allocAttr(v, state.symbols.create("lastModified")), *lastModified);
@@ -167,7 +172,7 @@ static void fetchTree(
if (state.allowedPaths)
state.allowedPaths->insert(tree.actualPath);
- emitTreeAttrs(state, tree, input2, v, params.emptyRevFallback);
+ emitTreeAttrs(state, tree, input2, v, params.emptyRevFallback, false);
}
static void prim_fetchTree(EvalState & state, const Pos & pos, Value * * args, Value & v)
@@ -229,20 +234,21 @@ static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
? fetchers::downloadTarball(state.store, *url, name, (bool) expectedHash).first.storePath
: fetchers::downloadFile(state.store, *url, name, (bool) expectedHash).storePath;
- auto path = state.store->toRealPath(storePath);
+ auto realPath = state.store->toRealPath(storePath);
if (expectedHash) {
auto hash = unpack
? state.store->queryPathInfo(storePath)->narHash
- : hashFile(htSHA256, path);
+ : hashFile(htSHA256, realPath);
if (hash != *expectedHash)
throw Error((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n specified: %s\n got: %s",
*url, expectedHash->to_string(Base32, true), hash.to_string(Base32, true));
}
if (state.allowedPaths)
- state.allowedPaths->insert(path);
+ state.allowedPaths->insert(realPath);
+ auto path = state.store->printStorePath(storePath);
mkString(v, path, PathSet({path}));
}
diff --git a/src/libexpr/value-to-xml.cc b/src/libexpr/value-to-xml.cc
index 7464455d8..2ddc5f751 100644
--- a/src/libexpr/value-to-xml.cc
+++ b/src/libexpr/value-to-xml.cc
@@ -42,7 +42,7 @@ static void showAttrs(EvalState & state, bool strict, bool location,
XMLAttrs xmlAttrs;
xmlAttrs["name"] = i;
- if (location && a.pos != &noPos) posToXML(xmlAttrs, *a.pos);
+ if (location && a.pos != ptr(&noPos)) posToXML(xmlAttrs, *a.pos);
XMLOpenElement _(doc, "attr", xmlAttrs);
printValueAsXML(state, strict, location,
diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc
index d89763ccd..8468d2afc 100644
--- a/src/libfetchers/git.cc
+++ b/src/libfetchers/git.cc
@@ -13,6 +13,12 @@ using namespace std::string_literals;
namespace nix::fetchers {
+// Explicit initial branch of our bare repo to suppress warnings from new version of git.
+// The value itself does not matter, since we always fetch a specific revision or branch.
+// It is set with `-c init.defaultBranch=` instead of `--initial-branch=` to stay compatible with
+// old version of git, which will ignore unrecognized `-c` options.
+const std::string gitInitialBranch = "__nix_dummy_branch";
+
static std::string readHead(const Path & path)
{
return chomp(runProgram("git", true, { "-C", path, "rev-parse", "--abbrev-ref", "HEAD" }));
@@ -324,7 +330,7 @@ struct GitInputScheme : InputScheme
lockFile(lock.get(), ltWrite, true);
if (!pathExists(cacheDir)) {
- runProgram("git", true, { "init", "--bare", repoDir });
+ runProgram("git", true, { "-c", "init.defaultBranch=" + gitInitialBranch, "init", "--bare", repoDir });
}
deleteLockFile(cacheDirLock, lock.get());
@@ -413,17 +419,14 @@ struct GitInputScheme : InputScheme
AutoDelete delTmpDir(tmpDir, true);
PathFilter filter = defaultPathFilter;
- RunOptions checkCommitOpts(
- "git",
- { "-C", repoDir, "cat-file", "commit", input.getRev()->gitRev() }
- );
- checkCommitOpts.searchPath = true;
- checkCommitOpts.mergeStderrToStdout = true;
-
- auto result = runProgram(checkCommitOpts);
+ auto result = runProgram(RunOptions {
+ .program = "git",
+ .args = { "-C", repoDir, "cat-file", "commit", input.getRev()->gitRev() },
+ .mergeStderrToStdout = true
+ });
if (WEXITSTATUS(result.first) == 128
- && result.second.find("bad file") != std::string::npos
- ) {
+ && result.second.find("bad file") != std::string::npos)
+ {
throw Error(
"Cannot find Git revision '%s' in ref '%s' of repository '%s'! "
"Please make sure that the " ANSI_BOLD "rev" ANSI_NORMAL " exists on the "
@@ -439,7 +442,7 @@ struct GitInputScheme : InputScheme
Path tmpGitDir = createTempDir();
AutoDelete delTmpGitDir(tmpGitDir, true);
- runProgram("git", true, { "init", tmpDir, "--separate-git-dir", tmpGitDir });
+ runProgram("git", true, { "-c", "init.defaultBranch=" + gitInitialBranch, "init", tmpDir, "--separate-git-dir", tmpGitDir });
// TODO: repoDir might lack the ref (it only checks if rev
// exists, see FIXME above) so use a big hammer and fetch
// everything to ensure we get the rev.
@@ -455,9 +458,11 @@ struct GitInputScheme : InputScheme
// FIXME: should pipe this, or find some better way to extract a
// revision.
auto source = sinkToSource([&](Sink & sink) {
- RunOptions gitOptions("git", { "-C", repoDir, "archive", input.getRev()->gitRev() });
- gitOptions.standardOut = &sink;
- runProgram2(gitOptions);
+ runProgram2({
+ .program = "git",
+ .args = { "-C", repoDir, "archive", input.getRev()->gitRev() },
+ .standardOut = &sink
+ });
});
unpackTarfile(*source, tmpDir);
diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc
index 298c05f9a..ffc44e9e2 100644
--- a/src/libfetchers/github.cc
+++ b/src/libfetchers/github.cc
@@ -273,9 +273,9 @@ struct GitHubInputScheme : GitArchiveInputScheme
void clone(const Input & input, const Path & destDir) override
{
auto host = maybeGetStrAttr(input.attrs, "host").value_or("github.com");
- Input::fromURL(fmt("git+ssh://git@%s/%s/%s.git",
+ Input::fromURL(fmt("git+https://%s/%s/%s.git",
host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo")))
- .applyOverrides(input.getRef().value_or("HEAD"), input.getRev())
+ .applyOverrides(input.getRef(), input.getRev())
.clone(destDir);
}
};
@@ -341,9 +341,9 @@ struct GitLabInputScheme : GitArchiveInputScheme
{
auto host = maybeGetStrAttr(input.attrs, "host").value_or("gitlab.com");
// FIXME: get username somewhere
- Input::fromURL(fmt("git+ssh://git@%s/%s/%s.git",
+ Input::fromURL(fmt("git+https://%s/%s/%s.git",
host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo")))
- .applyOverrides(input.getRef().value_or("HEAD"), input.getRev())
+ .applyOverrides(input.getRef(), input.getRev())
.clone(destDir);
}
};
diff --git a/src/libfetchers/local.mk b/src/libfetchers/local.mk
index bbef03afe..2e8869d83 100644
--- a/src/libfetchers/local.mk
+++ b/src/libfetchers/local.mk
@@ -8,6 +8,6 @@ libfetchers_SOURCES := $(wildcard $(d)/*.cc)
libfetchers_CXXFLAGS += -I src/libutil -I src/libstore
-libfetchers_LDFLAGS = -pthread
+libfetchers_LDFLAGS += -pthread
libfetchers_LIBS = libutil libstore
diff --git a/src/libfetchers/mercurial.cc b/src/libfetchers/mercurial.cc
index efb4ee8db..d52d4641b 100644
--- a/src/libfetchers/mercurial.cc
+++ b/src/libfetchers/mercurial.cc
@@ -11,34 +11,32 @@ using namespace std::string_literals;
namespace nix::fetchers {
-namespace {
-
-RunOptions hgOptions(const Strings & args) {
- RunOptions opts("hg", args);
- opts.searchPath = true;
-
- auto env = getEnv();
- // Set HGPLAIN: this means we get consistent output from hg and avoids leakage from a user or system .hgrc.
- env["HGPLAIN"] = "";
- opts.environment = env;
-
- return opts;
+static RunOptions hgOptions(const Strings & args)
+{
+ auto env = getEnv();
+ // Set HGPLAIN: this means we get consistent output from hg and avoids leakage from a user or system .hgrc.
+ env["HGPLAIN"] = "";
+
+ return {
+ .program = "hg",
+ .searchPath = true,
+ .args = args,
+ .environment = env
+ };
}
// runProgram wrapper that uses hgOptions instead of stock RunOptions.
-string runHg(const Strings & args, const std::optional<std::string> & input = {})
+static string runHg(const Strings & args, const std::optional<std::string> & input = {})
{
- RunOptions opts = hgOptions(args);
- opts.input = input;
-
- auto res = runProgram(opts);
+ RunOptions opts = hgOptions(args);
+ opts.input = input;
- if (!statusOk(res.first))
- throw ExecError(res.first, fmt("hg %1%", statusToString(res.first)));
+ auto res = runProgram(std::move(opts));
- return res.second;
-}
+ if (!statusOk(res.first))
+ throw ExecError(res.first, fmt("hg %1%", statusToString(res.first)));
+ return res.second;
}
struct MercurialInputScheme : InputScheme
@@ -253,9 +251,7 @@ struct MercurialInputScheme : InputScheme
have to pull again. */
if (!(input.getRev()
&& pathExists(cacheDir)
- && runProgram(
- hgOptions({ "log", "-R", cacheDir, "-r", input.getRev()->gitRev(), "--template", "1" })
- .killStderr(true)).second == "1"))
+ && runProgram(hgOptions({ "log", "-R", cacheDir, "-r", input.getRev()->gitRev(), "--template", "1" })).second == "1"))
{
Activity act(*logger, lvlTalkative, actUnknown, fmt("fetching Mercurial repository '%s'", actualUrl));
diff --git a/src/libfetchers/path.cc b/src/libfetchers/path.cc
index b6fcdac9e..fb5702c4c 100644
--- a/src/libfetchers/path.cc
+++ b/src/libfetchers/path.cc
@@ -85,18 +85,26 @@ struct PathInputScheme : InputScheme
std::string absPath;
auto path = getStrAttr(input.attrs, "path");
- if (path[0] != '/' && input.parent) {
+ if (path[0] != '/') {
+ if (!input.parent)
+ throw Error("cannot fetch input '%s' because it uses a relative path", input.to_string());
+
auto parent = canonPath(*input.parent);
// the path isn't relative, prefix it
- absPath = canonPath(parent + "/" + path);
+ absPath = nix::absPath(path, parent);
// for security, ensure that if the parent is a store path, it's inside it
- if (!parent.rfind(store->storeDir, 0) && absPath.rfind(store->storeDir, 0))
- throw BadStorePath("relative path '%s' points outside of its parent's store path %s, this is a security violation", path, parent);
+ if (store->isInStore(parent)) {
+ auto storePath = store->printStorePath(store->toStorePath(parent).first);
+ if (!isInDir(absPath, storePath))
+ throw BadStorePath("relative path '%s' points outside of its parent's store path '%s'", path, storePath);
+ }
} else
absPath = path;
+ Activity act(*logger, lvlTalkative, actUnknown, fmt("copying '%s'", absPath));
+
// FIXME: check whether access to 'path' is allowed.
auto storePath = store->maybeParseStorePath(absPath);
diff --git a/src/libmain/local.mk b/src/libmain/local.mk
index f45d6e3ff..99da95e27 100644
--- a/src/libmain/local.mk
+++ b/src/libmain/local.mk
@@ -8,7 +8,7 @@ libmain_SOURCES := $(wildcard $(d)/*.cc)
libmain_CXXFLAGS += -I src/libutil -I src/libstore
-libmain_LDFLAGS = $(OPENSSL_LIBS)
+libmain_LDFLAGS += $(OPENSSL_LIBS)
libmain_LIBS = libstore libutil
diff --git a/src/libmain/shared.cc b/src/libmain/shared.cc
index 3a9529c4d..85f9f0d58 100644
--- a/src/libmain/shared.cc
+++ b/src/libmain/shared.cc
@@ -15,6 +15,9 @@
#include <sys/stat.h>
#include <unistd.h>
#include <signal.h>
+#include <sys/types.h>
+#include <sys/socket.h>
+#include <netdb.h>
#include <openssl/crypto.h>
@@ -110,6 +113,31 @@ static void opensslLockCallback(int mode, int type, const char * file, int line)
}
#endif
+static std::once_flag dns_resolve_flag;
+
+static void preloadNSS() {
+ /* builtin:fetchurl can trigger a DNS lookup, which with glibc can trigger a dynamic library load of
+ one of the glibc NSS libraries in a sandboxed child, which will fail unless the library's already
+ been loaded in the parent. So we force a lookup of an invalid domain to force the NSS machinery to
+ load its lookup libraries in the parent before any child gets a chance to. */
+ std::call_once(dns_resolve_flag, []() {
+ struct addrinfo *res = NULL;
+
+ /* nss will only force the "local" (not through nscd) dns resolution if its on the LOCALDOMAIN.
+ We need the resolution to be done locally, as nscd socket will not be accessible in the
+ sandbox. */
+ char * previous_env = getenv("LOCALDOMAIN");
+ setenv("LOCALDOMAIN", "invalid", 1);
+ if (getaddrinfo("this.pre-initializes.the.dns.resolvers.invalid.", "http", NULL, &res) == 0) {
+ if (res) freeaddrinfo(res);
+ }
+ if (previous_env) {
+ setenv("LOCALDOMAIN", previous_env, 1);
+ } else {
+ unsetenv("LOCALDOMAIN");
+ }
+ });
+}
static void sigHandler(int signo) { }
@@ -176,6 +204,8 @@ void initNix()
if (hasPrefix(getEnv("TMPDIR").value_or("/tmp"), "/var/folders/"))
unsetenv("TMPDIR");
#endif
+
+ preloadNSS();
}
diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc
index 74eb0a9ab..3a6be541f 100644
--- a/src/libstore/binary-cache-store.cc
+++ b/src/libstore/binary-cache-store.cc
@@ -52,9 +52,9 @@ void BinaryCacheStore::init()
throw Error("binary cache '%s' is for Nix stores with prefix '%s', not '%s'",
getUri(), value, storeDir);
} else if (name == "WantMassQuery") {
- wantMassQuery.setDefault(value == "1" ? "true" : "false");
+ wantMassQuery.setDefault(value == "1");
} else if (name == "Priority") {
- priority.setDefault(fmt("%d", std::stoi(value)));
+ priority.setDefault(std::stoi(value));
}
}
}
diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc
index 876b8def0..0907120db 100644
--- a/src/libstore/build/derivation-goal.cc
+++ b/src/libstore/build/derivation-goal.cc
@@ -566,7 +566,7 @@ void DerivationGoal::tryToBuild()
lockFiles.insert(worker.store.Store::toRealPath(*i.second.second));
else
lockFiles.insert(
- worker.store.Store::toRealPath(drvPath) + "!" + i.first
+ worker.store.Store::toRealPath(drvPath) + "." + i.first
);
}
}
@@ -774,9 +774,6 @@ void runPostBuildHook(
hookEnvironment.emplace("OUT_PATHS", chomp(concatStringsSep(" ", store.printStorePathSet(outputPaths))));
hookEnvironment.emplace("NIX_CONFIG", globalConfig.toKeyValue());
- RunOptions opts(settings.postBuildHook, {});
- opts.environment = hookEnvironment;
-
struct LogSink : Sink {
Activity & act;
std::string currentLine;
@@ -807,9 +804,12 @@ void runPostBuildHook(
};
LogSink sink(act);
- opts.standardOut = &sink;
- opts.mergeStderrToStdout = true;
- runProgram2(opts);
+ runProgram2({
+ .program = settings.postBuildHook,
+ .environment = hookEnvironment,
+ .standardOut = &sink,
+ .mergeStderrToStdout = true,
+ });
}
void DerivationGoal::buildDone()
@@ -1009,7 +1009,7 @@ HookReply DerivationGoal::tryBuildHook()
return readLine(worker.hook->fromHook.readSide.get());
} catch (Error & e) {
e.addTrace({}, "while reading the response from the build hook");
- throw e;
+ throw;
}
}();
if (handleJSONLogMessage(s, worker.act, worker.hook->activities, true))
@@ -1055,7 +1055,7 @@ HookReply DerivationGoal::tryBuildHook()
machineName = readLine(hook->fromHook.readSide.get());
} catch (Error & e) {
e.addTrace({}, "while reading the machine name from the build hook");
- throw e;
+ throw;
}
/* Tell the hook all the inputs that have to be copied to the
diff --git a/src/libstore/build/entry-points.cc b/src/libstore/build/entry-points.cc
index 96deb81d1..2b77e4354 100644
--- a/src/libstore/build/entry-points.cc
+++ b/src/libstore/build/entry-points.cc
@@ -11,12 +11,12 @@ void Store::buildPaths(const std::vector<DerivedPath> & reqs, BuildMode buildMod
Worker worker(*this, evalStore ? *evalStore : *this);
Goals goals;
- for (auto & br : reqs) {
+ for (const auto & br : reqs) {
std::visit(overloaded {
- [&](DerivedPath::Built bfd) {
+ [&](const DerivedPath::Built & bfd) {
goals.insert(worker.makeDerivationGoal(bfd.drvPath, bfd.outputs, buildMode));
},
- [&](DerivedPath::Opaque bo) {
+ [&](const DerivedPath::Opaque & bo) {
goals.insert(worker.makePathSubstitutionGoal(bo.path, buildMode == bmRepair ? Repair : NoRepair));
},
}, br.raw());
diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc
index 990ff60b7..e91e35851 100644
--- a/src/libstore/build/local-derivation-goal.cc
+++ b/src/libstore/build/local-derivation-goal.cc
@@ -17,16 +17,14 @@
#include <regex>
#include <queue>
-#include <sys/types.h>
-#include <sys/socket.h>
#include <sys/un.h>
-#include <netdb.h>
#include <fcntl.h>
#include <termios.h>
#include <unistd.h>
#include <sys/mman.h>
#include <sys/utsname.h>
#include <sys/resource.h>
+#include <sys/socket.h>
#if HAVE_STATVFS
#include <sys/statvfs.h>
@@ -34,7 +32,6 @@
/* Includes required for chroot support. */
#if __linux__
-#include <sys/socket.h>
#include <sys/ioctl.h>
#include <net/if.h>
#include <netinet/ip.h>
@@ -70,12 +67,14 @@ void handleDiffHook(
auto diffHook = settings.diffHook;
if (diffHook != "" && settings.runDiffHook) {
try {
- RunOptions diffHookOptions(diffHook,{tryA, tryB, drvPath, tmpDir});
- diffHookOptions.searchPath = true;
- diffHookOptions.uid = uid;
- diffHookOptions.gid = gid;
- diffHookOptions.chdir = "/";
- auto diffRes = runProgram(diffHookOptions);
+ auto diffRes = runProgram(RunOptions {
+ .program = diffHook,
+ .searchPath = true,
+ .args = {tryA, tryB, drvPath, tmpDir},
+ .uid = uid,
+ .gid = gid,
+ .chdir = "/"
+ });
if (!statusOk(diffRes.first))
throw ExecError(diffRes.first,
"diff-hook program '%1%' %2%",
@@ -344,23 +343,6 @@ int childEntry(void * arg)
}
-static std::once_flag dns_resolve_flag;
-
-static void preloadNSS() {
- /* builtin:fetchurl can trigger a DNS lookup, which with glibc can trigger a dynamic library load of
- one of the glibc NSS libraries in a sandboxed child, which will fail unless the library's already
- been loaded in the parent. So we force a lookup of an invalid domain to force the NSS machinery to
- load its lookup libraries in the parent before any child gets a chance to. */
- std::call_once(dns_resolve_flag, []() {
- struct addrinfo *res = NULL;
-
- if (getaddrinfo("this.pre-initializes.the.dns.resolvers.invalid.", "http", NULL, &res) != 0) {
- if (res) freeaddrinfo(res);
- }
- });
-}
-
-
static void linkOrCopy(const Path & from, const Path & to)
{
if (link(from.c_str(), to.c_str()) == -1) {
@@ -389,9 +371,6 @@ void LocalDerivationGoal::startBuilder()
settings.thisSystem,
concatStringsSep<StringSet>(", ", worker.store.systemFeatures));
- if (drv->isBuiltin())
- preloadNSS();
-
#if __APPLE__
additionalSandboxProfile = parsedDrv->getStringAttr("__sandboxProfile").value_or("");
#endif
@@ -959,9 +938,12 @@ void LocalDerivationGoal::startBuilder()
try {
return readLine(builderOut.readSide.get());
} catch (Error & e) {
- e.addTrace({}, "while waiting for the build environment to initialize (previous messages: %s)",
+ auto status = pid.wait();
+ e.addTrace({}, "while waiting for the build environment for '%s' to initialize (%s, previous messages: %s)",
+ worker.store.printStorePath(drvPath),
+ statusToString(status),
concatStringsSep("|", msgs));
- throw e;
+ throw;
}
}();
if (string(msg, 0, 1) == "\2") break;
@@ -969,7 +951,7 @@ void LocalDerivationGoal::startBuilder()
FdSource source(builderOut.readSide.get());
auto ex = readError(source);
ex.addTrace({}, "while setting up the build environment");
- throw ex;
+ throw;
}
debug("sandbox setup: " + msg);
msgs.push_back(std::move(msg));
@@ -1112,10 +1094,10 @@ void LocalDerivationGoal::writeStructuredAttrs()
static StorePath pathPartOfReq(const DerivedPath & req)
{
return std::visit(overloaded {
- [&](DerivedPath::Opaque bo) {
+ [&](const DerivedPath::Opaque & bo) {
return bo.path;
},
- [&](DerivedPath::Built bfd) {
+ [&](const DerivedPath::Built & bfd) {
return bfd.drvPath;
},
}, req.raw());
@@ -1853,7 +1835,7 @@ void LocalDerivationGoal::runChild()
/* Fill in the arguments. */
Strings args;
- const char *builder = "invalid";
+ std::string builder = "invalid";
if (drv->isBuiltin()) {
;
@@ -1979,13 +1961,13 @@ void LocalDerivationGoal::runChild()
}
args.push_back(drv->builder);
} else {
- builder = drv->builder.c_str();
+ builder = drv->builder;
args.push_back(std::string(baseNameOf(drv->builder)));
}
}
#else
else {
- builder = drv->builder.c_str();
+ builder = drv->builder;
args.push_back(std::string(baseNameOf(drv->builder)));
}
#endif
@@ -2041,9 +2023,9 @@ void LocalDerivationGoal::runChild()
posix_spawnattr_setbinpref_np(&attrp, 1, &cpu, NULL);
}
- posix_spawn(NULL, builder, NULL, &attrp, stringsToCharPtrs(args).data(), stringsToCharPtrs(envStrs).data());
+ posix_spawn(NULL, builder.c_str(), NULL, &attrp, stringsToCharPtrs(args).data(), stringsToCharPtrs(envStrs).data());
#else
- execve(builder, stringsToCharPtrs(args).data(), stringsToCharPtrs(envStrs).data());
+ execve(builder.c_str(), stringsToCharPtrs(args).data(), stringsToCharPtrs(envStrs).data());
#endif
throw SysError("executing '%1%'", drv->builder);
@@ -2173,8 +2155,8 @@ void LocalDerivationGoal::registerOutputs()
/* Since we'll use the already installed versions of these, we
can treat them as leaves and ignore any references they
have. */
- [&](AlreadyRegistered _) { return StringSet {}; },
- [&](PerhapsNeedToRegister refs) {
+ [&](const AlreadyRegistered &) { return StringSet {}; },
+ [&](const PerhapsNeedToRegister & refs) {
StringSet referencedOutputs;
/* FIXME build inverted map up front so no quadratic waste here */
for (auto & r : refs.refs)
@@ -2210,11 +2192,11 @@ void LocalDerivationGoal::registerOutputs()
};
std::optional<StorePathSet> referencesOpt = std::visit(overloaded {
- [&](AlreadyRegistered skippedFinalPath) -> std::optional<StorePathSet> {
+ [&](const AlreadyRegistered & skippedFinalPath) -> std::optional<StorePathSet> {
finish(skippedFinalPath.path);
return std::nullopt;
},
- [&](PerhapsNeedToRegister r) -> std::optional<StorePathSet> {
+ [&](const PerhapsNeedToRegister & r) -> std::optional<StorePathSet> {
return r.refs;
},
}, outputReferencesIfUnregistered.at(outputName));
@@ -2330,7 +2312,7 @@ void LocalDerivationGoal::registerOutputs()
};
ValidPathInfo newInfo = std::visit(overloaded {
- [&](DerivationOutputInputAddressed output) {
+ [&](const DerivationOutputInputAddressed & output) {
/* input-addressed case */
auto requiredFinalPath = output.path;
/* Preemptively add rewrite rule for final hash, as that is
@@ -2349,14 +2331,14 @@ void LocalDerivationGoal::registerOutputs()
newInfo0.references.insert(newInfo0.path);
return newInfo0;
},
- [&](DerivationOutputCAFixed dof) {
+ [&](const DerivationOutputCAFixed & dof) {
auto newInfo0 = newInfoFromCA(DerivationOutputCAFloating {
.method = dof.hash.method,
.hashType = dof.hash.hash.type,
});
/* Check wanted hash */
- Hash & wanted = dof.hash.hash;
+ const Hash & wanted = dof.hash.hash;
assert(newInfo0.ca);
auto got = getContentAddressHash(*newInfo0.ca);
if (wanted != got) {
@@ -2492,7 +2474,13 @@ void LocalDerivationGoal::registerOutputs()
infos.emplace(outputName, std::move(newInfo));
}
- if (buildMode == bmCheck) return;
+ if (buildMode == bmCheck) {
+ // In case of FOD mismatches on `--check` an error must be thrown as this is also
+ // a source for non-determinism.
+ if (delayedException)
+ std::rethrow_exception(delayedException);
+ return;
+ }
/* Apply output checks. */
checkOutputs(infos);
diff --git a/src/libstore/content-address.cc b/src/libstore/content-address.cc
index 90a3ad1f5..974d1c471 100644
--- a/src/libstore/content-address.cc
+++ b/src/libstore/content-address.cc
@@ -31,10 +31,10 @@ std::string makeFixedOutputCA(FileIngestionMethod method, const Hash & hash)
std::string renderContentAddress(ContentAddress ca)
{
return std::visit(overloaded {
- [](TextHash th) {
+ [](TextHash & th) {
return "text:" + th.hash.to_string(Base32, true);
},
- [](FixedOutputHash fsh) {
+ [](FixedOutputHash & fsh) {
return makeFixedOutputCA(fsh.method, fsh.hash);
}
}, ca);
@@ -43,10 +43,10 @@ std::string renderContentAddress(ContentAddress ca)
std::string renderContentAddressMethod(ContentAddressMethod cam)
{
return std::visit(overloaded {
- [](TextHashMethod &th) {
+ [](TextHashMethod & th) {
return std::string{"text:"} + printHashType(htSHA256);
},
- [](FixedOutputHashMethod &fshm) {
+ [](FixedOutputHashMethod & fshm) {
return "fixed:" + makeFileIngestionPrefix(fshm.fileIngestionMethod) + printHashType(fshm.hashType);
}
}, cam);
@@ -104,12 +104,12 @@ ContentAddress parseContentAddress(std::string_view rawCa) {
return std::visit(
overloaded {
- [&](TextHashMethod thm) {
+ [&](TextHashMethod & thm) {
return ContentAddress(TextHash {
.hash = Hash::parseNonSRIUnprefixed(rest, htSHA256)
});
},
- [&](FixedOutputHashMethod fohMethod) {
+ [&](FixedOutputHashMethod & fohMethod) {
return ContentAddress(FixedOutputHash {
.method = fohMethod.fileIngestionMethod,
.hash = Hash::parseNonSRIUnprefixed(rest, std::move(fohMethod.hashType)),
@@ -137,10 +137,10 @@ std::string renderContentAddress(std::optional<ContentAddress> ca)
Hash getContentAddressHash(const ContentAddress & ca)
{
return std::visit(overloaded {
- [](TextHash th) {
+ [](const TextHash & th) {
return th.hash;
},
- [](FixedOutputHash fsh) {
+ [](const FixedOutputHash & fsh) {
return fsh.hash;
}
}, ca);
diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc
index d68ff64d7..164a9b2be 100644
--- a/src/libstore/daemon.cc
+++ b/src/libstore/daemon.cc
@@ -227,6 +227,12 @@ struct ClientSettings
try {
if (name == "ssh-auth-sock") // obsolete
;
+ else if (name == settings.experimentalFeatures.name) {
+ // We don’t want to forward the experimental features to
+ // the daemon, as that could cause some pretty weird stuff
+ if (tokenizeString<Strings>(value) != settings.experimentalFeatures.get())
+ debug("Ignoring the client-specified experimental features");
+ }
else if (trusted
|| name == settings.buildTimeout.name
|| name == "connect-timeout"
@@ -389,13 +395,13 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
FramedSource source(from);
// TODO this is essentially RemoteStore::addCAToStore. Move it up to Store.
return std::visit(overloaded {
- [&](TextHashMethod &_) {
+ [&](TextHashMethod &) {
// We could stream this by changing Store
std::string contents = source.drain();
auto path = store->addTextToStore(name, contents, refs, repair);
return store->queryPathInfo(path);
},
- [&](FixedOutputHashMethod &fohm) {
+ [&](FixedOutputHashMethod & fohm) {
if (!refs.empty())
throw UnimplementedError("cannot yet have refs with flat or nar-hashed data");
auto path = store->addToStoreFromDump(source, name, fohm.fileIngestionMethod, fohm.hashType, repair);
diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc
index 899475860..ef8765841 100644
--- a/src/libstore/derivations.cc
+++ b/src/libstore/derivations.cc
@@ -10,18 +10,18 @@ namespace nix {
std::optional<StorePath> DerivationOutput::path(const Store & store, std::string_view drvName, std::string_view outputName) const
{
return std::visit(overloaded {
- [](DerivationOutputInputAddressed doi) -> std::optional<StorePath> {
+ [](const DerivationOutputInputAddressed & doi) -> std::optional<StorePath> {
return { doi.path };
},
- [&](DerivationOutputCAFixed dof) -> std::optional<StorePath> {
+ [&](const DerivationOutputCAFixed & dof) -> std::optional<StorePath> {
return {
dof.path(store, drvName, outputName)
};
},
- [](DerivationOutputCAFloating dof) -> std::optional<StorePath> {
+ [](const DerivationOutputCAFloating & dof) -> std::optional<StorePath> {
return std::nullopt;
},
- [](DerivationOutputDeferred) -> std::optional<StorePath> {
+ [](const DerivationOutputDeferred &) -> std::optional<StorePath> {
return std::nullopt;
},
}, output);
@@ -332,22 +332,22 @@ string Derivation::unparse(const Store & store, bool maskOutputs,
if (first) first = false; else s += ',';
s += '('; printUnquotedString(s, i.first);
std::visit(overloaded {
- [&](DerivationOutputInputAddressed doi) {
+ [&](const DerivationOutputInputAddressed & doi) {
s += ','; printUnquotedString(s, maskOutputs ? "" : store.printStorePath(doi.path));
s += ','; printUnquotedString(s, "");
s += ','; printUnquotedString(s, "");
},
- [&](DerivationOutputCAFixed dof) {
+ [&](const DerivationOutputCAFixed & dof) {
s += ','; printUnquotedString(s, maskOutputs ? "" : store.printStorePath(dof.path(store, name, i.first)));
s += ','; printUnquotedString(s, dof.hash.printMethodAlgo());
s += ','; printUnquotedString(s, dof.hash.hash.to_string(Base16, false));
},
- [&](DerivationOutputCAFloating dof) {
+ [&](const DerivationOutputCAFloating & dof) {
s += ','; printUnquotedString(s, "");
s += ','; printUnquotedString(s, makeFileIngestionPrefix(dof.method) + printHashType(dof.hashType));
s += ','; printUnquotedString(s, "");
},
- [&](DerivationOutputDeferred) {
+ [&](const DerivationOutputDeferred &) {
s += ','; printUnquotedString(s, "");
s += ','; printUnquotedString(s, "");
s += ','; printUnquotedString(s, "");
@@ -420,13 +420,13 @@ DerivationType BasicDerivation::type() const
std::optional<HashType> floatingHashType;
for (auto & i : outputs) {
std::visit(overloaded {
- [&](DerivationOutputInputAddressed _) {
+ [&](const DerivationOutputInputAddressed &) {
inputAddressedOutputs.insert(i.first);
},
- [&](DerivationOutputCAFixed _) {
+ [&](const DerivationOutputCAFixed &) {
fixedCAOutputs.insert(i.first);
},
- [&](DerivationOutputCAFloating dof) {
+ [&](const DerivationOutputCAFloating & dof) {
floatingCAOutputs.insert(i.first);
if (!floatingHashType) {
floatingHashType = dof.hashType;
@@ -435,7 +435,7 @@ DerivationType BasicDerivation::type() const
throw Error("All floating outputs must use the same hash type");
}
},
- [&](DerivationOutputDeferred _) {
+ [&](const DerivationOutputDeferred &) {
deferredIAOutputs.insert(i.first);
},
}, i.second.output);
@@ -538,15 +538,15 @@ DrvHashModulo hashDerivationModulo(Store & store, const Derivation & drv, bool m
const auto & res = pathDerivationModulo(store, i.first);
std::visit(overloaded {
// Regular non-CA derivation, replace derivation
- [&](Hash drvHash) {
+ [&](const Hash & drvHash) {
inputs2.insert_or_assign(drvHash.to_string(Base16, false), i.second);
},
- [&](DeferredHash deferredHash) {
+ [&](const DeferredHash & deferredHash) {
isDeferred = true;
inputs2.insert_or_assign(deferredHash.hash.to_string(Base16, false), i.second);
},
// CA derivation's output hashes
- [&](CaOutputHashes outputHashes) {
+ [&](const CaOutputHashes & outputHashes) {
std::set<std::string> justOut = { "out" };
for (auto & output : i.second) {
/* Put each one in with a single "out" output.. */
@@ -572,17 +572,17 @@ std::map<std::string, Hash> staticOutputHashes(Store & store, const Derivation &
{
std::map<std::string, Hash> res;
std::visit(overloaded {
- [&](Hash drvHash) {
+ [&](const Hash & drvHash) {
for (auto & outputName : drv.outputNames()) {
res.insert({outputName, drvHash});
}
},
- [&](DeferredHash deferredHash) {
+ [&](const DeferredHash & deferredHash) {
for (auto & outputName : drv.outputNames()) {
res.insert({outputName, deferredHash.hash});
}
},
- [&](CaOutputHashes outputHashes) {
+ [&](const CaOutputHashes & outputHashes) {
res = outputHashes;
},
}, hashDerivationModulo(store, drv, true));
@@ -666,22 +666,22 @@ void writeDerivation(Sink & out, const Store & store, const BasicDerivation & dr
for (auto & i : drv.outputs) {
out << i.first;
std::visit(overloaded {
- [&](DerivationOutputInputAddressed doi) {
+ [&](const DerivationOutputInputAddressed & doi) {
out << store.printStorePath(doi.path)
<< ""
<< "";
},
- [&](DerivationOutputCAFixed dof) {
+ [&](const DerivationOutputCAFixed & dof) {
out << store.printStorePath(dof.path(store, drv.name, i.first))
<< dof.hash.printMethodAlgo()
<< dof.hash.hash.to_string(Base16, false);
},
- [&](DerivationOutputCAFloating dof) {
+ [&](const DerivationOutputCAFloating & dof) {
out << ""
<< (makeFileIngestionPrefix(dof.method) + printHashType(dof.hashType))
<< "";
},
- [&](DerivationOutputDeferred) {
+ [&](const DerivationOutputDeferred &) {
out << ""
<< ""
<< "";
diff --git a/src/libstore/derivations.hh b/src/libstore/derivations.hh
index 2df440536..b1cb68194 100644
--- a/src/libstore/derivations.hh
+++ b/src/libstore/derivations.hh
@@ -138,8 +138,8 @@ struct Derivation : BasicDerivation
/* Return the underlying basic derivation but with these changes:
- 1. Input drvs are emptied, but the outputs of them that were used are
- added directly to input sources.
+ 1. Input drvs are emptied, but the outputs of them that were used are
+ added directly to input sources.
2. Input placeholders are replaced with realized input store paths. */
std::optional<BasicDerivation> tryResolve(Store & store);
diff --git a/src/libstore/derived-path.cc b/src/libstore/derived-path.cc
index 8da81d0ac..e55af21e9 100644
--- a/src/libstore/derived-path.cc
+++ b/src/libstore/derived-path.cc
@@ -24,8 +24,8 @@ StorePathSet BuiltPath::outPaths() const
{
return std::visit(
overloaded{
- [](BuiltPath::Opaque p) { return StorePathSet{p.path}; },
- [](BuiltPath::Built b) {
+ [](const BuiltPath::Opaque & p) { return StorePathSet{p.path}; },
+ [](const BuiltPath::Built & b) {
StorePathSet res;
for (auto & [_, path] : b.outputs)
res.insert(path);
@@ -94,8 +94,8 @@ RealisedPath::Set BuiltPath::toRealisedPaths(Store & store) const
RealisedPath::Set res;
std::visit(
overloaded{
- [&](BuiltPath::Opaque p) { res.insert(p.path); },
- [&](BuiltPath::Built p) {
+ [&](const BuiltPath::Opaque & p) { res.insert(p.path); },
+ [&](const BuiltPath::Built & p) {
auto drvHashes =
staticOutputHashes(store, store.readDerivation(p.drvPath));
for (auto& [outputName, outputPath] : p.outputs) {
diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc
index d3b27d7be..6934801e3 100644
--- a/src/libstore/globals.cc
+++ b/src/libstore/globals.cc
@@ -166,7 +166,7 @@ bool Settings::isExperimentalFeatureEnabled(const std::string & name)
}
MissingExperimentalFeature::MissingExperimentalFeature(std::string feature)
- : Error("experimental Nix feature '%1%' is disabled; use '--experimental-features %1%' to override", feature)
+ : Error("experimental Nix feature '%1%' is disabled; use '--extra-experimental-features %1%' to override", feature)
, missingFeature(feature)
{}
diff --git a/src/libstore/http-binary-cache-store.cc b/src/libstore/http-binary-cache-store.cc
index 0a3afcd51..605ec4b28 100644
--- a/src/libstore/http-binary-cache-store.cc
+++ b/src/libstore/http-binary-cache-store.cc
@@ -57,8 +57,8 @@ public:
{
// FIXME: do this lazily?
if (auto cacheInfo = diskCache->cacheExists(cacheUri)) {
- wantMassQuery.setDefault(cacheInfo->wantMassQuery ? "true" : "false");
- priority.setDefault(fmt("%d", cacheInfo->priority));
+ wantMassQuery.setDefault(cacheInfo->wantMassQuery);
+ priority.setDefault(cacheInfo->priority);
} else {
try {
BinaryCacheStore::init();
diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc
index 7da3b36b5..814960bb5 100644
--- a/src/libstore/legacy-ssh-store.cc
+++ b/src/libstore/legacy-ssh-store.cc
@@ -82,9 +82,20 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
conn->to << SERVE_MAGIC_1 << SERVE_PROTOCOL_VERSION;
conn->to.flush();
- unsigned int magic = readInt(conn->from);
- if (magic != SERVE_MAGIC_2)
- throw Error("protocol mismatch with 'nix-store --serve' on '%s'", host);
+ StringSink saved;
+ try {
+ TeeSource tee(conn->from, saved);
+ unsigned int magic = readInt(tee);
+ if (magic != SERVE_MAGIC_2)
+ throw Error("'nix-store --serve' protocol mismatch from '%s'", host);
+ } catch (SerialisationError & e) {
+ /* In case the other side is waiting for our input,
+ close it. */
+ conn->sshConn->in.close();
+ auto msg = conn->from.drain();
+ throw Error("'nix-store --serve' protocol mismatch from '%s', got '%s'",
+ host, chomp(*saved.s + msg));
+ }
conn->remoteVersion = readInt(conn->from);
if (GET_PROTOCOL_MAJOR(conn->remoteVersion) != 0x200)
throw Error("unsupported 'nix-store --serve' protocol version on '%s'", host);
@@ -283,10 +294,10 @@ public:
for (auto & p : drvPaths) {
auto sOrDrvPath = StorePathWithOutputs::tryFromDerivedPath(p);
std::visit(overloaded {
- [&](StorePathWithOutputs s) {
+ [&](const StorePathWithOutputs & s) {
ss.push_back(s.to_string(*this));
},
- [&](StorePath drvPath) {
+ [&](const StorePath & drvPath) {
throw Error("wanted to fetch '%s' but the legacy ssh protocol doesn't support merely substituting drv files via the build paths command. It would build them instead. Try using ssh-ng://", printStorePath(drvPath));
},
}, sOrDrvPath);
diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc
index 747eb205e..5b2490472 100644
--- a/src/libstore/local-store.cc
+++ b/src/libstore/local-store.cc
@@ -681,7 +681,7 @@ void LocalStore::checkDerivationOutputs(const StorePath & drvPath, const Derivat
std::optional<Hash> h;
for (auto & i : drv.outputs) {
std::visit(overloaded {
- [&](DerivationOutputInputAddressed doia) {
+ [&](const DerivationOutputInputAddressed & doia) {
if (!h) {
// somewhat expensive so we do lazily
auto temp = hashDerivationModulo(*this, drv, true);
@@ -693,14 +693,14 @@ void LocalStore::checkDerivationOutputs(const StorePath & drvPath, const Derivat
printStorePath(drvPath), printStorePath(doia.path), printStorePath(recomputed));
envHasRightPath(doia.path, i.first);
},
- [&](DerivationOutputCAFixed dof) {
+ [&](const DerivationOutputCAFixed & dof) {
StorePath path = makeFixedOutputPath(dof.hash.method, dof.hash.hash, drvName);
envHasRightPath(path, i.first);
},
- [&](DerivationOutputCAFloating _) {
+ [&](const DerivationOutputCAFloating &) {
/* Nothing to check */
},
- [&](DerivationOutputDeferred) {
+ [&](const DerivationOutputDeferred &) {
},
}, i.second.output);
}
@@ -1071,14 +1071,19 @@ StorePathSet LocalStore::querySubstitutablePaths(const StorePathSet & paths)
}
+// FIXME: move this, it's not specific to LocalStore.
void LocalStore::querySubstitutablePathInfos(const StorePathCAMap & paths, SubstitutablePathInfos & infos)
{
if (!settings.useSubstitutes) return;
for (auto & sub : getDefaultSubstituters()) {
for (auto & path : paths) {
+ if (infos.count(path.first))
+ // Choose first succeeding substituter.
+ continue;
+
auto subPath(path.first);
- // recompute store path so that we can use a different store root
+ // Recompute store path so that we can use a different store root.
if (path.second) {
subPath = makeFixedOutputPathFromCA(path.first.name(), *path.second);
if (sub->storeDir == storeDir)
@@ -1239,11 +1244,6 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
deletePath(realPath);
- // text hashing has long been allowed to have non-self-references because it is used for drv files.
- bool refersToSelf = info.references.count(info.path) > 0;
- if (info.ca.has_value() && !info.references.empty() && !(std::holds_alternative<TextHash>(*info.ca) && !refersToSelf))
- settings.requireExperimentalFeature("ca-references");
-
/* While restoring the path from the NAR, compute the hash
of the NAR. */
HashSink hashSink(htSHA256);
diff --git a/src/libstore/local.mk b/src/libstore/local.mk
index b87cee8d5..b992bcbc0 100644
--- a/src/libstore/local.mk
+++ b/src/libstore/local.mk
@@ -8,7 +8,7 @@ libstore_SOURCES := $(wildcard $(d)/*.cc $(d)/builtins/*.cc $(d)/build/*.cc)
libstore_LIBS = libutil
-libstore_LDFLAGS = $(SQLITE3_LIBS) $(LIBCURL_LIBS) $(SODIUM_LIBS) -pthread
+libstore_LDFLAGS += $(SQLITE3_LIBS) $(LIBCURL_LIBS) $(SODIUM_LIBS) -pthread
ifdef HOST_LINUX
libstore_LDFLAGS += -ldl
endif
diff --git a/src/libstore/misc.cc b/src/libstore/misc.cc
index b4929b445..f184dd857 100644
--- a/src/libstore/misc.cc
+++ b/src/libstore/misc.cc
@@ -166,7 +166,7 @@ void Store::queryMissing(const std::vector<DerivedPath> & targets,
}
std::visit(overloaded {
- [&](DerivedPath::Built bfd) {
+ [&](const DerivedPath::Built & bfd) {
if (!isValidPath(bfd.drvPath)) {
// FIXME: we could try to substitute the derivation.
auto state(state_.lock());
@@ -199,7 +199,7 @@ void Store::queryMissing(const std::vector<DerivedPath> & targets,
mustBuildDrv(bfd.drvPath, *drv);
},
- [&](DerivedPath::Opaque bo) {
+ [&](const DerivedPath::Opaque & bo) {
if (isValidPath(bo.path)) return;
diff --git a/src/libstore/path-with-outputs.cc b/src/libstore/path-with-outputs.cc
index 865d64cf2..e5a121e00 100644
--- a/src/libstore/path-with-outputs.cc
+++ b/src/libstore/path-with-outputs.cc
@@ -31,14 +31,14 @@ std::vector<DerivedPath> toDerivedPaths(const std::vector<StorePathWithOutputs>
std::variant<StorePathWithOutputs, StorePath> StorePathWithOutputs::tryFromDerivedPath(const DerivedPath & p)
{
return std::visit(overloaded {
- [&](DerivedPath::Opaque bo) -> std::variant<StorePathWithOutputs, StorePath> {
+ [&](const DerivedPath::Opaque & bo) -> std::variant<StorePathWithOutputs, StorePath> {
if (bo.path.isDerivation()) {
// drv path gets interpreted as "build", not "get drv file itself"
return bo.path;
}
return StorePathWithOutputs { bo.path };
},
- [&](DerivedPath::Built bfd) -> std::variant<StorePathWithOutputs, StorePath> {
+ [&](const DerivedPath::Built & bfd) -> std::variant<StorePathWithOutputs, StorePath> {
return StorePathWithOutputs { bfd.drvPath, bfd.outputs };
},
}, p.raw());
diff --git a/src/libstore/profiles.cc b/src/libstore/profiles.cc
index 84a21c0ba..73163424c 100644
--- a/src/libstore/profiles.cc
+++ b/src/libstore/profiles.cc
@@ -126,9 +126,9 @@ void deleteGeneration(const Path & profile, GenerationNumber gen)
static void deleteGeneration2(const Path & profile, GenerationNumber gen, bool dryRun)
{
if (dryRun)
- printInfo(format("would remove generation %1%") % gen);
+ notice("would remove profile version %1%", gen);
else {
- printInfo(format("removing generation %1%") % gen);
+ notice("removing profile version %1%", gen);
deleteGeneration(profile, gen);
}
}
@@ -142,7 +142,7 @@ void deleteGenerations(const Path & profile, const std::set<GenerationNumber> &
auto [gens, curGen] = findGenerations(profile);
if (gensToDelete.count(*curGen))
- throw Error("cannot delete current generation of profile %1%'", profile);
+ throw Error("cannot delete current version of profile %1%'", profile);
for (auto & i : gens) {
if (!gensToDelete.count(i.number)) continue;
@@ -236,6 +236,37 @@ void switchLink(Path link, Path target)
}
+void switchGeneration(
+ const Path & profile,
+ std::optional<GenerationNumber> dstGen,
+ bool dryRun)
+{
+ PathLocks lock;
+ lockProfile(lock, profile);
+
+ auto [gens, curGen] = findGenerations(profile);
+
+ std::optional<Generation> dst;
+ for (auto & i : gens)
+ if ((!dstGen && i.number < curGen) ||
+ (dstGen && i.number == *dstGen))
+ dst = i;
+
+ if (!dst) {
+ if (dstGen)
+ throw Error("profile version %1% does not exist", *dstGen);
+ else
+ throw Error("no profile version older than the current (%1%) exists", curGen.value_or(0));
+ }
+
+ notice("switching profile from version %d to %d", curGen.value_or(0), dst->number);
+
+ if (dryRun) return;
+
+ switchLink(profile, dst->path);
+}
+
+
void lockProfile(PathLocks & lock, const Path & profile)
{
lock.lockPaths({profile}, (format("waiting for lock on profile '%1%'") % profile).str());
diff --git a/src/libstore/profiles.hh b/src/libstore/profiles.hh
index be55a65d4..d100c970c 100644
--- a/src/libstore/profiles.hh
+++ b/src/libstore/profiles.hh
@@ -11,7 +11,7 @@ namespace nix {
class StorePath;
-typedef unsigned int GenerationNumber;
+typedef uint64_t GenerationNumber;
struct Generation
{
@@ -46,6 +46,13 @@ void deleteGenerationsOlderThan(const Path & profile, const string & timeSpec, b
void switchLink(Path link, Path target);
+/* Roll back a profile to the specified generation, or to the most
+ recent one older than the current. */
+void switchGeneration(
+ const Path & profile,
+ std::optional<GenerationNumber> dstGen,
+ bool dryRun);
+
/* Ensure exclusive access to a profile. Any command that modifies
the profile first acquires this lock. */
void lockProfile(PathLocks & lock, const Path & profile);
diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc
index 140f39120..fa5ea8af7 100644
--- a/src/libstore/remote-store.cc
+++ b/src/libstore/remote-store.cc
@@ -162,8 +162,19 @@ void RemoteStore::initConnection(Connection & conn)
try {
conn.to << WORKER_MAGIC_1;
conn.to.flush();
- unsigned int magic = readInt(conn.from);
- if (magic != WORKER_MAGIC_2) throw Error("protocol mismatch");
+ StringSink saved;
+ try {
+ TeeSource tee(conn.from, saved);
+ unsigned int magic = readInt(tee);
+ if (magic != WORKER_MAGIC_2)
+ throw Error("protocol mismatch");
+ } catch (SerialisationError & e) {
+ /* In case the other side is waiting for our input, close
+ it. */
+ conn.closeWrite();
+ auto msg = conn.from.drain();
+ throw Error("protocol mismatch, got '%s'", chomp(*saved.s + msg));
+ }
conn.from >> conn.daemonVersion;
if (GET_PROTOCOL_MAJOR(conn.daemonVersion) != GET_PROTOCOL_MAJOR(PROTOCOL_VERSION))
@@ -222,6 +233,7 @@ void RemoteStore::setOptions(Connection & conn)
overrides.erase(settings.buildCores.name);
overrides.erase(settings.useSubstitutes.name);
overrides.erase(loggerSettings.showTrace.name);
+ overrides.erase(settings.experimentalFeatures.name);
conn.to << overrides.size();
for (auto & i : overrides)
conn.to << i.first << i.second.value;
@@ -516,13 +528,13 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
if (repair) throw Error("repairing is not supported when building through the Nix daemon protocol < 1.25");
std::visit(overloaded {
- [&](TextHashMethod thm) -> void {
+ [&](const TextHashMethod & thm) -> void {
std::string s = dump.drain();
conn->to << wopAddTextToStore << name << s;
worker_proto::write(*this, conn->to, references);
conn.processStderr();
},
- [&](FixedOutputHashMethod fohm) -> void {
+ [&](const FixedOutputHashMethod & fohm) -> void {
conn->to
<< wopAddToStore
<< name
@@ -693,10 +705,10 @@ static void writeDerivedPaths(RemoteStore & store, ConnectionHandle & conn, cons
for (auto & p : reqs) {
auto sOrDrvPath = StorePathWithOutputs::tryFromDerivedPath(p);
std::visit(overloaded {
- [&](StorePathWithOutputs s) {
+ [&](const StorePathWithOutputs & s) {
ss.push_back(s.to_string(store));
},
- [&](StorePath drvPath) {
+ [&](const StorePath & drvPath) {
throw Error("trying to request '%s', but daemon protocol %d.%d is too old (< 1.29) to request a derivation file",
store.printStorePath(drvPath),
GET_PROTOCOL_MAJOR(conn->daemonVersion),
diff --git a/src/libstore/remote-store.hh b/src/libstore/remote-store.hh
index 8901c79fc..ac1eaa19e 100644
--- a/src/libstore/remote-store.hh
+++ b/src/libstore/remote-store.hh
@@ -125,7 +125,6 @@ public:
struct Connection
{
- AutoCloseFD fd;
FdSink to;
FdSource from;
unsigned int daemonVersion;
@@ -133,6 +132,8 @@ public:
virtual ~Connection();
+ virtual void closeWrite() = 0;
+
std::exception_ptr processStderr(Sink * sink = 0, Source * source = 0, bool flush = true);
};
diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc
index 6bfbee044..7accad7f4 100644
--- a/src/libstore/s3-binary-cache-store.cc
+++ b/src/libstore/s3-binary-cache-store.cc
@@ -209,7 +209,7 @@ struct S3BinaryCacheStoreImpl : virtual S3BinaryCacheStoreConfig, public virtual
S3Helper s3Helper;
S3BinaryCacheStoreImpl(
- const std::string & scheme,
+ const std::string & uriScheme,
const std::string & bucketName,
const Params & params)
: StoreConfig(params)
@@ -232,8 +232,8 @@ struct S3BinaryCacheStoreImpl : virtual S3BinaryCacheStoreConfig, public virtual
void init() override
{
if (auto cacheInfo = diskCache->cacheExists(getUri())) {
- wantMassQuery.setDefault(cacheInfo->wantMassQuery ? "true" : "false");
- priority.setDefault(fmt("%d", cacheInfo->priority));
+ wantMassQuery.setDefault(cacheInfo->wantMassQuery);
+ priority.setDefault(cacheInfo->priority);
} else {
BinaryCacheStore::init();
diskCache->createCache(getUri(), storeDir, wantMassQuery, priority);
diff --git a/src/libstore/sandbox-defaults.sb b/src/libstore/sandbox-defaults.sb
index 2bb1ea130..41893e6dd 100644
--- a/src/libstore/sandbox-defaults.sb
+++ b/src/libstore/sandbox-defaults.sb
@@ -97,3 +97,7 @@
; This is used by /bin/sh on macOS 10.15 and later.
(allow file*
(literal "/private/var/select/sh"))
+
+; Allow Rosetta 2 to run x86_64 binaries on aarch64-darwin.
+(allow file-read*
+ (subpath "/Library/Apple/usr/libexec/oah"))
diff --git a/src/libstore/ssh-store.cc b/src/libstore/ssh-store.cc
index f2caf2aeb..bb03daef4 100644
--- a/src/libstore/ssh-store.cc
+++ b/src/libstore/ssh-store.cc
@@ -57,6 +57,11 @@ private:
struct Connection : RemoteStore::Connection
{
std::unique_ptr<SSHMaster::Connection> sshConn;
+
+ void closeWrite() override
+ {
+ sshConn->in.close();
+ }
};
ref<RemoteStore::Connection> openConnection() override;
diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc
index 970bafd88..b5ff3dccf 100644
--- a/src/libstore/store-api.cc
+++ b/src/libstore/store-api.cc
@@ -199,10 +199,10 @@ StorePath Store::makeFixedOutputPathFromCA(std::string_view name, ContentAddress
{
// New template
return std::visit(overloaded {
- [&](TextHash th) {
+ [&](const TextHash & th) {
return makeTextPath(name, th.hash, references);
},
- [&](FixedOutputHash fsh) {
+ [&](const FixedOutputHash & fsh) {
return makeFixedOutputPath(fsh.method, fsh.hash, name, references, hasSelfReference);
}
}, ca);
@@ -1114,10 +1114,10 @@ bool ValidPathInfo::isContentAddressed(const Store & store) const
if (! ca) return false;
auto caPath = std::visit(overloaded {
- [&](TextHash th) {
+ [&](const TextHash & th) {
return store.makeTextPath(path.name(), th.hash, references);
},
- [&](FixedOutputHash fsh) {
+ [&](const FixedOutputHash & fsh) {
auto refs = references;
bool hasSelfReference = false;
if (refs.count(path)) {
diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh
index 4fb6c40c7..54471bdf2 100644
--- a/src/libstore/store-api.hh
+++ b/src/libstore/store-api.hh
@@ -430,9 +430,10 @@ public:
virtual StorePathSet querySubstitutablePaths(const StorePathSet & paths) { return {}; };
/* Query substitute info (i.e. references, derivers and download
- sizes) of a map of paths to their optional ca values. If a path
- does not have substitute info, it's omitted from the resulting
- ‘infos’ map. */
+ sizes) of a map of paths to their optional ca values. The info
+ of the first succeeding substituter for each path will be
+ returned. If a path does not have substitute info, it's omitted
+ from the resulting ‘infos’ map. */
virtual void querySubstitutablePathInfos(const StorePathCAMap & paths,
SubstitutablePathInfos & infos) { return; };
diff --git a/src/libstore/uds-remote-store.cc b/src/libstore/uds-remote-store.cc
index cac4fa036..cfadccf68 100644
--- a/src/libstore/uds-remote-store.cc
+++ b/src/libstore/uds-remote-store.cc
@@ -45,6 +45,12 @@ std::string UDSRemoteStore::getUri()
}
+void UDSRemoteStore::Connection::closeWrite()
+{
+ shutdown(fd.get(), SHUT_WR);
+}
+
+
ref<RemoteStore::Connection> UDSRemoteStore::openConnection()
{
auto conn = make_ref<Connection>();
diff --git a/src/libstore/uds-remote-store.hh b/src/libstore/uds-remote-store.hh
index ddc7716cd..f8dfcca70 100644
--- a/src/libstore/uds-remote-store.hh
+++ b/src/libstore/uds-remote-store.hh
@@ -40,6 +40,12 @@ public:
private:
+ struct Connection : RemoteStore::Connection
+ {
+ AutoCloseFD fd;
+ void closeWrite() override;
+ };
+
ref<RemoteStore::Connection> openConnection() override;
std::optional<std::string> path;
};
diff --git a/src/libutil/ansicolor.hh b/src/libutil/ansicolor.hh
index ae741f867..38305e71c 100644
--- a/src/libutil/ansicolor.hh
+++ b/src/libutil/ansicolor.hh
@@ -9,7 +9,7 @@ namespace nix {
#define ANSI_ITALIC "\e[3m"
#define ANSI_RED "\e[31;1m"
#define ANSI_GREEN "\e[32;1m"
-#define ANSI_YELLOW "\e[33;1m"
+#define ANSI_WARNING "\e[35;1m"
#define ANSI_BLUE "\e[34;1m"
#define ANSI_MAGENTA "\e[35;1m"
#define ANSI_CYAN "\e[36;1m"
diff --git a/src/libutil/args.cc b/src/libutil/args.cc
index afed0670f..9df279faf 100644
--- a/src/libutil/args.cc
+++ b/src/libutil/args.cc
@@ -331,6 +331,7 @@ MultiCommand::MultiCommand(const Commands & commands_)
if (i == commands.end())
throw UsageError("'%s' is not a recognised command", s);
command = {s, i->second()};
+ command->second->parent = this;
}}
});
diff --git a/src/libutil/args.hh b/src/libutil/args.hh
index c08ba8abd..7521b3065 100644
--- a/src/libutil/args.hh
+++ b/src/libutil/args.hh
@@ -12,6 +12,8 @@ namespace nix {
enum HashType : char;
+class MultiCommand;
+
class Args
{
public:
@@ -89,6 +91,14 @@ protected:
})
, arity(1)
{ }
+
+ template<class I>
+ Handler(std::optional<I> * dest)
+ : fun([=](std::vector<std::string> ss) {
+ *dest = string2IntWithUnitPrefix<I>(ss[0]);
+ })
+ , arity(1)
+ { }
};
/* Options. */
@@ -169,11 +179,13 @@ public:
virtual nlohmann::json toJSON();
friend class MultiCommand;
+
+ MultiCommand * parent = nullptr;
};
/* A command is an argument parser that can be executed by calling its
run() method. */
-struct Command : virtual Args
+struct Command : virtual public Args
{
friend class MultiCommand;
@@ -193,7 +205,7 @@ typedef std::map<std::string, std::function<ref<Command>()>> Commands;
/* An argument parser that supports multiple subcommands,
i.e. ‘<command> <subcommand>’. */
-class MultiCommand : virtual Args
+class MultiCommand : virtual public Args
{
public:
Commands commands;
diff --git a/src/libutil/config.cc b/src/libutil/config.cc
index 2a5f913e6..c247c7dae 100644
--- a/src/libutil/config.cc
+++ b/src/libutil/config.cc
@@ -177,11 +177,6 @@ AbstractSetting::AbstractSetting(
{
}
-void AbstractSetting::setDefault(const std::string & str)
-{
- if (!overridden) set(str);
-}
-
nlohmann::json AbstractSetting::toJSON()
{
return nlohmann::json(toJSONObject());
diff --git a/src/libutil/config.hh b/src/libutil/config.hh
index df5c2226f..736810bf3 100644
--- a/src/libutil/config.hh
+++ b/src/libutil/config.hh
@@ -194,8 +194,6 @@ public:
bool overridden = false;
- void setDefault(const std::string & str);
-
protected:
AbstractSetting(
@@ -253,6 +251,7 @@ public:
bool operator !=(const T & v2) const { return value != v2; }
void operator =(const T & v) { assign(v); }
virtual void assign(const T & v) { value = v; }
+ void setDefault(const T & v) { if (!overridden) value = v; }
void set(const std::string & str, bool append = false) override;
diff --git a/src/libutil/error.cc b/src/libutil/error.cc
index 0eea3455d..203d79087 100644
--- a/src/libutil/error.cc
+++ b/src/libutil/error.cc
@@ -185,15 +185,15 @@ void printAtPos(const ErrPos & pos, std::ostream & out)
if (pos) {
switch (pos.origin) {
case foFile: {
- out << fmt(ANSI_BLUE "at " ANSI_YELLOW "%s:%s" ANSI_NORMAL ":", pos.file, showErrPos(pos));
+ out << fmt(ANSI_BLUE "at " ANSI_WARNING "%s:%s" ANSI_NORMAL ":", pos.file, showErrPos(pos));
break;
}
case foString: {
- out << fmt(ANSI_BLUE "at " ANSI_YELLOW "«string»:%s" ANSI_NORMAL ":", showErrPos(pos));
+ out << fmt(ANSI_BLUE "at " ANSI_WARNING "«string»:%s" ANSI_NORMAL ":", showErrPos(pos));
break;
}
case foStdin: {
- out << fmt(ANSI_BLUE "at " ANSI_YELLOW "«stdin»:%s" ANSI_NORMAL ":", showErrPos(pos));
+ out << fmt(ANSI_BLUE "at " ANSI_WARNING "«stdin»:%s" ANSI_NORMAL ":", showErrPos(pos));
break;
}
default:
@@ -232,7 +232,7 @@ std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool s
break;
}
case Verbosity::lvlWarn: {
- prefix = ANSI_YELLOW "warning";
+ prefix = ANSI_WARNING "warning";
break;
}
case Verbosity::lvlInfo: {
@@ -252,7 +252,7 @@ std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool s
break;
}
case Verbosity::lvlDebug: {
- prefix = ANSI_YELLOW "debug";
+ prefix = ANSI_WARNING "debug";
break;
}
default:
diff --git a/src/libutil/fmt.hh b/src/libutil/fmt.hh
index 85c0e9429..fd335b811 100644
--- a/src/libutil/fmt.hh
+++ b/src/libutil/fmt.hh
@@ -82,7 +82,7 @@ struct yellowtxt
template <class T>
std::ostream & operator<<(std::ostream & out, const yellowtxt<T> & y)
{
- return out << ANSI_YELLOW << y.value << ANSI_NORMAL;
+ return out << ANSI_WARNING << y.value << ANSI_NORMAL;
}
template <class T>
diff --git a/src/libutil/local.mk b/src/libutil/local.mk
index 3a6415ee3..f880c0fc5 100644
--- a/src/libutil/local.mk
+++ b/src/libutil/local.mk
@@ -6,7 +6,7 @@ libutil_DIR := $(d)
libutil_SOURCES := $(wildcard $(d)/*.cc)
-libutil_LDFLAGS = -pthread $(OPENSSL_LIBS) $(LIBBROTLI_LIBS) $(LIBARCHIVE_LIBS) $(BOOST_LDFLAGS) -lboost_context
+libutil_LDFLAGS += -pthread $(OPENSSL_LIBS) $(LIBBROTLI_LIBS) $(LIBARCHIVE_LIBS) $(BOOST_LDFLAGS) -lboost_context
ifeq ($(HAVE_LIBCPUID), 1)
libutil_LDFLAGS += -lcpuid
diff --git a/src/libutil/logging.cc b/src/libutil/logging.cc
index 7d6090e0a..f8a121ed1 100644
--- a/src/libutil/logging.cc
+++ b/src/libutil/logging.cc
@@ -27,7 +27,7 @@ Logger * logger = makeSimpleLogger(true);
void Logger::warn(const std::string & msg)
{
- log(lvlWarn, ANSI_YELLOW "warning:" ANSI_NORMAL " " + msg);
+ log(lvlWarn, ANSI_WARNING "warning:" ANSI_NORMAL " " + msg);
}
void Logger::writeToStdout(std::string_view s)
diff --git a/src/libutil/ref.hh b/src/libutil/ref.hh
index 2549ef496..d6bf53bb8 100644
--- a/src/libutil/ref.hh
+++ b/src/libutil/ref.hh
@@ -99,4 +99,47 @@ make_ref(Args&&... args)
return ref<T>(p);
}
+
+/* A non-nullable pointer.
+ This is similar to a C++ "& reference", but mutable.
+ This is similar to ref<T> but backed by a regular pointer instead of a smart pointer.
+ */
+template<typename T>
+class ptr {
+private:
+ T * p;
+
+public:
+ ptr<T>(const ptr<T> & r)
+ : p(r.p)
+ { }
+
+ explicit ptr<T>(T * p)
+ : p(p)
+ {
+ if (!p)
+ throw std::invalid_argument("null pointer cast to ptr");
+ }
+
+ T* operator ->() const
+ {
+ return &*p;
+ }
+
+ T& operator *() const
+ {
+ return *p;
+ }
+
+ bool operator == (const ptr<T> & other) const
+ {
+ return p == other.p;
+ }
+
+ bool operator != (const ptr<T> & other) const
+ {
+ return p != other.p;
+ }
+};
+
}
diff --git a/src/libutil/serialise.cc b/src/libutil/serialise.cc
index 374b48d79..16f3476c2 100644
--- a/src/libutil/serialise.cc
+++ b/src/libutil/serialise.cc
@@ -244,7 +244,8 @@ std::unique_ptr<FinishSink> sourceToSink(std::function<void(Source &)> fun)
if (!cur.empty()) (*coro)(false);
}
- void finish() {
+ void finish() override
+ {
if (!coro) return;
if (!*coro) abort();
(*coro)(true);
diff --git a/src/libutil/tarfile.cc b/src/libutil/tarfile.cc
index 24905130d..50e691a3d 100644
--- a/src/libutil/tarfile.cc
+++ b/src/libutil/tarfile.cc
@@ -39,32 +39,30 @@ void TarArchive::check(int err, const std::string & reason)
throw Error(reason, archive_error_string(this->archive));
}
-TarArchive::TarArchive(Source & source, bool raw) : buffer(4096)
+TarArchive::TarArchive(Source & source, bool raw)
+ : source(&source), buffer(4096)
{
- this->archive = archive_read_new();
- this->source = &source;
-
- if (!raw) {
- archive_read_support_filter_all(archive);
+ init();
+ if (!raw)
archive_read_support_format_all(archive);
- } else {
- archive_read_support_filter_all(archive);
+ else
archive_read_support_format_raw(archive);
- archive_read_support_format_empty(archive);
- }
check(archive_read_open(archive, (void *)this, callback_open, callback_read, callback_close), "Failed to open archive (%s)");
}
-
TarArchive::TarArchive(const Path & path)
{
- this->archive = archive_read_new();
-
- archive_read_support_filter_all(archive);
+ init();
archive_read_support_format_all(archive);
check(archive_read_open_filename(archive, path.c_str(), 16384), "failed to open archive: %s");
}
+void TarArchive::init()
+{
+ archive = archive_read_new();
+ archive_read_support_filter_all(archive);
+}
+
void TarArchive::close()
{
check(archive_read_close(this->archive), "Failed to close archive (%s)");
@@ -87,13 +85,16 @@ static void extract_archive(TarArchive & archive, const Path & destDir)
struct archive_entry * entry;
int r = archive_read_next_header(archive.archive, &entry);
if (r == ARCHIVE_EOF) break;
- else if (r == ARCHIVE_WARN)
+ auto name = archive_entry_pathname(entry);
+ if (!name)
+ throw Error("cannot get archive member name: %s", archive_error_string(archive.archive));
+ if (r == ARCHIVE_WARN)
warn(archive_error_string(archive.archive));
else
archive.check(r);
archive_entry_set_pathname(entry,
- (destDir + "/" + archive_entry_pathname(entry)).c_str());
+ (destDir + "/" + name).c_str());
archive.check(archive_read_extract(archive.archive, entry, flags));
}
diff --git a/src/libutil/tarfile.hh b/src/libutil/tarfile.hh
index 4d9141fd4..f107a7e2e 100644
--- a/src/libutil/tarfile.hh
+++ b/src/libutil/tarfile.hh
@@ -17,10 +17,13 @@ struct TarArchive {
// disable copy constructor
TarArchive(const TarArchive &) = delete;
+ void init();
+
void close();
~TarArchive();
};
+
void unpackTarfile(Source & source, const Path & destDir);
void unpackTarfile(const Path & tarFile, const Path & destDir);
diff --git a/src/libutil/tests/logging.cc b/src/libutil/tests/logging.cc
index d990e5499..cef3bd481 100644
--- a/src/libutil/tests/logging.cc
+++ b/src/libutil/tests/logging.cc
@@ -336,7 +336,7 @@ namespace nix {
ASSERT_STREQ(
hintfmt("only one arg %1% %2%", "fulfilled").str().c_str(),
- "only one arg " ANSI_YELLOW "fulfilled" ANSI_NORMAL " ");
+ "only one arg " ANSI_WARNING "fulfilled" ANSI_NORMAL " ");
}
@@ -344,7 +344,7 @@ namespace nix {
ASSERT_STREQ(
hintfmt("what about this %1% %2%", "%3%", "one", "two").str().c_str(),
- "what about this " ANSI_YELLOW "%3%" ANSI_NORMAL " " ANSI_YELLOW "one" ANSI_NORMAL);
+ "what about this " ANSI_WARNING "%3%" ANSI_NORMAL " " ANSI_YELLOW "one" ANSI_NORMAL);
}
diff --git a/src/libutil/util.cc b/src/libutil/util.cc
index d1270cd31..bc841f425 100644
--- a/src/libutil/util.cc
+++ b/src/libutil/util.cc
@@ -4,6 +4,7 @@
#include "finally.hh"
#include "serialise.hh"
+#include <array>
#include <cctype>
#include <cerrno>
#include <climits>
@@ -1033,17 +1034,10 @@ std::vector<char *> stringsToCharPtrs(const Strings & ss)
return res;
}
-// Output = "standard out" output stream
string runProgram(Path program, bool searchPath, const Strings & args,
const std::optional<std::string> & input)
{
- RunOptions opts(program, args);
- opts.searchPath = searchPath;
- // This allows you to refer to a program with a pathname relative to the
- // PATH variable.
- opts.input = input;
-
- auto res = runProgram(opts);
+ auto res = runProgram(RunOptions {.program = program, .searchPath = searchPath, .args = args, .input = input});
if (!statusOk(res.first))
throw ExecError(res.first, fmt("program '%1%' %2%", program, statusToString(res.first)));
@@ -1052,9 +1046,8 @@ string runProgram(Path program, bool searchPath, const Strings & args,
}
// Output = error code + "standard out" output stream
-std::pair<int, std::string> runProgram(const RunOptions & options_)
+std::pair<int, std::string> runProgram(RunOptions && options)
{
- RunOptions options(options_);
StringSink sink;
options.standardOut = &sink;
@@ -1723,6 +1716,8 @@ string showBytes(uint64_t bytes)
// FIXME: move to libstore/build
void commonChildInit(Pipe & logPipe)
{
+ logger = makeSimpleLogger();
+
const static string pathNullDevice = "/dev/null";
restoreProcessContext();
diff --git a/src/libutil/util.hh b/src/libutil/util.hh
index a8dd4bd47..bee77b53f 100644
--- a/src/libutil/util.hh
+++ b/src/libutil/util.hh
@@ -276,26 +276,20 @@ string runProgram(Path program, bool searchPath = false,
struct RunOptions
{
+ Path program;
+ bool searchPath = true;
+ Strings args;
std::optional<uid_t> uid;
std::optional<uid_t> gid;
std::optional<Path> chdir;
std::optional<std::map<std::string, std::string>> environment;
- Path program;
- bool searchPath = true;
- Strings args;
std::optional<std::string> input;
Source * standardIn = nullptr;
Sink * standardOut = nullptr;
bool mergeStderrToStdout = false;
- bool _killStderr = false;
-
- RunOptions(const Path & program, const Strings & args)
- : program(program), args(args) { };
-
- RunOptions & killStderr(bool v) { _killStderr = true; return *this; }
};
-std::pair<int, std::string> runProgram(const RunOptions & options);
+std::pair<int, std::string> runProgram(RunOptions && options);
void runProgram2(const RunOptions & options);
diff --git a/src/nix-env/nix-env.cc b/src/nix-env/nix-env.cc
index e04954d45..a86f55f84 100644
--- a/src/nix-env/nix-env.cc
+++ b/src/nix-env/nix-env.cc
@@ -1204,37 +1204,6 @@ static void opSwitchProfile(Globals & globals, Strings opFlags, Strings opArgs)
}
-static constexpr GenerationNumber prevGen = std::numeric_limits<GenerationNumber>::max();
-
-
-static void switchGeneration(Globals & globals, GenerationNumber dstGen)
-{
- PathLocks lock;
- lockProfile(lock, globals.profile);
-
- auto [gens, curGen] = findGenerations(globals.profile);
-
- std::optional<Generation> dst;
- for (auto & i : gens)
- if ((dstGen == prevGen && i.number < curGen) ||
- (dstGen >= 0 && i.number == dstGen))
- dst = i;
-
- if (!dst) {
- if (dstGen == prevGen)
- throw Error("no generation older than the current (%1%) exists", curGen.value_or(0));
- else
- throw Error("generation %1% does not exist", dstGen);
- }
-
- printInfo("switching from generation %1% to %2%", curGen.value_or(0), dst->number);
-
- if (globals.dryRun) return;
-
- switchLink(globals.profile, dst->path);
-}
-
-
static void opSwitchGeneration(Globals & globals, Strings opFlags, Strings opArgs)
{
if (opFlags.size() > 0)
@@ -1243,7 +1212,7 @@ static void opSwitchGeneration(Globals & globals, Strings opFlags, Strings opArg
throw UsageError("exactly one argument expected");
if (auto dstGen = string2Int<GenerationNumber>(opArgs.front()))
- switchGeneration(globals, *dstGen);
+ switchGeneration(globals.profile, *dstGen, globals.dryRun);
else
throw UsageError("expected a generation number");
}
@@ -1256,7 +1225,7 @@ static void opRollback(Globals & globals, Strings opFlags, Strings opArgs)
if (opArgs.size() != 0)
throw UsageError("no arguments expected");
- switchGeneration(globals, prevGen);
+ switchGeneration(globals.profile, {}, globals.dryRun);
}
@@ -1296,12 +1265,12 @@ static void opDeleteGenerations(Globals & globals, Strings opFlags, Strings opAr
} else if (opArgs.size() == 1 && opArgs.front().find('d') != string::npos) {
deleteGenerationsOlderThan(globals.profile, opArgs.front(), globals.dryRun);
} else if (opArgs.size() == 1 && opArgs.front().find('+') != string::npos) {
- if(opArgs.front().size() < 2)
- throw Error("invalid number of generations ‘%1%’", opArgs.front());
+ if (opArgs.front().size() < 2)
+ throw Error("invalid number of generations '%1%'", opArgs.front());
string str_max = string(opArgs.front(), 1, opArgs.front().size());
auto max = string2Int<GenerationNumber>(str_max);
if (!max || *max == 0)
- throw Error("invalid number of generations to keep ‘%1%’", opArgs.front());
+ throw Error("invalid number of generations to keep '%1%'", opArgs.front());
deleteGenerationsGreaterThan(globals.profile, *max, globals.dryRun);
} else {
std::set<GenerationNumber> gens;
diff --git a/src/nix-env/user-env.cc b/src/nix-env/user-env.cc
index 5ceb2ae67..1fd4bcbd3 100644
--- a/src/nix-env/user-env.cc
+++ b/src/nix-env/user-env.cc
@@ -131,9 +131,9 @@ bool createUserEnv(EvalState & state, DrvInfos & elems,
state.forceValue(topLevel);
PathSet context;
Attr & aDrvPath(*topLevel.attrs->find(state.sDrvPath));
- auto topLevelDrv = state.store->parseStorePath(state.coerceToPath(aDrvPath.pos ? *(aDrvPath.pos) : noPos, *(aDrvPath.value), context));
+ auto topLevelDrv = state.store->parseStorePath(state.coerceToPath(*aDrvPath.pos, *aDrvPath.value, context));
Attr & aOutPath(*topLevel.attrs->find(state.sOutPath));
- Path topLevelOut = state.coerceToPath(aOutPath.pos ? *(aOutPath.pos) : noPos, *(aOutPath.value), context);
+ Path topLevelOut = state.coerceToPath(*aOutPath.pos, *aOutPath.value, context);
/* Realise the resulting store expression. */
debug("building user environment");
diff --git a/src/nix/build.cc b/src/nix/build.cc
index 13eb66ac6..6e31757a2 100644
--- a/src/nix/build.cc
+++ b/src/nix/build.cc
@@ -54,7 +54,7 @@ struct CmdBuild : InstallablesCommand, MixDryRun, MixJSON, MixProfile
{
auto buildables = build(
getEvalStore(), store,
- dryRun ? Realise::Nothing : Realise::Outputs,
+ dryRun ? Realise::Derivation : Realise::Outputs,
installables, buildMode);
if (json) logger->cout("%s", derivedPathsWithHintsToJSON(buildables, store).dump());
@@ -66,12 +66,12 @@ struct CmdBuild : InstallablesCommand, MixDryRun, MixJSON, MixProfile
for (const auto & [_i, buildable] : enumerate(buildables)) {
auto i = _i;
std::visit(overloaded {
- [&](BuiltPath::Opaque bo) {
+ [&](const BuiltPath::Opaque & bo) {
std::string symlink = outLink;
if (i) symlink += fmt("-%d", i);
store2->addPermRoot(bo.path, absPath(symlink));
},
- [&](BuiltPath::Built bfd) {
+ [&](const BuiltPath::Built & bfd) {
for (auto & output : bfd.outputs) {
std::string symlink = outLink;
if (i) symlink += fmt("-%d", i);
diff --git a/src/nix/copy.cc b/src/nix/copy.cc
index 0489dfe06..197c85316 100644
--- a/src/nix/copy.cc
+++ b/src/nix/copy.cc
@@ -78,7 +78,7 @@ struct CmdCopy : BuiltPathsCommand
BuiltPathsCommand::run(store);
}
- void run(ref<Store> srcStore, BuiltPaths paths) override
+ void run(ref<Store> srcStore, BuiltPaths && paths) override
{
ref<Store> dstStore = dstUri.empty() ? openStore() : openStore(dstUri);
diff --git a/src/nix/develop.cc b/src/nix/develop.cc
index c823f16c8..c20b9f272 100644
--- a/src/nix/develop.cc
+++ b/src/nix/develop.cc
@@ -9,6 +9,7 @@
#include "progress-bar.hh"
#include "run.hh"
+#include <memory>
#include <nlohmann/json.hpp>
using namespace nix;
@@ -505,6 +506,20 @@ struct CmdDevelop : Common, MixEnvironment
auto args = phase || !command.empty() ? Strings{std::string(baseNameOf(shell)), rcFilePath}
: Strings{std::string(baseNameOf(shell)), "--rcfile", rcFilePath};
+ // Need to chdir since phases assume in flake directory
+ if (phase) {
+ // chdir if installable is a flake of type git+file or path
+ auto installableFlake = std::dynamic_pointer_cast<InstallableFlake>(installable);
+ if (installableFlake) {
+ auto sourcePath = installableFlake->getLockedFlake()->flake.resolvedRef.input.getSourcePath();
+ if (sourcePath) {
+ if (chdir(sourcePath->c_str()) == -1) {
+ throw SysError("chdir to '%s' failed", *sourcePath);
+ }
+ }
+ }
+ }
+
runProgramInStore(store, shell, args);
}
};
diff --git a/src/nix/flake-show.md b/src/nix/flake-show.md
index 1a42c44a0..e484cf47e 100644
--- a/src/nix/flake-show.md
+++ b/src/nix/flake-show.md
@@ -35,4 +35,7 @@ specified by flake reference *flake-url*. These are the top-level
attributes in the `outputs` of the flake, as well as lower-level
attributes for some standard outputs (e.g. `packages` or `checks`).
+With `--json`, the output is in a JSON representation suitable for automatic
+processing by other tools.
+
)""
diff --git a/src/nix/flake.cc b/src/nix/flake.cc
index abb0fd3b4..7d7ada707 100644
--- a/src/nix/flake.cc
+++ b/src/nix/flake.cc
@@ -131,8 +131,18 @@ static void enumerateOutputs(EvalState & state, Value & vFlake,
state.forceAttrs(*aOutputs->value);
- for (auto & attr : *aOutputs->value->attrs)
- callback(attr.name, *attr.value, *attr.pos);
+ auto sHydraJobs = state.symbols.create("hydraJobs");
+
+ /* Hack: ensure that hydraJobs is evaluated before anything
+ else. This way we can disable IFD for hydraJobs and then enable
+ it for other outputs. */
+ if (auto attr = aOutputs->value->attrs->get(sHydraJobs))
+ callback(attr->name, *attr->value, *attr->pos);
+
+ for (auto & attr : *aOutputs->value->attrs) {
+ if (attr.name != sHydraJobs)
+ callback(attr.name, *attr.value, *attr.pos);
+ }
}
struct CmdFlakeMetadata : FlakeCommand, MixJSON
@@ -269,7 +279,10 @@ struct CmdFlakeCheck : FlakeCommand
void run(nix::ref<nix::Store> store) override
{
- settings.readOnlyMode = !build;
+ if (!build) {
+ settings.readOnlyMode = true;
+ evalSettings.enableImportFromDerivation.setDefault(false);
+ }
auto state = getEvalState();
@@ -381,9 +394,13 @@ struct CmdFlakeCheck : FlakeCommand
for (auto & attr : *v.attrs) {
state->forceAttrs(*attr.value, *attr.pos);
- if (!state->isDerivation(*attr.value))
- checkHydraJobs(attrPath + "." + (std::string) attr.name,
- *attr.value, *attr.pos);
+ auto attrPath2 = attrPath + "." + (std::string) attr.name;
+ if (state->isDerivation(*attr.value)) {
+ Activity act(*logger, lvlChatty, actUnknown,
+ fmt("checking Hydra job '%s'", attrPath2));
+ checkDerivation(attrPath2, *attr.value, *attr.pos);
+ } else
+ checkHydraJobs(attrPath2, *attr.value, *attr.pos);
}
} catch (Error & e) {
@@ -447,8 +464,8 @@ struct CmdFlakeCheck : FlakeCommand
if (!v.isLambda())
throw Error("bundler must be a function");
if (!v.lambda.fun->formals ||
- v.lambda.fun->formals->argNames.find(state->symbols.create("program")) == v.lambda.fun->formals->argNames.end() ||
- v.lambda.fun->formals->argNames.find(state->symbols.create("system")) == v.lambda.fun->formals->argNames.end())
+ !v.lambda.fun->formals->argNames.count(state->symbols.create("program")) ||
+ !v.lambda.fun->formals->argNames.count(state->symbols.create("system")))
throw Error("bundler must take formal arguments 'program' and 'system'");
} catch (Error & e) {
e.addTrace(pos, hintfmt("while checking the template '%s'", attrPath));
@@ -469,6 +486,8 @@ struct CmdFlakeCheck : FlakeCommand
fmt("checking flake output '%s'", name));
try {
+ evalSettings.enableImportFromDerivation.setDefault(name != "hydraJobs");
+
state->forceValue(vOutput, pos);
if (name == "checks") {
@@ -603,7 +622,7 @@ struct CmdFlakeCheck : FlakeCommand
store->buildPaths(drvPaths);
}
if (hasErrors)
- throw Error("Some errors were encountered during the evaluation");
+ throw Error("some errors were encountered during the evaluation");
}
};
@@ -846,7 +865,7 @@ struct CmdFlakeArchive : FlakeCommand, MixJSON, MixDryRun
}
};
-struct CmdFlakeShow : FlakeCommand
+struct CmdFlakeShow : FlakeCommand, MixJSON
{
bool showLegacy = false;
@@ -873,52 +892,69 @@ struct CmdFlakeShow : FlakeCommand
void run(nix::ref<nix::Store> store) override
{
+ evalSettings.enableImportFromDerivation.setDefault(false);
+
auto state = getEvalState();
auto flake = std::make_shared<LockedFlake>(lockFlake());
- std::function<void(eval_cache::AttrCursor & visitor, const std::vector<Symbol> & attrPath, const std::string & headerPrefix, const std::string & nextPrefix)> visit;
-
- visit = [&](eval_cache::AttrCursor & visitor, const std::vector<Symbol> & attrPath, const std::string & headerPrefix, const std::string & nextPrefix)
+ std::function<nlohmann::json(
+ eval_cache::AttrCursor & visitor,
+ const std::vector<Symbol> & attrPath,
+ const std::string & headerPrefix,
+ const std::string & nextPrefix)> visit;
+
+ visit = [&](
+ eval_cache::AttrCursor & visitor,
+ const std::vector<Symbol> & attrPath,
+ const std::string & headerPrefix,
+ const std::string & nextPrefix)
+ -> nlohmann::json
{
+ auto j = nlohmann::json::object();
+
Activity act(*logger, lvlInfo, actUnknown,
fmt("evaluating '%s'", concatStringsSep(".", attrPath)));
try {
auto recurse = [&]()
{
- logger->cout("%s", headerPrefix);
+ if (!json)
+ logger->cout("%s", headerPrefix);
auto attrs = visitor.getAttrs();
for (const auto & [i, attr] : enumerate(attrs)) {
bool last = i + 1 == attrs.size();
auto visitor2 = visitor.getAttr(attr);
auto attrPath2(attrPath);
attrPath2.push_back(attr);
- visit(*visitor2, attrPath2,
+ auto j2 = visit(*visitor2, attrPath2,
fmt(ANSI_GREEN "%s%s" ANSI_NORMAL ANSI_BOLD "%s" ANSI_NORMAL, nextPrefix, last ? treeLast : treeConn, attr),
nextPrefix + (last ? treeNull : treeLine));
+ if (json) j.emplace(attr, std::move(j2));
}
};
auto showDerivation = [&]()
{
auto name = visitor.getAttr(state->sName)->getString();
-
- /*
- std::string description;
-
- if (auto aMeta = visitor.maybeGetAttr("meta")) {
- if (auto aDescription = aMeta->maybeGetAttr("description"))
- description = aDescription->getString();
+ if (json) {
+ std::optional<std::string> description;
+ if (auto aMeta = visitor.maybeGetAttr("meta")) {
+ if (auto aDescription = aMeta->maybeGetAttr("description"))
+ description = aDescription->getString();
+ }
+ j.emplace("type", "derivation");
+ j.emplace("name", name);
+ if (description)
+ j.emplace("description", *description);
+ } else {
+ logger->cout("%s: %s '%s'",
+ headerPrefix,
+ attrPath.size() == 2 && attrPath[0] == "devShell" ? "development environment" :
+ attrPath.size() >= 2 && attrPath[0] == "devShells" ? "development environment" :
+ attrPath.size() == 3 && attrPath[0] == "checks" ? "derivation" :
+ attrPath.size() >= 1 && attrPath[0] == "hydraJobs" ? "derivation" :
+ "package",
+ name);
}
- */
-
- logger->cout("%s: %s '%s'",
- headerPrefix,
- attrPath.size() == 2 && attrPath[0] == "devShell" ? "development environment" :
- attrPath.size() >= 2 && attrPath[0] == "devShells" ? "development environment" :
- attrPath.size() == 3 && attrPath[0] == "checks" ? "derivation" :
- attrPath.size() >= 1 && attrPath[0] == "hydraJobs" ? "derivation" :
- "package",
- name);
};
if (attrPath.size() == 0
@@ -962,7 +998,7 @@ struct CmdFlakeShow : FlakeCommand
if (attrPath.size() == 1)
recurse();
else if (!showLegacy)
- logger->cout("%s: " ANSI_YELLOW "omitted" ANSI_NORMAL " (use '--legacy' to show)", headerPrefix);
+ logger->warn(fmt("%s: " ANSI_WARNING "omitted" ANSI_NORMAL " (use '--legacy' to show)", headerPrefix));
else {
if (visitor.isDerivation())
showDerivation();
@@ -979,7 +1015,11 @@ struct CmdFlakeShow : FlakeCommand
auto aType = visitor.maybeGetAttr("type");
if (!aType || aType->getString() != "app")
throw EvalError("not an app definition");
- logger->cout("%s: app", headerPrefix);
+ if (json) {
+ j.emplace("type", "app");
+ } else {
+ logger->cout("%s: app", headerPrefix);
+ }
}
else if (
@@ -987,27 +1027,40 @@ struct CmdFlakeShow : FlakeCommand
(attrPath.size() == 2 && attrPath[0] == "templates"))
{
auto description = visitor.getAttr("description")->getString();
- logger->cout("%s: template: " ANSI_BOLD "%s" ANSI_NORMAL, headerPrefix, description);
+ if (json) {
+ j.emplace("type", "template");
+ j.emplace("description", description);
+ } else {
+ logger->cout("%s: template: " ANSI_BOLD "%s" ANSI_NORMAL, headerPrefix, description);
+ }
}
else {
- logger->cout("%s: %s",
- headerPrefix,
+ auto [type, description] =
(attrPath.size() == 1 && attrPath[0] == "overlay")
- || (attrPath.size() == 2 && attrPath[0] == "overlays") ? "Nixpkgs overlay" :
- attrPath.size() == 2 && attrPath[0] == "nixosConfigurations" ? "NixOS configuration" :
- attrPath.size() == 2 && attrPath[0] == "nixosModules" ? "NixOS module" :
- ANSI_YELLOW "unknown" ANSI_NORMAL);
+ || (attrPath.size() == 2 && attrPath[0] == "overlays") ? std::make_pair("nixpkgs-overlay", "Nixpkgs overlay") :
+ attrPath.size() == 2 && attrPath[0] == "nixosConfigurations" ? std::make_pair("nixos-configuration", "NixOS configuration") :
+ attrPath.size() == 2 && attrPath[0] == "nixosModules" ? std::make_pair("nixos-module", "NixOS module") :
+ std::make_pair("unknown", "unknown");
+ if (json) {
+ j.emplace("type", type);
+ } else {
+ logger->cout("%s: " ANSI_WARNING "%s" ANSI_NORMAL, headerPrefix, description);
+ }
}
} catch (EvalError & e) {
if (!(attrPath.size() > 0 && attrPath[0] == "legacyPackages"))
throw;
}
+
+ return j;
};
auto cache = openEvalCache(*state, flake);
- visit(*cache->getRoot(), {}, fmt(ANSI_BOLD "%s" ANSI_NORMAL, flake->flake.lockedRef), "");
+ auto j = visit(*cache->getRoot(), {}, fmt(ANSI_BOLD "%s" ANSI_NORMAL, flake->flake.lockedRef), "");
+ if (json)
+ logger->cout("%s", j.dump());
}
};
diff --git a/src/nix/local.mk b/src/nix/local.mk
index 83b6dd08b..e4ec7634d 100644
--- a/src/nix/local.mk
+++ b/src/nix/local.mk
@@ -14,7 +14,7 @@ nix_SOURCES := \
$(wildcard src/nix-instantiate/*.cc) \
$(wildcard src/nix-store/*.cc) \
-nix_CXXFLAGS += -I src/libutil -I src/libstore -I src/libfetchers -I src/libexpr -I src/libmain -I src/libcmd
+nix_CXXFLAGS += -I src/libutil -I src/libstore -I src/libfetchers -I src/libexpr -I src/libmain -I src/libcmd -I doc/manual
nix_LIBS = libexpr libmain libfetchers libstore libutil libcmd
@@ -30,3 +30,5 @@ src/nix-env/user-env.cc: src/nix-env/buildenv.nix.gen.hh
src/nix/develop.cc: src/nix/get-env.sh.gen.hh
src/nix-channel/nix-channel.cc: src/nix-channel/unpack-channel.nix.gen.hh
+
+src/nix/main.cc: doc/manual/generate-manpage.nix.gen.hh doc/manual/utils.nix.gen.hh
diff --git a/src/nix/log.cc b/src/nix/log.cc
index 962c47525..fd3c1d787 100644
--- a/src/nix/log.cc
+++ b/src/nix/log.cc
@@ -35,10 +35,10 @@ struct CmdLog : InstallableCommand
RunPager pager;
for (auto & sub : subs) {
auto log = std::visit(overloaded {
- [&](DerivedPath::Opaque bo) {
+ [&](const DerivedPath::Opaque & bo) {
return sub->getBuildLog(bo.path);
},
- [&](DerivedPath::Built bfd) {
+ [&](const DerivedPath::Built & bfd) {
return sub->getBuildLog(bfd.drvPath);
},
}, b.raw());
diff --git a/src/nix/main.cc b/src/nix/main.cc
index 008482be3..8aaf08813 100644
--- a/src/nix/main.cc
+++ b/src/nix/main.cc
@@ -10,6 +10,7 @@
#include "filetransfer.hh"
#include "finally.hh"
#include "loggers.hh"
+#include "markdown.hh"
#include <sys/types.h>
#include <sys/socket.h>
@@ -163,9 +164,43 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs
}
};
-static void showHelp(std::vector<std::string> subcommand)
+/* Render the help for the specified subcommand to stdout using
+ lowdown. */
+static void showHelp(std::vector<std::string> subcommand, MultiCommand & toplevel)
{
- showManPage(subcommand.empty() ? "nix" : fmt("nix3-%s", concatStringsSep("-", subcommand)));
+ auto mdName = subcommand.empty() ? "nix" : fmt("nix3-%s", concatStringsSep("-", subcommand));
+
+ evalSettings.restrictEval = false;
+ evalSettings.pureEval = false;
+ EvalState state({}, openStore("dummy://"));
+
+ auto vGenerateManpage = state.allocValue();
+ state.eval(state.parseExprFromString(
+ #include "generate-manpage.nix.gen.hh"
+ , "/"), *vGenerateManpage);
+
+ auto vUtils = state.allocValue();
+ state.cacheFile(
+ "/utils.nix", "/utils.nix",
+ state.parseExprFromString(
+ #include "utils.nix.gen.hh"
+ , "/"),
+ *vUtils);
+
+ auto vJson = state.allocValue();
+ mkString(*vJson, toplevel.toJSON().dump());
+
+ auto vRes = state.allocValue();
+ state.callFunction(*vGenerateManpage, *vJson, *vRes, noPos);
+
+ auto attr = vRes->attrs->get(state.symbols.create(mdName + ".md"));
+ if (!attr)
+ throw UsageError("Nix has no subcommand '%s'", concatStringsSep("", subcommand));
+
+ auto markdown = state.forceString(*attr->value);
+
+ RunPager pager;
+ std::cout << renderMarkdownToTerminal(markdown) << "\n";
}
struct CmdHelp : Command
@@ -194,7 +229,10 @@ struct CmdHelp : Command
void run() override
{
- showHelp(subcommand);
+ assert(parent);
+ MultiCommand * toplevel = parent;
+ while (toplevel->parent) toplevel = toplevel->parent;
+ showHelp(subcommand, *toplevel);
}
};
@@ -277,7 +315,7 @@ void mainWrapped(int argc, char * * argv)
} else
break;
}
- showHelp(subcommand);
+ showHelp(subcommand, args);
return;
} catch (UsageError &) {
if (!completions) throw;
diff --git a/src/nix/make-content-addressable.cc b/src/nix/make-content-addressable.cc
index f5bdc7e65..12f303a10 100644
--- a/src/nix/make-content-addressable.cc
+++ b/src/nix/make-content-addressable.cc
@@ -25,7 +25,7 @@ struct CmdMakeContentAddressable : StorePathsCommand, MixJSON
;
}
- void run(ref<Store> store, StorePaths storePaths) override
+ void run(ref<Store> store, StorePaths && storePaths) override
{
auto paths = store->topoSortPaths(StorePathSet(storePaths.begin(), storePaths.end()));
diff --git a/src/nix/path-info.cc b/src/nix/path-info.cc
index 518cd5568..3743d7504 100644
--- a/src/nix/path-info.cc
+++ b/src/nix/path-info.cc
@@ -79,7 +79,7 @@ struct CmdPathInfo : StorePathsCommand, MixJSON
std::cout << fmt("\t%6.1f%c", res, idents.at(power));
}
- void run(ref<Store> store, StorePaths storePaths) override
+ void run(ref<Store> store, StorePaths && storePaths) override
{
size_t pathLen = 0;
for (auto & storePath : storePaths)
diff --git a/src/nix/path-info.md b/src/nix/path-info.md
index 76a83e39d..7a1714ba4 100644
--- a/src/nix/path-info.md
+++ b/src/nix/path-info.md
@@ -82,7 +82,7 @@ This command shows information about the store paths produced by
By default, this command only prints the store paths. You can get
additional information by passing flags such as `--closure-size`,
---size`, `--sigs` or `--json`.
+`--size`, `--sigs` or `--json`.
> **Warning**
>
diff --git a/src/nix/prefetch.cc b/src/nix/prefetch.cc
index 9c2309a5f..768d37595 100644
--- a/src/nix/prefetch.cc
+++ b/src/nix/prefetch.cc
@@ -199,26 +199,24 @@ static int main_nix_prefetch_url(int argc, char * * argv)
state->forceAttrs(v);
/* Extract the URL. */
- auto attr = v.attrs->find(state->symbols.create("urls"));
- if (attr == v.attrs->end())
- throw Error("attribute set does not contain a 'urls' attribute");
- state->forceList(*attr->value);
- if (attr->value->listSize() < 1)
+ auto & attr = v.attrs->need(state->symbols.create("urls"));
+ state->forceList(*attr.value);
+ if (attr.value->listSize() < 1)
throw Error("'urls' list is empty");
- url = state->forceString(*attr->value->listElems()[0]);
+ url = state->forceString(*attr.value->listElems()[0]);
/* Extract the hash mode. */
- attr = v.attrs->find(state->symbols.create("outputHashMode"));
- if (attr == v.attrs->end())
+ auto attr2 = v.attrs->get(state->symbols.create("outputHashMode"));
+ if (!attr2)
printInfo("warning: this does not look like a fetchurl call");
else
- unpack = state->forceString(*attr->value) == "recursive";
+ unpack = state->forceString(*attr2->value) == "recursive";
/* Extract the name. */
if (!name) {
- attr = v.attrs->find(state->symbols.create("name"));
- if (attr != v.attrs->end())
- name = state->forceString(*attr->value);
+ auto attr3 = v.attrs->get(state->symbols.create("name"));
+ if (!attr3)
+ name = state->forceString(*attr3->value);
}
}
diff --git a/src/nix/profile-history.md b/src/nix/profile-history.md
index d0fe40c82..f0bfe5037 100644
--- a/src/nix/profile-history.md
+++ b/src/nix/profile-history.md
@@ -6,10 +6,10 @@ R""(
```console
# nix profile history
- Version 508 -> 509:
+ Version 508 (2020-04-10):
flake:nixpkgs#legacyPackages.x86_64-linux.awscli: ∅ -> 1.17.13
- Version 509 -> 510:
+ Version 509 (2020-05-16) <- 508:
flake:nixpkgs#legacyPackages.x86_64-linux.awscli: 1.17.13 -> 1.18.211
```
diff --git a/src/nix/profile-rollback.md b/src/nix/profile-rollback.md
new file mode 100644
index 000000000..6bb75aa5e
--- /dev/null
+++ b/src/nix/profile-rollback.md
@@ -0,0 +1,26 @@
+R""(
+
+# Examples
+
+* Roll back your default profile to the previous version:
+
+ ```console
+ # nix profile rollback
+ switching profile from version 519 to 518
+ ```
+
+* Switch your default profile to version 510:
+
+ ```console
+ # nix profile rollback --to 510
+ switching profile from version 518 to 510
+ ```
+
+# Description
+
+This command switches a profile to the most recent version older
+than the currently active version, or if `--to` *N* is given, to
+version *N* of the profile. To see the available versions of a
+profile, use `nix profile history`.
+
+)""
diff --git a/src/nix/profile-wipe-history.md b/src/nix/profile-wipe-history.md
new file mode 100644
index 000000000..b4b262864
--- /dev/null
+++ b/src/nix/profile-wipe-history.md
@@ -0,0 +1,20 @@
+R""(
+
+# Examples
+
+* Delete all versions of the default profile older than 100 days:
+
+ ```console
+ # nix profile wipe-history --profile /tmp/profile --older-than 100d
+ removing profile version 515
+ removing profile version 514
+ ```
+
+# Description
+
+This command deletes non-current versions of a profile, making it
+impossible to roll back to these versions. By default, all non-current
+versions are deleted. With `--older-than` *N*`d`, all non-current
+versions older than *N* days are deleted.
+
+)""
diff --git a/src/nix/profile.cc b/src/nix/profile.cc
index 8cef6d0b6..c63ed9c88 100644
--- a/src/nix/profile.cc
+++ b/src/nix/profile.cc
@@ -12,6 +12,7 @@
#include <nlohmann/json.hpp>
#include <regex>
+#include <iomanip>
using namespace nix;
@@ -259,11 +260,11 @@ struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile
ProfileElement element;
std::visit(overloaded {
- [&](BuiltPath::Opaque bo) {
+ [&](const BuiltPath::Opaque & bo) {
pathsToBuild.push_back(bo);
element.storePaths.insert(bo.path);
},
- [&](BuiltPath::Built bfd) {
+ [&](const BuiltPath::Built & bfd) {
// TODO: Why are we querying if we know the output
// names already? Is it just to figure out what the
// default one is?
@@ -528,10 +529,11 @@ struct CmdProfileHistory : virtual StoreCommand, EvalCommand, MixDefaultProfile
if (!first) std::cout << "\n";
first = false;
- if (prevGen)
- std::cout << fmt("Version %d -> %d:\n", prevGen->first.number, gen.number);
- else
- std::cout << fmt("Version %d:\n", gen.number);
+ std::cout << fmt("Version %s%d" ANSI_NORMAL " (%s)%s:\n",
+ gen.number == curGen ? ANSI_GREEN : ANSI_BOLD,
+ gen.number,
+ std::put_time(std::gmtime(&gen.creationTime), "%Y-%m-%d"),
+ prevGen ? fmt(" <- %d", prevGen->first.number) : "");
ProfileManifest::printDiff(
prevGen ? prevGen->second : ProfileManifest(),
@@ -543,6 +545,76 @@ struct CmdProfileHistory : virtual StoreCommand, EvalCommand, MixDefaultProfile
}
};
+struct CmdProfileRollback : virtual StoreCommand, MixDefaultProfile, MixDryRun
+{
+ std::optional<GenerationNumber> version;
+
+ CmdProfileRollback()
+ {
+ addFlag({
+ .longName = "to",
+ .description = "The profile version to roll back to.",
+ .labels = {"version"},
+ .handler = {&version},
+ });
+ }
+
+ std::string description() override
+ {
+ return "roll back to the previous version or a specified version of a profile";
+ }
+
+ std::string doc() override
+ {
+ return
+ #include "profile-rollback.md"
+ ;
+ }
+
+ void run(ref<Store> store) override
+ {
+ switchGeneration(*profile, version, dryRun);
+ }
+};
+
+struct CmdProfileWipeHistory : virtual StoreCommand, MixDefaultProfile, MixDryRun
+{
+ std::optional<std::string> minAge;
+
+ CmdProfileWipeHistory()
+ {
+ addFlag({
+ .longName = "older-than",
+ .description =
+ "Delete versions older than the specified age. *age* "
+ "must be in the format *N*`d`, where *N* denotes a number "
+ "of days.",
+ .labels = {"age"},
+ .handler = {&minAge},
+ });
+ }
+
+ std::string description() override
+ {
+ return "delete non-current versions of a profile";
+ }
+
+ std::string doc() override
+ {
+ return
+ #include "profile-wipe-history.md"
+ ;
+ }
+
+ void run(ref<Store> store) override
+ {
+ if (minAge)
+ deleteGenerationsOlderThan(*profile, *minAge, dryRun);
+ else
+ deleteOldGenerations(*profile, dryRun);
+ }
+};
+
struct CmdProfile : NixMultiCommand
{
CmdProfile()
@@ -553,6 +625,8 @@ struct CmdProfile : NixMultiCommand
{"list", []() { return make_ref<CmdProfileList>(); }},
{"diff-closures", []() { return make_ref<CmdProfileDiffClosures>(); }},
{"history", []() { return make_ref<CmdProfileHistory>(); }},
+ {"rollback", []() { return make_ref<CmdProfileRollback>(); }},
+ {"wipe-history", []() { return make_ref<CmdProfileWipeHistory>(); }},
})
{ }
diff --git a/src/nix/realisation.cc b/src/nix/realisation.cc
index d59e594df..dfa8ff449 100644
--- a/src/nix/realisation.cc
+++ b/src/nix/realisation.cc
@@ -44,7 +44,7 @@ struct CmdRealisationInfo : BuiltPathsCommand, MixJSON
Category category() override { return catSecondary; }
- void run(ref<Store> store, BuiltPaths paths) override
+ void run(ref<Store> store, BuiltPaths && paths) override
{
settings.requireExperimentalFeature("ca-derivations");
RealisedPath::Set realisations;
diff --git a/src/nix/registry.md b/src/nix/registry.md
index 557e5795b..a1674bd2e 100644
--- a/src/nix/registry.md
+++ b/src/nix/registry.md
@@ -41,7 +41,7 @@ A registry is a JSON file with the following format:
```json
{
"version": 2,
- [
+ "flakes": [
{
"from": {
"type": "indirect",
diff --git a/src/nix/repl.cc b/src/nix/repl.cc
index b711f4163..c1233ab46 100644
--- a/src/nix/repl.cc
+++ b/src/nix/repl.cc
@@ -110,11 +110,13 @@ string runNix(Path program, const Strings & args,
{
auto subprocessEnv = getEnv();
subprocessEnv["NIX_CONFIG"] = globalConfig.toKeyValue();
- RunOptions opts(settings.nixBinDir+ "/" + program, args);
- opts.input = input;
- opts.environment = subprocessEnv;
- auto res = runProgram(opts);
+ auto res = runProgram(RunOptions {
+ .program = settings.nixBinDir+ "/" + program,
+ .args = args,
+ .environment = subprocessEnv,
+ .input = input,
+ });
if (!statusOk(res.first))
throw ExecError(res.first, fmt("program '%1%' %2%", program, statusToString(res.first)));
@@ -705,7 +707,7 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int m
break;
case nString:
- str << ANSI_YELLOW;
+ str << ANSI_WARNING;
printStringValue(str, v.string.s);
str << ANSI_NORMAL;
break;
diff --git a/src/nix/search.cc b/src/nix/search.cc
index c52a48d4e..0d8fdd5c2 100644
--- a/src/nix/search.cc
+++ b/src/nix/search.cc
@@ -62,6 +62,7 @@ struct CmdSearch : InstallableCommand, MixJSON
void run(ref<Store> store) override
{
settings.readOnlyMode = true;
+ evalSettings.enableImportFromDerivation.setDefault(false);
// Empty search string should match all packages
// Use "^" here instead of ".*" due to differences in resulting highlighting
diff --git a/src/nix/show-derivation.cc b/src/nix/show-derivation.cc
index 2588a011d..c614be68d 100644
--- a/src/nix/show-derivation.cc
+++ b/src/nix/show-derivation.cc
@@ -65,18 +65,18 @@ struct CmdShowDerivation : InstallablesCommand
auto & outputName = _outputName; // work around clang bug
auto outputObj { outputsObj.object(outputName) };
std::visit(overloaded {
- [&](DerivationOutputInputAddressed doi) {
+ [&](const DerivationOutputInputAddressed & doi) {
outputObj.attr("path", store->printStorePath(doi.path));
},
- [&](DerivationOutputCAFixed dof) {
+ [&](const DerivationOutputCAFixed & dof) {
outputObj.attr("path", store->printStorePath(dof.path(*store, drv.name, outputName)));
outputObj.attr("hashAlgo", dof.hash.printMethodAlgo());
outputObj.attr("hash", dof.hash.hash.to_string(Base16, false));
},
- [&](DerivationOutputCAFloating dof) {
+ [&](const DerivationOutputCAFloating & dof) {
outputObj.attr("hashAlgo", makeFileIngestionPrefix(dof.method) + printHashType(dof.hashType));
},
- [&](DerivationOutputDeferred) {},
+ [&](const DerivationOutputDeferred &) {},
}, output.output);
}
}
diff --git a/src/nix/sigs.cc b/src/nix/sigs.cc
index c64b472b6..43e0d9148 100644
--- a/src/nix/sigs.cc
+++ b/src/nix/sigs.cc
@@ -27,7 +27,7 @@ struct CmdCopySigs : StorePathsCommand
return "copy store path signatures from substituters";
}
- void run(ref<Store> store, StorePaths storePaths) override
+ void run(ref<Store> store, StorePaths && storePaths) override
{
if (substituterUris.empty())
throw UsageError("you must specify at least one substituter using '-s'");
@@ -113,7 +113,7 @@ struct CmdSign : StorePathsCommand
return "sign store paths";
}
- void run(ref<Store> store, StorePaths storePaths) override
+ void run(ref<Store> store, StorePaths && storePaths) override
{
if (secretKeyFile.empty())
throw UsageError("you must specify a secret key file using '-k'");
diff --git a/src/nix/store-delete.cc b/src/nix/store-delete.cc
index 10245978e..e4a3cb554 100644
--- a/src/nix/store-delete.cc
+++ b/src/nix/store-delete.cc
@@ -30,7 +30,7 @@ struct CmdStoreDelete : StorePathsCommand
;
}
- void run(ref<Store> store, std::vector<StorePath> storePaths) override
+ void run(ref<Store> store, std::vector<StorePath> && storePaths) override
{
for (auto & path : storePaths)
options.pathsToDelete.insert(path);
diff --git a/src/nix/store-repair.cc b/src/nix/store-repair.cc
index 1c7a4392e..8fcb3639a 100644
--- a/src/nix/store-repair.cc
+++ b/src/nix/store-repair.cc
@@ -17,7 +17,7 @@ struct CmdStoreRepair : StorePathsCommand
;
}
- void run(ref<Store> store, std::vector<StorePath> storePaths) override
+ void run(ref<Store> store, std::vector<StorePath> && storePaths) override
{
for (auto & path : storePaths)
store->repairPath(path);
diff --git a/src/nix/verify.cc b/src/nix/verify.cc
index f5a576064..e92df1303 100644
--- a/src/nix/verify.cc
+++ b/src/nix/verify.cc
@@ -59,7 +59,7 @@ struct CmdVerify : StorePathsCommand
;
}
- void run(ref<Store> store, StorePaths storePaths) override
+ void run(ref<Store> store, StorePaths && storePaths) override
{
std::vector<ref<Store>> substituters;
for (auto & s : substituterUris)
diff --git a/tests/ca/signatures.sh b/tests/ca/signatures.sh
index 4b4e468f7..0c7d974ea 100644
--- a/tests/ca/signatures.sh
+++ b/tests/ca/signatures.sh
@@ -22,8 +22,8 @@ testOneCopy () {
rm -rf "$REMOTE_STORE_DIR"
attrPath="$1"
- nix copy --to $REMOTE_STORE "$attrPath" --file ./content-addressed.nix \
- --secret-key-files "$TEST_ROOT/sk1"
+ nix copy -vvvv --to $REMOTE_STORE "$attrPath" --file ./content-addressed.nix \
+ --secret-key-files "$TEST_ROOT/sk1" --show-trace
ensureCorrectlyCopied "$attrPath"
diff --git a/tests/fetchurl.sh b/tests/fetchurl.sh
index cd84e9a4c..3d1685f43 100644
--- a/tests/fetchurl.sh
+++ b/tests/fetchurl.sh
@@ -5,7 +5,7 @@ clearStore
# Test fetching a flat file.
hash=$(nix-hash --flat --type sha256 ./fetchurl.sh)
-outPath=$(nix-build --expr 'import <nix/fetchurl.nix>' --argstr url file://$(pwd)/fetchurl.sh --argstr sha256 $hash --no-out-link)
+outPath=$(nix-build -vvvvv --expr 'import <nix/fetchurl.nix>' --argstr url file://$(pwd)/fetchurl.sh --argstr sha256 $hash --no-out-link)
cmp $outPath fetchurl.sh
@@ -14,7 +14,7 @@ clearStore
hash=$(nix hash file --type sha512 --base64 ./fetchurl.sh)
-outPath=$(nix-build --expr 'import <nix/fetchurl.nix>' --argstr url file://$(pwd)/fetchurl.sh --argstr sha512 $hash --no-out-link)
+outPath=$(nix-build -vvvvv --expr 'import <nix/fetchurl.nix>' --argstr url file://$(pwd)/fetchurl.sh --argstr sha512 $hash --no-out-link)
cmp $outPath fetchurl.sh
@@ -25,7 +25,7 @@ hash=$(nix hash file ./fetchurl.sh)
[[ $hash =~ ^sha256- ]]
-outPath=$(nix-build --expr 'import <nix/fetchurl.nix>' --argstr url file://$(pwd)/fetchurl.sh --argstr hash $hash --no-out-link)
+outPath=$(nix-build -vvvvv --expr 'import <nix/fetchurl.nix>' --argstr url file://$(pwd)/fetchurl.sh --argstr hash $hash --no-out-link)
cmp $outPath fetchurl.sh
@@ -38,10 +38,10 @@ hash=$(nix hash file --type sha256 --base16 ./fetchurl.sh)
storePath=$(nix --store $other_store store add-file ./fetchurl.sh)
-outPath=$(nix-build --expr 'import <nix/fetchurl.nix>' --argstr url file:///no-such-dir/fetchurl.sh --argstr sha256 $hash --no-out-link --substituters $other_store)
+outPath=$(nix-build -vvvvv --expr 'import <nix/fetchurl.nix>' --argstr url file:///no-such-dir/fetchurl.sh --argstr sha256 $hash --no-out-link --substituters $other_store)
# Test hashed mirrors with an SRI hash.
-nix-build --expr 'import <nix/fetchurl.nix>' --argstr url file:///no-such-dir/fetchurl.sh --argstr hash $(nix hash to-sri --type sha256 $hash) \
+nix-build -vvvvv --expr 'import <nix/fetchurl.nix>' --argstr url file:///no-such-dir/fetchurl.sh --argstr hash $(nix hash to-sri --type sha256 $hash) \
--no-out-link --substituters $other_store
# Test unpacking a NAR.
@@ -55,7 +55,7 @@ nix-store --dump $TEST_ROOT/archive > $nar
hash=$(nix-hash --flat --type sha256 $nar)
-outPath=$(nix-build --expr 'import <nix/fetchurl.nix>' --argstr url file://$nar --argstr sha256 $hash \
+outPath=$(nix-build -vvvvv --expr 'import <nix/fetchurl.nix>' --argstr url file://$nar --argstr sha256 $hash \
--arg unpack true --argstr name xyzzy --no-out-link)
echo $outPath | grep -q 'xyzzy'
@@ -69,7 +69,7 @@ nix-store --delete $outPath
narxz=$TEST_ROOT/archive.nar.xz
rm -f $narxz
xz --keep $nar
-outPath=$(nix-build --expr 'import <nix/fetchurl.nix>' --argstr url file://$narxz --argstr sha256 $hash \
+outPath=$(nix-build -vvvvv --expr 'import <nix/fetchurl.nix>' --argstr url file://$narxz --argstr sha256 $hash \
--arg unpack true --argstr name xyzzy --no-out-link)
test -x $outPath/fetchurl.sh
diff --git a/tests/fixed.nix b/tests/fixed.nix
index 76580ffa1..babe71504 100644
--- a/tests/fixed.nix
+++ b/tests/fixed.nix
@@ -21,6 +21,14 @@ rec {
(f ./fixed.builder2.sh "recursive" "sha1" "vw46m23bizj4n8afrc0fj19wrp7mj3c0")
];
+ # Expression to test that `nix-build --check` also throws an error if the hash of
+ # fixed-output derivation has changed even if the hash exists in the store (in this
+ # case the hash exists because of `fixed.builder2.sh`, but building a derivation
+ # with the same hash and a different result must throw an error).
+ check = [
+ (f ./fixed.builder1.sh "recursive" "md5" "3670af73070fa14077ad74e0f5ea4e42")
+ ];
+
good2 = [
# Yes, this looks fscked up: builder2 doesn't have that result.
# But Nix sees that an output with the desired hash already
diff --git a/tests/fixed.sh b/tests/fixed.sh
index 90c4c8c32..f1e1ce420 100644
--- a/tests/fixed.sh
+++ b/tests/fixed.sh
@@ -15,6 +15,11 @@ nix path-info --json $path | grep fixed:md5:2qk15sxzzjlnpjk9brn7j8ppcd
echo 'testing good...'
nix-build fixed.nix -A good --no-out-link
+if isDaemonNewer "2.4pre20210927"; then
+ echo 'testing --check...'
+ nix-build fixed.nix -A check --check && fail "should fail"
+fi
+
echo 'testing good2...'
nix-build fixed.nix -A good2 --no-out-link
diff --git a/tests/flakes.sh b/tests/flakes.sh
index f5c7b6804..26cdf27b7 100644
--- a/tests/flakes.sh
+++ b/tests/flakes.sh
@@ -23,6 +23,7 @@ flake6Dir=$TEST_ROOT/flake6
flake7Dir=$TEST_ROOT/flake7
templatesDir=$TEST_ROOT/templates
nonFlakeDir=$TEST_ROOT/nonFlake
+badFlakeDir=$TEST_ROOT/badFlake
flakeA=$TEST_ROOT/flakeA
flakeB=$TEST_ROOT/flakeB
flakeGitBare=$TEST_ROOT/flakeGitBare
@@ -391,12 +392,14 @@ git -C $templatesDir commit -m 'Initial'
nix flake check templates
nix flake show templates
+nix flake show templates --json | jq
(cd $flake7Dir && nix flake init)
(cd $flake7Dir && nix flake init) # check idempotence
git -C $flake7Dir add flake.nix
nix flake check $flake7Dir
nix flake show $flake7Dir
+nix flake show $flake7Dir --json | jq
git -C $flake7Dir commit -a -m 'Initial'
# Test 'nix flake new'.
@@ -763,7 +766,7 @@ cat > $flakeFollowsA/flake.nix <<EOF
{
description = "Flake A";
inputs = {
- B.url = "path:./../../flakeB";
+ B.url = "path:../flakeB";
};
outputs = { ... }: {};
}
@@ -771,4 +774,13 @@ EOF
git -C $flakeFollowsA add flake.nix
-nix flake lock $flakeFollowsA 2>&1 | grep 'this is a security violation'
+nix flake lock $flakeFollowsA 2>&1 | grep 'points outside'
+
+# Test flake in store does not evaluate
+rm -rf $badFlakeDir
+mkdir $badFlakeDir
+echo INVALID > $badFlakeDir/flake.nix
+nix store delete $(nix store add-path $badFlakeDir)
+
+[[ $(nix path-info $(nix store add-path $flake1Dir)) =~ flake1 ]]
+[[ $(nix path-info path:$(nix store add-path $flake1Dir)) =~ simple ]]
diff --git a/tests/lang/eval-fail-antiquoted-path.nix b/tests/lang/eval-fail-nonexist-path.nix
index f2f08107b..f2f08107b 100644
--- a/tests/lang/eval-fail-antiquoted-path.nix
+++ b/tests/lang/eval-fail-nonexist-path.nix
diff --git a/tests/lang/eval-okay-path-antiquotation.nix b/tests/lang/eval-okay-path-antiquotation.nix
new file mode 100644
index 000000000..497d7c1c7
--- /dev/null
+++ b/tests/lang/eval-okay-path-antiquotation.nix
@@ -0,0 +1,12 @@
+let
+ foo = "foo";
+in
+{
+ simple = ./${foo};
+ surrounded = ./a-${foo}-b;
+ absolute = /${foo};
+ expr = ./${foo + "/bar"};
+ home = ~/${foo};
+ notfirst = ./bar/${foo};
+ slashes = /${foo}/${"bar"};
+}
diff --git a/tests/local-store.sh b/tests/local-store.sh
index 4ec3d64b0..0247346f1 100644
--- a/tests/local-store.sh
+++ b/tests/local-store.sh
@@ -15,6 +15,5 @@ PATH1=$(nix path-info --store ./x $CORRECT_PATH)
PATH2=$(nix path-info --store "$PWD/x" $CORRECT_PATH)
[ $CORRECT_PATH == $PATH2 ]
-# FIXME we could also test the query parameter version:
-# PATH3=$(nix path-info --store "local?store=$PWD/x" $CORRECT_PATH)
-# [ $CORRECT_PATH == $PATH3 ]
+PATH3=$(nix path-info --store "local?root=$PWD/x" $CORRECT_PATH)
+[ $CORRECT_PATH == $PATH3 ]
diff --git a/tests/recursive.sh b/tests/recursive.sh
index b6740877d..91518d67d 100644
--- a/tests/recursive.sh
+++ b/tests/recursive.sh
@@ -1,5 +1,8 @@
source common.sh
+sed -i 's/experimental-features .*/& recursive-nix/' "$NIX_CONF_DIR"/nix.conf
+restartDaemon
+
# FIXME
if [[ $(uname) != Linux ]]; then exit 99; fi