aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.github/workflows/test.yml44
-rw-r--r--Makefile.config.in1
-rw-r--r--doc/manual/src/advanced-topics/distributed-builds.md10
-rw-r--r--doc/manual/src/command-ref/nix-store.md2
-rw-r--r--doc/manual/src/command-ref/opt-common.md9
-rw-r--r--doc/manual/src/installation/prerequisites-source.md3
-rw-r--r--flake.nix45
-rw-r--r--mk/jars.mk36
-rw-r--r--mk/lib.mk13
-rw-r--r--mk/patterns.mk6
-rwxr-xr-xmk/run_test.sh2
-rw-r--r--mk/tests.mk2
-rw-r--r--mk/tracing.mk2
-rwxr-xr-xscripts/bigsur-nixbld-user-migration.sh46
-rw-r--r--scripts/install-darwin-multi-user.sh2
-rw-r--r--scripts/install-multi-user.sh8
-rw-r--r--src/libcmd/command.hh2
-rw-r--r--src/libcmd/installables.cc9
-rw-r--r--src/libexpr/eval.cc4
-rw-r--r--src/libexpr/flake/flake.hh2
-rw-r--r--src/libfetchers/fetchers.hh8
-rw-r--r--src/libfetchers/git.cc14
-rw-r--r--src/libfetchers/github.cc6
-rw-r--r--src/libfetchers/registry.cc2
-rw-r--r--src/libfetchers/tarball.cc19
-rw-r--r--src/libmain/progress-bar.cc1
-rw-r--r--src/libstore/build/derivation-goal.cc43
-rw-r--r--src/libstore/build/derivation-goal.hh3
-rw-r--r--src/libstore/build/drv-output-substitution-goal.cc95
-rw-r--r--src/libstore/build/drv-output-substitution-goal.hh50
-rw-r--r--src/libstore/build/entry-points.cc8
-rw-r--r--src/libstore/build/local-derivation-goal.cc33
-rw-r--r--src/libstore/build/local-derivation-goal.hh2
-rw-r--r--src/libstore/build/substitution-goal.cc34
-rw-r--r--src/libstore/build/substitution-goal.hh9
-rw-r--r--src/libstore/build/worker.cc34
-rw-r--r--src/libstore/build/worker.hh17
-rw-r--r--src/libstore/ca-specific-schema.sql1
-rw-r--r--src/libstore/daemon.cc5
-rw-r--r--src/libstore/derivations.hh2
-rw-r--r--src/libstore/globals.cc2
-rw-r--r--src/libstore/globals.hh5
-rw-r--r--src/libstore/legacy-ssh-store.cc2
-rw-r--r--src/libstore/local-binary-cache-store.cc5
-rw-r--r--src/libstore/local-store.cc41
-rw-r--r--src/libstore/local-store.hh9
-rw-r--r--src/libstore/local.mk2
-rw-r--r--src/libstore/machines.cc6
-rw-r--r--src/libstore/misc.cc35
-rw-r--r--src/libstore/realisation.cc46
-rw-r--r--src/libstore/realisation.hh8
-rw-r--r--src/libstore/remote-store.cc20
-rw-r--r--src/libstore/serve-protocol.hh2
-rw-r--r--src/libstore/ssh-store.cc2
-rw-r--r--src/libstore/ssh.cc16
-rw-r--r--src/libstore/ssh.hh3
-rw-r--r--src/libstore/store-api.cc2
-rw-r--r--src/libstore/store-api.hh9
-rw-r--r--src/libstore/worker-protocol.hh2
-rw-r--r--src/libutil/config.cc28
-rw-r--r--src/libutil/config.hh22
-rw-r--r--src/libutil/tests/config.cc32
-rw-r--r--src/libutil/tests/url.cc18
-rw-r--r--src/libutil/url-parts.hh2
-rw-r--r--src/libutil/util.cc2
-rwxr-xr-xsrc/nix-build/nix-build.cc1
-rw-r--r--src/nix-store/nix-store.cc2
-rw-r--r--src/nix/build.md2
-rw-r--r--src/nix/flake-init.md2
-rw-r--r--src/nix/flake-list-inputs.md23
-rw-r--r--src/nix/flake-metadata.md (renamed from src/nix/flake-info.md)27
-rw-r--r--src/nix/flake.cc154
-rw-r--r--src/nix/flake.md4
-rw-r--r--src/nix/main.cc8
-rw-r--r--src/nix/realisation.cc78
-rw-r--r--src/nix/realisation/info.md15
-rw-r--r--src/nix/store-prefetch-file.md2
-rw-r--r--tests/build-hook-ca-fixed.nix56
-rw-r--r--tests/build-hook-ca-floating.nix (renamed from tests/build-hook-ca.nix)0
-rw-r--r--tests/build-remote-content-addressed-fixed.sh5
-rw-r--r--tests/build-remote-content-addressed-floating.sh2
-rw-r--r--tests/ca/build.sh (renamed from tests/content-addressed.sh)2
-rw-r--r--tests/ca/common.sh1
-rw-r--r--tests/ca/content-addressed.nix (renamed from tests/content-addressed.nix)2
-rwxr-xr-xtests/ca/nix-copy.sh (renamed from tests/nix-copy-content-addressed.sh)0
-rw-r--r--tests/ca/signatures.sh39
-rw-r--r--tests/ca/substitute.sh24
-rw-r--r--tests/common.sh.in11
-rw-r--r--tests/config.sh38
-rw-r--r--tests/db-migration.sh26
-rw-r--r--tests/fetchGit.sh10
-rw-r--r--tests/flakes.sh44
-rw-r--r--tests/local.mk10
-rwxr-xr-xtests/push-to-store.sh6
-rw-r--r--tests/remote-store.sh4
95 files changed, 1130 insertions, 423 deletions
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index bde6106e0..33035ca1e 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -8,52 +8,62 @@ jobs:
matrix:
os: [ubuntu-latest, macos-latest]
runs-on: ${{ matrix.os }}
- env:
- CACHIX_NAME: nix-ci
+
steps:
- uses: actions/checkout@v2.3.4
with:
fetch-depth: 0
- - uses: cachix/install-nix-action@v12
- - uses: cachix/cachix-action@v8
+ - uses: cachix/install-nix-action@v13
+ - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
+ - uses: cachix/cachix-action@v9
with:
name: '${{ env.CACHIX_NAME }}'
signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}'
+ authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
#- run: nix flake check
- run: nix-build -A checks.$(if [[ `uname` = Linux ]]; then echo x86_64-linux; else echo x86_64-darwin; fi)
+ check_cachix:
+ name: Cachix secret present for installer tests
+ runs-on: ubuntu-latest
+ outputs:
+ secret: ${{ steps.secret.outputs.secret }}
+ steps:
+ - name: Check for Cachix secret
+ id: secret
+ env:
+ _CACHIX_SECRETS: ${{ secrets.CACHIX_SIGNING_KEY }}${{ secrets.CACHIX_AUTH_TOKEN }}
+ run: echo "::set-output name=secret::${{ env._CACHIX_SECRETS != '' }}"
installer:
- if: github.event_name == 'push'
- needs: tests
+ needs: [tests, check_cachix]
+ if: github.event_name == 'push' && needs.check_cachix.outputs.secret == 'true'
runs-on: ubuntu-latest
- env:
- CACHIX_NAME: nix-ci
outputs:
installerURL: ${{ steps.prepare-installer.outputs.installerURL }}
steps:
- uses: actions/checkout@v2.3.4
with:
fetch-depth: 0
- - uses: cachix/install-nix-action@v12
- - uses: cachix/cachix-action@v8
+ - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
+ - uses: cachix/install-nix-action@v13
+ - uses: cachix/cachix-action@v9
with:
name: '${{ env.CACHIX_NAME }}'
signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}'
+ authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
- id: prepare-installer
run: scripts/prepare-installer-for-github-actions
installer_test:
- if: github.event_name == 'push'
- needs: installer
+ needs: [installer, check_cachix]
+ if: github.event_name == 'push' && needs.check_cachix.outputs.secret == 'true'
strategy:
matrix:
os: [ubuntu-latest, macos-latest]
runs-on: ${{ matrix.os }}
- env:
- CACHIX_NAME: nix-ci
steps:
- uses: actions/checkout@v2.3.4
- - uses: cachix/install-nix-action@master
+ - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
+ - uses: cachix/install-nix-action@v13
with:
install_url: '${{needs.installer.outputs.installerURL}}'
- install_options: '--tarball-url-prefix https://${{ env.CACHIX_NAME }}.cachix.org/serve'
+ install_options: "--tarball-url-prefix https://${{ env.CACHIX_NAME }}.cachix.org/serve"
- run: nix-instantiate -E 'builtins.currentTime' --eval
- \ No newline at end of file
diff --git a/Makefile.config.in b/Makefile.config.in
index 9d0500e48..3c1f01d1e 100644
--- a/Makefile.config.in
+++ b/Makefile.config.in
@@ -17,6 +17,7 @@ LIBBROTLI_LIBS = @LIBBROTLI_LIBS@
LIBCURL_LIBS = @LIBCURL_LIBS@
LIBLZMA_LIBS = @LIBLZMA_LIBS@
OPENSSL_LIBS = @OPENSSL_LIBS@
+LIBSECCOMP_LIBS = @LIBSECCOMP_LIBS@
PACKAGE_NAME = @PACKAGE_NAME@
PACKAGE_VERSION = @PACKAGE_VERSION@
SHELL = @bash@
diff --git a/doc/manual/src/advanced-topics/distributed-builds.md b/doc/manual/src/advanced-topics/distributed-builds.md
index c6966a50b..580b36736 100644
--- a/doc/manual/src/advanced-topics/distributed-builds.md
+++ b/doc/manual/src/advanced-topics/distributed-builds.md
@@ -37,7 +37,7 @@ then you need to ensure that the `PATH` of non-interactive login shells
contains Nix.
> **Warning**
->
+>
> If you are building via the Nix daemon, it is the Nix daemon user
> account (that is, `root`) that should have SSH access to the remote
> machine. If you can’t or don’t want to configure `root` to be able to
@@ -52,7 +52,7 @@ example, the following command allows you to build a derivation for
```console
$ uname
Linux
-
+
$ nix build \
'(with import <nixpkgs> { system = "x86_64-darwin"; }; runCommand "foo" {} "uname > $out")' \
--builders 'ssh://mac x86_64-darwin'
@@ -103,7 +103,7 @@ default, set it to `-`.
```nix
requiredSystemFeatures = [ "kvm" ];
```
-
+
will cause the build to be performed on a machine that has the `kvm`
feature.
@@ -112,6 +112,10 @@ default, set it to `-`.
features appear in the derivation’s `requiredSystemFeatures`
attribute..
+8. The (base64-encoded) public host key of the remote machine. If omitted, SSH
+ will use its regular known-hosts file. Specifically, the field is calculated
+ via `base64 -w0 /etc/ssh/ssh_host_ed25519_key.pub`.
+
For example, the machine specification
nix@scratchy.labs.cs.uu.nl i686-linux /home/nix/.ssh/id_scratchy_auto 8 1 kvm
diff --git a/doc/manual/src/command-ref/nix-store.md b/doc/manual/src/command-ref/nix-store.md
index 361c20cc9..49d06f31e 100644
--- a/doc/manual/src/command-ref/nix-store.md
+++ b/doc/manual/src/command-ref/nix-store.md
@@ -79,7 +79,7 @@ paths. Realisation is a somewhat overloaded term:
system). If the path is already valid, we are done immediately.
Otherwise, the path and any missing paths in its closure may be
produced through substitutes. If there are no (successful)
- subsitutes, realisation fails.
+ substitutes, realisation fails.
The output path of each derivation is printed on standard output. (For
non-derivations argument, the argument itself is printed.)
diff --git a/doc/manual/src/command-ref/opt-common.md b/doc/manual/src/command-ref/opt-common.md
index 9650f53f8..bc8eb6796 100644
--- a/doc/manual/src/command-ref/opt-common.md
+++ b/doc/manual/src/command-ref/opt-common.md
@@ -134,15 +134,6 @@ Most Nix commands accept the following command-line options:
failure in obtaining the substitutes to lead to a full build from
source (with the related consumption of resources).
- - `--no-build-hook`
- Disables the build hook mechanism. This allows to ignore remote
- builders if they are setup on the machine.
-
- It's useful in cases where the bandwidth between the client and the
- remote builder is too low. In that case it can take more time to
- upload the sources to the remote builder and fetch back the result
- than to do the computation locally.
-
- `--readonly-mode`
When this option is used, no attempt is made to open the Nix
database. Most Nix operations do need database access, so those
diff --git a/doc/manual/src/installation/prerequisites-source.md b/doc/manual/src/installation/prerequisites-source.md
index 6825af707..12758c5e1 100644
--- a/doc/manual/src/installation/prerequisites-source.md
+++ b/doc/manual/src/installation/prerequisites-source.md
@@ -69,3 +69,6 @@
`--disable-seccomp-sandboxing` option to the `configure` script (Not
recommended unless your system doesn't support `libseccomp`). To get
the library, visit <https://github.com/seccomp/libseccomp>.
+
+ - Niels Lohmann's [JSON library](https://github.com/nlohmann/json).
+
diff --git a/flake.nix b/flake.nix
index e59ec9a35..58dc5019d 100644
--- a/flake.nix
+++ b/flake.nix
@@ -90,7 +90,7 @@
lowdown
gmock
]
- ++ lib.optionals stdenv.isLinux [libseccomp utillinuxMinimal]
+ ++ lib.optionals stdenv.isLinux [libseccomp (pkgs.util-linuxMinimal or pkgs.utillinuxMinimal)]
++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium
++ lib.optional stdenv.isx86_64 libcpuid;
@@ -144,12 +144,46 @@
echo "file installer $out/install" >> $out/nix-support/hydra-build-products
'';
+ testNixVersions = pkgs: client: daemon: with commonDeps pkgs; pkgs.stdenv.mkDerivation {
+ NIX_DAEMON_PACKAGE = daemon;
+ NIX_CLIENT_PACKAGE = client;
+ # Must keep this name short as OSX has a rather strict limit on the
+ # socket path length, and this name appears in the path of the
+ # nix-daemon socket used in the tests
+ name = "nix-tests";
+ inherit version;
+
+ src = self;
+
+ VERSION_SUFFIX = versionSuffix;
+
+ nativeBuildInputs = nativeBuildDeps;
+ buildInputs = buildDeps ++ awsDeps;
+ propagatedBuildInputs = propagatedDeps;
+
+ enableParallelBuilding = true;
+
+ dontBuild = true;
+ doInstallCheck = true;
+
+ installPhase = ''
+ mkdir -p $out
+ '';
+ installCheckPhase = "make installcheck";
+
+ };
+
in {
# A Nixpkgs overlay that overrides the 'nix' and
# 'nix.perl-bindings' packages.
overlay = final: prev: {
+ # An older version of Nix to test against when using the daemon.
+ # Currently using `nixUnstable` as the stable one doesn't respect
+ # `NIX_DAEMON_SOCKET_PATH` which is needed for the tests.
+ nixStable = prev.nix;
+
nix = with final; with commonDeps pkgs; stdenv.mkDerivation {
name = "nix-${version}";
inherit version;
@@ -434,6 +468,15 @@
checks = forAllSystems (system: {
binaryTarball = self.hydraJobs.binaryTarball.${system};
perlBindings = self.hydraJobs.perlBindings.${system};
+ installTests =
+ let pkgs = nixpkgsFor.${system}; in
+ pkgs.runCommand "install-tests" {
+ againstSelf = testNixVersions pkgs pkgs.nix pkgs.pkgs.nix;
+ againstCurrentUnstable = testNixVersions pkgs pkgs.nix pkgs.nixUnstable;
+ # Disabled because the latest stable version doesn't handle
+ # `NIX_DAEMON_SOCKET_PATH` which is required for the tests to work
+ # againstLatestStable = testNixVersions pkgs pkgs.nix pkgs.nixStable;
+ } "touch $out";
});
packages = forAllSystems (system: {
diff --git a/mk/jars.mk b/mk/jars.mk
deleted file mode 100644
index c8513e664..000000000
--- a/mk/jars.mk
+++ /dev/null
@@ -1,36 +0,0 @@
-define build-jar
-
- $(1)_NAME ?= $(1)
-
- _d := $$(strip $$($(1)_DIR))
-
- $(1)_PATH := $$(_d)/$$($(1)_NAME).jar
-
- $(1)_TMPDIR := $$(_d)/.$$($(1)_NAME).jar.tmp
-
- _jars := $$(foreach jar, $$($(1)_JARS), $$($$(jar)_PATH))
-
- $$($(1)_PATH): $$($(1)_SOURCES) $$(_jars) $$($(1)_EXTRA_DEPS)| $$($(1)_ORDER_AFTER)
- @rm -rf $$($(1)_TMPDIR)
- @mkdir -p $$($(1)_TMPDIR)
- $$(trace-javac) javac $(GLOBAL_JAVACFLAGS) $$($(1)_JAVACFLAGS) -d $$($(1)_TMPDIR) \
- $$(foreach fn, $$($(1)_SOURCES), '$$(fn)') \
- -cp "$$(subst $$(space),,$$(foreach jar,$$($(1)_JARS),$$($$(jar)_PATH):))$$$$CLASSPATH"
- @echo -e '$$(subst $$(newline),\n,$$($(1)_MANIFEST))' > $$($(1)_PATH).manifest
- $$(trace-jar) jar cfm $$($(1)_PATH) $$($(1)_PATH).manifest -C $$($(1)_TMPDIR) .
- @rm $$($(1)_PATH).manifest
- @rm -rf $$($(1)_TMPDIR)
-
- $(1)_INSTALL_DIR ?= $$(jardir)
-
- $(1)_INSTALL_PATH := $$($(1)_INSTALL_DIR)/$$($(1)_NAME).jar
-
- $$(eval $$(call install-file-as, $$($(1)_PATH), $$($(1)_INSTALL_PATH), 0644))
-
- install: $$($(1)_INSTALL_PATH)
-
- jars-list += $$($(1)_PATH)
-
- clean-files += $$($(1)_PATH)
-
-endef
diff --git a/mk/lib.mk b/mk/lib.mk
index a09ebaa97..975102531 100644
--- a/mk/lib.mk
+++ b/mk/lib.mk
@@ -31,7 +31,6 @@ libdir ?= $(prefix)/lib
bindir ?= $(prefix)/bin
libexecdir ?= $(prefix)/libexec
datadir ?= $(prefix)/share
-jardir ?= $(datadir)/java
localstatedir ?= $(prefix)/var
sysconfdir ?= $(prefix)/etc
mandir ?= $(prefix)/share/man
@@ -74,7 +73,6 @@ BUILD_DEBUG ?= 1
ifeq ($(BUILD_DEBUG), 1)
GLOBAL_CFLAGS += -g
GLOBAL_CXXFLAGS += -g
- GLOBAL_JAVACFLAGS += -g
endif
@@ -84,7 +82,6 @@ include mk/clean.mk
include mk/install.mk
include mk/libraries.mk
include mk/programs.mk
-include mk/jars.mk
include mk/patterns.mk
include mk/templates.mk
include mk/tests.mk
@@ -102,7 +99,6 @@ $(foreach mf, $(makefiles), $(eval $(call include-sub-makefile, $(mf))))
# Instantiate stuff.
$(foreach lib, $(libraries), $(eval $(call build-library,$(lib))))
$(foreach prog, $(programs), $(eval $(call build-program,$(prog))))
-$(foreach jar, $(jars), $(eval $(call build-jar,$(jar))))
$(foreach script, $(bin-scripts), $(eval $(call install-program-in,$(script),$(bindir))))
$(foreach script, $(bin-scripts), $(eval programs-list += $(script)))
$(foreach script, $(noinst-scripts), $(eval programs-list += $(script)))
@@ -113,7 +109,7 @@ $(foreach file, $(man-pages), $(eval $(call install-data-in, $(file), $(mandir)/
.PHONY: default all man help
-all: $(programs-list) $(libs-list) $(jars-list) $(man-pages)
+all: $(programs-list) $(libs-list) $(man-pages)
man: $(man-pages)
@@ -138,12 +134,6 @@ ifdef libs-list
@echo ""
@for i in $(libs-list); do echo " $$i"; done
endif
-ifdef jars-list
- @echo ""
- @echo "The following JARs can be built:"
- @echo ""
- @for i in $(jars-list); do echo " $$i"; done
-endif
@echo ""
@echo "The following variables control the build:"
@echo ""
@@ -153,4 +143,5 @@ endif
@echo " CFLAGS: Flags for the C compiler"
@echo " CXX ($(CXX)): C++ compiler to be used"
@echo " CXXFLAGS: Flags for the C++ compiler"
+ @echo " CPPFLAGS: C preprocessor flags, used for both CC and CXX"
@$(print-var-help)
diff --git a/mk/patterns.mk b/mk/patterns.mk
index 7319f4cdd..86a724806 100644
--- a/mk/patterns.mk
+++ b/mk/patterns.mk
@@ -1,11 +1,11 @@
$(buildprefix)%.o: %.cc
@mkdir -p "$(dir $@)"
- $(trace-cxx) $(CXX) -o $@ -c $< $(GLOBAL_CXXFLAGS_PCH) $(GLOBAL_CXXFLAGS) $(CXXFLAGS) $($@_CXXFLAGS) -MMD -MF $(call filename-to-dep, $@) -MP
+ $(trace-cxx) $(CXX) -o $@ -c $< $(CPPFLAGS) $(GLOBAL_CXXFLAGS_PCH) $(GLOBAL_CXXFLAGS) $(CXXFLAGS) $($@_CXXFLAGS) -MMD -MF $(call filename-to-dep, $@) -MP
$(buildprefix)%.o: %.cpp
@mkdir -p "$(dir $@)"
- $(trace-cxx) $(CXX) -o $@ -c $< $(GLOBAL_CXXFLAGS_PCH) $(GLOBAL_CXXFLAGS) $(CXXFLAGS) $($@_CXXFLAGS) -MMD -MF $(call filename-to-dep, $@) -MP
+ $(trace-cxx) $(CXX) -o $@ -c $< $(CPPFLAGS) $(GLOBAL_CXXFLAGS_PCH) $(GLOBAL_CXXFLAGS) $(CXXFLAGS) $($@_CXXFLAGS) -MMD -MF $(call filename-to-dep, $@) -MP
$(buildprefix)%.o: %.c
@mkdir -p "$(dir $@)"
- $(trace-cc) $(CC) -o $@ -c $< $(GLOBAL_CFLAGS) $(CFLAGS) $($@_CFLAGS) -MMD -MF $(call filename-to-dep, $@) -MP
+ $(trace-cc) $(CC) -o $@ -c $< $(CPPFLAGS) $(GLOBAL_CFLAGS) $(CFLAGS) $($@_CFLAGS) -MMD -MF $(call filename-to-dep, $@) -MP
diff --git a/mk/run_test.sh b/mk/run_test.sh
index 6af5b070a..3783d3bf7 100755
--- a/mk/run_test.sh
+++ b/mk/run_test.sh
@@ -14,7 +14,7 @@ if [ -t 1 ]; then
yellow=""
normal=""
fi
-(cd $(dirname $1) && env ${TESTS_ENVIRONMENT} init.sh 2>/dev/null > /dev/null)
+(cd tests && env ${TESTS_ENVIRONMENT} init.sh 2>/dev/null > /dev/null)
log="$(cd $(dirname $1) && env ${TESTS_ENVIRONMENT} $(basename $1) 2>&1)"
status=$?
if [ $status -eq 0 ]; then
diff --git a/mk/tests.mk b/mk/tests.mk
index c1e140bac..21bdc5748 100644
--- a/mk/tests.mk
+++ b/mk/tests.mk
@@ -8,7 +8,7 @@ define run-install-test
.PHONY: $1.test
$1.test: $1 $(test-deps)
- @env TEST_NAME=$(notdir $(basename $1)) TESTS_ENVIRONMENT="$(tests-environment)" mk/run_test.sh $1 < /dev/null
+ @env TEST_NAME=$(basename $1) TESTS_ENVIRONMENT="$(tests-environment)" mk/run_test.sh $1 < /dev/null
endef
diff --git a/mk/tracing.mk b/mk/tracing.mk
index 54c77ab60..1fc5573d7 100644
--- a/mk/tracing.mk
+++ b/mk/tracing.mk
@@ -8,8 +8,6 @@ ifeq ($(V), 0)
trace-ld = @echo " LD " $@;
trace-ar = @echo " AR " $@;
trace-install = @echo " INST " $@;
- trace-javac = @echo " JAVAC " $@;
- trace-jar = @echo " JAR " $@;
trace-mkdir = @echo " MKDIR " $@;
trace-test = @echo " TEST " $@;
diff --git a/scripts/bigsur-nixbld-user-migration.sh b/scripts/bigsur-nixbld-user-migration.sh
new file mode 100755
index 000000000..f1619fd56
--- /dev/null
+++ b/scripts/bigsur-nixbld-user-migration.sh
@@ -0,0 +1,46 @@
+#!/usr/bin/env bash
+
+((NEW_NIX_FIRST_BUILD_UID=301))
+
+id_available(){
+ dscl . list /Users UniqueID | grep -E '\b'$1'\b' >/dev/null
+}
+
+change_nixbld_names_and_ids(){
+ local name uid next_id
+ ((next_id=NEW_NIX_FIRST_BUILD_UID))
+ echo "Attempting to migrate nixbld users."
+ echo "Each user should change from nixbld# to _nixbld#"
+ echo "and their IDs relocated to $next_id+"
+ while read -r name uid; do
+ echo " Checking $name (uid: $uid)"
+ # iterate for a clean ID
+ while id_available "$next_id"; do
+ ((next_id++))
+ if ((next_id >= 400)); then
+ echo "We've hit UID 400 without placing all of your users :("
+ echo "You should use the commands in this script as a starting"
+ echo "point to review your UID-space and manually move the"
+ echo "remaining users (or delete them, if you don't need them)."
+ exit 1
+ fi
+ done
+
+ if [[ $name == _* ]]; then
+ echo " It looks like $name has already been renamed--skipping."
+ else
+ # first 3 are cleanup, it's OK if they aren't here
+ sudo dscl . delete /Users/$name dsAttrTypeNative:_writers_passwd &>/dev/null || true
+ sudo dscl . change /Users/$name NFSHomeDirectory "/private/var/empty 1" "/var/empty" &>/dev/null || true
+ # remove existing user from group
+ sudo dseditgroup -o edit -t user -d $name nixbld || true
+ sudo dscl . change /Users/$name UniqueID $uid $next_id
+ sudo dscl . change /Users/$name RecordName $name _$name
+ # add renamed user to group
+ sudo dseditgroup -o edit -t user -a _$name nixbld
+ echo " $name migrated to _$name (uid: $next_id)"
+ fi
+ done < <(dscl . list /Users UniqueID | grep nixbld | sort -n -k2)
+}
+
+change_nixbld_names_and_ids
diff --git a/scripts/install-darwin-multi-user.sh b/scripts/install-darwin-multi-user.sh
index a27be2a43..f6575ae2f 100644
--- a/scripts/install-darwin-multi-user.sh
+++ b/scripts/install-darwin-multi-user.sh
@@ -4,6 +4,8 @@ set -eu
set -o pipefail
readonly PLIST_DEST=/Library/LaunchDaemons/org.nixos.nix-daemon.plist
+NIX_FIRST_BUILD_UID="301"
+NIX_BUILD_USER_NAME_TEMPLATE="_nixbld%d"
dsclattr() {
/usr/bin/dscl . -read "$1" \
diff --git a/scripts/install-multi-user.sh b/scripts/install-multi-user.sh
index 5e8b4ac18..66825f9de 100644
--- a/scripts/install-multi-user.sh
+++ b/scripts/install-multi-user.sh
@@ -25,13 +25,15 @@ readonly RED='\033[31m'
readonly NIX_USER_COUNT=${NIX_USER_COUNT:-32}
readonly NIX_BUILD_GROUP_ID="30000"
readonly NIX_BUILD_GROUP_NAME="nixbld"
-readonly NIX_FIRST_BUILD_UID="30001"
+# darwin installer needs to override these
+NIX_FIRST_BUILD_UID="30001"
+NIX_BUILD_USER_NAME_TEMPLATE="nixbld%d"
# Please don't change this. We don't support it, because the
# default shell profile that comes with Nix doesn't support it.
readonly NIX_ROOT="/nix"
readonly NIX_EXTRA_CONF=${NIX_EXTRA_CONF:-}
-readonly PROFILE_TARGETS=("/etc/bashrc" "/etc/profile.d/nix.sh" "/etc/zshenv")
+readonly PROFILE_TARGETS=("/etc/bashrc" "/etc/profile.d/nix.sh" "/etc/zshenv" "/etc/bash.bashrc" "/etc/zsh/zshenv")
readonly PROFILE_BACKUP_SUFFIX=".backup-before-nix"
readonly PROFILE_NIX_FILE="$NIX_ROOT/var/nix/profiles/default/etc/profile.d/nix-daemon.sh"
@@ -104,7 +106,7 @@ EOF
}
nix_user_for_core() {
- printf "nixbld%d" "$1"
+ printf "$NIX_BUILD_USER_NAME_TEMPLATE" "$1"
}
nix_uid_for_core() {
diff --git a/src/libcmd/command.hh b/src/libcmd/command.hh
index c02193924..e66c697eb 100644
--- a/src/libcmd/command.hh
+++ b/src/libcmd/command.hh
@@ -48,6 +48,8 @@ struct EvalCommand : virtual StoreCommand, MixEvalArgs
ref<EvalState> getEvalState();
std::shared_ptr<EvalState> evalState;
+
+ ~EvalCommand();
};
struct MixFlakeOptions : virtual Args, EvalCommand
diff --git a/src/libcmd/installables.cc b/src/libcmd/installables.cc
index 4739dc974..898e642a5 100644
--- a/src/libcmd/installables.cc
+++ b/src/libcmd/installables.cc
@@ -111,10 +111,11 @@ MixFlakeOptions::MixFlakeOptions()
addFlag({
.longName = "override-input",
- .description = "Override a specific flake input (e.g. `dwarffs/nixpkgs`).",
+ .description = "Override a specific flake input (e.g. `dwarffs/nixpkgs`). This implies `--no-write-lock-file`.",
.category = category,
.labels = {"input-path", "flake-url"},
.handler = {[&](std::string inputPath, std::string flakeRef) {
+ lockFlags.writeLockFile = false;
lockFlags.inputOverrides.insert_or_assign(
flake::parseInputPath(inputPath),
parseFlakeRef(flakeRef, absPath(".")));
@@ -280,6 +281,12 @@ ref<EvalState> EvalCommand::getEvalState()
return ref<EvalState>(evalState);
}
+EvalCommand::~EvalCommand()
+{
+ if (evalState)
+ evalState->printStats();
+}
+
void completeFlakeRef(ref<Store> store, std::string_view prefix)
{
if (prefix == "")
diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc
index e2f2308aa..3afe2e47b 100644
--- a/src/libexpr/eval.cc
+++ b/src/libexpr/eval.cc
@@ -592,10 +592,8 @@ Value & EvalState::getBuiltin(const string & name)
std::optional<EvalState::Doc> EvalState::getDoc(Value & v)
{
- if (v.isPrimOp() || v.isPrimOpApp()) {
+ if (v.isPrimOp()) {
auto v2 = &v;
- while (v2->isPrimOpApp())
- v2 = v2->primOpApp.left;
if (v2->primOp->doc)
return Doc {
.pos = noPos,
diff --git a/src/libexpr/flake/flake.hh b/src/libexpr/flake/flake.hh
index 65ed1ad0a..d17d5e183 100644
--- a/src/libexpr/flake/flake.hh
+++ b/src/libexpr/flake/flake.hh
@@ -113,7 +113,7 @@ struct LockFlags
/* Whether to commit changes to flake.lock. */
bool commitLockFile = false;
- /* Flake inputs to be overriden. */
+ /* Flake inputs to be overridden. */
std::map<InputPath, FlakeRef> inputOverrides;
/* Flake inputs to be updated. This means that any existing lock
diff --git a/src/libfetchers/fetchers.hh b/src/libfetchers/fetchers.hh
index a72cfafa4..c6b219c02 100644
--- a/src/libfetchers/fetchers.hh
+++ b/src/libfetchers/fetchers.hh
@@ -145,7 +145,13 @@ DownloadFileResult downloadFile(
bool immutable,
const Headers & headers = {});
-std::pair<Tree, time_t> downloadTarball(
+struct DownloadTarballMeta
+{
+ time_t lastModified;
+ std::string effectiveUrl;
+};
+
+std::pair<Tree, DownloadTarballMeta> downloadTarball(
ref<Store> store,
const std::string & url,
const std::string & name,
diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc
index 81c647f89..b9a240b13 100644
--- a/src/libfetchers/git.cc
+++ b/src/libfetchers/git.cc
@@ -153,12 +153,14 @@ struct GitInputScheme : InputScheme
std::pair<bool, std::string> getActualUrl(const Input & input) const
{
- // Don't clone file:// URIs (but otherwise treat them the
- // same as remote URIs, i.e. don't use the working tree or
- // HEAD).
+ // file:// URIs are normally not cloned (but otherwise treated the
+ // same as remote URIs, i.e. we don't use the working tree or
+ // HEAD). Exception: If _NIX_FORCE_HTTP is set, or the repo is a bare git
+ // repo, treat as a remote URI to force a clone.
static bool forceHttp = getEnv("_NIX_FORCE_HTTP") == "1"; // for testing
auto url = parseURL(getStrAttr(input.attrs, "url"));
- bool isLocal = url.scheme == "file" && !forceHttp;
+ bool isBareRepository = url.scheme == "file" && !pathExists(url.path + "/.git");
+ bool isLocal = url.scheme == "file" && !forceHttp && !isBareRepository;
return {isLocal, isLocal ? url.path : url.base};
}
@@ -363,7 +365,9 @@ struct GitInputScheme : InputScheme
? "refs/*"
: ref->compare(0, 5, "refs/") == 0
? *ref
- : "refs/heads/" + *ref;
+ : ref == "HEAD"
+ ? *ref
+ : "refs/heads/" + *ref;
runProgram("git", true, { "-C", repoDir, "fetch", "--quiet", "--force", "--", actualUrl, fmt("%s:%s", fetchRef, fetchRef) });
} catch (Error & e) {
if (!pathExists(localRefFile)) throw;
diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc
index 8352ef02d..3e5ad75a8 100644
--- a/src/libfetchers/github.cc
+++ b/src/libfetchers/github.cc
@@ -207,16 +207,16 @@ struct GitArchiveInputScheme : InputScheme
auto url = getDownloadUrl(input);
- auto [tree, lastModified] = downloadTarball(store, url.url, "source", true, url.headers);
+ auto [tree, meta] = downloadTarball(store, url.url, "source", true, url.headers);
- input.attrs.insert_or_assign("lastModified", uint64_t(lastModified));
+ input.attrs.insert_or_assign("lastModified", uint64_t(meta.lastModified));
getCache()->add(
store,
immutableAttrs,
{
{"rev", rev->gitRev()},
- {"lastModified", uint64_t(lastModified)}
+ {"lastModified", uint64_t(meta.lastModified)}
},
tree.storePath,
true);
diff --git a/src/libfetchers/registry.cc b/src/libfetchers/registry.cc
index 81b2227de..74376adc0 100644
--- a/src/libfetchers/registry.cc
+++ b/src/libfetchers/registry.cc
@@ -114,7 +114,7 @@ static std::shared_ptr<Registry> getSystemRegistry()
Path getUserRegistryPath()
{
- return getHome() + "/.config/nix/registry.json";
+ return getConfigDir() + "/nix/registry.json";
}
std::shared_ptr<Registry> getUserRegistry()
diff --git a/src/libfetchers/tarball.cc b/src/libfetchers/tarball.cc
index f467a3c49..eb2422dac 100644
--- a/src/libfetchers/tarball.cc
+++ b/src/libfetchers/tarball.cc
@@ -115,7 +115,7 @@ DownloadFileResult downloadFile(
};
}
-std::pair<Tree, time_t> downloadTarball(
+std::pair<Tree, DownloadTarballMeta> downloadTarball(
ref<Store> store,
const std::string & url,
const std::string & name,
@@ -133,7 +133,10 @@ std::pair<Tree, time_t> downloadTarball(
if (cached && !cached->expired)
return {
Tree(store->toRealPath(cached->storePath), std::move(cached->storePath)),
- getIntAttr(cached->infoAttrs, "lastModified")
+ {
+ .lastModified = time_t(getIntAttr(cached->infoAttrs, "lastModified")),
+ .effectiveUrl = maybeGetStrAttr(cached->infoAttrs, "effectiveUrl").value_or(url),
+ },
};
auto res = downloadFile(store, url, name, immutable, headers);
@@ -158,6 +161,7 @@ std::pair<Tree, time_t> downloadTarball(
Attrs infoAttrs({
{"lastModified", uint64_t(lastModified)},
+ {"effectiveUrl", res.effectiveUrl},
{"etag", res.etag},
});
@@ -170,7 +174,10 @@ std::pair<Tree, time_t> downloadTarball(
return {
Tree(store->toRealPath(*unpackedStorePath), std::move(*unpackedStorePath)),
- lastModified,
+ {
+ .lastModified = lastModified,
+ .effectiveUrl = res.effectiveUrl,
+ },
};
}
@@ -229,9 +236,11 @@ struct TarballInputScheme : InputScheme
return true;
}
- std::pair<Tree, Input> fetch(ref<Store> store, const Input & input) override
+ std::pair<Tree, Input> fetch(ref<Store> store, const Input & _input) override
{
- auto tree = downloadTarball(store, getStrAttr(input.attrs, "url"), "source", false).first;
+ Input input(_input);
+ auto [tree, meta] = downloadTarball(store, getStrAttr(input.attrs, "url"), "source", false);
+ input.attrs.insert_or_assign("url", meta.effectiveUrl);
return {std::move(tree), input};
}
};
diff --git a/src/libmain/progress-bar.cc b/src/libmain/progress-bar.cc
index 0e5432fca..15354549a 100644
--- a/src/libmain/progress-bar.cc
+++ b/src/libmain/progress-bar.cc
@@ -122,6 +122,7 @@ public:
void log(Verbosity lvl, const FormatOrString & fs) override
{
+ if (lvl > verbosity) return;
auto state(state_.lock());
log(*state, lvl, fs.s);
}
diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc
index c29237f5c..2e7be517e 100644
--- a/src/libstore/build/derivation-goal.cc
+++ b/src/libstore/build/derivation-goal.cc
@@ -170,7 +170,7 @@ void DerivationGoal::getDerivation()
return;
}
- addWaitee(upcast_goal(worker.makeSubstitutionGoal(drvPath)));
+ addWaitee(upcast_goal(worker.makePathSubstitutionGoal(drvPath)));
state = &DerivationGoal::loadDerivation;
}
@@ -246,17 +246,22 @@ void DerivationGoal::haveDerivation()
through substitutes. If that doesn't work, we'll build
them. */
if (settings.useSubstitutes && parsedDrv->substitutesAllowed())
- for (auto & [_, status] : initialOutputs) {
+ for (auto & [outputName, status] : initialOutputs) {
if (!status.wanted) continue;
- if (!status.known) {
- warn("do not know how to query for unknown floating content-addressed derivation output yet");
- /* Nothing to wait for; tail call */
- return DerivationGoal::gaveUpOnSubstitution();
- }
- addWaitee(upcast_goal(worker.makeSubstitutionGoal(
- status.known->path,
- buildMode == bmRepair ? Repair : NoRepair,
- getDerivationCA(*drv))));
+ if (!status.known)
+ addWaitee(
+ upcast_goal(
+ worker.makeDrvOutputSubstitutionGoal(
+ DrvOutput{status.outputHash, outputName},
+ buildMode == bmRepair ? Repair : NoRepair
+ )
+ )
+ );
+ else
+ addWaitee(upcast_goal(worker.makePathSubstitutionGoal(
+ status.known->path,
+ buildMode == bmRepair ? Repair : NoRepair,
+ getDerivationCA(*drv))));
}
if (waitees.empty()) /* to prevent hang (no wake-up event) */
@@ -337,7 +342,7 @@ void DerivationGoal::gaveUpOnSubstitution()
if (!settings.useSubstitutes)
throw Error("dependency '%s' of '%s' does not exist, and substitution is disabled",
worker.store.printStorePath(i), worker.store.printStorePath(drvPath));
- addWaitee(upcast_goal(worker.makeSubstitutionGoal(i)));
+ addWaitee(upcast_goal(worker.makePathSubstitutionGoal(i)));
}
if (waitees.empty()) /* to prevent hang (no wake-up event) */
@@ -388,7 +393,7 @@ void DerivationGoal::repairClosure()
worker.store.printStorePath(i), worker.store.printStorePath(drvPath));
auto drvPath2 = outputsToDrv.find(i);
if (drvPath2 == outputsToDrv.end())
- addWaitee(upcast_goal(worker.makeSubstitutionGoal(i, Repair)));
+ addWaitee(upcast_goal(worker.makePathSubstitutionGoal(i, Repair)));
else
addWaitee(worker.makeDerivationGoal(drvPath2->second, StringSet(), bmRepair));
}
@@ -920,6 +925,8 @@ void DerivationGoal::resolvedFinished() {
if (realisation) {
auto newRealisation = *realisation;
newRealisation.id = DrvOutput{initialOutputs.at(wantedOutput).outputHash, wantedOutput};
+ newRealisation.signatures.clear();
+ signRealisation(newRealisation);
worker.store.registerDrvOutput(newRealisation);
} else {
// If we don't have a realisation, then it must mean that something
@@ -1243,9 +1250,12 @@ OutputPathMap DerivationGoal::queryDerivationOutputMap()
void DerivationGoal::checkPathValidity()
{
bool checkHash = buildMode == bmRepair;
+ auto wantedOutputsLeft = wantedOutputs;
for (auto & i : queryPartialDerivationOutputMap()) {
InitialOutput & info = initialOutputs.at(i.first);
info.wanted = wantOutput(i.first, wantedOutputs);
+ if (info.wanted)
+ wantedOutputsLeft.erase(i.first);
if (i.second) {
auto outputPath = *i.second;
info.known = {
@@ -1267,6 +1277,13 @@ void DerivationGoal::checkPathValidity()
}
}
}
+ // If we requested all the outputs via the empty set, we are always fine.
+ // If we requested specific elements, the loop above removes all the valid
+ // ones, so any that are left must be invalid.
+ if (!wantedOutputsLeft.empty())
+ throw Error("derivation '%s' does not have wanted outputs %s",
+ worker.store.printStorePath(drvPath),
+ concatStringsSep(", ", quoteStrings(wantedOutputsLeft)));
}
diff --git a/src/libstore/build/derivation-goal.hh b/src/libstore/build/derivation-goal.hh
index c85bcd84f..704b77caf 100644
--- a/src/libstore/build/derivation-goal.hh
+++ b/src/libstore/build/derivation-goal.hh
@@ -180,6 +180,9 @@ struct DerivationGoal : public Goal
/* Open a log file and a pipe to it. */
Path openLogFile();
+ /* Sign the newly built realisation if the store allows it */
+ virtual void signRealisation(Realisation&) {}
+
/* Close the log file. */
void closeLogFile();
diff --git a/src/libstore/build/drv-output-substitution-goal.cc b/src/libstore/build/drv-output-substitution-goal.cc
new file mode 100644
index 000000000..a5ac4c49d
--- /dev/null
+++ b/src/libstore/build/drv-output-substitution-goal.cc
@@ -0,0 +1,95 @@
+#include "drv-output-substitution-goal.hh"
+#include "worker.hh"
+#include "substitution-goal.hh"
+
+namespace nix {
+
+DrvOutputSubstitutionGoal::DrvOutputSubstitutionGoal(const DrvOutput& id, Worker & worker, RepairFlag repair, std::optional<ContentAddress> ca)
+ : Goal(worker)
+ , id(id)
+{
+ state = &DrvOutputSubstitutionGoal::init;
+ name = fmt("substitution of '%s'", id.to_string());
+ trace("created");
+}
+
+
+void DrvOutputSubstitutionGoal::init()
+{
+ trace("init");
+ subs = settings.useSubstitutes ? getDefaultSubstituters() : std::list<ref<Store>>();
+ tryNext();
+}
+
+void DrvOutputSubstitutionGoal::tryNext()
+{
+ trace("Trying next substituter");
+
+ if (subs.size() == 0) {
+ /* None left. Terminate this goal and let someone else deal
+ with it. */
+ debug("drv output '%s' is required, but there is no substituter that can provide it", id.to_string());
+
+ /* Hack: don't indicate failure if there were no substituters.
+ In that case the calling derivation should just do a
+ build. */
+ amDone(substituterFailed ? ecFailed : ecNoSubstituters);
+
+ if (substituterFailed) {
+ worker.failedSubstitutions++;
+ worker.updateProgress();
+ }
+
+ return;
+ }
+
+ auto sub = subs.front();
+ subs.pop_front();
+
+ // FIXME: Make async
+ outputInfo = sub->queryRealisation(id);
+ if (!outputInfo) {
+ tryNext();
+ return;
+ }
+
+ addWaitee(worker.makePathSubstitutionGoal(outputInfo->outPath));
+
+ if (waitees.empty()) outPathValid();
+ else state = &DrvOutputSubstitutionGoal::outPathValid;
+}
+
+void DrvOutputSubstitutionGoal::outPathValid()
+{
+ assert(outputInfo);
+ trace("Output path substituted");
+
+ if (nrFailed > 0) {
+ debug("The output path of the derivation output '%s' could not be substituted", id.to_string());
+ amDone(nrNoSubstituters > 0 || nrIncompleteClosure > 0 ? ecIncompleteClosure : ecFailed);
+ return;
+ }
+
+ worker.store.registerDrvOutput(*outputInfo);
+ finished();
+}
+
+void DrvOutputSubstitutionGoal::finished()
+{
+ trace("finished");
+ amDone(ecSuccess);
+}
+
+string DrvOutputSubstitutionGoal::key()
+{
+ /* "a$" ensures substitution goals happen before derivation
+ goals. */
+ return "a$" + std::string(id.to_string());
+}
+
+void DrvOutputSubstitutionGoal::work()
+{
+ (this->*state)();
+}
+
+}
diff --git a/src/libstore/build/drv-output-substitution-goal.hh b/src/libstore/build/drv-output-substitution-goal.hh
new file mode 100644
index 000000000..63ab53d89
--- /dev/null
+++ b/src/libstore/build/drv-output-substitution-goal.hh
@@ -0,0 +1,50 @@
+#pragma once
+
+#include "store-api.hh"
+#include "goal.hh"
+#include "realisation.hh"
+
+namespace nix {
+
+class Worker;
+
+// Substitution of a derivation output.
+// This is done in three steps:
+// 1. Fetch the output info from a substituter
+// 2. Substitute the corresponding output path
+// 3. Register the output info
+class DrvOutputSubstitutionGoal : public Goal {
+private:
+ // The drv output we're trying to substitue
+ DrvOutput id;
+
+ // The realisation corresponding to the given output id.
+ // Will be filled once we can get it.
+ std::optional<Realisation> outputInfo;
+
+ /* The remaining substituters. */
+ std::list<ref<Store>> subs;
+
+ /* Whether a substituter failed. */
+ bool substituterFailed = false;
+
+public:
+ DrvOutputSubstitutionGoal(const DrvOutput& id, Worker & worker, RepairFlag repair = NoRepair, std::optional<ContentAddress> ca = std::nullopt);
+
+ typedef void (DrvOutputSubstitutionGoal::*GoalState)();
+ GoalState state;
+
+ void init();
+ void tryNext();
+ void outPathValid();
+ void finished();
+
+ void timedOut(Error && ex) override { abort(); };
+
+ string key() override;
+
+ void work() override;
+
+};
+
+}
diff --git a/src/libstore/build/entry-points.cc b/src/libstore/build/entry-points.cc
index 01a564aba..686364440 100644
--- a/src/libstore/build/entry-points.cc
+++ b/src/libstore/build/entry-points.cc
@@ -15,7 +15,7 @@ void Store::buildPaths(const std::vector<StorePathWithOutputs> & drvPaths, Build
if (path.path.isDerivation())
goals.insert(worker.makeDerivationGoal(path.path, path.outputs, buildMode));
else
- goals.insert(worker.makeSubstitutionGoal(path.path, buildMode == bmRepair ? Repair : NoRepair));
+ goals.insert(worker.makePathSubstitutionGoal(path.path, buildMode == bmRepair ? Repair : NoRepair));
}
worker.run(goals);
@@ -31,7 +31,7 @@ void Store::buildPaths(const std::vector<StorePathWithOutputs> & drvPaths, Build
}
if (i->exitCode != Goal::ecSuccess) {
if (auto i2 = dynamic_cast<DerivationGoal *>(i.get())) failed.insert(i2->drvPath);
- else if (auto i2 = dynamic_cast<SubstitutionGoal *>(i.get())) failed.insert(i2->storePath);
+ else if (auto i2 = dynamic_cast<PathSubstitutionGoal *>(i.get())) failed.insert(i2->storePath);
}
}
@@ -90,7 +90,7 @@ void Store::ensurePath(const StorePath & path)
if (isValidPath(path)) return;
Worker worker(*this);
- GoalPtr goal = worker.makeSubstitutionGoal(path);
+ GoalPtr goal = worker.makePathSubstitutionGoal(path);
Goals goals = {goal};
worker.run(goals);
@@ -108,7 +108,7 @@ void Store::ensurePath(const StorePath & path)
void LocalStore::repairPath(const StorePath & path)
{
Worker worker(*this);
- GoalPtr goal = worker.makeSubstitutionGoal(path, Repair);
+ GoalPtr goal = worker.makePathSubstitutionGoal(path, Repair);
Goals goals = {goal};
worker.run(goals);
diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc
index e4e84308d..9b6645222 100644
--- a/src/libstore/build/local-derivation-goal.cc
+++ b/src/libstore/build/local-derivation-goal.cc
@@ -287,7 +287,7 @@ bool LocalDerivationGoal::cleanupDecideWhetherDiskFull()
So instead, check if the disk is (nearly) full now. If
so, we don't mark this build as a permanent failure. */
#if HAVE_STATVFS
- {
+ {
auto & localStore = getLocalStore();
uint64_t required = 8ULL * 1024 * 1024; // FIXME: make configurable
struct statvfs st;
@@ -297,7 +297,7 @@ bool LocalDerivationGoal::cleanupDecideWhetherDiskFull()
if (statvfs(tmpDir.c_str(), &st) == 0 &&
(uint64_t) st.f_bavail * st.f_bsize < required)
diskFull = true;
- }
+ }
#endif
deleteTmpDir(false);
@@ -1703,18 +1703,18 @@ void LocalDerivationGoal::runChild()
network, so give them access to /etc/resolv.conf and so
on. */
if (derivationIsImpure(derivationType)) {
- ss.push_back("/etc/resolv.conf");
-
// Only use nss functions to resolve hosts and
// services. Don’t use it for anything else that may
// be configured for this system. This limits the
// potential impurities introduced in fixed-outputs.
writeFile(chrootRootDir + "/etc/nsswitch.conf", "hosts: files dns\nservices: files\n");
- ss.push_back("/etc/services");
- ss.push_back("/etc/hosts");
- if (pathExists("/var/run/nscd/socket"))
- ss.push_back("/var/run/nscd/socket");
+ /* N.B. it is realistic that these paths might not exist. It
+ happens when testing Nix building fixed-output derivations
+ within a pure derivation. */
+ for (auto & path : { "/etc/resolv.conf", "/etc/services", "/etc/hosts", "/var/run/nscd/socket" })
+ if (pathExists(path))
+ ss.push_back(path);
}
for (auto & i : ss) dirsInChroot.emplace(i, i);
@@ -2620,13 +2620,22 @@ void LocalDerivationGoal::registerOutputs()
but it's fine to do in all cases. */
if (settings.isExperimentalFeatureEnabled("ca-derivations")) {
- for (auto& [outputName, newInfo] : infos)
- worker.store.registerDrvOutput(Realisation{
- .id = DrvOutput{initialOutputs.at(outputName).outputHash, outputName},
- .outPath = newInfo.path});
+ for (auto& [outputName, newInfo] : infos) {
+ auto thisRealisation = Realisation{
+ .id = DrvOutput{initialOutputs.at(outputName).outputHash,
+ outputName},
+ .outPath = newInfo.path};
+ signRealisation(thisRealisation);
+ worker.store.registerDrvOutput(thisRealisation);
+ }
}
}
+void LocalDerivationGoal::signRealisation(Realisation & realisation)
+{
+ getLocalStore().signRealisation(realisation);
+}
+
void LocalDerivationGoal::checkOutputs(const std::map<Path, ValidPathInfo> & outputs)
{
diff --git a/src/libstore/build/local-derivation-goal.hh b/src/libstore/build/local-derivation-goal.hh
index 4bbf27a1b..47b818a8b 100644
--- a/src/libstore/build/local-derivation-goal.hh
+++ b/src/libstore/build/local-derivation-goal.hh
@@ -161,6 +161,8 @@ struct LocalDerivationGoal : public DerivationGoal
as valid. */
void registerOutputs() override;
+ void signRealisation(Realisation &) override;
+
/* Check that an output meets the requirements specified by the
'outputChecks' attribute (or the legacy
'{allowed,disallowed}{References,Requisites}' attributes). */
diff --git a/src/libstore/build/substitution-goal.cc b/src/libstore/build/substitution-goal.cc
index ac646c3a5..adb9880be 100644
--- a/src/libstore/build/substitution-goal.cc
+++ b/src/libstore/build/substitution-goal.cc
@@ -5,20 +5,20 @@
namespace nix {
-SubstitutionGoal::SubstitutionGoal(const StorePath & storePath, Worker & worker, RepairFlag repair, std::optional<ContentAddress> ca)
+PathSubstitutionGoal::PathSubstitutionGoal(const StorePath & storePath, Worker & worker, RepairFlag repair, std::optional<ContentAddress> ca)
: Goal(worker)
, storePath(storePath)
, repair(repair)
, ca(ca)
{
- state = &SubstitutionGoal::init;
+ state = &PathSubstitutionGoal::init;
name = fmt("substitution of '%s'", worker.store.printStorePath(this->storePath));
trace("created");
maintainExpectedSubstitutions = std::make_unique<MaintainCount<uint64_t>>(worker.expectedSubstitutions);
}
-SubstitutionGoal::~SubstitutionGoal()
+PathSubstitutionGoal::~PathSubstitutionGoal()
{
try {
if (thr.joinable()) {
@@ -32,13 +32,13 @@ SubstitutionGoal::~SubstitutionGoal()
}
-void SubstitutionGoal::work()
+void PathSubstitutionGoal::work()
{
(this->*state)();
}
-void SubstitutionGoal::init()
+void PathSubstitutionGoal::init()
{
trace("init");
@@ -59,7 +59,7 @@ void SubstitutionGoal::init()
}
-void SubstitutionGoal::tryNext()
+void PathSubstitutionGoal::tryNext()
{
trace("trying next substituter");
@@ -145,7 +145,7 @@ void SubstitutionGoal::tryNext()
/* Bail out early if this substituter lacks a valid
signature. LocalStore::addToStore() also checks for this, but
only after we've downloaded the path. */
- if (!sub->isTrusted && worker.store.pathInfoIsTrusted(*info))
+ if (!sub->isTrusted && worker.store.pathInfoIsUntrusted(*info))
{
warn("substituter '%s' does not have a valid signature for path '%s'",
sub->getUri(), worker.store.printStorePath(storePath));
@@ -156,16 +156,16 @@ void SubstitutionGoal::tryNext()
/* To maintain the closure invariant, we first have to realise the
paths referenced by this one. */
for (auto & i : info->references)
- addWaitee(worker.makeSubstitutionGoal(i));
+ addWaitee(worker.makePathSubstitutionGoal(i));
if (waitees.empty()) /* to prevent hang (no wake-up event) */
referencesValid();
else
- state = &SubstitutionGoal::referencesValid;
+ state = &PathSubstitutionGoal::referencesValid;
}
-void SubstitutionGoal::referencesValid()
+void PathSubstitutionGoal::referencesValid()
{
trace("all references realised");
@@ -178,12 +178,12 @@ void SubstitutionGoal::referencesValid()
for (auto & i : info->references)
assert(worker.store.isValidPath(i));
- state = &SubstitutionGoal::tryToRun;
+ state = &PathSubstitutionGoal::tryToRun;
worker.wakeUp(shared_from_this());
}
-void SubstitutionGoal::tryToRun()
+void PathSubstitutionGoal::tryToRun()
{
trace("trying to run");
@@ -222,11 +222,11 @@ void SubstitutionGoal::tryToRun()
worker.childStarted(shared_from_this(), {outPipe.readSide.get()}, true, false);
- state = &SubstitutionGoal::finished;
+ state = &PathSubstitutionGoal::finished;
}
-void SubstitutionGoal::finished()
+void PathSubstitutionGoal::finished()
{
trace("substitute finished");
@@ -250,7 +250,7 @@ void SubstitutionGoal::finished()
}
/* Try the next substitute. */
- state = &SubstitutionGoal::tryNext;
+ state = &PathSubstitutionGoal::tryNext;
worker.wakeUp(shared_from_this());
return;
}
@@ -279,12 +279,12 @@ void SubstitutionGoal::finished()
}
-void SubstitutionGoal::handleChildOutput(int fd, const string & data)
+void PathSubstitutionGoal::handleChildOutput(int fd, const string & data)
{
}
-void SubstitutionGoal::handleEOF(int fd)
+void PathSubstitutionGoal::handleEOF(int fd)
{
if (fd == outPipe.readSide.get()) worker.wakeUp(shared_from_this());
}
diff --git a/src/libstore/build/substitution-goal.hh b/src/libstore/build/substitution-goal.hh
index dee2cecbf..3b3cb7e32 100644
--- a/src/libstore/build/substitution-goal.hh
+++ b/src/libstore/build/substitution-goal.hh
@@ -8,7 +8,7 @@ namespace nix {
class Worker;
-struct SubstitutionGoal : public Goal
+struct PathSubstitutionGoal : public Goal
{
/* The store path that should be realised through a substitute. */
StorePath storePath;
@@ -47,14 +47,15 @@ struct SubstitutionGoal : public Goal
std::unique_ptr<MaintainCount<uint64_t>> maintainExpectedSubstitutions,
maintainRunningSubstitutions, maintainExpectedNar, maintainExpectedDownload;
- typedef void (SubstitutionGoal::*GoalState)();
+ typedef void (PathSubstitutionGoal::*GoalState)();
GoalState state;
/* Content address for recomputing store path */
std::optional<ContentAddress> ca;
- SubstitutionGoal(const StorePath & storePath, Worker & worker, RepairFlag repair = NoRepair, std::optional<ContentAddress> ca = std::nullopt);
- ~SubstitutionGoal();
+public:
+ PathSubstitutionGoal(const StorePath & storePath, Worker & worker, RepairFlag repair = NoRepair, std::optional<ContentAddress> ca = std::nullopt);
+ ~PathSubstitutionGoal();
void timedOut(Error && ex) override { abort(); };
diff --git a/src/libstore/build/worker.cc b/src/libstore/build/worker.cc
index b2223c3b6..616b17e61 100644
--- a/src/libstore/build/worker.cc
+++ b/src/libstore/build/worker.cc
@@ -1,6 +1,7 @@
#include "machines.hh"
#include "worker.hh"
#include "substitution-goal.hh"
+#include "drv-output-substitution-goal.hh"
#include "local-derivation-goal.hh"
#include "hook-instance.hh"
@@ -78,20 +79,32 @@ std::shared_ptr<DerivationGoal> Worker::makeBasicDerivationGoal(const StorePath
}
-std::shared_ptr<SubstitutionGoal> Worker::makeSubstitutionGoal(const StorePath & path, RepairFlag repair, std::optional<ContentAddress> ca)
+std::shared_ptr<PathSubstitutionGoal> Worker::makePathSubstitutionGoal(const StorePath & path, RepairFlag repair, std::optional<ContentAddress> ca)
{
- std::weak_ptr<SubstitutionGoal> & goal_weak = substitutionGoals[path];
+ std::weak_ptr<PathSubstitutionGoal> & goal_weak = substitutionGoals[path];
auto goal = goal_weak.lock(); // FIXME
if (!goal) {
- goal = std::make_shared<SubstitutionGoal>(path, *this, repair, ca);
+ goal = std::make_shared<PathSubstitutionGoal>(path, *this, repair, ca);
goal_weak = goal;
wakeUp(goal);
}
return goal;
}
-template<typename G>
-static void removeGoal(std::shared_ptr<G> goal, std::map<StorePath, std::weak_ptr<G>> & goalMap)
+std::shared_ptr<DrvOutputSubstitutionGoal> Worker::makeDrvOutputSubstitutionGoal(const DrvOutput& id, RepairFlag repair, std::optional<ContentAddress> ca)
+{
+ std::weak_ptr<DrvOutputSubstitutionGoal> & goal_weak = drvOutputSubstitutionGoals[id];
+ auto goal = goal_weak.lock(); // FIXME
+ if (!goal) {
+ goal = std::make_shared<DrvOutputSubstitutionGoal>(id, *this, repair, ca);
+ goal_weak = goal;
+ wakeUp(goal);
+ }
+ return goal;
+}
+
+template<typename K, typename G>
+static void removeGoal(std::shared_ptr<G> goal, std::map<K, std::weak_ptr<G>> & goalMap)
{
/* !!! inefficient */
for (auto i = goalMap.begin();
@@ -109,8 +122,10 @@ void Worker::removeGoal(GoalPtr goal)
{
if (auto drvGoal = std::dynamic_pointer_cast<DerivationGoal>(goal))
nix::removeGoal(drvGoal, derivationGoals);
- else if (auto subGoal = std::dynamic_pointer_cast<SubstitutionGoal>(goal))
+ else if (auto subGoal = std::dynamic_pointer_cast<PathSubstitutionGoal>(goal))
nix::removeGoal(subGoal, substitutionGoals);
+ else if (auto subGoal = std::dynamic_pointer_cast<DrvOutputSubstitutionGoal>(goal))
+ nix::removeGoal(subGoal, drvOutputSubstitutionGoals);
else
assert(false);
if (topGoals.find(goal) != topGoals.end()) {
@@ -217,7 +232,7 @@ void Worker::run(const Goals & _topGoals)
topGoals.insert(i);
if (auto goal = dynamic_cast<DerivationGoal *>(i.get())) {
topPaths.push_back({goal->drvPath, goal->wantedOutputs});
- } else if (auto goal = dynamic_cast<SubstitutionGoal *>(i.get())) {
+ } else if (auto goal = dynamic_cast<PathSubstitutionGoal *>(i.get())) {
topPaths.push_back({goal->storePath});
}
}
@@ -471,7 +486,10 @@ void Worker::markContentsGood(const StorePath & path)
}
-GoalPtr upcast_goal(std::shared_ptr<SubstitutionGoal> subGoal) {
+GoalPtr upcast_goal(std::shared_ptr<PathSubstitutionGoal> subGoal) {
+ return subGoal;
+}
+GoalPtr upcast_goal(std::shared_ptr<DrvOutputSubstitutionGoal> subGoal) {
return subGoal;
}
diff --git a/src/libstore/build/worker.hh b/src/libstore/build/worker.hh
index 82e711191..918de35f6 100644
--- a/src/libstore/build/worker.hh
+++ b/src/libstore/build/worker.hh
@@ -4,6 +4,7 @@
#include "lock.hh"
#include "store-api.hh"
#include "goal.hh"
+#include "realisation.hh"
#include <future>
#include <thread>
@@ -12,18 +13,20 @@ namespace nix {
/* Forward definition. */
struct DerivationGoal;
-struct SubstitutionGoal;
+struct PathSubstitutionGoal;
+class DrvOutputSubstitutionGoal;
/* Workaround for not being able to declare a something like
- class SubstitutionGoal : public Goal;
+ class PathSubstitutionGoal : public Goal;
even when Goal is a complete type.
This is still a static cast. The purpose of exporting it is to define it in
- a place where `SubstitutionGoal` is concrete, and use it in a place where it
+ a place where `PathSubstitutionGoal` is concrete, and use it in a place where it
is opaque. */
-GoalPtr upcast_goal(std::shared_ptr<SubstitutionGoal> subGoal);
+GoalPtr upcast_goal(std::shared_ptr<PathSubstitutionGoal> subGoal);
+GoalPtr upcast_goal(std::shared_ptr<DrvOutputSubstitutionGoal> subGoal);
typedef std::chrono::time_point<std::chrono::steady_clock> steady_time_point;
@@ -72,7 +75,8 @@ private:
/* Maps used to prevent multiple instantiations of a goal for the
same derivation / path. */
std::map<StorePath, std::weak_ptr<DerivationGoal>> derivationGoals;
- std::map<StorePath, std::weak_ptr<SubstitutionGoal>> substitutionGoals;
+ std::map<StorePath, std::weak_ptr<PathSubstitutionGoal>> substitutionGoals;
+ std::map<DrvOutput, std::weak_ptr<DrvOutputSubstitutionGoal>> drvOutputSubstitutionGoals;
/* Goals waiting for busy paths to be unlocked. */
WeakGoals waitingForAnyGoal;
@@ -146,7 +150,8 @@ public:
const StringSet & wantedOutputs, BuildMode buildMode = bmNormal);
/* substitution goal */
- std::shared_ptr<SubstitutionGoal> makeSubstitutionGoal(const StorePath & storePath, RepairFlag repair = NoRepair, std::optional<ContentAddress> ca = std::nullopt);
+ std::shared_ptr<PathSubstitutionGoal> makePathSubstitutionGoal(const StorePath & storePath, RepairFlag repair = NoRepair, std::optional<ContentAddress> ca = std::nullopt);
+ std::shared_ptr<DrvOutputSubstitutionGoal> makeDrvOutputSubstitutionGoal(const DrvOutput & id, RepairFlag repair = NoRepair, std::optional<ContentAddress> ca = std::nullopt);
/* Remove a dead goal. */
void removeGoal(GoalPtr goal);
diff --git a/src/libstore/ca-specific-schema.sql b/src/libstore/ca-specific-schema.sql
index 93c442826..20ee046a1 100644
--- a/src/libstore/ca-specific-schema.sql
+++ b/src/libstore/ca-specific-schema.sql
@@ -6,6 +6,7 @@ create table if not exists Realisations (
drvPath text not null,
outputName text not null, -- symbolic output id, usually "out"
outputPath integer not null,
+ signatures text, -- space-separated list
primary key (drvPath, outputName),
foreign key (outputPath) references ValidPaths(id) on delete cascade
);
diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc
index a29c3e476..1f1baaff1 100644
--- a/src/libstore/daemon.cc
+++ b/src/libstore/daemon.cc
@@ -579,7 +579,10 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
auto res = store->buildDerivation(drvPath, drv, buildMode);
logger->stopWork();
to << res.status << res.errorMsg;
- if (GET_PROTOCOL_MINOR(clientVersion) >= 0xc) {
+ if (GET_PROTOCOL_MINOR(clientVersion) >= 29) {
+ to << res.timesBuilt << res.isNonDeterministic << res.startTime << res.stopTime;
+ }
+ if (GET_PROTOCOL_MINOR(clientVersion) >= 28) {
worker_proto::write(*store, to, res.builtOutputs);
}
break;
diff --git a/src/libstore/derivations.hh b/src/libstore/derivations.hh
index 92e62ee14..caf6062cd 100644
--- a/src/libstore/derivations.hh
+++ b/src/libstore/derivations.hh
@@ -52,7 +52,7 @@ struct DerivationOutput
DerivationOutputCAFloating,
DerivationOutputDeferred
> output;
- std::optional<HashType> hashAlgoOpt(const Store & store) const;
+
/* Note, when you use this function you should make sure that you're passing
the right derivation name. When in doubt, you should use the safer
interface provided by BasicDerivation::outputsAndOptPaths */
diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc
index 8d44003f4..d3b27d7be 100644
--- a/src/libstore/globals.cc
+++ b/src/libstore/globals.cc
@@ -81,7 +81,7 @@ void loadConfFile()
/* We only want to send overrides to the daemon, i.e. stuff from
~/.nix/nix.conf or the command line. */
- globalConfig.resetOverriden();
+ globalConfig.resetOverridden();
auto files = settings.nixUserConfFiles;
for (auto file = files.rbegin(); file != files.rend(); file++) {
diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh
index a51d9c2f1..3e4ead76c 100644
--- a/src/libstore/globals.hh
+++ b/src/libstore/globals.hh
@@ -206,7 +206,10 @@ public:
Setting<std::string> builders{
this, "@" + nixConfDir + "/machines", "builders",
- "A semicolon-separated list of build machines, in the format of `nix.machines`."};
+ R"(
+ A semicolon-separated list of build machines.
+ For the exact format and examples, see [the manual chapter on remote builds](../advanced-topics/distributed-builds.md)
+ )"};
Setting<bool> buildersUseSubstitutes{
this, false, "builders-use-substitutes",
diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc
index 49218c4b7..2c97a620a 100644
--- a/src/libstore/legacy-ssh-store.cc
+++ b/src/libstore/legacy-ssh-store.cc
@@ -15,6 +15,7 @@ struct LegacySSHStoreConfig : virtual StoreConfig
using StoreConfig::StoreConfig;
const Setting<int> maxConnections{(StoreConfig*) this, 1, "max-connections", "maximum number of concurrent SSH connections"};
const Setting<Path> sshKey{(StoreConfig*) this, "", "ssh-key", "path to an SSH private key"};
+ const Setting<std::string> sshPublicHostKey{(StoreConfig*) this, "", "base64-ssh-public-host-key", "The public half of the host's SSH key"};
const Setting<bool> compress{(StoreConfig*) this, false, "compress", "whether to compress the connection"};
const Setting<Path> remoteProgram{(StoreConfig*) this, "nix-store", "remote-program", "path to the nix-store executable on the remote system"};
const Setting<std::string> remoteStore{(StoreConfig*) this, "", "remote-store", "URI of the store on the remote system"};
@@ -59,6 +60,7 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
, master(
host,
sshKey,
+ sshPublicHostKey,
// Use SSH master only if using more than 1 connection.
connections->capacity() > 1,
compress,
diff --git a/src/libstore/local-binary-cache-store.cc b/src/libstore/local-binary-cache-store.cc
index a58b7733f..964c4017e 100644
--- a/src/libstore/local-binary-cache-store.cc
+++ b/src/libstore/local-binary-cache-store.cc
@@ -2,6 +2,8 @@
#include "globals.hh"
#include "nar-info-disk-cache.hh"
+#include <atomic>
+
namespace nix {
struct LocalBinaryCacheStoreConfig : virtual BinaryCacheStoreConfig
@@ -50,7 +52,8 @@ protected:
const std::string & mimeType) override
{
auto path2 = binaryCacheDir + "/" + path;
- Path tmp = path2 + ".tmp." + std::to_string(getpid());
+ static std::atomic<int> counter{0};
+ Path tmp = fmt("%s.tmp.%d.%d", path2, getpid(), ++counter);
AutoDelete del(tmp, false);
StreamToSourceAdapter source(istream);
writeFile(tmp, source);
diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc
index 8c36c0635..f5b6362a8 100644
--- a/src/libstore/local-store.cc
+++ b/src/libstore/local-store.cc
@@ -310,13 +310,13 @@ LocalStore::LocalStore(const Params & params)
if (settings.isExperimentalFeatureEnabled("ca-derivations")) {
state->stmts->RegisterRealisedOutput.create(state->db,
R"(
- insert or replace into Realisations (drvPath, outputName, outputPath)
- values (?, ?, (select id from ValidPaths where path = ?))
+ insert or replace into Realisations (drvPath, outputName, outputPath, signatures)
+ values (?, ?, (select id from ValidPaths where path = ?), ?)
;
)");
state->stmts->QueryRealisedOutput.create(state->db,
R"(
- select Output.path from Realisations
+ select Output.path, Realisations.signatures from Realisations
inner join ValidPaths as Output on Output.id = Realisations.outputPath
where drvPath = ? and outputName = ?
;
@@ -652,6 +652,14 @@ void LocalStore::checkDerivationOutputs(const StorePath & drvPath, const Derivat
}
}
+void LocalStore::registerDrvOutput(const Realisation & info, CheckSigsFlag checkSigs)
+{
+ settings.requireExperimentalFeature("ca-derivations");
+ if (checkSigs == NoCheckSigs || !realisationIsUntrusted(info))
+ registerDrvOutput(info);
+ else
+ throw Error("cannot register realisation '%s' because it lacks a valid signature", info.outPath.to_string());
+}
void LocalStore::registerDrvOutput(const Realisation & info)
{
@@ -662,6 +670,7 @@ void LocalStore::registerDrvOutput(const Realisation & info)
(info.id.strHash())
(info.id.outputName)
(printStorePath(info.outPath))
+ (concatStringsSep(" ", info.signatures))
.exec();
});
}
@@ -1108,15 +1117,20 @@ const PublicKeys & LocalStore::getPublicKeys()
return *state->publicKeys;
}
-bool LocalStore::pathInfoIsTrusted(const ValidPathInfo & info)
+bool LocalStore::pathInfoIsUntrusted(const ValidPathInfo & info)
{
return requireSigs && !info.checkSignatures(*this, getPublicKeys());
}
+bool LocalStore::realisationIsUntrusted(const Realisation & realisation)
+{
+ return requireSigs && !realisation.checkSignatures(getPublicKeys());
+}
+
void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
RepairFlag repair, CheckSigsFlag checkSigs)
{
- if (checkSigs && pathInfoIsTrusted(info))
+ if (checkSigs && pathInfoIsUntrusted(info))
throw Error("cannot add path '%s' because it lacks a valid signature", printStorePath(info.path));
addTempRoot(info.path);
@@ -1630,6 +1644,18 @@ void LocalStore::addSignatures(const StorePath & storePath, const StringSet & si
}
+void LocalStore::signRealisation(Realisation & realisation)
+{
+ // FIXME: keep secret keys in memory.
+
+ auto secretKeyFiles = settings.secretKeyFiles;
+
+ for (auto & secretKeyFile : secretKeyFiles.get()) {
+ SecretKey secretKey(readFile(secretKeyFile));
+ realisation.sign(secretKey);
+ }
+}
+
void LocalStore::signPathInfo(ValidPathInfo & info)
{
// FIXME: keep secret keys in memory.
@@ -1667,8 +1693,9 @@ std::optional<const Realisation> LocalStore::queryRealisation(
if (!use.next())
return std::nullopt;
auto outputPath = parseStorePath(use.getStr(0));
- return Ret{
- Realisation{.id = id, .outPath = outputPath}};
+ auto signatures = tokenizeString<StringSet>(use.getStr(1));
+ return Ret{Realisation{
+ .id = id, .outPath = outputPath, .signatures = signatures}};
});
}
} // namespace nix
diff --git a/src/libstore/local-store.hh b/src/libstore/local-store.hh
index 03bb0218d..26e034a82 100644
--- a/src/libstore/local-store.hh
+++ b/src/libstore/local-store.hh
@@ -136,7 +136,8 @@ public:
void querySubstitutablePathInfos(const StorePathCAMap & paths,
SubstitutablePathInfos & infos) override;
- bool pathInfoIsTrusted(const ValidPathInfo &) override;
+ bool pathInfoIsUntrusted(const ValidPathInfo &) override;
+ bool realisationIsUntrusted(const Realisation & ) override;
void addToStore(const ValidPathInfo & info, Source & source,
RepairFlag repair, CheckSigsFlag checkSigs) override;
@@ -202,6 +203,7 @@ public:
/* Register the store path 'output' as the output named 'outputName' of
derivation 'deriver'. */
void registerDrvOutput(const Realisation & info) override;
+ void registerDrvOutput(const Realisation & info, CheckSigsFlag checkSigs) override;
void cacheDrvOutputMapping(State & state, const uint64_t deriver, const string & outputName, const StorePath & output);
std::optional<const Realisation> queryRealisation(const DrvOutput&) override;
@@ -272,16 +274,19 @@ private:
bool isValidPath_(State & state, const StorePath & path);
void queryReferrers(State & state, const StorePath & path, StorePathSet & referrers);
- /* Add signatures to a ValidPathInfo using the secret keys
+ /* Add signatures to a ValidPathInfo or Realisation using the secret keys
specified by the ‘secret-key-files’ option. */
void signPathInfo(ValidPathInfo & info);
+ void signRealisation(Realisation &);
Path getRealStoreDir() override { return realStoreDir; }
void createUser(const std::string & userName, uid_t userId) override;
friend struct LocalDerivationGoal;
+ friend struct PathSubstitutionGoal;
friend struct SubstitutionGoal;
+ friend struct DerivationGoal;
};
diff --git a/src/libstore/local.mk b/src/libstore/local.mk
index 03c4351ac..cf0933705 100644
--- a/src/libstore/local.mk
+++ b/src/libstore/local.mk
@@ -28,7 +28,7 @@ ifeq ($(OS), SunOS)
endif
ifeq ($(HAVE_SECCOMP), 1)
- libstore_LDFLAGS += -lseccomp
+ libstore_LDFLAGS += $(LIBSECCOMP_LIBS)
endif
libstore_CXXFLAGS += \
diff --git a/src/libstore/machines.cc b/src/libstore/machines.cc
index 7db2556f4..b42e5e434 100644
--- a/src/libstore/machines.cc
+++ b/src/libstore/machines.cc
@@ -54,9 +54,15 @@ ref<Store> Machine::openStore() const {
if (hasPrefix(storeUri, "ssh://")) {
storeParams["max-connections"] = "1";
storeParams["log-fd"] = "4";
+ }
+
+ if (hasPrefix(storeUri, "ssh://") || hasPrefix(storeUri, "ssh-ng://")) {
if (sshKey != "")
storeParams["ssh-key"] = sshKey;
+ if (sshPublicHostKey != "")
+ storeParams["base64-ssh-public-host-key"] = sshPublicHostKey;
}
+
{
auto & fs = storeParams["system-features"];
auto append = [&](auto feats) {
diff --git a/src/libstore/misc.cc b/src/libstore/misc.cc
index 778ee4709..aed5f2842 100644
--- a/src/libstore/misc.cc
+++ b/src/libstore/misc.cc
@@ -22,54 +22,52 @@ void Store::computeFSClosure(const StorePathSet & startPaths,
Sync<State> state_(State{0, paths_, 0});
- std::function<void(const Path &)> enqueue;
+ std::function<void(const StorePath &)> enqueue;
std::condition_variable done;
- enqueue = [&](const Path & path) -> void {
+ enqueue = [&](const StorePath & path) -> void {
{
auto state(state_.lock());
if (state->exc) return;
- if (!state->paths.insert(parseStorePath(path)).second) return;
+ if (!state->paths.insert(path).second) return;
state->pending++;
}
- queryPathInfo(parseStorePath(path), {[&, pathS(path)](std::future<ref<const ValidPathInfo>> fut) {
+ queryPathInfo(path, {[&](std::future<ref<const ValidPathInfo>> fut) {
// FIXME: calls to isValidPath() should be async
try {
auto info = fut.get();
- auto path = parseStorePath(pathS);
-
if (flipDirection) {
StorePathSet referrers;
queryReferrers(path, referrers);
for (auto & ref : referrers)
if (ref != path)
- enqueue(printStorePath(ref));
+ enqueue(ref);
if (includeOutputs)
for (auto & i : queryValidDerivers(path))
- enqueue(printStorePath(i));
+ enqueue(i);
if (includeDerivers && path.isDerivation())
for (auto & i : queryDerivationOutputs(path))
if (isValidPath(i) && queryPathInfo(i)->deriver == path)
- enqueue(printStorePath(i));
+ enqueue(i);
} else {
for (auto & ref : info->references)
- enqueue(printStorePath(ref));
+ enqueue(ref);
if (includeOutputs && path.isDerivation())
for (auto & i : queryDerivationOutputs(path))
- if (isValidPath(i)) enqueue(printStorePath(i));
+ if (isValidPath(i)) enqueue(i);
if (includeDerivers && info->deriver && isValidPath(*info->deriver))
- enqueue(printStorePath(*info->deriver));
+ enqueue(*info->deriver);
}
@@ -89,7 +87,7 @@ void Store::computeFSClosure(const StorePathSet & startPaths,
};
for (auto & startPath : startPaths)
- enqueue(printStorePath(startPath));
+ enqueue(startPath);
{
auto state(state_.lock());
@@ -171,13 +169,10 @@ void Store::queryMissing(const std::vector<StorePathWithOutputs> & targets,
};
auto checkOutput = [&](
- const Path & drvPathS, ref<Derivation> drv, const Path & outPathS, ref<Sync<DrvState>> drvState_)
+ const StorePath & drvPath, ref<Derivation> drv, const StorePath & outPath, ref<Sync<DrvState>> drvState_)
{
if (drvState_->lock()->done) return;
- auto drvPath = parseStorePath(drvPathS);
- auto outPath = parseStorePath(outPathS);
-
SubstitutablePathInfos infos;
querySubstitutablePathInfos({{outPath, getDerivationCA(*drv)}}, infos);
@@ -214,7 +209,7 @@ void Store::queryMissing(const std::vector<StorePathWithOutputs> & targets,
return;
}
- PathSet invalid;
+ StorePathSet invalid;
/* true for regular derivations, and CA derivations for which we
have a trust mapping for all wanted outputs. */
auto knownOutputPaths = true;
@@ -224,7 +219,7 @@ void Store::queryMissing(const std::vector<StorePathWithOutputs> & targets,
break;
}
if (wantOutput(outputName, path.outputs) && !isValidPath(*pathOpt))
- invalid.insert(printStorePath(*pathOpt));
+ invalid.insert(*pathOpt);
}
if (knownOutputPaths && invalid.empty()) return;
@@ -234,7 +229,7 @@ void Store::queryMissing(const std::vector<StorePathWithOutputs> & targets,
if (knownOutputPaths && settings.useSubstitutes && parsedDrv.substitutesAllowed()) {
auto drvState = make_ref<Sync<DrvState>>(DrvState(invalid.size()));
for (auto & output : invalid)
- pool.enqueue(std::bind(checkOutput, printStorePath(path.path), drv, output, drvState));
+ pool.enqueue(std::bind(checkOutput, path.path, drv, output, drvState));
} else
mustBuildDrv(path.path, *drv);
diff --git a/src/libstore/realisation.cc b/src/libstore/realisation.cc
index cd74af4ee..638065547 100644
--- a/src/libstore/realisation.cc
+++ b/src/libstore/realisation.cc
@@ -25,27 +25,69 @@ nlohmann::json Realisation::toJSON() const {
return nlohmann::json{
{"id", id.to_string()},
{"outPath", outPath.to_string()},
+ {"signatures", signatures},
};
}
Realisation Realisation::fromJSON(
const nlohmann::json& json,
const std::string& whence) {
- auto getField = [&](std::string fieldName) -> std::string {
+ auto getOptionalField = [&](std::string fieldName) -> std::optional<std::string> {
auto fieldIterator = json.find(fieldName);
if (fieldIterator == json.end())
+ return std::nullopt;
+ return *fieldIterator;
+ };
+ auto getField = [&](std::string fieldName) -> std::string {
+ if (auto field = getOptionalField(fieldName))
+ return *field;
+ else
throw Error(
"Drv output info file '%1%' is corrupt, missing field %2%",
whence, fieldName);
- return *fieldIterator;
};
+ StringSet signatures;
+ if (auto signaturesIterator = json.find("signatures"); signaturesIterator != json.end())
+ signatures.insert(signaturesIterator->begin(), signaturesIterator->end());
+
return Realisation{
.id = DrvOutput::parse(getField("id")),
.outPath = StorePath(getField("outPath")),
+ .signatures = signatures,
};
}
+std::string Realisation::fingerprint() const
+{
+ auto serialized = toJSON();
+ serialized.erase("signatures");
+ return serialized.dump();
+}
+
+void Realisation::sign(const SecretKey & secretKey)
+{
+ signatures.insert(secretKey.signDetached(fingerprint()));
+}
+
+bool Realisation::checkSignature(const PublicKeys & publicKeys, const std::string & sig) const
+{
+ return verifyDetached(fingerprint(), sig, publicKeys);
+}
+
+size_t Realisation::checkSignatures(const PublicKeys & publicKeys) const
+{
+ // FIXME: Maybe we should return `maxSigs` if the realisation corresponds to
+ // an input-addressed one − because in that case the drv is enough to check
+ // it − but we can't know that here.
+
+ size_t good = 0;
+ for (auto & sig : signatures)
+ if (checkSignature(publicKeys, sig))
+ good++;
+ return good;
+}
+
StorePath RealisedPath::path() const {
return std::visit([](auto && arg) { return arg.getPath(); }, raw);
}
diff --git a/src/libstore/realisation.hh b/src/libstore/realisation.hh
index e1ab77c5b..adf1cf77a 100644
--- a/src/libstore/realisation.hh
+++ b/src/libstore/realisation.hh
@@ -6,6 +6,7 @@
#include "path.hh"
#include <nlohmann/json_fwd.hpp>
#include "comparator.hh"
+#include "crypto.hh"
namespace nix {
@@ -28,9 +29,16 @@ struct Realisation {
DrvOutput id;
StorePath outPath;
+ StringSet signatures;
+
nlohmann::json toJSON() const;
static Realisation fromJSON(const nlohmann::json& json, const std::string& whence);
+ std::string fingerprint() const;
+ void sign(const SecretKey &);
+ bool checkSignature(const PublicKeys & publicKeys, const std::string & sig) const;
+ size_t checkSignatures(const PublicKeys & publicKeys) const;
+
StorePath getPath() const { return outPath; }
GENERATE_CMP(Realisation, me->id, me->outPath);
diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc
index e9df7440b..8ebe8a015 100644
--- a/src/libstore/remote-store.cc
+++ b/src/libstore/remote-store.cc
@@ -62,9 +62,15 @@ void write(const Store & store, Sink & out, const Realisation & realisation)
{ out << realisation.toJSON().dump(); }
DrvOutput read(const Store & store, Source & from, Phantom<DrvOutput> _)
-{ return DrvOutput::parse(readString(from)); }
+{
+ return DrvOutput::parse(readString(from));
+}
+
void write(const Store & store, Sink & out, const DrvOutput & drvOutput)
-{ out << drvOutput.to_string(); }
+{
+ out << drvOutput.to_string();
+}
+
std::optional<StorePath> read(const Store & store, Source & from, Phantom<std::optional<StorePath>> _)
{
@@ -682,10 +688,12 @@ BuildResult RemoteStore::buildDerivation(const StorePath & drvPath, const BasicD
conn->to << buildMode;
conn.processStderr();
BuildResult res;
- unsigned int status;
- conn->from >> status >> res.errorMsg;
- res.status = (BuildResult::Status) status;
- if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 0xc) {
+ res.status = (BuildResult::Status) readInt(conn->from);
+ conn->from >> res.errorMsg;
+ if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 29) {
+ conn->from >> res.timesBuilt >> res.isNonDeterministic >> res.startTime >> res.stopTime;
+ }
+ if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 28) {
auto builtOutputs = worker_proto::read(*this, conn->from, Phantom<DrvOutputs> {});
res.builtOutputs = builtOutputs;
}
diff --git a/src/libstore/serve-protocol.hh b/src/libstore/serve-protocol.hh
index 0a17387cb..02d0810cc 100644
--- a/src/libstore/serve-protocol.hh
+++ b/src/libstore/serve-protocol.hh
@@ -5,7 +5,7 @@ namespace nix {
#define SERVE_MAGIC_1 0x390c9deb
#define SERVE_MAGIC_2 0x5452eecb
-#define SERVE_PROTOCOL_VERSION 0x206
+#define SERVE_PROTOCOL_VERSION (2 << 8 | 6)
#define GET_PROTOCOL_MAJOR(x) ((x) & 0xff00)
#define GET_PROTOCOL_MINOR(x) ((x) & 0x00ff)
diff --git a/src/libstore/ssh-store.cc b/src/libstore/ssh-store.cc
index 17c258201..f2caf2aeb 100644
--- a/src/libstore/ssh-store.cc
+++ b/src/libstore/ssh-store.cc
@@ -13,6 +13,7 @@ struct SSHStoreConfig : virtual RemoteStoreConfig
using RemoteStoreConfig::RemoteStoreConfig;
const Setting<Path> sshKey{(StoreConfig*) this, "", "ssh-key", "path to an SSH private key"};
+ const Setting<std::string> sshPublicHostKey{(StoreConfig*) this, "", "base64-ssh-public-host-key", "The public half of the host's SSH key"};
const Setting<bool> compress{(StoreConfig*) this, false, "compress", "whether to compress the connection"};
const Setting<Path> remoteProgram{(StoreConfig*) this, "nix-daemon", "remote-program", "path to the nix-daemon executable on the remote system"};
const Setting<std::string> remoteStore{(StoreConfig*) this, "", "remote-store", "URI of the store on the remote system"};
@@ -34,6 +35,7 @@ public:
, master(
host,
sshKey,
+ sshPublicHostKey,
// Use SSH master only if using more than 1 connection.
connections->capacity() > 1,
compress)
diff --git a/src/libstore/ssh.cc b/src/libstore/ssh.cc
index 84548a6e4..235eed37a 100644
--- a/src/libstore/ssh.cc
+++ b/src/libstore/ssh.cc
@@ -2,24 +2,37 @@
namespace nix {
-SSHMaster::SSHMaster(const std::string & host, const std::string & keyFile, bool useMaster, bool compress, int logFD)
+SSHMaster::SSHMaster(const std::string & host, const std::string & keyFile, const std::string & sshPublicHostKey, bool useMaster, bool compress, int logFD)
: host(host)
, fakeSSH(host == "localhost")
, keyFile(keyFile)
+ , sshPublicHostKey(sshPublicHostKey)
, useMaster(useMaster && !fakeSSH)
, compress(compress)
, logFD(logFD)
{
if (host == "" || hasPrefix(host, "-"))
throw Error("invalid SSH host name '%s'", host);
+
+ auto state(state_.lock());
+ state->tmpDir = std::make_unique<AutoDelete>(createTempDir("", "nix", true, true, 0700));
}
void SSHMaster::addCommonSSHOpts(Strings & args)
{
+ auto state(state_.lock());
+
for (auto & i : tokenizeString<Strings>(getEnv("NIX_SSHOPTS").value_or("")))
args.push_back(i);
if (!keyFile.empty())
args.insert(args.end(), {"-i", keyFile});
+ if (!sshPublicHostKey.empty()) {
+ Path fileName = (Path) *state->tmpDir + "/host-key";
+ auto p = host.rfind("@");
+ string thost = p != string::npos ? string(host, p + 1) : host;
+ writeFile(fileName, thost + " " + base64Decode(sshPublicHostKey) + "\n");
+ args.insert(args.end(), {"-oUserKnownHostsFile=" + fileName});
+ }
if (compress)
args.push_back("-C");
}
@@ -87,7 +100,6 @@ Path SSHMaster::startMaster()
if (state->sshMaster != -1) return state->socketPath;
- state->tmpDir = std::make_unique<AutoDelete>(createTempDir("", "nix", true, true, 0700));
state->socketPath = (Path) *state->tmpDir + "/ssh.sock";
diff --git a/src/libstore/ssh.hh b/src/libstore/ssh.hh
index 4f0f0bd29..dabbcedda 100644
--- a/src/libstore/ssh.hh
+++ b/src/libstore/ssh.hh
@@ -12,6 +12,7 @@ private:
const std::string host;
bool fakeSSH;
const std::string keyFile;
+ const std::string sshPublicHostKey;
const bool useMaster;
const bool compress;
const int logFD;
@@ -29,7 +30,7 @@ private:
public:
- SSHMaster(const std::string & host, const std::string & keyFile, bool useMaster, bool compress, int logFD = -1);
+ SSHMaster(const std::string & host, const std::string & keyFile, const std::string & sshPublicHostKey, bool useMaster, bool compress, int logFD = -1);
struct Connection
{
diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc
index 5a6fd33d1..e79fa6217 100644
--- a/src/libstore/store-api.cc
+++ b/src/libstore/store-api.cc
@@ -815,7 +815,7 @@ std::map<StorePath, StorePath> copyPaths(ref<Store> srcStore, ref<Store> dstStor
auto pathsMap = copyPaths(srcStore, dstStore, storePaths, repair, checkSigs, substitute);
try {
for (auto & realisation : realisations) {
- dstStore->registerDrvOutput(realisation);
+ dstStore->registerDrvOutput(realisation, checkSigs);
}
} catch (MissingExperimentalFeature & e) {
// Don't fail if the remote doesn't support CA derivations is it might
diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh
index 6dca8d7af..034a39f15 100644
--- a/src/libstore/store-api.hh
+++ b/src/libstore/store-api.hh
@@ -380,7 +380,12 @@ public:
we don't really want to add the dependencies listed in a nar info we
don't trust anyyways.
*/
- virtual bool pathInfoIsTrusted(const ValidPathInfo &)
+ virtual bool pathInfoIsUntrusted(const ValidPathInfo &)
+ {
+ return true;
+ }
+
+ virtual bool realisationIsUntrusted(const Realisation & )
{
return true;
}
@@ -476,6 +481,8 @@ public:
*/
virtual void registerDrvOutput(const Realisation & output)
{ unsupported("registerDrvOutput"); }
+ virtual void registerDrvOutput(const Realisation & output, CheckSigsFlag checkSigs)
+ { return registerDrvOutput(output); }
/* Write a NAR dump of a store path. */
virtual void narFromPath(const StorePath & path, Sink & sink) = 0;
diff --git a/src/libstore/worker-protocol.hh b/src/libstore/worker-protocol.hh
index 95f08bc9a..be071dd78 100644
--- a/src/libstore/worker-protocol.hh
+++ b/src/libstore/worker-protocol.hh
@@ -9,7 +9,7 @@ namespace nix {
#define WORKER_MAGIC_1 0x6e697863
#define WORKER_MAGIC_2 0x6478696f
-#define PROTOCOL_VERSION 0x11c
+#define PROTOCOL_VERSION (1 << 8 | 29)
#define GET_PROTOCOL_MAJOR(x) ((x) & 0xff00)
#define GET_PROTOCOL_MINOR(x) ((x) & 0x00ff)
diff --git a/src/libutil/config.cc b/src/libutil/config.cc
index 7467e5ac0..bda07cd55 100644
--- a/src/libutil/config.cc
+++ b/src/libutil/config.cc
@@ -20,7 +20,7 @@ bool Config::set(const std::string & name, const std::string & value)
return false;
}
i->second.setting->set(value, append);
- i->second.setting->overriden = true;
+ i->second.setting->overridden = true;
return true;
}
@@ -35,7 +35,7 @@ void Config::addSetting(AbstractSetting * setting)
auto i = unknownSettings.find(setting->name);
if (i != unknownSettings.end()) {
setting->set(i->second);
- setting->overriden = true;
+ setting->overridden = true;
unknownSettings.erase(i);
set = true;
}
@@ -48,7 +48,7 @@ void Config::addSetting(AbstractSetting * setting)
alias, setting->name);
else {
setting->set(i->second);
- setting->overriden = true;
+ setting->overridden = true;
unknownSettings.erase(i);
set = true;
}
@@ -69,10 +69,10 @@ void AbstractConfig::reapplyUnknownSettings()
set(s.first, s.second);
}
-void Config::getSettings(std::map<std::string, SettingInfo> & res, bool overridenOnly)
+void Config::getSettings(std::map<std::string, SettingInfo> & res, bool overriddenOnly)
{
for (auto & opt : _settings)
- if (!opt.second.isAlias && (!overridenOnly || opt.second.setting->overriden))
+ if (!opt.second.isAlias && (!overriddenOnly || opt.second.setting->overridden))
res.emplace(opt.first, SettingInfo{opt.second.setting->to_string(), opt.second.setting->description});
}
@@ -136,10 +136,10 @@ void AbstractConfig::applyConfigFile(const Path & path)
} catch (SysError &) { }
}
-void Config::resetOverriden()
+void Config::resetOverridden()
{
for (auto & s : _settings)
- s.second.setting->overriden = false;
+ s.second.setting->overridden = false;
}
nlohmann::json Config::toJSON()
@@ -169,7 +169,7 @@ AbstractSetting::AbstractSetting(
void AbstractSetting::setDefault(const std::string & str)
{
- if (!overriden) set(str);
+ if (!overridden) set(str);
}
nlohmann::json AbstractSetting::toJSON()
@@ -203,7 +203,7 @@ void BaseSetting<T>::convertToArg(Args & args, const std::string & category)
.description = fmt("Set the `%s` setting.", name),
.category = category,
.labels = {"value"},
- .handler = {[=](std::string s) { overriden = true; set(s); }},
+ .handler = {[=](std::string s) { overridden = true; set(s); }},
});
if (isAppendable())
@@ -212,7 +212,7 @@ void BaseSetting<T>::convertToArg(Args & args, const std::string & category)
.description = fmt("Append to the `%s` setting.", name),
.category = category,
.labels = {"value"},
- .handler = {[=](std::string s) { overriden = true; set(s, true); }},
+ .handler = {[=](std::string s) { overridden = true; set(s, true); }},
});
}
@@ -365,16 +365,16 @@ bool GlobalConfig::set(const std::string & name, const std::string & value)
return false;
}
-void GlobalConfig::getSettings(std::map<std::string, SettingInfo> & res, bool overridenOnly)
+void GlobalConfig::getSettings(std::map<std::string, SettingInfo> & res, bool overriddenOnly)
{
for (auto & config : *configRegistrations)
- config->getSettings(res, overridenOnly);
+ config->getSettings(res, overriddenOnly);
}
-void GlobalConfig::resetOverriden()
+void GlobalConfig::resetOverridden()
{
for (auto & config : *configRegistrations)
- config->resetOverriden();
+ config->resetOverridden();
}
nlohmann::json GlobalConfig::toJSON()
diff --git a/src/libutil/config.hh b/src/libutil/config.hh
index 71e31656d..bf81b4892 100644
--- a/src/libutil/config.hh
+++ b/src/libutil/config.hh
@@ -71,9 +71,9 @@ public:
/**
* Adds the currently known settings to the given result map `res`.
* - res: map to store settings in
- * - overridenOnly: when set to true only overridden settings will be added to `res`
+ * - overriddenOnly: when set to true only overridden settings will be added to `res`
*/
- virtual void getSettings(std::map<std::string, SettingInfo> & res, bool overridenOnly = false) = 0;
+ virtual void getSettings(std::map<std::string, SettingInfo> & res, bool overriddenOnly = false) = 0;
/**
* Parses the configuration in `contents` and applies it
@@ -91,7 +91,7 @@ public:
/**
* Resets the `overridden` flag of all Settings
*/
- virtual void resetOverriden() = 0;
+ virtual void resetOverridden() = 0;
/**
* Outputs all settings to JSON
@@ -127,7 +127,7 @@ public:
MyClass() : Config(readConfigFile("/etc/my-app.conf"))
{
- std::cout << foo << "\n"; // will print 123 unless overriden
+ std::cout << foo << "\n"; // will print 123 unless overridden
}
};
*/
@@ -163,9 +163,9 @@ public:
void addSetting(AbstractSetting * setting);
- void getSettings(std::map<std::string, SettingInfo> & res, bool overridenOnly = false) override;
+ void getSettings(std::map<std::string, SettingInfo> & res, bool overriddenOnly = false) override;
- void resetOverriden() override;
+ void resetOverridden() override;
nlohmann::json toJSON() override;
@@ -184,7 +184,7 @@ public:
int created = 123;
- bool overriden = false;
+ bool overridden = false;
void setDefault(const std::string & str);
@@ -215,7 +215,7 @@ protected:
virtual void convertToArg(Args & args, const std::string & category);
- bool isOverriden() const { return overriden; }
+ bool isOverridden() const { return overridden; }
};
/* A setting of type T. */
@@ -252,7 +252,7 @@ public:
virtual void override(const T & v)
{
- overriden = true;
+ overridden = true;
value = v;
}
@@ -324,9 +324,9 @@ struct GlobalConfig : public AbstractConfig
bool set(const std::string & name, const std::string & value) override;
- void getSettings(std::map<std::string, SettingInfo> & res, bool overridenOnly = false) override;
+ void getSettings(std::map<std::string, SettingInfo> & res, bool overriddenOnly = false) override;
- void resetOverriden() override;
+ void resetOverridden() override;
nlohmann::json toJSON() override;
diff --git a/src/libutil/tests/config.cc b/src/libutil/tests/config.cc
index c305af9f5..0ebdaf3db 100644
--- a/src/libutil/tests/config.cc
+++ b/src/libutil/tests/config.cc
@@ -29,20 +29,20 @@ namespace nix {
std::map<std::string, Config::SettingInfo> settings;
Setting<std::string> foo{&config, value, "name-of-the-setting", "description"};
- config.getSettings(settings, /* overridenOnly = */ false);
+ config.getSettings(settings, /* overriddenOnly = */ false);
const auto iter = settings.find("name-of-the-setting");
ASSERT_NE(iter, settings.end());
ASSERT_EQ(iter->second.value, "");
ASSERT_EQ(iter->second.description, "description\n");
}
- TEST(Config, getDefinedOverridenSettingNotSet) {
+ TEST(Config, getDefinedOverriddenSettingNotSet) {
Config config;
std::string value;
std::map<std::string, Config::SettingInfo> settings;
Setting<std::string> foo{&config, value, "name-of-the-setting", "description"};
- config.getSettings(settings, /* overridenOnly = */ true);
+ config.getSettings(settings, /* overriddenOnly = */ true);
const auto e = settings.find("name-of-the-setting");
ASSERT_EQ(e, settings.end());
}
@@ -55,7 +55,7 @@ namespace nix {
setting.assign("value");
- config.getSettings(settings, /* overridenOnly = */ false);
+ config.getSettings(settings, /* overriddenOnly = */ false);
const auto iter = settings.find("name-of-the-setting");
ASSERT_NE(iter, settings.end());
ASSERT_EQ(iter->second.value, "value");
@@ -69,7 +69,7 @@ namespace nix {
ASSERT_TRUE(config.set("name-of-the-setting", "value"));
- config.getSettings(settings, /* overridenOnly = */ false);
+ config.getSettings(settings, /* overriddenOnly = */ false);
const auto e = settings.find("name-of-the-setting");
ASSERT_NE(e, settings.end());
ASSERT_EQ(e->second.value, "value");
@@ -100,7 +100,7 @@ namespace nix {
{
std::map<std::string, Config::SettingInfo> settings;
- config.getSettings(settings, /* overridenOnly = */ false);
+ config.getSettings(settings, /* overriddenOnly = */ false);
ASSERT_EQ(settings.find("key"), settings.end());
}
@@ -108,17 +108,17 @@ namespace nix {
{
std::map<std::string, Config::SettingInfo> settings;
- config.getSettings(settings, /* overridenOnly = */ false);
+ config.getSettings(settings, /* overriddenOnly = */ false);
ASSERT_EQ(settings["key"].value, "value");
}
}
- TEST(Config, resetOverriden) {
+ TEST(Config, resetOverridden) {
Config config;
- config.resetOverriden();
+ config.resetOverridden();
}
- TEST(Config, resetOverridenWithSetting) {
+ TEST(Config, resetOverriddenWithSetting) {
Config config;
Setting<std::string> setting{&config, "", "name-of-the-setting", "description"};
@@ -127,7 +127,7 @@ namespace nix {
setting.set("foo");
ASSERT_EQ(setting.get(), "foo");
- config.getSettings(settings, /* overridenOnly = */ true);
+ config.getSettings(settings, /* overriddenOnly = */ true);
ASSERT_TRUE(settings.empty());
}
@@ -135,18 +135,18 @@ namespace nix {
std::map<std::string, Config::SettingInfo> settings;
setting.override("bar");
- ASSERT_TRUE(setting.overriden);
+ ASSERT_TRUE(setting.overridden);
ASSERT_EQ(setting.get(), "bar");
- config.getSettings(settings, /* overridenOnly = */ true);
+ config.getSettings(settings, /* overriddenOnly = */ true);
ASSERT_FALSE(settings.empty());
}
{
std::map<std::string, Config::SettingInfo> settings;
- config.resetOverriden();
- ASSERT_FALSE(setting.overriden);
- config.getSettings(settings, /* overridenOnly = */ true);
+ config.resetOverridden();
+ ASSERT_FALSE(setting.overridden);
+ config.getSettings(settings, /* overriddenOnly = */ true);
ASSERT_TRUE(settings.empty());
}
}
diff --git a/src/libutil/tests/url.cc b/src/libutil/tests/url.cc
index 80646ad3e..aff58e9ee 100644
--- a/src/libutil/tests/url.cc
+++ b/src/libutil/tests/url.cc
@@ -117,6 +117,24 @@ namespace nix {
ASSERT_EQ(parsed, expected);
}
+ TEST(parseURL, parseScopedRFC4007IPv6Address) {
+ auto s = "http://[fe80::818c:da4d:8975:415c\%enp0s25]:8080";
+ auto parsed = parseURL(s);
+
+ ParsedURL expected {
+ .url = "http://[fe80::818c:da4d:8975:415c\%enp0s25]:8080",
+ .base = "http://[fe80::818c:da4d:8975:415c\%enp0s25]:8080",
+ .scheme = "http",
+ .authority = "[fe80::818c:da4d:8975:415c\%enp0s25]:8080",
+ .path = "",
+ .query = (StringMap) { },
+ .fragment = "",
+ };
+
+ ASSERT_EQ(parsed, expected);
+
+ }
+
TEST(parseURL, parseIPv6Address) {
auto s = "http://[2a02:8071:8192:c100:311d:192d:81ac:11ea]:8080";
auto parsed = parseURL(s);
diff --git a/src/libutil/url-parts.hh b/src/libutil/url-parts.hh
index 862d9fa6e..da10a6bbc 100644
--- a/src/libutil/url-parts.hh
+++ b/src/libutil/url-parts.hh
@@ -8,7 +8,7 @@ namespace nix {
// URI stuff.
const static std::string pctEncoded = "(?:%[0-9a-fA-F][0-9a-fA-F])";
const static std::string schemeRegex = "(?:[a-z][a-z0-9+.-]*)";
-const static std::string ipv6AddressSegmentRegex = "[0-9a-fA-F:]+";
+const static std::string ipv6AddressSegmentRegex = "[0-9a-fA-F:]+(?:%\\w+)?";
const static std::string ipv6AddressRegex = "(?:\\[" + ipv6AddressSegmentRegex + "\\]|" + ipv6AddressSegmentRegex + ")";
const static std::string unreservedRegex = "(?:[a-zA-Z0-9-._~])";
const static std::string subdelimsRegex = "(?:[!$&'\"()*+,;=])";
diff --git a/src/libutil/util.cc b/src/libutil/util.cc
index ef37275ac..dea9c74b7 100644
--- a/src/libutil/util.cc
+++ b/src/libutil/util.cc
@@ -1590,7 +1590,7 @@ void startSignalHandlerThread()
updateWindowSize();
if (sigprocmask(SIG_BLOCK, nullptr, &savedSignalMask))
- throw SysError("quering signal mask");
+ throw SysError("querying signal mask");
sigset_t set;
sigemptyset(&set);
diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc
index 7b4a53919..65b85b304 100755
--- a/src/nix-build/nix-build.cc
+++ b/src/nix-build/nix-build.cc
@@ -447,6 +447,7 @@ static void main_nix_build(int argc, char * * argv)
"unset NIX_ENFORCE_PURITY; "
"shopt -u nullglob; "
"unset TZ; %6%"
+ "shopt -s execfail;"
"%7%",
shellEscape(tmpDir),
(pure ? "" : "p=$PATH; "),
diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc
index dfa5de62f..b2b003a8b 100644
--- a/src/nix-store/nix-store.cc
+++ b/src/nix-store/nix-store.cc
@@ -911,7 +911,7 @@ static void opServe(Strings opFlags, Strings opArgs)
if (GET_PROTOCOL_MINOR(clientVersion) >= 3)
out << status.timesBuilt << status.isNonDeterministic << status.startTime << status.stopTime;
- if (GET_PROTOCOL_MINOR(clientVersion >= 5)) {
+ if (GET_PROTOCOL_MINOR(clientVersion >= 6)) {
worker_proto::write(*store, out, status.builtOutputs);
}
diff --git a/src/nix/build.md b/src/nix/build.md
index c2f3e387a..20138b7e0 100644
--- a/src/nix/build.md
+++ b/src/nix/build.md
@@ -81,7 +81,7 @@ path installables are substituted.
Unless `--no-link` is specified, after a successful build, it creates
symlinks to the store paths of the installables. These symlinks have
-the prefix `./result` by default; this can be overriden using the
+the prefix `./result` by default; this can be overridden using the
`--out-link` option. Each symlink has a suffix `-<N>-<outname>`, where
*N* is the index of the installable (with the left-most installable
having index 0), and *outname* is the symbolic derivation output name
diff --git a/src/nix/flake-init.md b/src/nix/flake-init.md
index c66154ad5..890038016 100644
--- a/src/nix/flake-init.md
+++ b/src/nix/flake-init.md
@@ -24,7 +24,7 @@ R""(
This command creates a flake in the current directory by copying the
files of a template. It will not overwrite existing files. The default
-template is `templates#defaultTemplate`, but this can be overriden
+template is `templates#defaultTemplate`, but this can be overridden
using `-t`.
# Template definitions
diff --git a/src/nix/flake-list-inputs.md b/src/nix/flake-list-inputs.md
deleted file mode 100644
index 250e13be0..000000000
--- a/src/nix/flake-list-inputs.md
+++ /dev/null
@@ -1,23 +0,0 @@
-R""(
-
-# Examples
-
-* Show the inputs of the `hydra` flake:
-
- ```console
- # nix flake list-inputs github:NixOS/hydra
- github:NixOS/hydra/bde8d81876dfc02143e5070e42c78d8f0d83d6f7
- ├───nix: github:NixOS/nix/79aa7d95183cbe6c0d786965f0dbff414fd1aa67
- │ ├───lowdown-src: github:kristapsdz/lowdown/1705b4a26fbf065d9574dce47a94e8c7c79e052f
- │ └───nixpkgs: github:NixOS/nixpkgs/ad0d20345219790533ebe06571f82ed6b034db31
- └───nixpkgs follows input 'nix/nixpkgs'
- ```
-
-# Description
-
-This command shows the inputs of the flake specified by the flake
-referenced *flake-url*. Since it prints the locked inputs that result
-from generating or updating the lock file, this command essentially
-displays the contents of the flake's lock file in human-readable form.
-
-)""
diff --git a/src/nix/flake-info.md b/src/nix/flake-metadata.md
index fda3171db..5a009409b 100644
--- a/src/nix/flake-info.md
+++ b/src/nix/flake-metadata.md
@@ -5,19 +5,24 @@ R""(
* Show what `nixpkgs` resolves to:
```console
- # nix flake info nixpkgs
- Resolved URL: github:NixOS/nixpkgs
- Locked URL: github:NixOS/nixpkgs/b67ba0bfcc714453cdeb8d713e35751eb8b4c8f4
- Description: A collection of packages for the Nix package manager
- Path: /nix/store/23qapccs6cfmwwrlq8kr41vz5vdmns3r-source
- Revision: b67ba0bfcc714453cdeb8d713e35751eb8b4c8f4
- Last modified: 2020-12-23 12:36:12
+ # nix flake metadata nixpkgs
+ Resolved URL: github:edolstra/dwarffs
+ Locked URL: github:edolstra/dwarffs/f691e2c991e75edb22836f1dbe632c40324215c5
+ Description: A filesystem that fetches DWARF debug info from the Internet on demand
+ Path: /nix/store/769s05vjydmc2lcf6b02az28wsa9ixh1-source
+ Revision: f691e2c991e75edb22836f1dbe632c40324215c5
+ Last modified: 2021-01-21 15:41:26
+ Inputs:
+ ├───nix: github:NixOS/nix/6254b1f5d298ff73127d7b0f0da48f142bdc753c
+ │ ├───lowdown-src: github:kristapsdz/lowdown/1705b4a26fbf065d9574dce47a94e8c7c79e052f
+ │ └───nixpkgs: github:NixOS/nixpkgs/ad0d20345219790533ebe06571f82ed6b034db31
+ └───nixpkgs follows input 'nix/nixpkgs'
```
* Show information about `dwarffs` in JSON format:
```console
- # nix flake info dwarffs --json | jq .
+ # nix flake metadata dwarffs --json | jq .
{
"description": "A filesystem that fetches DWARF debug info from the Internet on demand",
"lastModified": 1597153508,
@@ -29,6 +34,7 @@ R""(
"rev": "d181d714fd36eb06f4992a1997cd5601e26db8f5",
"type": "github"
},
+ "locks": { ... },
"original": {
"id": "dwarffs",
"type": "indirect"
@@ -75,6 +81,9 @@ data. This includes:
time of the commit of the locked flake; for tarball flakes, it's the
most recent timestamp of any file inside the tarball.
+* `Inputs`: The flake inputs with their corresponding lock file
+ entries.
+
With `--json`, the output is a JSON object with the following fields:
* `original` and `originalUrl`: The flake reference specified by the
@@ -96,4 +105,6 @@ With `--json`, the output is a JSON object with the following fields:
* `lastModified`: See `Last modified` above.
+* `locks`: The contents of `flake.lock`.
+
)""
diff --git a/src/nix/flake.cc b/src/nix/flake.cc
index 2f0c468a8..a2b6c0303 100644
--- a/src/nix/flake.cc
+++ b/src/nix/flake.cc
@@ -43,12 +43,6 @@ public:
return parseFlakeRef(flakeUrl, absPath(".")); //FIXME
}
- Flake getFlake()
- {
- auto evalState = getEvalState();
- return flake::getFlake(*evalState, getFlakeRef(), lockFlags.useRegistries);
- }
-
LockedFlake lockFlake()
{
return flake::lockFlake(*getEvalState(), getFlakeRef(), lockFlags);
@@ -60,43 +54,6 @@ public:
}
};
-static void printFlakeInfo(const Store & store, const Flake & flake)
-{
- logger->cout("Resolved URL: %s", flake.resolvedRef.to_string());
- logger->cout("Locked URL: %s", flake.lockedRef.to_string());
- if (flake.description)
- logger->cout("Description: %s", *flake.description);
- logger->cout("Path: %s", store.printStorePath(flake.sourceInfo->storePath));
- if (auto rev = flake.lockedRef.input.getRev())
- logger->cout("Revision: %s", rev->to_string(Base16, false));
- if (auto revCount = flake.lockedRef.input.getRevCount())
- logger->cout("Revisions: %s", *revCount);
- if (auto lastModified = flake.lockedRef.input.getLastModified())
- logger->cout("Last modified: %s",
- std::put_time(std::localtime(&*lastModified), "%F %T"));
-}
-
-static nlohmann::json flakeToJSON(const Store & store, const Flake & flake)
-{
- nlohmann::json j;
- if (flake.description)
- j["description"] = *flake.description;
- j["originalUrl"] = flake.originalRef.to_string();
- j["original"] = fetchers::attrsToJSON(flake.originalRef.toAttrs());
- j["resolvedUrl"] = flake.resolvedRef.to_string();
- j["resolved"] = fetchers::attrsToJSON(flake.resolvedRef.toAttrs());
- j["url"] = flake.lockedRef.to_string(); // FIXME: rename to lockedUrl
- j["locked"] = fetchers::attrsToJSON(flake.lockedRef.toAttrs());
- if (auto rev = flake.lockedRef.input.getRev())
- j["revision"] = rev->to_string(Base16, false);
- if (auto revCount = flake.lockedRef.input.getRevCount())
- j["revCount"] = *revCount;
- if (auto lastModified = flake.lockedRef.input.getLastModified())
- j["lastModified"] = *lastModified;
- j["path"] = store.printStorePath(flake.sourceInfo->storePath);
- return j;
-}
-
struct CmdFlakeUpdate : FlakeCommand
{
std::string description() override
@@ -110,6 +67,7 @@ struct CmdFlakeUpdate : FlakeCommand
removeFlag("recreate-lock-file");
removeFlag("update-input");
removeFlag("no-update-lock-file");
+ removeFlag("no-write-lock-file");
}
std::string doc() override
@@ -124,6 +82,7 @@ struct CmdFlakeUpdate : FlakeCommand
settings.tarballTtl = 0;
lockFlags.recreateLockFile = true;
+ lockFlags.writeLockFile = true;
lockFlake();
}
@@ -136,6 +95,12 @@ struct CmdFlakeLock : FlakeCommand
return "create missing lock file entries";
}
+ CmdFlakeLock()
+ {
+ /* Remove flags that don't make sense. */
+ removeFlag("no-write-lock-file");
+ }
+
std::string doc() override
{
return
@@ -147,6 +112,8 @@ struct CmdFlakeLock : FlakeCommand
{
settings.tarballTtl = 0;
+ lockFlags.writeLockFile = true;
+
lockFlake();
}
};
@@ -165,54 +132,72 @@ static void enumerateOutputs(EvalState & state, Value & vFlake,
callback(attr.name, *attr.value, *attr.pos);
}
-struct CmdFlakeInfo : FlakeCommand, MixJSON
+struct CmdFlakeMetadata : FlakeCommand, MixJSON
{
std::string description() override
{
- return "list info about a given flake";
+ return "show flake metadata";
}
std::string doc() override
{
return
- #include "flake-info.md"
+ #include "flake-metadata.md"
;
}
void run(nix::ref<nix::Store> store) override
{
- auto flake = getFlake();
+ auto lockedFlake = lockFlake();
+ auto & flake = lockedFlake.flake;
if (json) {
- auto json = flakeToJSON(*store, flake);
- logger->cout("%s", json.dump());
- } else
- printFlakeInfo(*store, flake);
- }
-};
-
-struct CmdFlakeListInputs : FlakeCommand, MixJSON
-{
- std::string description() override
- {
- return "list flake inputs";
- }
-
- std::string doc() override
- {
- return
- #include "flake-list-inputs.md"
- ;
- }
-
- void run(nix::ref<nix::Store> store) override
- {
- auto flake = lockFlake();
-
- if (json)
- logger->cout("%s", flake.lockFile.toJSON());
- else {
- logger->cout("%s", flake.flake.lockedRef);
+ nlohmann::json j;
+ if (flake.description)
+ j["description"] = *flake.description;
+ j["originalUrl"] = flake.originalRef.to_string();
+ j["original"] = fetchers::attrsToJSON(flake.originalRef.toAttrs());
+ j["resolvedUrl"] = flake.resolvedRef.to_string();
+ j["resolved"] = fetchers::attrsToJSON(flake.resolvedRef.toAttrs());
+ j["url"] = flake.lockedRef.to_string(); // FIXME: rename to lockedUrl
+ j["locked"] = fetchers::attrsToJSON(flake.lockedRef.toAttrs());
+ if (auto rev = flake.lockedRef.input.getRev())
+ j["revision"] = rev->to_string(Base16, false);
+ if (auto revCount = flake.lockedRef.input.getRevCount())
+ j["revCount"] = *revCount;
+ if (auto lastModified = flake.lockedRef.input.getLastModified())
+ j["lastModified"] = *lastModified;
+ j["path"] = store->printStorePath(flake.sourceInfo->storePath);
+ j["locks"] = lockedFlake.lockFile.toJSON();
+ logger->cout("%s", j.dump());
+ } else {
+ logger->cout(
+ ANSI_BOLD "Resolved URL:" ANSI_NORMAL " %s",
+ flake.resolvedRef.to_string());
+ logger->cout(
+ ANSI_BOLD "Locked URL:" ANSI_NORMAL " %s",
+ flake.lockedRef.to_string());
+ if (flake.description)
+ logger->cout(
+ ANSI_BOLD "Description:" ANSI_NORMAL " %s",
+ *flake.description);
+ logger->cout(
+ ANSI_BOLD "Path:" ANSI_NORMAL " %s",
+ store->printStorePath(flake.sourceInfo->storePath));
+ if (auto rev = flake.lockedRef.input.getRev())
+ logger->cout(
+ ANSI_BOLD "Revision:" ANSI_NORMAL " %s",
+ rev->to_string(Base16, false));
+ if (auto revCount = flake.lockedRef.input.getRevCount())
+ logger->cout(
+ ANSI_BOLD "Revisions:" ANSI_NORMAL " %s",
+ *revCount);
+ if (auto lastModified = flake.lockedRef.input.getLastModified())
+ logger->cout(
+ ANSI_BOLD "Last modified:" ANSI_NORMAL " %s",
+ std::put_time(std::localtime(&*lastModified), "%F %T"));
+
+ logger->cout(ANSI_BOLD "Inputs:" ANSI_NORMAL);
std::unordered_set<std::shared_ptr<Node>> visited;
@@ -226,7 +211,7 @@ struct CmdFlakeListInputs : FlakeCommand, MixJSON
if (auto lockedNode = std::get_if<0>(&input.second)) {
logger->cout("%s" ANSI_BOLD "%s" ANSI_NORMAL ": %s",
prefix + (last ? treeLast : treeConn), input.first,
- *lockedNode ? (*lockedNode)->lockedRef : flake.flake.lockedRef);
+ *lockedNode ? (*lockedNode)->lockedRef : flake.lockedRef);
bool firstVisit = visited.insert(*lockedNode).second;
@@ -239,12 +224,21 @@ struct CmdFlakeListInputs : FlakeCommand, MixJSON
}
};
- visited.insert(flake.lockFile.root);
- recurse(*flake.lockFile.root, "");
+ visited.insert(lockedFlake.lockFile.root);
+ recurse(*lockedFlake.lockFile.root, "");
}
}
};
+struct CmdFlakeInfo : CmdFlakeMetadata
+{
+ void run(nix::ref<nix::Store> store) override
+ {
+ warn("'nix flake info' is a deprecated alias for 'nix flake metadata'");
+ CmdFlakeMetadata::run(store);
+ }
+};
+
struct CmdFlakeCheck : FlakeCommand
{
bool build = true;
@@ -1038,8 +1032,8 @@ struct CmdFlake : NixMultiCommand
: MultiCommand({
{"update", []() { return make_ref<CmdFlakeUpdate>(); }},
{"lock", []() { return make_ref<CmdFlakeLock>(); }},
+ {"metadata", []() { return make_ref<CmdFlakeMetadata>(); }},
{"info", []() { return make_ref<CmdFlakeInfo>(); }},
- {"list-inputs", []() { return make_ref<CmdFlakeListInputs>(); }},
{"check", []() { return make_ref<CmdFlakeCheck>(); }},
{"init", []() { return make_ref<CmdFlakeInit>(); }},
{"new", []() { return make_ref<CmdFlakeNew>(); }},
diff --git a/src/nix/flake.md b/src/nix/flake.md
index 440c45dd1..0035195e5 100644
--- a/src/nix/flake.md
+++ b/src/nix/flake.md
@@ -70,7 +70,7 @@ Here are some examples of flake references in their URL-like representation:
* `/home/alice/src/patchelf`: A flake in some other directory.
* `nixpkgs`: The `nixpkgs` entry in the flake registry.
* `nixpkgs/a3a3dda3bacf61e8a39258a0ed9c924eeca8e293`: The `nixpkgs`
- entry in the flake registry, with its Git revision overriden to a
+ entry in the flake registry, with its Git revision overridden to a
specific value.
* `github:NixOS/nixpkgs`: The `master` branch of the `NixOS/nixpkgs`
repository on GitHub.
@@ -377,7 +377,7 @@ outputs = { self, nixpkgs, grcov }: {
};
```
-Transitive inputs can be overriden from a `flake.nix` file. For
+Transitive inputs can be overridden from a `flake.nix` file. For
example, the following overrides the `nixpkgs` input of the `nixops`
input:
diff --git a/src/nix/main.cc b/src/nix/main.cc
index 06e221682..f8701ee56 100644
--- a/src/nix/main.cc
+++ b/src/nix/main.cc
@@ -309,13 +309,13 @@ void mainWrapped(int argc, char * * argv)
if (!args.useNet) {
// FIXME: should check for command line overrides only.
- if (!settings.useSubstitutes.overriden)
+ if (!settings.useSubstitutes.overridden)
settings.useSubstitutes = false;
- if (!settings.tarballTtl.overriden)
+ if (!settings.tarballTtl.overridden)
settings.tarballTtl = std::numeric_limits<unsigned int>::max();
- if (!fileTransferSettings.tries.overriden)
+ if (!fileTransferSettings.tries.overridden)
fileTransferSettings.tries = 0;
- if (!fileTransferSettings.connectTimeout.overriden)
+ if (!fileTransferSettings.connectTimeout.overridden)
fileTransferSettings.connectTimeout = 1;
}
diff --git a/src/nix/realisation.cc b/src/nix/realisation.cc
new file mode 100644
index 000000000..9ee9ccb91
--- /dev/null
+++ b/src/nix/realisation.cc
@@ -0,0 +1,78 @@
+#include "command.hh"
+#include "common-args.hh"
+
+#include <nlohmann/json.hpp>
+
+using namespace nix;
+
+struct CmdRealisation : virtual NixMultiCommand
+{
+ CmdRealisation() : MultiCommand(RegisterCommand::getCommandsFor({"realisation"}))
+ { }
+
+ std::string description() override
+ {
+ return "manipulate a Nix realisation";
+ }
+
+ Category category() override { return catUtility; }
+
+ void run() override
+ {
+ if (!command)
+ throw UsageError("'nix realisation' requires a sub-command.");
+ command->second->prepare();
+ command->second->run();
+ }
+};
+
+static auto rCmdRealisation = registerCommand<CmdRealisation>("realisation");
+
+struct CmdRealisationInfo : RealisedPathsCommand, MixJSON
+{
+ std::string description() override
+ {
+ return "query information about one or several realisations";
+ }
+
+ std::string doc() override
+ {
+ return
+ #include "realisation/info.md"
+ ;
+ }
+
+ Category category() override { return catSecondary; }
+
+ void run(ref<Store> store, std::vector<RealisedPath> paths) override
+ {
+ settings.requireExperimentalFeature("ca-derivations");
+ if (json) {
+ nlohmann::json res = nlohmann::json::array();
+ for (auto & path : paths) {
+ nlohmann::json currentPath;
+ if (auto realisation = std::get_if<Realisation>(&path.raw))
+ currentPath = realisation->toJSON();
+ else
+ currentPath["opaquePath"] = store->printStorePath(path.path());
+
+ res.push_back(currentPath);
+ }
+ std::cout << res.dump();
+ }
+ else {
+ for (auto & path : paths) {
+ if (auto realisation = std::get_if<Realisation>(&path.raw)) {
+ std::cout <<
+ realisation->id.to_string() << " " <<
+ store->printStorePath(realisation->outPath);
+ } else
+ std::cout << store->printStorePath(path.path());
+
+ std::cout << std::endl;
+ }
+ }
+ }
+};
+
+static auto rCmdRealisationInfo = registerCommand2<CmdRealisationInfo>({"realisation", "info"});
diff --git a/src/nix/realisation/info.md b/src/nix/realisation/info.md
new file mode 100644
index 000000000..852240f44
--- /dev/null
+++ b/src/nix/realisation/info.md
@@ -0,0 +1,15 @@
+R"MdBoundary(
+# Description
+
+Display some informations about the given realisation
+
+# Examples
+
+Show some information about the realisation of the `hello` package:
+
+```console
+$ nix realisation info nixpkgs#hello --json
+[{"id":"sha256:3d382378a00588e064ee30be96dd0fa7e7df7cf3fbcace85a0e7b7dada1eef25!out","outPath":"fd3m7xawvrqcg98kgz5hc2vk3x9q0lh7-hello"}]
+```
+
+)MdBoundary"
diff --git a/src/nix/store-prefetch-file.md b/src/nix/store-prefetch-file.md
index 1663b847b..f9fdcbc57 100644
--- a/src/nix/store-prefetch-file.md
+++ b/src/nix/store-prefetch-file.md
@@ -27,6 +27,6 @@ the resulting store path and the cryptographic hash of the contents of
the file.
The name component of the store path defaults to the last component of
-*url*, but this can be overriden using `--name`.
+*url*, but this can be overridden using `--name`.
)""
diff --git a/tests/build-hook-ca-fixed.nix b/tests/build-hook-ca-fixed.nix
new file mode 100644
index 000000000..ec7171ac9
--- /dev/null
+++ b/tests/build-hook-ca-fixed.nix
@@ -0,0 +1,56 @@
+{ busybox }:
+
+with import ./config.nix;
+
+let
+
+ mkDerivation = args:
+ derivation ({
+ inherit system;
+ builder = busybox;
+ args = ["sh" "-e" args.builder or (builtins.toFile "builder-${args.name}.sh" "if [ -e .attrs.sh ]; then source .attrs.sh; fi; eval \"$buildCommand\"")];
+ outputHashMode = "recursive";
+ outputHashAlgo = "sha256";
+ } // removeAttrs args ["builder" "meta"])
+ // { meta = args.meta or {}; };
+
+ input1 = mkDerivation {
+ shell = busybox;
+ name = "build-remote-input-1";
+ buildCommand = "echo FOO > $out";
+ requiredSystemFeatures = ["foo"];
+ outputHash = "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=";
+ };
+
+ input2 = mkDerivation {
+ shell = busybox;
+ name = "build-remote-input-2";
+ buildCommand = "echo BAR > $out";
+ requiredSystemFeatures = ["bar"];
+ outputHash = "sha256-XArauVH91AVwP9hBBQNlkX9ccuPpSYx9o0zeIHb6e+Q=";
+ };
+
+ input3 = mkDerivation {
+ shell = busybox;
+ name = "build-remote-input-3";
+ buildCommand = ''
+ read x < ${input2}
+ echo $x BAZ > $out
+ '';
+ requiredSystemFeatures = ["baz"];
+ outputHash = "sha256-daKAcPp/+BYMQsVi/YYMlCKoNAxCNDsaivwSHgQqD2s=";
+ };
+
+in
+
+ mkDerivation {
+ shell = busybox;
+ name = "build-remote";
+ buildCommand =
+ ''
+ read x < ${input1}
+ read y < ${input3}
+ echo "$x $y" > $out
+ '';
+ outputHash = "sha256-5SxbkUw6xe2l9TE1uwCvTtTDysD1vhRor38OtDF0LqQ=";
+ }
diff --git a/tests/build-hook-ca.nix b/tests/build-hook-ca-floating.nix
index 67295985f..67295985f 100644
--- a/tests/build-hook-ca.nix
+++ b/tests/build-hook-ca-floating.nix
diff --git a/tests/build-remote-content-addressed-fixed.sh b/tests/build-remote-content-addressed-fixed.sh
new file mode 100644
index 000000000..ae7441591
--- /dev/null
+++ b/tests/build-remote-content-addressed-fixed.sh
@@ -0,0 +1,5 @@
+source common.sh
+
+file=build-hook-ca-fixed.nix
+
+source build-remote.sh
diff --git a/tests/build-remote-content-addressed-floating.sh b/tests/build-remote-content-addressed-floating.sh
index cbb75729b..7447d92bd 100644
--- a/tests/build-remote-content-addressed-floating.sh
+++ b/tests/build-remote-content-addressed-floating.sh
@@ -1,6 +1,6 @@
source common.sh
-file=build-hook-ca.nix
+file=build-hook-ca-floating.nix
sed -i 's/experimental-features .*/& ca-derivations/' "$NIX_CONF_DIR"/nix.conf
diff --git a/tests/content-addressed.sh b/tests/ca/build.sh
index 7e32e1f28..35bf1dcf7 100644
--- a/tests/content-addressed.sh
+++ b/tests/ca/build.sh
@@ -61,7 +61,9 @@ testNixCommand () {
# Disabled until we have it properly working
# testRemoteCache
+clearStore
testDeterministicCA
+clearStore
testCutoff
testGC
testNixCommand
diff --git a/tests/ca/common.sh b/tests/ca/common.sh
new file mode 100644
index 000000000..e083d873c
--- /dev/null
+++ b/tests/ca/common.sh
@@ -0,0 +1 @@
+source ../common.sh
diff --git a/tests/content-addressed.nix b/tests/ca/content-addressed.nix
index 61079176f..e5b1c4de3 100644
--- a/tests/content-addressed.nix
+++ b/tests/ca/content-addressed.nix
@@ -1,4 +1,4 @@
-with import ./config.nix;
+with import ../config.nix;
{ seed ? 0 }:
# A simple content-addressed derivation.
diff --git a/tests/nix-copy-content-addressed.sh b/tests/ca/nix-copy.sh
index 2e0dea2d2..2e0dea2d2 100755
--- a/tests/nix-copy-content-addressed.sh
+++ b/tests/ca/nix-copy.sh
diff --git a/tests/ca/signatures.sh b/tests/ca/signatures.sh
new file mode 100644
index 000000000..4b4e468f7
--- /dev/null
+++ b/tests/ca/signatures.sh
@@ -0,0 +1,39 @@
+source common.sh
+
+# Globally enable the ca derivations experimental flag
+sed -i 's/experimental-features = .*/& ca-derivations ca-references/' "$NIX_CONF_DIR/nix.conf"
+
+clearStore
+clearCache
+
+nix-store --generate-binary-cache-key cache1.example.org $TEST_ROOT/sk1 $TEST_ROOT/pk1
+pk1=$(cat $TEST_ROOT/pk1)
+
+export REMOTE_STORE_DIR="$TEST_ROOT/remote_store"
+export REMOTE_STORE="file://$REMOTE_STORE_DIR"
+
+ensureCorrectlyCopied () {
+ attrPath="$1"
+ nix build --store "$REMOTE_STORE" --file ./content-addressed.nix "$attrPath"
+}
+
+testOneCopy () {
+ clearStore
+ rm -rf "$REMOTE_STORE_DIR"
+
+ attrPath="$1"
+ nix copy --to $REMOTE_STORE "$attrPath" --file ./content-addressed.nix \
+ --secret-key-files "$TEST_ROOT/sk1"
+
+ ensureCorrectlyCopied "$attrPath"
+
+ # Ensure that we can copy back what we put in the store
+ clearStore
+ nix copy --from $REMOTE_STORE \
+ --file ./content-addressed.nix "$attrPath" \
+ --trusted-public-keys $pk1
+}
+
+for attrPath in rootCA dependentCA transitivelyDependentCA dependentNonCA dependentFixedOutput; do
+ testOneCopy "$attrPath"
+done
diff --git a/tests/ca/substitute.sh b/tests/ca/substitute.sh
new file mode 100644
index 000000000..b44fe499a
--- /dev/null
+++ b/tests/ca/substitute.sh
@@ -0,0 +1,24 @@
+#!/usr/bin/env bash
+
+# Ensure that binary substitution works properly with ca derivations
+
+source common.sh
+
+sed -i 's/experimental-features .*/& ca-derivations ca-references/' "$NIX_CONF_DIR"/nix.conf
+
+rm -rf $TEST_ROOT/binary_cache
+
+export REMOTE_STORE=file://$TEST_ROOT/binary_cache
+
+buildDrvs () {
+ nix build --file ./content-addressed.nix -L --no-link "$@"
+}
+
+# Populate the remote cache
+clearStore
+buildDrvs --post-build-hook ../push-to-store.sh
+
+# Restart the build on an empty store, ensuring that we don't build
+clearStore
+buildDrvs --substitute --substituters $REMOTE_STORE --no-require-sigs -j0
+
diff --git a/tests/common.sh.in b/tests/common.sh.in
index e3bcab507..d31d3fbb8 100644
--- a/tests/common.sh.in
+++ b/tests/common.sh.in
@@ -11,7 +11,7 @@ export NIX_LOCALSTATE_DIR=$TEST_ROOT/var
export NIX_LOG_DIR=$TEST_ROOT/var/log/nix
export NIX_STATE_DIR=$TEST_ROOT/var/nix
export NIX_CONF_DIR=$TEST_ROOT/etc
-export NIX_DAEMON_SOCKET_PATH=$TEST_ROOT/daemon-socket
+export NIX_DAEMON_SOCKET_PATH=$TEST_ROOT/dSocket
unset NIX_USER_CONF_FILES
export _NIX_TEST_SHARED=$TEST_ROOT/shared
if [[ -n $NIX_STORE ]]; then
@@ -29,6 +29,12 @@ unset XDG_CACHE_HOME
mkdir -p $TEST_HOME
export PATH=@bindir@:$PATH
+if [[ -n "${NIX_CLIENT_PACKAGE:-}" ]]; then
+ export PATH="$NIX_CLIENT_PACKAGE/bin":$PATH
+fi
+if [[ -n "${NIX_DAEMON_PACKAGE:-}" ]]; then
+ export NIX_DAEMON_COMMAND="$NIX_DAEMON_PACKAGE/bin/nix-daemon"
+fi
coreutils=@coreutils@
export dot=@dot@
@@ -57,7 +63,6 @@ clearStore() {
mkdir "$NIX_STORE_DIR"
rm -rf "$NIX_STATE_DIR"
mkdir "$NIX_STATE_DIR"
- nix-store --init
clearProfiles
}
@@ -73,7 +78,7 @@ startDaemon() {
# Start the daemon, wait for the socket to appear. !!!
# ‘nix-daemon’ should have an option to fork into the background.
rm -f $NIX_STATE_DIR/daemon-socket/socket
- nix daemon &
+ ${NIX_DAEMON_COMMAND:-nix daemon} &
for ((i = 0; i < 30; i++)); do
if [ -e $NIX_DAEMON_SOCKET_PATH ]; then break; fi
sleep 1
diff --git a/tests/config.sh b/tests/config.sh
index eaa46c395..01c78f2c3 100644
--- a/tests/config.sh
+++ b/tests/config.sh
@@ -1,15 +1,41 @@
source common.sh
+# Isolate the home for this test.
+# Other tests (e.g. flake registry tests) could be writing to $HOME in parallel.
+export HOME=$TEST_ROOT/userhome
+
+# Test that using XDG_CONFIG_HOME works
+# Assert the config folder didn't exist initially.
+[ ! -e "$HOME/.config" ]
+# Without XDG_CONFIG_HOME, creates $HOME/.config
+unset XDG_CONFIG_HOME
+# Run against the nix registry to create the config dir
+# (Tip: this relies on removing non-existent entries being a no-op!)
+nix registry remove userhome-without-xdg
+# Verifies it created it
+[ -e "$HOME/.config" ]
+# Remove the directory it created
+rm -rf "$HOME/.config"
+# Run the same test, but with XDG_CONFIG_HOME
+export XDG_CONFIG_HOME=$TEST_ROOT/confighome
+# Assert the XDG_CONFIG_HOME/nix path does not exist yet.
+[ ! -e "$TEST_ROOT/confighome/nix" ]
+nix registry remove userhome-with-xdg
+# Verifies the confighome path has been created
+[ -e "$TEST_ROOT/confighome/nix" ]
+# Assert the .config folder hasn't been created.
+[ ! -e "$HOME/.config" ]
+
# Test that files are loaded from XDG by default
-export XDG_CONFIG_HOME=/tmp/home
-export XDG_CONFIG_DIRS=/tmp/dir1:/tmp/dir2
+export XDG_CONFIG_HOME=$TEST_ROOT/confighome
+export XDG_CONFIG_DIRS=$TEST_ROOT/dir1:$TEST_ROOT/dir2
files=$(nix-build --verbose --version | grep "User config" | cut -d ':' -f2- | xargs)
-[[ $files == "/tmp/home/nix/nix.conf:/tmp/dir1/nix/nix.conf:/tmp/dir2/nix/nix.conf" ]]
+[[ $files == "$TEST_ROOT/confighome/nix/nix.conf:$TEST_ROOT/dir1/nix/nix.conf:$TEST_ROOT/dir2/nix/nix.conf" ]]
# Test that setting NIX_USER_CONF_FILES overrides all the default user config files
-export NIX_USER_CONF_FILES=/tmp/file1.conf:/tmp/file2.conf
+export NIX_USER_CONF_FILES=$TEST_ROOT/file1.conf:$TEST_ROOT/file2.conf
files=$(nix-build --verbose --version | grep "User config" | cut -d ':' -f2- | xargs)
-[[ $files == "/tmp/file1.conf:/tmp/file2.conf" ]]
+[[ $files == "$TEST_ROOT/file1.conf:$TEST_ROOT/file2.conf" ]]
# Test that it's possible to load the config from a custom location
here=$(readlink -f "$(dirname "${BASH_SOURCE[0]}")")
@@ -24,4 +50,4 @@ exp_cores=$(nix show-config | grep '^cores' | cut -d '=' -f 2 | xargs)
exp_features=$(nix show-config | grep '^experimental-features' | cut -d '=' -f 2 | xargs)
[[ $prev != $exp_cores ]]
[[ $exp_cores == "4242" ]]
-[[ $exp_features == "nix-command flakes" ]] \ No newline at end of file
+[[ $exp_features == "nix-command flakes" ]]
diff --git a/tests/db-migration.sh b/tests/db-migration.sh
new file mode 100644
index 000000000..e0ff7d311
--- /dev/null
+++ b/tests/db-migration.sh
@@ -0,0 +1,26 @@
+# Test that we can successfully migrate from an older db schema
+
+# Only run this if we have an older Nix available
+# XXX: This assumes that the `daemon` package is older than the `client` one
+if [[ -z "$NIX_DAEMON_PACKAGE" ]]; then
+ exit 0
+fi
+
+source common.sh
+
+# Fill the db using the older Nix
+PATH_WITH_NEW_NIX="$PATH"
+export PATH="$NIX_DAEMON_PACKAGE/bin:$PATH"
+clearStore
+nix-build simple.nix --no-out-link
+nix-store --generate-binary-cache-key cache1.example.org $TEST_ROOT/sk1 $TEST_ROOT/pk1
+dependenciesOutPath=$(nix-build dependencies.nix --no-out-link --secret-key-files "$TEST_ROOT/sk1")
+fixedOutPath=$(IMPURE_VAR1=foo IMPURE_VAR2=bar nix-build fixed.nix -A good.0 --no-out-link)
+
+# Migrate to the new schema and ensure that everything's there
+export PATH="$PATH_WITH_NEW_NIX"
+info=$(nix path-info --json $dependenciesOutPath)
+[[ $info =~ '"ultimate":true' ]]
+[[ $info =~ 'cache1.example.org' ]]
+nix verify -r "$fixedOutPath"
+nix verify -r "$dependenciesOutPath" --sigs-needed 1 --trusted-public-keys $(cat $TEST_ROOT/pk1)
diff --git a/tests/fetchGit.sh b/tests/fetchGit.sh
index 1e8963d76..88744ee7f 100644
--- a/tests/fetchGit.sh
+++ b/tests/fetchGit.sh
@@ -179,3 +179,13 @@ git clone --depth 1 file://$repo $TEST_ROOT/shallow
path6=$(nix eval --impure --raw --expr "(builtins.fetchTree { type = \"git\"; url = \"file://$TEST_ROOT/shallow\"; ref = \"dev\"; shallow = true; }).outPath")
[[ $path3 = $path6 ]]
[[ $(nix eval --impure --expr "(builtins.fetchTree { type = \"git\"; url = \"file://$TEST_ROOT/shallow\"; ref = \"dev\"; shallow = true; }).revCount or 123") == 123 ]]
+
+# Explicit ref = "HEAD" should work, and produce the same outPath as without ref
+path7=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = \"file://$repo\"; ref = \"HEAD\"; }).outPath")
+path8=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = \"file://$repo\"; }).outPath")
+[[ $path7 = $path8 ]]
+
+# ref = "HEAD" should fetch the HEAD revision
+rev4=$(git -C $repo rev-parse HEAD)
+rev4_nix=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = \"file://$repo\"; ref = \"HEAD\"; }).rev")
+[[ $rev4 = $rev4_nix ]]
diff --git a/tests/flakes.sh b/tests/flakes.sh
index 25ba2ac43..e78e4a39d 100644
--- a/tests/flakes.sh
+++ b/tests/flakes.sh
@@ -25,6 +25,7 @@ templatesDir=$TEST_ROOT/templates
nonFlakeDir=$TEST_ROOT/nonFlake
flakeA=$TEST_ROOT/flakeA
flakeB=$TEST_ROOT/flakeB
+flakeGitBare=$TEST_ROOT/flakeGitBare
for repo in $flake1Dir $flake2Dir $flake3Dir $flake7Dir $templatesDir $nonFlakeDir $flakeA $flakeB; do
rm -rf $repo $repo.tmp
@@ -163,16 +164,17 @@ EOF
# Test 'nix flake list'.
[[ $(nix registry list | wc -l) == 7 ]]
-# Test 'nix flake info'.
-nix flake info flake1 | grep -q 'URL: .*flake1.*'
+# Test 'nix flake metadata'.
+nix flake metadata flake1
+nix flake metadata flake1 | grep -q 'Locked URL:.*flake1.*'
-# Test 'nix flake info' on a local flake.
-(cd $flake1Dir && nix flake info) | grep -q 'URL: .*flake1.*'
-(cd $flake1Dir && nix flake info .) | grep -q 'URL: .*flake1.*'
-nix flake info $flake1Dir | grep -q 'URL: .*flake1.*'
+# Test 'nix flake metadata' on a local flake.
+(cd $flake1Dir && nix flake metadata) | grep -q 'URL:.*flake1.*'
+(cd $flake1Dir && nix flake metadata .) | grep -q 'URL:.*flake1.*'
+nix flake metadata $flake1Dir | grep -q 'URL:.*flake1.*'
-# Test 'nix flake info --json'.
-json=$(nix flake info flake1 --json | jq .)
+# Test 'nix flake metadata --json'.
+json=$(nix flake metadata flake1 --json | jq .)
[[ $(echo "$json" | jq -r .description) = 'Bla bla' ]]
[[ -d $(echo "$json" | jq -r .path) ]]
[[ $(echo "$json" | jq -r .lastModified) = $(git -C $flake1Dir log -n1 --format=%ct) ]]
@@ -180,7 +182,7 @@ hash1=$(echo "$json" | jq -r .revision)
echo -n '# foo' >> $flake1Dir/flake.nix
git -C $flake1Dir commit -a -m 'Foo'
-hash2=$(nix flake info flake1 --json --refresh | jq -r .revision)
+hash2=$(nix flake metadata flake1 --json --refresh | jq -r .revision)
[[ $hash1 != $hash2 ]]
# Test 'nix build' on a flake.
@@ -604,6 +606,11 @@ nix flake update $flake3Dir
[[ $(jq -c .nodes.flake2.inputs.flake1 $flake3Dir/flake.lock) =~ '["foo"]' ]]
[[ $(jq .nodes.foo.locked.url $flake3Dir/flake.lock) =~ flake7 ]]
+# Test git+file with bare repo.
+rm -rf $flakeGitBare
+git clone --bare $flake1Dir $flakeGitBare
+nix build -o $TEST_ROOT/result git+file://$flakeGitBare
+
# Test Mercurial flakes.
rm -rf $flake5Dir
hg init $flake5Dir
@@ -624,7 +631,7 @@ hg commit --config ui.username=foobar@example.org $flake5Dir -m 'Initial commit'
nix build -o $TEST_ROOT/result hg+file://$flake5Dir
[[ -e $TEST_ROOT/result/hello ]]
-(! nix flake info --json hg+file://$flake5Dir | jq -e -r .revision)
+(! nix flake metadata --json hg+file://$flake5Dir | jq -e -r .revision)
nix eval hg+file://$flake5Dir#expr
@@ -632,13 +639,13 @@ nix eval hg+file://$flake5Dir#expr
(! nix eval hg+file://$flake5Dir#expr --no-allow-dirty)
-(! nix flake info --json hg+file://$flake5Dir | jq -e -r .revision)
+(! nix flake metadata --json hg+file://$flake5Dir | jq -e -r .revision)
hg commit --config ui.username=foobar@example.org $flake5Dir -m 'Add lock file'
-nix flake info --json hg+file://$flake5Dir --refresh | jq -e -r .revision
-nix flake info --json hg+file://$flake5Dir
-[[ $(nix flake info --json hg+file://$flake5Dir | jq -e -r .revCount) = 1 ]]
+nix flake metadata --json hg+file://$flake5Dir --refresh | jq -e -r .revision
+nix flake metadata --json hg+file://$flake5Dir
+[[ $(nix flake metadata --json hg+file://$flake5Dir | jq -e -r .revCount) = 1 ]]
nix build -o $TEST_ROOT/result hg+file://$flake5Dir --no-registries --no-allow-dirty
@@ -648,7 +655,7 @@ tar cfz $TEST_ROOT/flake.tar.gz -C $TEST_ROOT --exclude .hg flake5
nix build -o $TEST_ROOT/result file://$TEST_ROOT/flake.tar.gz
# Building with a tarball URL containing a SRI hash should also work.
-url=$(nix flake info --json file://$TEST_ROOT/flake.tar.gz | jq -r .url)
+url=$(nix flake metadata --json file://$TEST_ROOT/flake.tar.gz | jq -r .url)
[[ $url =~ sha256- ]]
nix build -o $TEST_ROOT/result $url
@@ -674,9 +681,8 @@ nix flake lock $flake3Dir
nix flake lock $flake3Dir --update-input flake2/flake1
[[ $(jq -r .nodes.flake1_2.locked.rev $flake3Dir/flake.lock) =~ $hash2 ]]
-# Test 'nix flake list-inputs'.
-[[ $(nix flake list-inputs $flake3Dir | wc -l) == 5 ]]
-nix flake list-inputs $flake3Dir --json | jq .
+# Test 'nix flake metadata --json'.
+nix flake metadata $flake3Dir --json | jq .
# Test circular flake dependencies.
cat > $flakeA/flake.nix <<EOF
@@ -715,4 +721,4 @@ git -C $flakeB commit -a -m 'Foo'
[[ $(nix eval --update-input b $flakeA#foo) = 1912 ]]
# Test list-inputs with circular dependencies
-nix flake list-inputs $flakeA
+nix flake metadata $flakeA
diff --git a/tests/local.mk b/tests/local.mk
index 60c0432a9..747f38a29 100644
--- a/tests/local.mk
+++ b/tests/local.mk
@@ -7,6 +7,7 @@ nix_tests = \
referrers.sh user-envs.sh logging.sh nix-build.sh misc.sh fixed.sh \
gc-runtime.sh check-refs.sh filter-source.sh \
local-store.sh remote-store.sh export.sh export-graph.sh \
+ db-migration.sh \
timeout.sh secure-drv-outputs.sh nix-channel.sh \
multiple-outputs.sh import-derivation.sh fetchurl.sh optimise-store.sh \
binary-cache.sh \
@@ -17,6 +18,7 @@ nix_tests = \
linux-sandbox.sh \
build-dry.sh \
build-remote-input-addressed.sh \
+ build-remote-content-addressed-fixed.sh \
build-remote-content-addressed-floating.sh \
ssh-relay.sh \
nar-access.sh \
@@ -38,11 +40,13 @@ nix_tests = \
recursive.sh \
describe-stores.sh \
flakes.sh \
- content-addressed.sh \
- nix-copy-content-addressed.sh \
text-hashed-output.sh \
build.sh \
- compute-levels.sh
+ compute-levels.sh \
+ ca/build.sh \
+ ca/substitute.sh \
+ ca/signatures.sh \
+ ca/nix-copy.sh
# parallel.sh
install-tests += $(foreach x, $(nix_tests), tests/$(x))
diff --git a/tests/push-to-store.sh b/tests/push-to-store.sh
index 6aadb916b..25352c751 100755
--- a/tests/push-to-store.sh
+++ b/tests/push-to-store.sh
@@ -1,4 +1,6 @@
#!/bin/sh
-echo Pushing "$@" to "$REMOTE_STORE"
-printf "%s" "$OUT_PATHS" | xargs -d: nix copy --to "$REMOTE_STORE" --no-require-sigs
+set -x
+
+echo Pushing "$OUT_PATHS" to "$REMOTE_STORE"
+printf "%s" "$DRV_PATH" | xargs nix copy --to "$REMOTE_STORE" --no-require-sigs
diff --git a/tests/remote-store.sh b/tests/remote-store.sh
index f7ae1a2ed..31210ab47 100644
--- a/tests/remote-store.sh
+++ b/tests/remote-store.sh
@@ -23,12 +23,12 @@ startDaemon
storeCleared=1 NIX_REMOTE_=$NIX_REMOTE $SHELL ./user-envs.sh
+nix-store --gc --max-freed 1K
+
nix-store --dump-db > $TEST_ROOT/d1
NIX_REMOTE= nix-store --dump-db > $TEST_ROOT/d2
cmp $TEST_ROOT/d1 $TEST_ROOT/d2
-nix-store --gc --max-freed 1K
-
killDaemon
user=$(whoami)