aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.github/ISSUE_TEMPLATE/feature_request.md2
-rw-r--r--.github/ISSUE_TEMPLATE/missing_documentation.md28
-rw-r--r--.github/stale.yml9
-rw-r--r--.github/workflows/backport.yml6
-rw-r--r--.github/workflows/ci.yml46
-rw-r--r--.github/workflows/hydra_status.yml4
-rw-r--r--.gitignore4
-rw-r--r--.version2
-rw-r--r--Makefile.config.in5
-rw-r--r--README.md2
-rw-r--r--boehmgc-coroutine-sp-fallback.diff32
-rw-r--r--configure.ac17
-rw-r--r--doc/manual/generate-manpage.nix187
-rw-r--r--doc/manual/local.mk16
-rw-r--r--doc/manual/redirects.js207
-rw-r--r--doc/manual/src/SUMMARY.md.in25
-rw-r--r--doc/manual/src/advanced-topics/distributed-builds.md4
-rw-r--r--doc/manual/src/command-ref/nix-build.md6
-rw-r--r--doc/manual/src/command-ref/nix-copy-closure.md4
-rw-r--r--doc/manual/src/command-ref/nix-env.md6
-rw-r--r--doc/manual/src/command-ref/nix-instantiate.md4
-rw-r--r--doc/manual/src/command-ref/nix-shell.md6
-rw-r--r--doc/manual/src/command-ref/nix-store.md2
-rw-r--r--doc/manual/src/command-ref/opt-common.md4
-rw-r--r--doc/manual/src/contributing/hacking.md5
-rw-r--r--doc/manual/src/expressions/arguments-variables.md80
-rw-r--r--doc/manual/src/expressions/build-script.md70
-rw-r--r--doc/manual/src/expressions/expression-language.md12
-rw-r--r--doc/manual/src/expressions/expression-syntax.md93
-rw-r--r--doc/manual/src/expressions/generic-builder.md66
-rw-r--r--doc/manual/src/expressions/language-values.md251
-rw-r--r--doc/manual/src/expressions/simple-building-testing.md61
-rw-r--r--doc/manual/src/expressions/simple-expression.md23
-rw-r--r--doc/manual/src/expressions/writing-nix-expressions.md12
-rw-r--r--doc/manual/src/glossary.md54
-rw-r--r--doc/manual/src/installation/installing-binary.md32
-rw-r--r--doc/manual/src/language/advanced-attributes.md (renamed from doc/manual/src/expressions/advanced-attributes.md)0
-rw-r--r--doc/manual/src/language/builtin-constants.md (renamed from doc/manual/src/expressions/builtin-constants.md)0
-rw-r--r--doc/manual/src/language/builtins-prefix.md (renamed from doc/manual/src/expressions/builtins-prefix.md)0
-rw-r--r--doc/manual/src/language/builtins-suffix.md (renamed from doc/manual/src/expressions/builtins-suffix.md)0
-rw-r--r--doc/manual/src/language/constructs.md (renamed from doc/manual/src/expressions/language-constructs.md)0
-rw-r--r--doc/manual/src/language/derivations.md (renamed from doc/manual/src/expressions/derivations.md)0
-rw-r--r--doc/manual/src/language/index.md33
-rw-r--r--doc/manual/src/language/operators.md (renamed from doc/manual/src/expressions/language-operators.md)2
-rw-r--r--doc/manual/src/language/values.md261
-rw-r--r--doc/manual/src/package-management/package-management.md3
-rw-r--r--doc/manual/src/release-notes/rl-2.10.md31
-rw-r--r--doc/manual/src/release-notes/rl-2.11.md5
-rw-r--r--doc/manual/src/release-notes/rl-next.md7
-rw-r--r--doc/manual/utils.nix26
-rw-r--r--docker.nix29
-rw-r--r--flake.lock6
-rw-r--r--flake.nix46
-rw-r--r--misc/zsh/completion.zsh11
-rw-r--r--mk/libraries.mk2
-rw-r--r--scripts/install-darwin-multi-user.sh2
-rw-r--r--scripts/install-multi-user.sh124
-rw-r--r--scripts/install-nix-from-closure.sh33
-rwxr-xr-xscripts/install.in4
-rw-r--r--scripts/local.mk2
-rw-r--r--scripts/nix-profile-daemon.fish.in35
-rw-r--r--scripts/nix-profile.fish.in35
-rw-r--r--scripts/nix-profile.sh.in1
-rw-r--r--src/libcmd/command.cc2
-rw-r--r--src/libcmd/command.hh18
-rw-r--r--src/libcmd/installables.cc58
-rw-r--r--src/libcmd/installables.hh2
-rw-r--r--src/libcmd/markdown.cc2
-rw-r--r--src/libcmd/repl.cc137
-rw-r--r--src/libexpr/eval-cache.cc9
-rw-r--r--src/libexpr/eval.cc24
-rw-r--r--src/libexpr/eval.hh10
-rw-r--r--src/libexpr/fetchurl.nix8
-rw-r--r--src/libexpr/flake/config.cc2
-rw-r--r--src/libexpr/flake/flake.cc38
-rw-r--r--src/libexpr/flake/flakeref.hh2
-rw-r--r--src/libexpr/flake/lockfile.cc2
-rw-r--r--src/libexpr/parser.y6
-rw-r--r--src/libexpr/primops.cc42
-rw-r--r--src/libexpr/primops/fetchTree.cc4
-rw-r--r--src/libexpr/tests/primops.cc24
-rw-r--r--src/libexpr/value-to-json.cc21
-rw-r--r--src/libexpr/value-to-json.hh4
-rw-r--r--src/libexpr/value.hh2
-rw-r--r--src/libfetchers/fetch-settings.hh2
-rw-r--r--src/libfetchers/git.cc4
-rw-r--r--src/libfetchers/github.cc9
-rw-r--r--src/libmain/loggers.cc7
-rw-r--r--src/libmain/progress-bar.cc66
-rw-r--r--src/libmain/progress-bar.hh4
-rw-r--r--src/libmain/shared.cc23
-rw-r--r--src/libstore/build/derivation-goal.cc11
-rw-r--r--src/libstore/build/hook-instance.cc25
-rw-r--r--src/libstore/build/local-derivation-goal.cc65
-rw-r--r--src/libstore/build/substitution-goal.cc2
-rw-r--r--src/libstore/builtins/unpack-channel.cc3
-rw-r--r--src/libstore/daemon.cc2
-rw-r--r--src/libstore/filetransfer.cc4
-rw-r--r--src/libstore/gc.cc17
-rw-r--r--src/libstore/globals.cc14
-rw-r--r--src/libstore/globals.hh34
-rw-r--r--src/libstore/local-binary-cache-store.cc4
-rw-r--r--src/libstore/local-store.cc44
-rw-r--r--src/libstore/local-store.hh2
-rw-r--r--src/libstore/local.mk11
-rw-r--r--src/libstore/lock.cc23
-rw-r--r--src/libstore/nar-accessor.cc3
-rw-r--r--src/libstore/nar-info-disk-cache.cc7
-rw-r--r--src/libstore/nar-info.cc5
-rw-r--r--src/libstore/nar-info.hh1
-rw-r--r--src/libstore/optimise-store.cc6
-rw-r--r--src/libstore/remote-store.cc19
-rw-r--r--src/libstore/remote-store.hh6
-rw-r--r--src/libstore/sandbox-defaults.sb6
-rw-r--r--src/libstore/sandbox-network.sb4
-rw-r--r--src/libstore/store-api.cc240
-rw-r--r--src/libstore/store-api.hh11
-rw-r--r--src/libutil/archive.cc7
-rw-r--r--src/libutil/archive.hh1
-rw-r--r--src/libutil/args.cc16
-rw-r--r--src/libutil/args.hh9
-rw-r--r--src/libutil/error.hh10
-rw-r--r--src/libutil/experimental-features.cc1
-rw-r--r--src/libutil/experimental-features.hh1
-rw-r--r--src/libutil/filesystem.cc172
-rw-r--r--src/libutil/json.cc27
-rw-r--r--src/libutil/json.hh11
-rw-r--r--src/libutil/logging.hh3
-rw-r--r--src/libutil/serialise.cc20
-rw-r--r--src/libutil/serialise.hh4
-rw-r--r--src/libutil/tests/json.cc4
-rw-r--r--src/libutil/util.cc221
-rw-r--r--src/libutil/util.hh28
-rw-r--r--src/nix-build/nix-build.cc48
-rw-r--r--src/nix-collect-garbage/nix-collect-garbage.cc1
-rw-r--r--src/nix-env/nix-env.cc4
-rw-r--r--src/nix-instantiate/nix-instantiate.cc5
-rw-r--r--src/nix-store/nix-store.cc2
-rw-r--r--src/nix/bundle.md2
-rw-r--r--src/nix/develop.cc17
-rw-r--r--src/nix/develop.md6
-rw-r--r--src/nix/eval.cc3
-rw-r--r--src/nix/flake-update.md2
-rw-r--r--src/nix/flake.cc37
-rw-r--r--src/nix/get-env.sh5
-rw-r--r--src/nix/main.cc17
-rw-r--r--src/nix/make-content-addressed.md2
-rw-r--r--src/nix/profile.md2
-rw-r--r--src/nix/registry.md2
-rw-r--r--src/nix/repl.md30
-rw-r--r--src/nix/run.cc2
-rw-r--r--src/nix/search.cc4
-rw-r--r--src/nix/shell.md6
-rw-r--r--src/nix/verify.cc2
-rw-r--r--tests/build-dry.sh4
-rw-r--r--tests/ca-shell.nix2
-rw-r--r--tests/ca/content-addressed.nix2
-rw-r--r--tests/check.sh14
-rw-r--r--tests/common.sh.in21
-rw-r--r--tests/completions.sh62
-rw-r--r--tests/flakes/bundle.sh (renamed from tests/flake-bundler.sh)9
-rw-r--r--tests/flakes/check.sh89
-rw-r--r--tests/flakes/circular.sh49
-rw-r--r--tests/flakes/common.sh73
-rw-r--r--tests/flakes/config.sh (renamed from tests/flake-local-settings.sh)5
-rw-r--r--tests/flakes/flakes.sh (renamed from tests/flakes.sh)411
-rw-r--r--tests/flakes/follow-paths.sh150
-rw-r--r--tests/flakes/init.sh87
-rw-r--r--tests/flakes/mercurial.sh46
-rw-r--r--tests/flakes/run.sh (renamed from tests/flakes-run.sh)4
-rw-r--r--tests/flakes/search-root.sh (renamed from tests/flake-searching.sh)24
-rw-r--r--tests/fmt.sh7
-rw-r--r--tests/github-flakes.nix14
-rw-r--r--tests/installer/default.nix220
-rw-r--r--tests/installer/vagrant_insecure_key27
-rw-r--r--tests/lang.sh2
-rw-r--r--tests/local.mk21
-rw-r--r--tests/nix-shell.sh8
-rw-r--r--tests/plugins.sh5
-rw-r--r--tests/pure-eval.sh2
-rw-r--r--tests/repl.sh60
-rw-r--r--tests/search.sh1
-rw-r--r--tests/signing.sh2
-rw-r--r--tests/sourcehut-flakes.nix13
184 files changed, 3357 insertions, 1997 deletions
diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md
index 392ed30c6..4fe86d5ec 100644
--- a/.github/ISSUE_TEMPLATE/feature_request.md
+++ b/.github/ISSUE_TEMPLATE/feature_request.md
@@ -2,7 +2,7 @@
name: Feature request
about: Suggest an idea for this project
title: ''
-labels: improvement
+labels: feature
assignees: ''
---
diff --git a/.github/ISSUE_TEMPLATE/missing_documentation.md b/.github/ISSUE_TEMPLATE/missing_documentation.md
new file mode 100644
index 000000000..fbabd868e
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/missing_documentation.md
@@ -0,0 +1,28 @@
+---
+name: Missing or incorrect documentation
+about: Help us improve the reference manual
+title: ''
+labels: documentation
+assignees: ''
+
+---
+
+## Problem
+
+<!-- describe your problem -->
+
+## Checklist
+
+<!-- make sure this issue is not redundant or obsolete -->
+
+- [ ] checked [latest Nix manual] \([source])
+- [ ] checked [open documentation issues and pull requests] for possible duplicates
+
+[latest Nix manual]: https://nixos.org/manual/nix/unstable/
+[source]: https://github.com/NixOS/nix/tree/master/doc/manual/src
+[open documentation issues and pull requests]: https://github.com/NixOS/nix/labels/documentation
+
+## Proposal
+
+<!-- propose a solution -->
+
diff --git a/.github/stale.yml b/.github/stale.yml
index fe24942f4..ee831135a 100644
--- a/.github/stale.yml
+++ b/.github/stale.yml
@@ -1,10 +1,9 @@
# Configuration for probot-stale - https://github.com/probot/stale
daysUntilStale: 180
-daysUntilClose: 365
+daysUntilClose: false
exemptLabels:
- "critical"
+ - "never-stale"
staleLabel: "stale"
-markComment: |
- I marked this as stale due to inactivity. &rarr; [More info](https://github.com/NixOS/nix/blob/master/.github/STALE-BOT.md)
-closeComment: |
- I closed this issue due to inactivity. &rarr; [More info](https://github.com/NixOS/nix/blob/master/.github/STALE-BOT.md)
+markComment: false
+closeComment: false
diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml
index 3a2d4de0e..75be788ef 100644
--- a/.github/workflows/backport.yml
+++ b/.github/workflows/backport.yml
@@ -2,9 +2,15 @@ name: Backport
on:
pull_request_target:
types: [closed, labeled]
+permissions:
+ contents: read
jobs:
backport:
name: Backport Pull Request
+ permissions:
+ # for zeebe-io/backport-action
+ contents: write
+ pull-requests: write
if: github.repository_owner == 'NixOS' && github.event.pull_request.merged == true && (github.event_name != 'labeled' || startsWith('backport', github.event.label.name))
runs-on: ubuntu-latest
steps:
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index fc6531ea5..628d1d192 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -4,10 +4,12 @@ on:
pull_request:
push:
+permissions: read-all
+
jobs:
tests:
- needs: [check_cachix]
+ needs: [check_secrets]
strategy:
matrix:
os: [ubuntu-latest, macos-latest]
@@ -20,28 +22,34 @@ jobs:
- uses: cachix/install-nix-action@v17
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
- uses: cachix/cachix-action@v10
- if: needs.check_cachix.outputs.secret == 'true'
+ if: needs.check_secrets.outputs.cachix == 'true'
with:
name: '${{ env.CACHIX_NAME }}'
signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}'
authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
- run: nix --experimental-features 'nix-command flakes' flake check -L
- check_cachix:
- name: Cachix secret present for installer tests
+ check_secrets:
+ permissions:
+ contents: none
+ name: Check Cachix and Docker secrets present for installer tests
runs-on: ubuntu-latest
outputs:
- secret: ${{ steps.secret.outputs.secret }}
+ cachix: ${{ steps.secret.outputs.cachix }}
+ docker: ${{ steps.secret.outputs.docker }}
steps:
- - name: Check for Cachix secret
+ - name: Check for secrets
id: secret
env:
_CACHIX_SECRETS: ${{ secrets.CACHIX_SIGNING_KEY }}${{ secrets.CACHIX_AUTH_TOKEN }}
- run: echo "::set-output name=secret::${{ env._CACHIX_SECRETS != '' }}"
+ _DOCKER_SECRETS: ${{ secrets.DOCKERHUB_USERNAME }}${{ secrets.DOCKERHUB_TOKEN }}
+ run: |
+ echo "::set-output name=cachix::${{ env._CACHIX_SECRETS != '' }}"
+ echo "::set-output name=docker::${{ env._DOCKER_SECRETS != '' }}"
installer:
- needs: [tests, check_cachix]
- if: github.event_name == 'push' && needs.check_cachix.outputs.secret == 'true'
+ needs: [tests, check_secrets]
+ if: github.event_name == 'push' && needs.check_secrets.outputs.cachix == 'true'
runs-on: ubuntu-latest
outputs:
installerURL: ${{ steps.prepare-installer.outputs.installerURL }}
@@ -60,8 +68,8 @@ jobs:
run: scripts/prepare-installer-for-github-actions
installer_test:
- needs: [installer, check_cachix]
- if: github.event_name == 'push' && needs.check_cachix.outputs.secret == 'true'
+ needs: [installer, check_secrets]
+ if: github.event_name == 'push' && needs.check_secrets.outputs.cachix == 'true'
strategy:
matrix:
os: [ubuntu-latest, macos-latest]
@@ -73,14 +81,22 @@ jobs:
with:
install_url: '${{needs.installer.outputs.installerURL}}'
install_options: "--tarball-url-prefix https://${{ env.CACHIX_NAME }}.cachix.org/serve"
- - run: nix-instantiate -E 'builtins.currentTime' --eval
+ - run: sudo apt install fish zsh
+ if: matrix.os == 'ubuntu-latest'
+ - run: brew install fish
+ if: matrix.os == 'macos-latest'
+ - run: exec bash -c "nix-instantiate -E 'builtins.currentTime' --eval"
+ - run: exec sh -c "nix-instantiate -E 'builtins.currentTime' --eval"
+ - run: exec zsh -c "nix-instantiate -E 'builtins.currentTime' --eval"
+ - run: exec fish -c "nix-instantiate -E 'builtins.currentTime' --eval"
docker_push_image:
- needs: [check_cachix, tests]
+ needs: [check_secrets, tests]
if: >-
github.event_name == 'push' &&
github.ref_name == 'master' &&
- needs.check_cachix.outputs.secret == 'true'
+ needs.check_secrets.outputs.cachix == 'true' &&
+ needs.check_secrets.outputs.docker == 'true'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
@@ -90,7 +106,7 @@ jobs:
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
- run: echo NIX_VERSION="$(nix --experimental-features 'nix-command flakes' eval .\#default.version | tr -d \")" >> $GITHUB_ENV
- uses: cachix/cachix-action@v10
- if: needs.check_cachix.outputs.secret == 'true'
+ if: needs.check_secrets.outputs.cachix == 'true'
with:
name: '${{ env.CACHIX_NAME }}'
signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}'
diff --git a/.github/workflows/hydra_status.yml b/.github/workflows/hydra_status.yml
index 53e69cb2d..38a9c0877 100644
--- a/.github/workflows/hydra_status.yml
+++ b/.github/workflows/hydra_status.yml
@@ -1,8 +1,12 @@
name: Hydra status
+
+permissions: read-all
+
on:
schedule:
- cron: "12,42 * * * *"
workflow_dispatch:
+
jobs:
check_hydra_status:
name: Check Hydra status
diff --git a/.gitignore b/.gitignore
index ba8e95191..8e0db013f 100644
--- a/.gitignore
+++ b/.gitignore
@@ -22,11 +22,13 @@ perl/Makefile.config
/doc/manual/src/SUMMARY.md
/doc/manual/src/command-ref/new-cli
/doc/manual/src/command-ref/conf-file.md
-/doc/manual/src/expressions/builtins.md
+/doc/manual/src/language/builtins.md
# /scripts/
/scripts/nix-profile.sh
/scripts/nix-profile-daemon.sh
+/scripts/nix-profile.fish
+/scripts/nix-profile-daemon.fish
# /src/libexpr/
/src/libexpr/lexer-tab.cc
diff --git a/.version b/.version
index f161b5d80..3ca2c9b2c 100644
--- a/.version
+++ b/.version
@@ -1 +1 @@
-2.10.0 \ No newline at end of file
+2.12.0 \ No newline at end of file
diff --git a/Makefile.config.in b/Makefile.config.in
index d724853fa..1c5405c6d 100644
--- a/Makefile.config.in
+++ b/Makefile.config.in
@@ -1,4 +1,3 @@
-HOST_OS = @host_os@
AR = @AR@
BDW_GC_LIBS = @BDW_GC_LIBS@
BOOST_LDFLAGS = @BOOST_LDFLAGS@
@@ -13,13 +12,14 @@ ENABLE_S3 = @ENABLE_S3@
GTEST_LIBS = @GTEST_LIBS@
HAVE_LIBCPUID = @HAVE_LIBCPUID@
HAVE_SECCOMP = @HAVE_SECCOMP@
+HOST_OS = @host_os@
LDFLAGS = @LDFLAGS@
LIBARCHIVE_LIBS = @LIBARCHIVE_LIBS@
LIBBROTLI_LIBS = @LIBBROTLI_LIBS@
LIBCURL_LIBS = @LIBCURL_LIBS@
+LIBSECCOMP_LIBS = @LIBSECCOMP_LIBS@
LOWDOWN_LIBS = @LOWDOWN_LIBS@
OPENSSL_LIBS = @OPENSSL_LIBS@
-LIBSECCOMP_LIBS = @LIBSECCOMP_LIBS@
PACKAGE_NAME = @PACKAGE_NAME@
PACKAGE_VERSION = @PACKAGE_VERSION@
SHELL = @bash@
@@ -31,6 +31,7 @@ datadir = @datadir@
datarootdir = @datarootdir@
doc_generate = @doc_generate@
docdir = @docdir@
+embedded_sandbox_shell = @embedded_sandbox_shell@
exec_prefix = @exec_prefix@
includedir = @includedir@
libdir = @libdir@
diff --git a/README.md b/README.md
index 80d6f128c..8a02c4c75 100644
--- a/README.md
+++ b/README.md
@@ -20,7 +20,7 @@ Information on additional installation methods is available on the [Nix download
## Building And Developing
-See our [Hacking guide](https://hydra.nixos.org/job/nix/master/build.x86_64-linux/latest/download-by-type/doc/manual/contributing/hacking.html) in our manual for instruction on how to
+See our [Hacking guide](https://nixos.org/manual/nix/stable/contributing/hacking.html) in our manual for instruction on how to
build nix from source with nix-build or how to get a development environment.
## Additional Resources
diff --git a/boehmgc-coroutine-sp-fallback.diff b/boehmgc-coroutine-sp-fallback.diff
index e659bf470..8fdafbecb 100644
--- a/boehmgc-coroutine-sp-fallback.diff
+++ b/boehmgc-coroutine-sp-fallback.diff
@@ -1,3 +1,35 @@
+diff --git a/darwin_stop_world.c b/darwin_stop_world.c
+index 3dbaa3fb..36a1d1f7 100644
+--- a/darwin_stop_world.c
++++ b/darwin_stop_world.c
+@@ -352,6 +352,7 @@ GC_INNER void GC_push_all_stacks(void)
+ int nthreads = 0;
+ word total_size = 0;
+ mach_msg_type_number_t listcount = (mach_msg_type_number_t)THREAD_TABLE_SZ;
++ size_t stack_limit;
+ if (!EXPECT(GC_thr_initialized, TRUE))
+ GC_thr_init();
+
+@@ -407,6 +408,19 @@ GC_INNER void GC_push_all_stacks(void)
+ GC_push_all_stack_sections(lo, hi, p->traced_stack_sect);
+ }
+ if (altstack_lo) {
++ // When a thread goes into a coroutine, we lose its original sp until
++ // control flow returns to the thread.
++ // While in the coroutine, the sp points outside the thread stack,
++ // so we can detect this and push the entire thread stack instead,
++ // as an approximation.
++ // We assume that the coroutine has similarly added its entire stack.
++ // This could be made accurate by cooperating with the application
++ // via new functions and/or callbacks.
++ stack_limit = pthread_get_stacksize_np(p->id);
++ if (altstack_lo >= altstack_hi || altstack_lo < altstack_hi - stack_limit) { // sp outside stack
++ altstack_lo = altstack_hi - stack_limit;
++ }
++
+ total_size += altstack_hi - altstack_lo;
+ GC_push_all_stack(altstack_lo, altstack_hi);
+ }
diff --git a/pthread_stop_world.c b/pthread_stop_world.c
index 4b2c429..1fb4c52 100644
--- a/pthread_stop_world.c
diff --git a/configure.ac b/configure.ac
index 15d5606c9..64fa12fc7 100644
--- a/configure.ac
+++ b/configure.ac
@@ -296,15 +296,6 @@ AC_CHECK_FUNCS([setresuid setreuid lchown])
AC_CHECK_FUNCS([strsignal posix_fallocate sysconf])
-# This is needed if bzip2 is a static library, and the Nix libraries
-# are dynamic.
-case "${host_os}" in
- darwin*)
- LDFLAGS="-all_load $LDFLAGS"
- ;;
-esac
-
-
AC_ARG_WITH(sandbox-shell, AS_HELP_STRING([--with-sandbox-shell=PATH],[path of a statically-linked shell to use as /bin/sh in sandboxes]),
sandbox_shell=$withval)
AC_SUBST(sandbox_shell)
@@ -320,6 +311,14 @@ if test ${cross_compiling:-no} = no && ! test -z ${sandbox_shell+x}; then
fi
fi
+AC_ARG_ENABLE(embedded-sandbox-shell, AS_HELP_STRING([--enable-embedded-sandbox-shell],[include the sandbox shell in the Nix binary [default=no]]),
+ embedded_sandbox_shell=$enableval, embedded_sandbox_shell=no)
+AC_SUBST(embedded_sandbox_shell)
+if test "$embedded_sandbox_shell" = yes; then
+ AC_DEFINE(HAVE_EMBEDDED_SANDBOX_SHELL, 1, [Include the sandbox shell in the Nix binary.])
+fi
+
+
# Expand all variables in config.status.
test "$prefix" = NONE && prefix=$ac_default_prefix
test "$exec_prefix" = NONE && exec_prefix='${prefix}'
diff --git a/doc/manual/generate-manpage.nix b/doc/manual/generate-manpage.nix
index 244cfa0c2..18a1a8bfe 100644
--- a/doc/manual/generate-manpage.nix
+++ b/doc/manual/generate-manpage.nix
@@ -1,99 +1,110 @@
-{ command, renderLinks ? false }:
+{ command }:
with builtins;
with import ./utils.nix;
let
- showCommand =
- { command, def, filename }:
- ''
- **Warning**: This program is **experimental** and its interface is subject to change.
- ''
- + "# Name\n\n"
- + "`${command}` - ${def.description}\n\n"
- + "# Synopsis\n\n"
- + showSynopsis { inherit command; args = def.args; }
- + (if def.commands or {} != {}
- then
- let
- categories = sort (x: y: x.id < y.id) (unique (map (cmd: cmd.category) (attrValues def.commands)));
- listCommands = cmds:
- concatStrings (map (name:
- "* "
- + (if renderLinks
- then "[`${command} ${name}`](./${appendName filename name}.md)"
- else "`${command} ${name}`")
- + " - ${cmds.${name}.description}\n")
- (attrNames cmds));
- in
- "where *subcommand* is one of the following:\n\n"
- # FIXME: group by category
- + (if length categories > 1
- then
- concatStrings (map
- (cat:
- "**${toString cat.description}:**\n\n"
- + listCommands (filterAttrs (n: v: v.category == cat) def.commands)
- + "\n"
- ) categories)
- + "\n"
- else
- listCommands def.commands
- + "\n")
- else "")
- + (if def ? doc
- then def.doc + "\n\n"
- else "")
- + (let s = showOptions def.flags; in
- if s != ""
- then "# Options\n\n${s}"
- else "")
- ;
+ showCommand = { command, details, filename }:
+ let
+ result = ''
+ > **Warning** \
+ > This program is **experimental** and its interface is subject to change.
+
+ # Name
+
+ `${command}` - ${details.description}
+
+ # Synopsis
+
+ ${showSynopsis command details.args}
+
+ ${maybeSubcommands}
+
+ ${maybeDocumentation}
+
+ ${maybeOptions}
+ '';
+ showSynopsis = command: args:
+ let
+ showArgument = arg: "*${arg.label}*" + (if arg ? arity then "" else "...");
+ arguments = concatStringsSep " " (map showArgument args);
+ in ''
+ `${command}` [*option*...] ${arguments}
+ '';
+ maybeSubcommands = if details ? commands && details.commands != {}
+ then ''
+ where *subcommand* is one of the following:
+
+ ${subcommands}
+ ''
+ else "";
+ subcommands = if length categories > 1
+ then listCategories
+ else listSubcommands details.commands;
+ categories = sort (x: y: x.id < y.id) (unique (map (cmd: cmd.category) (attrValues details.commands)));
+ listCategories = concatStrings (map showCategory categories);
+ showCategory = cat: ''
+ **${toString cat.description}:**
+
+ ${listSubcommands (filterAttrs (n: v: v.category == cat) details.commands)}
+ '';
+ listSubcommands = cmds: concatStrings (attrValues (mapAttrs showSubcommand cmds));
+ showSubcommand = name: subcmd: ''
+ * [`${command} ${name}`](./${appendName filename name}.md) - ${subcmd.description}
+ '';
+ maybeDocumentation = if details ? doc then details.doc else "";
+ maybeOptions = if details.flags == {} then "" else ''
+ # Options
+
+ ${showOptions details.flags}
+ '';
+ showOptions = options:
+ let
+ showCategory = cat: ''
+ ${if cat != "" then "**${cat}:**" else ""}
+
+ ${listOptions (filterAttrs (n: v: v.category == cat) options)}
+ '';
+ listOptions = opts: concatStringsSep "\n" (attrValues (mapAttrs showOption opts));
+ showOption = name: option:
+ let
+ shortName = if option ? shortName then "/ `-${option.shortName}`" else "";
+ labels = if option ? labels then (concatStringsSep " " (map (s: "*${s}*") option.labels)) else "";
+ in trim ''
+ - `--${name}` ${shortName} ${labels}
+
+ ${option.description}
+ '';
+ categories = sort builtins.lessThan (unique (map (cmd: cmd.category) (attrValues options)));
+ in concatStrings (map showCategory categories);
+ in squash result;
appendName = filename: name: (if filename == "nix" then "nix3" else filename) + "-" + name;
- showOptions = flags:
+ processCommand = { command, details, filename }:
let
- categories = sort builtins.lessThan (unique (map (cmd: cmd.category) (attrValues flags)));
- in
- concatStrings (map
- (cat:
- (if cat != ""
- then "**${cat}:**\n\n"
- else "")
- + concatStrings
- (map (longName:
- let
- flag = flags.${longName};
- in
- " - `--${longName}`"
- + (if flag ? shortName then " / `-${flag.shortName}`" else "")
- + (if flag ? labels then " " + (concatStringsSep " " (map (s: "*${s}*") flag.labels)) else "")
- + " \n"
- + " " + flag.description + "\n\n"
- ) (attrNames (filterAttrs (n: v: v.category == cat) flags))))
- categories);
-
- showSynopsis =
- { command, args }:
- "`${command}` [*option*...] ${concatStringsSep " "
- (map (arg: "*${arg.label}*" + (if arg ? arity then "" else "...")) args)}\n\n";
-
- processCommand = { command, def, filename }:
- [ { name = filename + ".md"; value = showCommand { inherit command def filename; }; inherit command; } ]
- ++ concatMap
- (name: processCommand {
- filename = appendName filename name;
- command = command + " " + name;
- def = def.commands.${name};
- })
- (attrNames def.commands or {});
-
-in
+ cmd = {
+ inherit command;
+ name = filename + ".md";
+ value = showCommand { inherit command details filename; };
+ };
+ subcommand = subCmd: processCommand {
+ command = command + " " + subCmd;
+ details = details.commands.${subCmd};
+ filename = appendName filename subCmd;
+ };
+ in [ cmd ] ++ concatMap subcommand (attrNames details.commands or {});
-let
- manpages = processCommand { filename = "nix"; command = "nix"; def = builtins.fromJSON command; };
- summary = concatStrings (map (manpage: " - [${manpage.command}](command-ref/new-cli/${manpage.name})\n") manpages);
-in
-(listToAttrs manpages) // { "SUMMARY.md" = summary; }
+ manpages = processCommand {
+ command = "nix";
+ details = builtins.fromJSON command;
+ filename = "nix";
+ };
+
+ tableOfContents = let
+ showEntry = page:
+ " - [${page.command}](command-ref/new-cli/${page.name})";
+ in concatStringsSep "\n" (map showEntry manpages) + "\n";
+
+in (listToAttrs manpages) // { "SUMMARY.md" = tableOfContents; }
diff --git a/doc/manual/local.mk b/doc/manual/local.mk
index 371ed6f21..364e02967 100644
--- a/doc/manual/local.mk
+++ b/doc/manual/local.mk
@@ -1,5 +1,9 @@
ifeq ($(doc_generate),yes)
+MANUAL_SRCS := \
+ $(call rwildcard, $(d)/src, *.md) \
+ $(call rwildcard, $(d)/src, */*.md)
+
# Generate man pages.
man-pages := $(foreach n, \
nix-env.1 nix-build.1 nix-shell.1 nix-store.1 nix-instantiate.1 \
@@ -46,7 +50,7 @@ $(d)/src/SUMMARY.md: $(d)/src/SUMMARY.md.in $(d)/src/command-ref/new-cli
$(d)/src/command-ref/new-cli: $(d)/nix.json $(d)/generate-manpage.nix $(bindir)/nix
@rm -rf $@
- $(trace-gen) $(nix-eval) --write-to $@ --expr 'import doc/manual/generate-manpage.nix { command = builtins.readFile $<; renderLinks = true; }'
+ $(trace-gen) $(nix-eval) --write-to $@ --expr 'import doc/manual/generate-manpage.nix { command = builtins.readFile $<; }'
$(d)/src/command-ref/conf-file.md: $(d)/conf-file.json $(d)/generate-options.nix $(d)/src/command-ref/conf-file-prefix.md $(bindir)/nix
@cat doc/manual/src/command-ref/conf-file-prefix.md > $@.tmp
@@ -61,10 +65,10 @@ $(d)/conf-file.json: $(bindir)/nix
$(trace-gen) $(dummy-env) $(bindir)/nix show-config --json --experimental-features nix-command > $@.tmp
@mv $@.tmp $@
-$(d)/src/expressions/builtins.md: $(d)/builtins.json $(d)/generate-builtins.nix $(d)/src/expressions/builtins-prefix.md $(bindir)/nix
- @cat doc/manual/src/expressions/builtins-prefix.md > $@.tmp
+$(d)/src/language/builtins.md: $(d)/builtins.json $(d)/generate-builtins.nix $(d)/src/language/builtins-prefix.md $(bindir)/nix
+ @cat doc/manual/src/language/builtins-prefix.md > $@.tmp
$(trace-gen) $(nix-eval) --expr 'import doc/manual/generate-builtins.nix (builtins.fromJSON (builtins.readFile $<))' >> $@.tmp
- @cat doc/manual/src/expressions/builtins-suffix.md >> $@.tmp
+ @cat doc/manual/src/language/builtins-suffix.md >> $@.tmp
@mv $@.tmp $@
$(d)/builtins.json: $(bindir)/nix
@@ -92,12 +96,12 @@ doc/manual/generated/man1/nix3-manpages: $(d)/src/command-ref/new-cli
if [[ $$name = SUMMARY ]]; then continue; fi; \
printf "Title: %s\n\n" "$$name" > $$tmpFile; \
cat $$i >> $$tmpFile; \
- lowdown -sT man -M section=1 $$tmpFile -o $(DESTDIR)$$(dirname $@)/$$name.1; \
+ lowdown -sT man --nroff-nolinks -M section=1 $$tmpFile -o $(DESTDIR)$$(dirname $@)/$$name.1; \
rm $$tmpFile; \
done
@touch $@
-$(docdir)/manual/index.html: $(MANUAL_SRCS) $(d)/book.toml $(d)/anchors.jq $(d)/custom.css $(d)/src/SUMMARY.md $(d)/src/command-ref/new-cli $(d)/src/command-ref/conf-file.md $(d)/src/expressions/builtins.md $(call rwildcard, $(d)/src, *.md)
+$(docdir)/manual/index.html: $(MANUAL_SRCS) $(d)/book.toml $(d)/anchors.jq $(d)/custom.css $(d)/src/SUMMARY.md $(d)/src/command-ref/new-cli $(d)/src/command-ref/conf-file.md $(d)/src/language/builtins.md
$(trace-gen) RUST_LOG=warn mdbook build doc/manual -d $(DESTDIR)$(docdir)/manual
endif
diff --git a/doc/manual/redirects.js b/doc/manual/redirects.js
index 19f928c7e..167e221b8 100644
--- a/doc/manual/redirects.js
+++ b/doc/manual/redirects.js
@@ -132,113 +132,106 @@ var redirects = {
"#sec-common-options": "command-ref/opt-common.html",
"#ch-utilities": "command-ref/utilities.html",
"#chap-hacking": "contributing/hacking.html",
- "#adv-attr-allowSubstitutes": "expressions/advanced-attributes.html#adv-attr-allowSubstitutes",
- "#adv-attr-allowedReferences": "expressions/advanced-attributes.html#adv-attr-allowedReferences",
- "#adv-attr-allowedRequisites": "expressions/advanced-attributes.html#adv-attr-allowedRequisites",
- "#adv-attr-disallowedReferences": "expressions/advanced-attributes.html#adv-attr-disallowedReferences",
- "#adv-attr-disallowedRequisites": "expressions/advanced-attributes.html#adv-attr-disallowedRequisites",
- "#adv-attr-exportReferencesGraph": "expressions/advanced-attributes.html#adv-attr-exportReferencesGraph",
- "#adv-attr-impureEnvVars": "expressions/advanced-attributes.html#adv-attr-impureEnvVars",
- "#adv-attr-outputHash": "expressions/advanced-attributes.html#adv-attr-outputHash",
- "#adv-attr-outputHashAlgo": "expressions/advanced-attributes.html#adv-attr-outputHashAlgo",
- "#adv-attr-outputHashMode": "expressions/advanced-attributes.html#adv-attr-outputHashMode",
- "#adv-attr-passAsFile": "expressions/advanced-attributes.html#adv-attr-passAsFile",
- "#adv-attr-preferLocalBuild": "expressions/advanced-attributes.html#adv-attr-preferLocalBuild",
- "#fixed-output-drvs": "expressions/advanced-attributes.html#adv-attr-outputHash",
- "#sec-advanced-attributes": "expressions/advanced-attributes.html",
- "#sec-arguments": "expressions/arguments-variables.html",
- "#sec-build-script": "expressions/build-script.html",
- "#builtin-abort": "expressions/builtins.html#builtins-abort",
- "#builtin-add": "expressions/builtins.html#builtins-add",
- "#builtin-all": "expressions/builtins.html#builtins-all",
- "#builtin-any": "expressions/builtins.html#builtins-any",
- "#builtin-attrNames": "expressions/builtins.html#builtins-attrNames",
- "#builtin-attrValues": "expressions/builtins.html#builtins-attrValues",
- "#builtin-baseNameOf": "expressions/builtins.html#builtins-baseNameOf",
- "#builtin-bitAnd": "expressions/builtins.html#builtins-bitAnd",
- "#builtin-bitOr": "expressions/builtins.html#builtins-bitOr",
- "#builtin-bitXor": "expressions/builtins.html#builtins-bitXor",
- "#builtin-builtins": "expressions/builtins.html#builtins-builtins",
- "#builtin-compareVersions": "expressions/builtins.html#builtins-compareVersions",
- "#builtin-concatLists": "expressions/builtins.html#builtins-concatLists",
- "#builtin-concatStringsSep": "expressions/builtins.html#builtins-concatStringsSep",
- "#builtin-currentSystem": "expressions/builtins.html#builtins-currentSystem",
- "#builtin-deepSeq": "expressions/builtins.html#builtins-deepSeq",
- "#builtin-derivation": "expressions/builtins.html#builtins-derivation",
- "#builtin-dirOf": "expressions/builtins.html#builtins-dirOf",
- "#builtin-div": "expressions/builtins.html#builtins-div",
- "#builtin-elem": "expressions/builtins.html#builtins-elem",
- "#builtin-elemAt": "expressions/builtins.html#builtins-elemAt",
- "#builtin-fetchGit": "expressions/builtins.html#builtins-fetchGit",
- "#builtin-fetchTarball": "expressions/builtins.html#builtins-fetchTarball",
- "#builtin-fetchurl": "expressions/builtins.html#builtins-fetchurl",
- "#builtin-filterSource": "expressions/builtins.html#builtins-filterSource",
- "#builtin-foldl-prime": "expressions/builtins.html#builtins-foldl-prime",
- "#builtin-fromJSON": "expressions/builtins.html#builtins-fromJSON",
- "#builtin-functionArgs": "expressions/builtins.html#builtins-functionArgs",
- "#builtin-genList": "expressions/builtins.html#builtins-genList",
- "#builtin-getAttr": "expressions/builtins.html#builtins-getAttr",
- "#builtin-getEnv": "expressions/builtins.html#builtins-getEnv",
- "#builtin-hasAttr": "expressions/builtins.html#builtins-hasAttr",
- "#builtin-hashFile": "expressions/builtins.html#builtins-hashFile",
- "#builtin-hashString": "expressions/builtins.html#builtins-hashString",
- "#builtin-head": "expressions/builtins.html#builtins-head",
- "#builtin-import": "expressions/builtins.html#builtins-import",
- "#builtin-intersectAttrs": "expressions/builtins.html#builtins-intersectAttrs",
- "#builtin-isAttrs": "expressions/builtins.html#builtins-isAttrs",
- "#builtin-isBool": "expressions/builtins.html#builtins-isBool",
- "#builtin-isFloat": "expressions/builtins.html#builtins-isFloat",
- "#builtin-isFunction": "expressions/builtins.html#builtins-isFunction",
- "#builtin-isInt": "expressions/builtins.html#builtins-isInt",
- "#builtin-isList": "expressions/builtins.html#builtins-isList",
- "#builtin-isNull": "expressions/builtins.html#builtins-isNull",
- "#builtin-isString": "expressions/builtins.html#builtins-isString",
- "#builtin-length": "expressions/builtins.html#builtins-length",
- "#builtin-lessThan": "expressions/builtins.html#builtins-lessThan",
- "#builtin-listToAttrs": "expressions/builtins.html#builtins-listToAttrs",
- "#builtin-map": "expressions/builtins.html#builtins-map",
- "#builtin-match": "expressions/builtins.html#builtins-match",
- "#builtin-mul": "expressions/builtins.html#builtins-mul",
- "#builtin-parseDrvName": "expressions/builtins.html#builtins-parseDrvName",
- "#builtin-path": "expressions/builtins.html#builtins-path",
- "#builtin-pathExists": "expressions/builtins.html#builtins-pathExists",
- "#builtin-placeholder": "expressions/builtins.html#builtins-placeholder",
- "#builtin-readDir": "expressions/builtins.html#builtins-readDir",
- "#builtin-readFile": "expressions/builtins.html#builtins-readFile",
- "#builtin-removeAttrs": "expressions/builtins.html#builtins-removeAttrs",
- "#builtin-replaceStrings": "expressions/builtins.html#builtins-replaceStrings",
- "#builtin-seq": "expressions/builtins.html#builtins-seq",
- "#builtin-sort": "expressions/builtins.html#builtins-sort",
- "#builtin-split": "expressions/builtins.html#builtins-split",
- "#builtin-splitVersion": "expressions/builtins.html#builtins-splitVersion",
- "#builtin-stringLength": "expressions/builtins.html#builtins-stringLength",
- "#builtin-sub": "expressions/builtins.html#builtins-sub",
- "#builtin-substring": "expressions/builtins.html#builtins-substring",
- "#builtin-tail": "expressions/builtins.html#builtins-tail",
- "#builtin-throw": "expressions/builtins.html#builtins-throw",
- "#builtin-toFile": "expressions/builtins.html#builtins-toFile",
- "#builtin-toJSON": "expressions/builtins.html#builtins-toJSON",
- "#builtin-toPath": "expressions/builtins.html#builtins-toPath",
- "#builtin-toString": "expressions/builtins.html#builtins-toString",
- "#builtin-toXML": "expressions/builtins.html#builtins-toXML",
- "#builtin-trace": "expressions/builtins.html#builtins-trace",
- "#builtin-tryEval": "expressions/builtins.html#builtins-tryEval",
- "#builtin-typeOf": "expressions/builtins.html#builtins-typeOf",
- "#ssec-builtins": "expressions/builtins.html",
- "#attr-system": "expressions/derivations.html#attr-system",
- "#ssec-derivation": "expressions/derivations.html",
- "#ch-expression-language": "expressions/expression-language.html",
- "#sec-expression-syntax": "expressions/expression-syntax.html",
- "#sec-generic-builder": "expressions/generic-builder.html",
- "#sec-constructs": "expressions/language-constructs.html",
- "#sect-let-expressions": "expressions/language-constructs.html#let-expressions",
- "#ss-functions": "expressions/language-constructs.html#functions",
- "#sec-language-operators": "expressions/language-operators.html",
- "#table-operators": "expressions/language-operators.html",
- "#ssec-values": "expressions/language-values.html",
- "#sec-building-simple": "expressions/simple-building-testing.html",
- "#ch-simple-expression": "expressions/simple-expression.html",
- "#chap-writing-nix-expressions": "expressions/writing-nix-expressions.html",
+ "#adv-attr-allowSubstitutes": "language/advanced-attributes.html#adv-attr-allowSubstitutes",
+ "#adv-attr-allowedReferences": "language/advanced-attributes.html#adv-attr-allowedReferences",
+ "#adv-attr-allowedRequisites": "language/advanced-attributes.html#adv-attr-allowedRequisites",
+ "#adv-attr-disallowedReferences": "language/advanced-attributes.html#adv-attr-disallowedReferences",
+ "#adv-attr-disallowedRequisites": "language/advanced-attributes.html#adv-attr-disallowedRequisites",
+ "#adv-attr-exportReferencesGraph": "language/advanced-attributes.html#adv-attr-exportReferencesGraph",
+ "#adv-attr-impureEnvVars": "language/advanced-attributes.html#adv-attr-impureEnvVars",
+ "#adv-attr-outputHash": "language/advanced-attributes.html#adv-attr-outputHash",
+ "#adv-attr-outputHashAlgo": "language/advanced-attributes.html#adv-attr-outputHashAlgo",
+ "#adv-attr-outputHashMode": "language/advanced-attributes.html#adv-attr-outputHashMode",
+ "#adv-attr-passAsFile": "language/advanced-attributes.html#adv-attr-passAsFile",
+ "#adv-attr-preferLocalBuild": "language/advanced-attributes.html#adv-attr-preferLocalBuild",
+ "#fixed-output-drvs": "language/advanced-attributes.html#adv-attr-outputHash",
+ "#sec-advanced-attributes": "language/advanced-attributes.html",
+ "#builtin-abort": "language/builtins.html#builtins-abort",
+ "#builtin-add": "language/builtins.html#builtins-add",
+ "#builtin-all": "language/builtins.html#builtins-all",
+ "#builtin-any": "language/builtins.html#builtins-any",
+ "#builtin-attrNames": "language/builtins.html#builtins-attrNames",
+ "#builtin-attrValues": "language/builtins.html#builtins-attrValues",
+ "#builtin-baseNameOf": "language/builtins.html#builtins-baseNameOf",
+ "#builtin-bitAnd": "language/builtins.html#builtins-bitAnd",
+ "#builtin-bitOr": "language/builtins.html#builtins-bitOr",
+ "#builtin-bitXor": "language/builtins.html#builtins-bitXor",
+ "#builtin-builtins": "language/builtins.html#builtins-builtins",
+ "#builtin-compareVersions": "language/builtins.html#builtins-compareVersions",
+ "#builtin-concatLists": "language/builtins.html#builtins-concatLists",
+ "#builtin-concatStringsSep": "language/builtins.html#builtins-concatStringsSep",
+ "#builtin-currentSystem": "language/builtins.html#builtins-currentSystem",
+ "#builtin-deepSeq": "language/builtins.html#builtins-deepSeq",
+ "#builtin-derivation": "language/builtins.html#builtins-derivation",
+ "#builtin-dirOf": "language/builtins.html#builtins-dirOf",
+ "#builtin-div": "language/builtins.html#builtins-div",
+ "#builtin-elem": "language/builtins.html#builtins-elem",
+ "#builtin-elemAt": "language/builtins.html#builtins-elemAt",
+ "#builtin-fetchGit": "language/builtins.html#builtins-fetchGit",
+ "#builtin-fetchTarball": "language/builtins.html#builtins-fetchTarball",
+ "#builtin-fetchurl": "language/builtins.html#builtins-fetchurl",
+ "#builtin-filterSource": "language/builtins.html#builtins-filterSource",
+ "#builtin-foldl-prime": "language/builtins.html#builtins-foldl-prime",
+ "#builtin-fromJSON": "language/builtins.html#builtins-fromJSON",
+ "#builtin-functionArgs": "language/builtins.html#builtins-functionArgs",
+ "#builtin-genList": "language/builtins.html#builtins-genList",
+ "#builtin-getAttr": "language/builtins.html#builtins-getAttr",
+ "#builtin-getEnv": "language/builtins.html#builtins-getEnv",
+ "#builtin-hasAttr": "language/builtins.html#builtins-hasAttr",
+ "#builtin-hashFile": "language/builtins.html#builtins-hashFile",
+ "#builtin-hashString": "language/builtins.html#builtins-hashString",
+ "#builtin-head": "language/builtins.html#builtins-head",
+ "#builtin-import": "language/builtins.html#builtins-import",
+ "#builtin-intersectAttrs": "language/builtins.html#builtins-intersectAttrs",
+ "#builtin-isAttrs": "language/builtins.html#builtins-isAttrs",
+ "#builtin-isBool": "language/builtins.html#builtins-isBool",
+ "#builtin-isFloat": "language/builtins.html#builtins-isFloat",
+ "#builtin-isFunction": "language/builtins.html#builtins-isFunction",
+ "#builtin-isInt": "language/builtins.html#builtins-isInt",
+ "#builtin-isList": "language/builtins.html#builtins-isList",
+ "#builtin-isNull": "language/builtins.html#builtins-isNull",
+ "#builtin-isString": "language/builtins.html#builtins-isString",
+ "#builtin-length": "language/builtins.html#builtins-length",
+ "#builtin-lessThan": "language/builtins.html#builtins-lessThan",
+ "#builtin-listToAttrs": "language/builtins.html#builtins-listToAttrs",
+ "#builtin-map": "language/builtins.html#builtins-map",
+ "#builtin-match": "language/builtins.html#builtins-match",
+ "#builtin-mul": "language/builtins.html#builtins-mul",
+ "#builtin-parseDrvName": "language/builtins.html#builtins-parseDrvName",
+ "#builtin-path": "language/builtins.html#builtins-path",
+ "#builtin-pathExists": "language/builtins.html#builtins-pathExists",
+ "#builtin-placeholder": "language/builtins.html#builtins-placeholder",
+ "#builtin-readDir": "language/builtins.html#builtins-readDir",
+ "#builtin-readFile": "language/builtins.html#builtins-readFile",
+ "#builtin-removeAttrs": "language/builtins.html#builtins-removeAttrs",
+ "#builtin-replaceStrings": "language/builtins.html#builtins-replaceStrings",
+ "#builtin-seq": "language/builtins.html#builtins-seq",
+ "#builtin-sort": "language/builtins.html#builtins-sort",
+ "#builtin-split": "language/builtins.html#builtins-split",
+ "#builtin-splitVersion": "language/builtins.html#builtins-splitVersion",
+ "#builtin-stringLength": "language/builtins.html#builtins-stringLength",
+ "#builtin-sub": "language/builtins.html#builtins-sub",
+ "#builtin-substring": "language/builtins.html#builtins-substring",
+ "#builtin-tail": "language/builtins.html#builtins-tail",
+ "#builtin-throw": "language/builtins.html#builtins-throw",
+ "#builtin-toFile": "language/builtins.html#builtins-toFile",
+ "#builtin-toJSON": "language/builtins.html#builtins-toJSON",
+ "#builtin-toPath": "language/builtins.html#builtins-toPath",
+ "#builtin-toString": "language/builtins.html#builtins-toString",
+ "#builtin-toXML": "language/builtins.html#builtins-toXML",
+ "#builtin-trace": "language/builtins.html#builtins-trace",
+ "#builtin-tryEval": "language/builtins.html#builtins-tryEval",
+ "#builtin-typeOf": "language/builtins.html#builtins-typeOf",
+ "#ssec-builtins": "language/builtins.html",
+ "#attr-system": "language/derivations.html#attr-system",
+ "#ssec-derivation": "language/derivations.html",
+ "#ch-expression-language": "language/index.html",
+ "#sec-constructs": "language/constructs.html",
+ "#sect-let-language": "language/constructs.html#let-language",
+ "#ss-functions": "language/constructs.html#functions",
+ "#sec-language-operators": "language/operators.html",
+ "#table-operators": "language/operators.html",
+ "#ssec-values": "language/values.html",
"#gloss-closure": "glossary.html#gloss-closure",
"#gloss-derivation": "glossary.html#gloss-derivation",
"#gloss-deriver": "glossary.html#gloss-deriver",
diff --git a/doc/manual/src/SUMMARY.md.in b/doc/manual/src/SUMMARY.md.in
index 825a8b4c0..908e7e3d9 100644
--- a/doc/manual/src/SUMMARY.md.in
+++ b/doc/manual/src/SUMMARY.md.in
@@ -26,21 +26,14 @@
- [Copying Closures via SSH](package-management/copy-closure.md)
- [Serving a Nix store via SSH](package-management/ssh-substituter.md)
- [Serving a Nix store via S3](package-management/s3-substituter.md)
-- [Writing Nix Expressions](expressions/writing-nix-expressions.md)
- - [A Simple Nix Expression](expressions/simple-expression.md)
- - [Expression Syntax](expressions/expression-syntax.md)
- - [Build Script](expressions/build-script.md)
- - [Arguments and Variables](expressions/arguments-variables.md)
- - [Building and Testing](expressions/simple-building-testing.md)
- - [Generic Builder Syntax](expressions/generic-builder.md)
- - [Writing Nix Expressions](expressions/expression-language.md)
- - [Values](expressions/language-values.md)
- - [Language Constructs](expressions/language-constructs.md)
- - [Operators](expressions/language-operators.md)
- - [Derivations](expressions/derivations.md)
- - [Advanced Attributes](expressions/advanced-attributes.md)
- - [Built-in Constants](expressions/builtin-constants.md)
- - [Built-in Functions](expressions/builtins.md)
+- [Nix Language](language/index.md)
+ - [Data Types](language/values.md)
+ - [Language Constructs](language/constructs.md)
+ - [Operators](language/operators.md)
+ - [Derivations](language/derivations.md)
+ - [Advanced Attributes](language/advanced-attributes.md)
+ - [Built-in Constants](language/builtin-constants.md)
+ - [Built-in Functions](language/builtins.md)
- [Advanced Topics](advanced-topics/advanced-topics.md)
- [Remote Builds](advanced-topics/distributed-builds.md)
- [Tuning Cores and Jobs](advanced-topics/cores-vs-jobs.md)
@@ -72,6 +65,8 @@
- [CLI guideline](contributing/cli-guideline.md)
- [Release Notes](release-notes/release-notes.md)
- [Release X.Y (202?-??-??)](release-notes/rl-next.md)
+ - [Release 2.11 (2022-08-25)](release-notes/rl-2.11.md)
+ - [Release 2.10 (2022-07-11)](release-notes/rl-2.10.md)
- [Release 2.9 (2022-05-30)](release-notes/rl-2.9.md)
- [Release 2.8 (2022-04-19)](release-notes/rl-2.8.md)
- [Release 2.7 (2022-03-07)](release-notes/rl-2.7.md)
diff --git a/doc/manual/src/advanced-topics/distributed-builds.md b/doc/manual/src/advanced-topics/distributed-builds.md
index b0d7fbf1a..fefd10100 100644
--- a/doc/manual/src/advanced-topics/distributed-builds.md
+++ b/doc/manual/src/advanced-topics/distributed-builds.md
@@ -12,14 +12,14 @@ machine is accessible via SSH and that it has Nix installed. You can
test whether connecting to the remote Nix instance works, e.g.
```console
-$ nix ping-store --store ssh://mac
+$ nix store ping --store ssh://mac
```
will try to connect to the machine named `mac`. It is possible to
specify an SSH identity file as part of the remote store URI, e.g.
```console
-$ nix ping-store --store ssh://mac?ssh-key=/home/alice/my-key
+$ nix store ping --store ssh://mac?ssh-key=/home/alice/my-key
```
Since builds should be non-interactive, the key should not have a
diff --git a/doc/manual/src/command-ref/nix-build.md b/doc/manual/src/command-ref/nix-build.md
index aacb32a25..49c6f3f55 100644
--- a/doc/manual/src/command-ref/nix-build.md
+++ b/doc/manual/src/command-ref/nix-build.md
@@ -12,6 +12,12 @@
[`--dry-run`]
[{`--out-link` | `-o`} *outlink*]
+# Disambiguation
+
+This man page describes the command `nix-build`, which is distinct from `nix
+build`. For documentation on the latter, run `nix build --help` or see `man
+nix3-build`.
+
# Description
The `nix-build` command builds the derivations described by the Nix
diff --git a/doc/manual/src/command-ref/nix-copy-closure.md b/doc/manual/src/command-ref/nix-copy-closure.md
index 7047d3012..9a29030bd 100644
--- a/doc/manual/src/command-ref/nix-copy-closure.md
+++ b/doc/manual/src/command-ref/nix-copy-closure.md
@@ -30,8 +30,8 @@ Since `nix-copy-closure` calls `ssh`, you may be asked to type in the
appropriate password or passphrase. In fact, you may be asked _twice_
because `nix-copy-closure` currently connects twice to the remote
machine, first to get the set of paths missing on the target machine,
-and second to send the dump of those paths. If this bothers you, use
-`ssh-agent`.
+and second to send the dump of those paths. When using public key
+authentication, you can avoid typing the passphrase with `ssh-agent`.
# Options
diff --git a/doc/manual/src/command-ref/nix-env.md b/doc/manual/src/command-ref/nix-env.md
index 8d6abaf52..a5df35d77 100644
--- a/doc/manual/src/command-ref/nix-env.md
+++ b/doc/manual/src/command-ref/nix-env.md
@@ -31,7 +31,7 @@ subcommand to be performed. These are documented below.
Several commands, such as `nix-env -q` and `nix-env -i`, take a list of
arguments that specify the packages on which to operate. These are
extended regular expressions that must match the entire name of the
-package. (For details on regular expressions, see regex7.) The match is
+package. (For details on regular expressions, see **regex**(7).) The match is
case-sensitive. The regular expression can optionally be followed by a
dash and a version number; if omitted, any version of the package will
match. Here are some examples:
@@ -198,7 +198,7 @@ a number of possible ways:
another.
- If `--from-expression` is given, *args* are Nix
- [functions](../expressions/language-constructs.md#functions)
+ [functions](../language/constructs.md#functions)
that are called with the active Nix expression as their single
argument. The derivations returned by those function calls are
installed. This allows derivations to be specified in an
@@ -412,7 +412,7 @@ The upgrade operation determines whether a derivation `y` is an upgrade
of a derivation `x` by looking at their respective `name` attributes.
The names (e.g., `gcc-3.3.1` are split into two parts: the package name
(`gcc`), and the version (`3.3.1`). The version part starts after the
-first dash not followed by a letter. `x` is considered an upgrade of `y`
+first dash not followed by a letter. `y` is considered an upgrade of `x`
if their package names match, and the version of `y` is higher than that
of `x`.
diff --git a/doc/manual/src/command-ref/nix-instantiate.md b/doc/manual/src/command-ref/nix-instantiate.md
index 2e198daed..8f143729e 100644
--- a/doc/manual/src/command-ref/nix-instantiate.md
+++ b/doc/manual/src/command-ref/nix-instantiate.md
@@ -51,7 +51,7 @@ standard input.
- `--strict`\
When used with `--eval`, recursively evaluate list elements and
attributes. Normally, such sub-expressions are left unevaluated
- (since the Nix expression language is lazy).
+ (since the Nix language is lazy).
> **Warning**
>
@@ -66,7 +66,7 @@ standard input.
When used with `--eval`, print the resulting value as an XML
representation of the abstract syntax tree rather than as an ATerm.
The schema is the same as that used by the [`toXML`
- built-in](../expressions/builtins.md).
+ built-in](../language/builtins.md).
- `--read-write-mode`\
When used with `--eval`, perform evaluation in read/write mode so
diff --git a/doc/manual/src/command-ref/nix-shell.md b/doc/manual/src/command-ref/nix-shell.md
index a2b6d8a8e..840bccd25 100644
--- a/doc/manual/src/command-ref/nix-shell.md
+++ b/doc/manual/src/command-ref/nix-shell.md
@@ -15,6 +15,12 @@
[`--keep` *name*]
{{`--packages` | `-p`} {*packages* | *expressions*} … | [*path*]}
+# Disambiguation
+
+This man page describes the command `nix-shell`, which is distinct from `nix
+shell`. For documentation on the latter, run `nix shell --help` or see `man
+nix3-shell`.
+
# Description
The command `nix-shell` will build the dependencies of the specified
diff --git a/doc/manual/src/command-ref/nix-store.md b/doc/manual/src/command-ref/nix-store.md
index dc8faba68..ecd838e8d 100644
--- a/doc/manual/src/command-ref/nix-store.md
+++ b/doc/manual/src/command-ref/nix-store.md
@@ -121,7 +121,7 @@ Special exit codes:
- `102`\
Hash mismatch, the build output was rejected because it does not
match the [`outputHash` attribute of the
- derivation](../expressions/advanced-attributes.md).
+ derivation](../language/advanced-attributes.md).
- `104`\
Not deterministic, the build succeeded in check mode but the
diff --git a/doc/manual/src/command-ref/opt-common.md b/doc/manual/src/command-ref/opt-common.md
index 51d7de18a..e612c416f 100644
--- a/doc/manual/src/command-ref/opt-common.md
+++ b/doc/manual/src/command-ref/opt-common.md
@@ -145,7 +145,7 @@ Most Nix commands accept the following command-line options:
expression evaluator will automatically try to call functions that
it encounters. It can automatically call functions for which every
argument has a [default
- value](../expressions/language-constructs.md#functions) (e.g.,
+ value](../language/constructs.md#functions) (e.g.,
`{ argName ? defaultValue }: ...`). With `--arg`, you can also
call functions that have arguments without a default value (or
override a default value). That is, if the evaluator encounters a
@@ -164,7 +164,7 @@ Most Nix commands accept the following command-line options:
So if you call this Nix expression (e.g., when you do `nix-env -iA
pkgname`), the function will be called automatically using the
- value [`builtins.currentSystem`](../expressions/builtins.md) for
+ value [`builtins.currentSystem`](../language/builtins.md) for
the `system` argument. You can override this using `--arg`, e.g.,
`nix-env -iA pkgname --arg system \"i686-freebsd\"`. (Note that
since the argument is a Nix string literal, you have to escape the
diff --git a/doc/manual/src/contributing/hacking.md b/doc/manual/src/contributing/hacking.md
index 5bee7932b..9f7d5057b 100644
--- a/doc/manual/src/contributing/hacking.md
+++ b/doc/manual/src/contributing/hacking.md
@@ -42,7 +42,7 @@ $ nix develop
```
To get a shell with a different compilation environment (e.g. stdenv,
-gccStdenv, clangStdenv, clang11Stdenv):
+gccStdenv, clangStdenv, clang11Stdenv, ccacheStdenv):
```console
$ nix-shell -A devShells.x86_64-linux.clang11StdenvPackages
@@ -54,6 +54,9 @@ or if you have a flake-enabled nix:
$ nix develop .#clang11StdenvPackages
```
+Note: you can use `ccacheStdenv` to drastically improve rebuild
+time. By default, ccache keeps artifacts in `~/.cache/ccache/`.
+
To build Nix itself in this shell:
```console
diff --git a/doc/manual/src/expressions/arguments-variables.md b/doc/manual/src/expressions/arguments-variables.md
deleted file mode 100644
index 12198c879..000000000
--- a/doc/manual/src/expressions/arguments-variables.md
+++ /dev/null
@@ -1,80 +0,0 @@
-# Arguments and Variables
-
-The [Nix expression for GNU Hello](expression-syntax.md) is a
-function; it is missing some arguments that have to be filled in
-somewhere. In the Nix Packages collection this is done in the file
-`pkgs/top-level/all-packages.nix`, where all Nix expressions for
-packages are imported and called with the appropriate arguments. Here
-are some fragments of `all-packages.nix`, with annotations of what
-they mean:
-
-```nix
-...
-
-rec { ①
-
- hello = import ../applications/misc/hello/ex-1 ② { ③
- inherit fetchurl stdenv perl;
- };
-
- perl = import ../development/interpreters/perl { ④
- inherit fetchurl stdenv;
- };
-
- fetchurl = import ../build-support/fetchurl {
- inherit stdenv; ...
- };
-
- stdenv = ...;
-
-}
-```
-
-1. This file defines a set of attributes, all of which are concrete
- derivations (i.e., not functions). In fact, we define a *mutually
- recursive* set of attributes. That is, the attributes can refer to
- each other. This is precisely what we want since we want to “plug”
- the various packages into each other.
-
-2. Here we *import* the Nix expression for GNU Hello. The import
- operation just loads and returns the specified Nix expression. In
- fact, we could just have put the contents of the Nix expression
- for GNU Hello in `all-packages.nix` at this point. That would be
- completely equivalent, but it would make `all-packages.nix` rather
- bulky.
-
- Note that we refer to `../applications/misc/hello/ex-1`, not
- `../applications/misc/hello/ex-1/default.nix`. When you try to
- import a directory, Nix automatically appends `/default.nix` to the
- file name.
-
-3. This is where the actual composition takes place. Here we *call* the
- function imported from `../applications/misc/hello/ex-1` with a set
- containing the things that the function expects, namely `fetchurl`,
- `stdenv`, and `perl`. We use inherit again to use the attributes
- defined in the surrounding scope (we could also have written
- `fetchurl = fetchurl;`, etc.).
-
- The result of this function call is an actual derivation that can be
- built by Nix (since when we fill in the arguments of the function,
- what we get is its body, which is the call to `stdenv.mkDerivation`
- in the [Nix expression for GNU Hello](expression-syntax.md)).
-
- > **Note**
- >
- > Nixpkgs has a convenience function `callPackage` that imports and
- > calls a function, filling in any missing arguments by passing the
- > corresponding attribute from the Nixpkgs set, like this:
- >
- > ```nix
- > hello = callPackage ../applications/misc/hello/ex-1 { };
- > ```
- >
- > If necessary, you can set or override arguments:
- >
- > ```nix
- > hello = callPackage ../applications/misc/hello/ex-1 { stdenv = myStdenv; };
- > ```
-
-4. Likewise, we have to instantiate Perl, `fetchurl`, and the standard
- environment.
diff --git a/doc/manual/src/expressions/build-script.md b/doc/manual/src/expressions/build-script.md
deleted file mode 100644
index b1eacae88..000000000
--- a/doc/manual/src/expressions/build-script.md
+++ /dev/null
@@ -1,70 +0,0 @@
-# Build Script
-
-Here is the builder referenced from Hello's Nix expression (stored in
-`pkgs/applications/misc/hello/ex-1/builder.sh`):
-
-```bash
-source $stdenv/setup ①
-
-PATH=$perl/bin:$PATH ②
-
-tar xvfz $src ③
-cd hello-*
-./configure --prefix=$out ④
-make ⑤
-make install
-```
-
-The builder can actually be made a lot shorter by using the *generic
-builder* functions provided by `stdenv`, but here we write out the build
-steps to elucidate what a builder does. It performs the following steps:
-
-1. When Nix runs a builder, it initially completely clears the
- environment (except for the attributes declared in the derivation).
- This is done to prevent undeclared inputs from being used in the
- build process. If for example the `PATH` contained `/usr/bin`, then
- you might accidentally use `/usr/bin/gcc`.
-
- So the first step is to set up the environment. This is done by
- calling the `setup` script of the standard environment. The
- environment variable `stdenv` points to the location of the
- standard environment being used. (It wasn't specified explicitly
- as an attribute in Hello's Nix expression, but `mkDerivation` adds
- it automatically.)
-
-2. Since Hello needs Perl, we have to make sure that Perl is in the
- `PATH`. The `perl` environment variable points to the location of
- the Perl package (since it was passed in as an attribute to the
- derivation), so `$perl/bin` is the directory containing the Perl
- interpreter.
-
-3. Now we have to unpack the sources. The `src` attribute was bound to
- the result of fetching the Hello source tarball from the network, so
- the `src` environment variable points to the location in the Nix
- store to which the tarball was downloaded. After unpacking, we `cd`
- to the resulting source directory.
-
- The whole build is performed in a temporary directory created in
- `/tmp`, by the way. This directory is removed after the builder
- finishes, so there is no need to clean up the sources afterwards.
- Also, the temporary directory is always newly created, so you don't
- have to worry about files from previous builds interfering with the
- current build.
-
-4. GNU Hello is a typical Autoconf-based package, so we first have to
- run its `configure` script. In Nix every package is stored in a
- separate location in the Nix store, for instance
- `/nix/store/9a54ba97fb71b65fda531012d0443ce2-hello-2.1.1`. Nix
- computes this path by cryptographically hashing all attributes of
- the derivation. The path is passed to the builder through the `out`
- environment variable. So here we give `configure` the parameter
- `--prefix=$out` to cause Hello to be installed in the expected
- location.
-
-5. Finally we build Hello (`make`) and install it into the location
- specified by `out` (`make install`).
-
-If you are wondering about the absence of error checking on the result
-of various commands called in the builder: this is because the shell
-script is evaluated with Bash's `-e` option, which causes the script to
-be aborted if any command fails without an error check.
diff --git a/doc/manual/src/expressions/expression-language.md b/doc/manual/src/expressions/expression-language.md
deleted file mode 100644
index 267fcb983..000000000
--- a/doc/manual/src/expressions/expression-language.md
+++ /dev/null
@@ -1,12 +0,0 @@
-# Nix Expression Language
-
-The Nix expression language is a pure, lazy, functional language. Purity
-means that operations in the language don't have side-effects (for
-instance, there is no variable assignment). Laziness means that
-arguments to functions are evaluated only when they are needed.
-Functional means that functions are “normal” values that can be passed
-around and manipulated in interesting ways. The language is not a
-full-featured, general purpose language. Its main job is to describe
-packages, compositions of packages, and the variability within packages.
-
-This section presents the various features of the language.
diff --git a/doc/manual/src/expressions/expression-syntax.md b/doc/manual/src/expressions/expression-syntax.md
deleted file mode 100644
index 6b93e692c..000000000
--- a/doc/manual/src/expressions/expression-syntax.md
+++ /dev/null
@@ -1,93 +0,0 @@
-# Expression Syntax
-
-Here is a Nix expression for GNU Hello:
-
-```nix
-{ stdenv, fetchurl, perl }: ①
-
-stdenv.mkDerivation { ②
- name = "hello-2.1.1"; ③
- builder = ./builder.sh; ④
- src = fetchurl { ⑤
- url = "ftp://ftp.nluug.nl/pub/gnu/hello/hello-2.1.1.tar.gz";
- sha256 = "1md7jsfd8pa45z73bz1kszpp01yw6x5ljkjk2hx7wl800any6465";
- };
- inherit perl; ⑥
-}
-```
-
-This file is actually already in the Nix Packages collection in
-`pkgs/applications/misc/hello/ex-1/default.nix`. It is customary to
-place each package in a separate directory and call the single Nix
-expression in that directory `default.nix`. The file has the following
-elements (referenced from the figure by number):
-
-1. This states that the expression is a *function* that expects to be
- called with three arguments: `stdenv`, `fetchurl`, and `perl`. They
- are needed to build Hello, but we don't know how to build them here;
- that's why they are function arguments. `stdenv` is a package that
- is used by almost all Nix Packages; it provides a
- “standard” environment consisting of the things you would expect
- in a basic Unix environment: a C/C++ compiler (GCC, to be precise),
- the Bash shell, fundamental Unix tools such as `cp`, `grep`, `tar`,
- etc. `fetchurl` is a function that downloads files. `perl` is the
- Perl interpreter.
-
- Nix functions generally have the form `{ x, y, ..., z }: e` where
- `x`, `y`, etc. are the names of the expected arguments, and where
- *e* is the body of the function. So here, the entire remainder of
- the file is the body of the function; when given the required
- arguments, the body should describe how to build an instance of
- the Hello package.
-
-2. So we have to build a package. Building something from other stuff
- is called a *derivation* in Nix (as opposed to sources, which are
- built by humans instead of computers). We perform a derivation by
- calling `stdenv.mkDerivation`. `mkDerivation` is a function
- provided by `stdenv` that builds a package from a set of
- *attributes*. A set is just a list of key/value pairs where each
- key is a string and each value is an arbitrary Nix
- expression. They take the general form `{ name1 = expr1; ...
- nameN = exprN; }`.
-
-3. The attribute `name` specifies the symbolic name and version of
- the package. Nix doesn't really care about these things, but they
- are used by for instance `nix-env -q` to show a “human-readable”
- name for packages. This attribute is required by `mkDerivation`.
-
-4. The attribute `builder` specifies the builder. This attribute can
- sometimes be omitted, in which case `mkDerivation` will fill in a
- default builder (which does a `configure; make; make install`, in
- essence). Hello is sufficiently simple that the default builder
- would suffice, but in this case, we will show an actual builder
- for educational purposes. The value `./builder.sh` refers to the
- shell script shown in the [next section](build-script.md),
- discussed below.
-
-5. The builder has to know what the sources of the package are. Here,
- the attribute `src` is bound to the result of a call to the
- `fetchurl` function. Given a URL and a SHA-256 hash of the expected
- contents of the file at that URL, this function builds a derivation
- that downloads the file and checks its hash. So the sources are a
- dependency that like all other dependencies is built before Hello
- itself is built.
-
- Instead of `src` any other name could have been used, and in fact
- there can be any number of sources (bound to different attributes).
- However, `src` is customary, and it's also expected by the default
- builder (which we don't use in this example).
-
-6. Since the derivation requires Perl, we have to pass the value of the
- `perl` function argument to the builder. All attributes in the set
- are actually passed as environment variables to the builder, so
- declaring an attribute
-
- ```nix
- perl = perl;
- ```
-
- will do the trick: it binds an attribute `perl` to the function
- argument which also happens to be called `perl`. However, it looks a
- bit silly, so there is a shorter syntax. The `inherit` keyword
- causes the specified attributes to be bound to whatever variables
- with the same name happen to be in scope.
diff --git a/doc/manual/src/expressions/generic-builder.md b/doc/manual/src/expressions/generic-builder.md
deleted file mode 100644
index cf26b5f82..000000000
--- a/doc/manual/src/expressions/generic-builder.md
+++ /dev/null
@@ -1,66 +0,0 @@
-# Generic Builder Syntax
-
-Recall that the [build script for GNU Hello](build-script.md) looked
-something like this:
-
-```bash
-PATH=$perl/bin:$PATH
-tar xvfz $src
-cd hello-*
-./configure --prefix=$out
-make
-make install
-```
-
-The builders for almost all Unix packages look like this — set up some
-environment variables, unpack the sources, configure, build, and
-install. For this reason the standard environment provides some Bash
-functions that automate the build process. Here is what a builder using
-the generic build facilities looks like:
-
-```bash
-buildInputs="$perl" ①
-
-source $stdenv/setup ②
-
-genericBuild ③
-```
-
-Here is what each line means:
-
-1. The `buildInputs` variable tells `setup` to use the indicated
- packages as “inputs”. This means that if a package provides a `bin`
- subdirectory, it's added to `PATH`; if it has a `include`
- subdirectory, it's added to GCC's header search path; and so on.
- (This is implemented in a modular way: `setup` tries to source the
- file `pkg/nix-support/setup-hook` of all dependencies. These “setup
- hooks” can then set up whatever environment variables they want; for
- instance, the setup hook for Perl sets the `PERL5LIB` environment
- variable to contain the `lib/site_perl` directories of all inputs.)
-
-2. The function `genericBuild` is defined in the file `$stdenv/setup`.
-
-3. The final step calls the shell function `genericBuild`, which
- performs the steps that were done explicitly in the previous build
- script. The generic builder is smart enough to figure out whether
- to unpack the sources using `gzip`, `bzip2`, etc. It can be
- customised in many ways; see the Nixpkgs manual for details.
-
-Discerning readers will note that the `buildInputs` could just as well
-have been set in the Nix expression, like this:
-
-```nix
- buildInputs = [ perl ];
-```
-
-The `perl` attribute can then be removed, and the builder becomes even
-shorter:
-
-```bash
-source $stdenv/setup
-genericBuild
-```
-
-In fact, `mkDerivation` provides a default builder that looks exactly
-like that, so it is actually possible to omit the builder for Hello
-entirely.
diff --git a/doc/manual/src/expressions/language-values.md b/doc/manual/src/expressions/language-values.md
deleted file mode 100644
index 75ae9f2eb..000000000
--- a/doc/manual/src/expressions/language-values.md
+++ /dev/null
@@ -1,251 +0,0 @@
-# Values
-
-## Simple Values
-
-Nix has the following basic data types:
-
- - *Strings* can be written in three ways.
-
- The most common way is to enclose the string between double quotes,
- e.g., `"foo bar"`. Strings can span multiple lines. The special
- characters `"` and `\` and the character sequence `${` must be
- escaped by prefixing them with a backslash (`\`). Newlines, carriage
- returns and tabs can be written as `\n`, `\r` and `\t`,
- respectively.
-
- You can include the result of an expression into a string by
- enclosing it in `${...}`, a feature known as *antiquotation*. The
- enclosed expression must evaluate to something that can be coerced
- into a string (meaning that it must be a string, a path, or a
- derivation). For instance, rather than writing
-
- ```nix
- "--with-freetype2-library=" + freetype + "/lib"
- ```
-
- (where `freetype` is a derivation), you can instead write the more
- natural
-
- ```nix
- "--with-freetype2-library=${freetype}/lib"
- ```
-
- The latter is automatically translated to the former. A more
- complicated example (from the Nix expression for
- [Qt](http://www.trolltech.com/products/qt)):
-
- ```nix
- configureFlags = "
- -system-zlib -system-libpng -system-libjpeg
- ${if openglSupport then "-dlopen-opengl
- -L${mesa}/lib -I${mesa}/include
- -L${libXmu}/lib -I${libXmu}/include" else ""}
- ${if threadSupport then "-thread" else "-no-thread"}
- ";
- ```
-
- Note that Nix expressions and strings can be arbitrarily nested; in
- this case the outer string contains various antiquotations that
- themselves contain strings (e.g., `"-thread"`), some of which in
- turn contain expressions (e.g., `${mesa}`).
-
- The second way to write string literals is as an *indented string*,
- which is enclosed between pairs of *double single-quotes*, like so:
-
- ```nix
- ''
- This is the first line.
- This is the second line.
- This is the third line.
- ''
- ```
-
- This kind of string literal intelligently strips indentation from
- the start of each line. To be precise, it strips from each line a
- number of spaces equal to the minimal indentation of the string as a
- whole (disregarding the indentation of empty lines). For instance,
- the first and second line are indented two spaces, while the third
- line is indented four spaces. Thus, two spaces are stripped from
- each line, so the resulting string is
-
- ```nix
- "This is the first line.\nThis is the second line.\n This is the third line.\n"
- ```
-
- Note that the whitespace and newline following the opening `''` is
- ignored if there is no non-whitespace text on the initial line.
-
- Antiquotation (`${expr}`) is supported in indented strings.
-
- Since `${` and `''` have special meaning in indented strings, you
- need a way to quote them. `$` can be escaped by prefixing it with
- `''` (that is, two single quotes), i.e., `''$`. `''` can be escaped
- by prefixing it with `'`, i.e., `'''`. `$` removes any special
- meaning from the following `$`. Linefeed, carriage-return and tab
- characters can be written as `''\n`, `''\r`, `''\t`, and `''\`
- escapes any other character.
-
- Indented strings are primarily useful in that they allow multi-line
- string literals to follow the indentation of the enclosing Nix
- expression, and that less escaping is typically necessary for
- strings representing languages such as shell scripts and
- configuration files because `''` is much less common than `"`.
- Example:
-
- ```nix
- stdenv.mkDerivation {
- ...
- postInstall =
- ''
- mkdir $out/bin $out/etc
- cp foo $out/bin
- echo "Hello World" > $out/etc/foo.conf
- ${if enableBar then "cp bar $out/bin" else ""}
- '';
- ...
- }
- ```
-
- Finally, as a convenience, *URIs* as defined in appendix B of
- [RFC 2396](http://www.ietf.org/rfc/rfc2396.txt) can be written *as
- is*, without quotes. For instance, the string
- `"http://example.org/foo.tar.bz2"` can also be written as
- `http://example.org/foo.tar.bz2`.
-
- - Numbers, which can be *integers* (like `123`) or *floating point*
- (like `123.43` or `.27e13`).
-
- Numbers are type-compatible: pure integer operations will always
- return integers, whereas any operation involving at least one
- floating point number will have a floating point number as a result.
-
- - *Paths*, e.g., `/bin/sh` or `./builder.sh`. A path must contain at
- least one slash to be recognised as such. For instance, `builder.sh`
- is not a path: it's parsed as an expression that selects the
- attribute `sh` from the variable `builder`. If the file name is
- relative, i.e., if it does not begin with a slash, it is made
- absolute at parse time relative to the directory of the Nix
- expression that contained it. For instance, if a Nix expression in
- `/foo/bar/bla.nix` refers to `../xyzzy/fnord.nix`, the absolute path
- is `/foo/xyzzy/fnord.nix`.
-
- If the first component of a path is a `~`, it is interpreted as if
- the rest of the path were relative to the user's home directory.
- e.g. `~/foo` would be equivalent to `/home/edolstra/foo` for a user
- whose home directory is `/home/edolstra`.
-
- Paths can also be specified between angle brackets, e.g.
- `<nixpkgs>`. This means that the directories listed in the
- environment variable `NIX_PATH` will be searched for the given file
- or directory name.
-
- Antiquotation is supported in any paths except those in angle brackets.
- `./${foo}-${bar}.nix` is a more convenient way of writing
- `./. + "/" + foo + "-" + bar + ".nix"` or `./. + "/${foo}-${bar}.nix"`. At
- least one slash must appear *before* any antiquotations for this to be
- recognized as a path. `a.${foo}/b.${bar}` is a syntactically valid division
- operation. `./a.${foo}/b.${bar}` is a path.
-
- - *Booleans* with values `true` and `false`.
-
- - The null value, denoted as `null`.
-
-## Lists
-
-Lists are formed by enclosing a whitespace-separated list of values
-between square brackets. For example,
-
-```nix
-[ 123 ./foo.nix "abc" (f { x = y; }) ]
-```
-
-defines a list of four elements, the last being the result of a call to
-the function `f`. Note that function calls have to be enclosed in
-parentheses. If they had been omitted, e.g.,
-
-```nix
-[ 123 ./foo.nix "abc" f { x = y; } ]
-```
-
-the result would be a list of five elements, the fourth one being a
-function and the fifth being a set.
-
-Note that lists are only lazy in values, and they are strict in length.
-
-## Sets
-
-Sets are really the core of the language, since ultimately the Nix
-language is all about creating derivations, which are really just sets
-of attributes to be passed to build scripts.
-
-Sets are just a list of name/value pairs (called *attributes*) enclosed
-in curly brackets, where each value is an arbitrary expression
-terminated by a semicolon. For example:
-
-```nix
-{ x = 123;
- text = "Hello";
- y = f { bla = 456; };
-}
-```
-
-This defines a set with attributes named `x`, `text`, `y`. The order of
-the attributes is irrelevant. An attribute name may only occur once.
-
-Attributes can be selected from a set using the `.` operator. For
-instance,
-
-```nix
-{ a = "Foo"; b = "Bar"; }.a
-```
-
-evaluates to `"Foo"`. It is possible to provide a default value in an
-attribute selection using the `or` keyword. For example,
-
-```nix
-{ a = "Foo"; b = "Bar"; }.c or "Xyzzy"
-```
-
-will evaluate to `"Xyzzy"` because there is no `c` attribute in the set.
-
-You can use arbitrary double-quoted strings as attribute names:
-
-```nix
-{ "foo ${bar}" = 123; "nix-1.0" = 456; }."foo ${bar}"
-```
-
-This will evaluate to `123` (Assuming `bar` is antiquotable). In the
-case where an attribute name is just a single antiquotation, the quotes
-can be dropped:
-
-```nix
-{ foo = 123; }.${bar} or 456
-```
-
-This will evaluate to `123` if `bar` evaluates to `"foo"` when coerced
-to a string and `456` otherwise (again assuming `bar` is antiquotable).
-
-In the special case where an attribute name inside of a set declaration
-evaluates to `null` (which is normally an error, as `null` is not
-antiquotable), that attribute is simply not added to the set:
-
-```nix
-{ ${if foo then "bar" else null} = true; }
-```
-
-This will evaluate to `{}` if `foo` evaluates to `false`.
-
-A set that has a `__functor` attribute whose value is callable (i.e. is
-itself a function or a set with a `__functor` attribute whose value is
-callable) can be applied as if it were a function, with the set itself
-passed in first , e.g.,
-
-```nix
-let add = { __functor = self: x: x + self.x; };
- inc = add // { x = 1; };
-in inc 1
-```
-
-evaluates to `2`. This can be used to attach metadata to a function
-without the caller needing to treat it specially, or to implement a form
-of object-oriented programming, for example.
diff --git a/doc/manual/src/expressions/simple-building-testing.md b/doc/manual/src/expressions/simple-building-testing.md
deleted file mode 100644
index 7f0d8f841..000000000
--- a/doc/manual/src/expressions/simple-building-testing.md
+++ /dev/null
@@ -1,61 +0,0 @@
-# Building and Testing
-
-You can now try to build Hello. Of course, you could do `nix-env -f . -iA
-hello`, but you may not want to install a possibly broken package just
-yet. The best way to test the package is by using the command
-`nix-build`, which builds a Nix expression and creates a symlink named
-`result` in the current directory:
-
-```console
-$ nix-build -A hello
-building path `/nix/store/632d2b22514d...-hello-2.1.1'
-hello-2.1.1/
-hello-2.1.1/intl/
-hello-2.1.1/intl/ChangeLog
-...
-
-$ ls -l result
-lrwxrwxrwx ... 2006-09-29 10:43 result -> /nix/store/632d2b22514d...-hello-2.1.1
-
-$ ./result/bin/hello
-Hello, world!
-```
-
-The `-A` option selects the `hello` attribute. This is faster than
-using the symbolic package name specified by the `name` attribute
-(which also happens to be `hello`) and is unambiguous (there can be
-multiple packages with the symbolic name `hello`, but there can be
-only one attribute in a set named `hello`).
-
-`nix-build` registers the `./result` symlink as a garbage collection
-root, so unless and until you delete the `./result` symlink, the output
-of the build will be safely kept on your system. You can use
-`nix-build`’s `-o` switch to give the symlink another name.
-
-Nix has transactional semantics. Once a build finishes successfully, Nix
-makes a note of this in its database: it registers that the path denoted
-by `out` is now “valid”. If you try to build the derivation again, Nix
-will see that the path is already valid and finish immediately. If a
-build fails, either because it returns a non-zero exit code, because Nix
-or the builder are killed, or because the machine crashes, then the
-output paths will not be registered as valid. If you try to build the
-derivation again, Nix will remove the output paths if they exist (e.g.,
-because the builder died half-way through `make
-install`) and try again. Note that there is no “negative caching”: Nix
-doesn't remember that a build failed, and so a failed build can always
-be repeated. This is because Nix cannot distinguish between permanent
-failures (e.g., a compiler error due to a syntax error in the source)
-and transient failures (e.g., a disk full condition).
-
-Nix also performs locking. If you run multiple Nix builds
-simultaneously, and they try to build the same derivation, the first Nix
-instance that gets there will perform the build, while the others block
-(or perform other derivations if available) until the build finishes:
-
-```console
-$ nix-build -A hello
-waiting for lock on `/nix/store/0h5b7hp8d4hqfrw8igvx97x1xawrjnac-hello-2.1.1x'
-```
-
-So it is always safe to run multiple instances of Nix in parallel (which
-isn’t the case with, say, `make`).
diff --git a/doc/manual/src/expressions/simple-expression.md b/doc/manual/src/expressions/simple-expression.md
deleted file mode 100644
index 857f71b9b..000000000
--- a/doc/manual/src/expressions/simple-expression.md
+++ /dev/null
@@ -1,23 +0,0 @@
-# A Simple Nix Expression
-
-This section shows how to add and test the [GNU Hello
-package](http://www.gnu.org/software/hello/hello.html) to the Nix
-Packages collection. Hello is a program that prints out the text “Hello,
-world\!”.
-
-To add a package to the Nix Packages collection, you generally need to
-do three things:
-
-1. Write a Nix expression for the package. This is a file that
- describes all the inputs involved in building the package, such as
- dependencies, sources, and so on.
-
-2. Write a *builder*. This is a shell script that builds the package
- from the inputs. (In fact, it can be written in any language, but
- typically it's a `bash` shell script.)
-
-3. Add the package to the file `pkgs/top-level/all-packages.nix`. The
- Nix expression written in the first step is a *function*; it
- requires other packages in order to build it. In this step you put
- it all together, i.e., you call the function with the right
- arguments to build the actual package.
diff --git a/doc/manual/src/expressions/writing-nix-expressions.md b/doc/manual/src/expressions/writing-nix-expressions.md
deleted file mode 100644
index 5664108e7..000000000
--- a/doc/manual/src/expressions/writing-nix-expressions.md
+++ /dev/null
@@ -1,12 +0,0 @@
-This chapter shows you how to write Nix expressions, which instruct Nix
-how to build packages. It starts with a simple example (a Nix expression
-for GNU Hello), and then moves on to a more in-depth look at the Nix
-expression language.
-
-> **Note**
->
-> This chapter is mostly about the Nix expression language. For more
-> extensive information on adding packages to the Nix Packages
-> collection (such as functions in the standard environment and coding
-> conventions), please consult [its
-> manual](http://nixos.org/nixpkgs/manual/).
diff --git a/doc/manual/src/glossary.md b/doc/manual/src/glossary.md
index 3448b971b..70a0eb994 100644
--- a/doc/manual/src/glossary.md
+++ b/doc/manual/src/glossary.md
@@ -3,14 +3,48 @@
- [derivation]{#gloss-derivation}\
A description of a build action. The result of a derivation is a
store object. Derivations are typically specified in Nix expressions
- using the [`derivation` primitive](expressions/derivations.md). These are
+ using the [`derivation` primitive](language/derivations.md). These are
translated into low-level *store derivations* (implicitly by
`nix-env` and `nix-build`, or explicitly by `nix-instantiate`).
+ - [content-addressed derivation]{#gloss-content-addressed-derivation}\
+ A derivation which has the
+ [`__contentAddressed`](language/advanced-attributes.md#adv-attr-__contentAddressed)
+ attribute set to `true`.
+
+ - [fixed-output derivation]{#gloss-fixed-output-derivation}\
+ A derivation which includes the
+ [`outputHash`](language/advanced-attributes.md#adv-attr-outputHash) attribute.
+
- [store]{#gloss-store}\
The location in the file system where store objects live. Typically
`/nix/store`.
+ From the perspective of the location where Nix is
+ invoked, the Nix store can be referred to
+ as a "_local_" or a "_remote_" one:
+
+ + A *local store* exists on the filesystem of
+ the machine where Nix is invoked. You can use other
+ local stores by passing the `--store` flag to the
+ `nix` command. Local stores can be used for building derivations.
+
+ + A *remote store* exists anywhere other than the
+ local filesystem. One example is the `/nix/store`
+ directory on another machine, accessed via `ssh` or
+ served by the `nix-serve` Perl script.
+
+ - [chroot store]{#gloss-chroot-store}\
+ A local store whose canonical path is anything other than `/nix/store`.
+
+ - [binary cache]{#gloss-binary-cache}\
+ A *binary cache* is a Nix store which uses a different format: its
+ metadata and signatures are kept in `.narinfo` files rather than in a
+ Nix database. This different format simplifies serving store objects
+ over the network, but cannot host builds. Examples of binary caches
+ include S3 buckets and the [NixOS binary
+ cache](https://cache.nixos.org).
+
- [store path]{#gloss-store-path}\
The location in the file system of a store object, i.e., an
immediate child of the Nix store directory.
@@ -22,6 +56,19 @@
derivation outputs (objects produced by running a build action), or
derivations (files describing a build action).
+ - [input-addressed store object]{#gloss-input-addressed-store-object}\
+ A store object produced by building a
+ non-[content-addressed](#gloss-content-addressed-derivation),
+ non-[fixed-output](#gloss-fixed-output-derivation)
+ derivation.
+
+ - [output-addressed store object]{#gloss-output-addressed-store-object}\
+ A store object whose store path hashes its content. This
+ includes derivations, the outputs of
+ [content-addressed derivations](#gloss-content-addressed-derivation),
+ and the outputs of
+ [fixed-output derivations](#gloss-fixed-output-derivation).
+
- [substitute]{#gloss-substitute}\
A substitute is a command invocation stored in the Nix database that
describes how to build a store object, bypassing the normal build
@@ -29,6 +76,11 @@
store object by downloading a pre-built version of the store object
from some server.
+ - [substituter]{#gloss-substituter}\
+ A *substituter* is an additional store from which Nix will
+ copy store objects it doesn't have. For details, see the
+ [`substituters` option](command-ref/conf-file.html#conf-substituters).
+
- [purity]{#gloss-purity}\
The assumption that equal Nix derivations when run always produce
the same output. This cannot be guaranteed in general (e.g., a
diff --git a/doc/manual/src/installation/installing-binary.md b/doc/manual/src/installation/installing-binary.md
index 9fb9c80c3..2d007ca1b 100644
--- a/doc/manual/src/installation/installing-binary.md
+++ b/doc/manual/src/installation/installing-binary.md
@@ -13,7 +13,7 @@ for your platform:
- multi-user on macOS
> **Notes on read-only filesystem root in macOS 10.15 Catalina +**
- >
+ >
> - It took some time to support this cleanly. You may see posts,
> examples, and tutorials using obsolete workarounds.
> - Supporting it cleanly made macOS installs too complex to qualify
@@ -31,8 +31,8 @@ $ sh <(curl -L https://nixos.org/nix/install) --no-daemon
```
This will perform a single-user installation of Nix, meaning that `/nix`
-is owned by the invoking user. You should run this under your usual user
-account, *not* as root. The script will invoke `sudo` to create `/nix`
+is owned by the invoking user. You can run this under your usual user
+account or root. The script will invoke `sudo` to create `/nix`
if it doesn’t already exist. If you don’t have `sudo`, you should
manually create `/nix` first as root, e.g.:
@@ -71,11 +71,11 @@ $ sh <(curl -L https://nixos.org/nix/install) --daemon
The multi-user installation of Nix will create build users between the
user IDs 30001 and 30032, and a group with the group ID 30000. You
-should run this under your usual user account, *not* as root. The script
+can run this under your usual user account or root. The script
will invoke `sudo` as needed.
> **Note**
->
+>
> If you need Nix to use a different group ID or user ID set, you will
> have to download the tarball manually and [edit the install
> script](#installing-from-a-binary-tarball).
@@ -148,7 +148,8 @@ and `/etc/zshrc` which you may remove.
This will remove all the build users that no longer serve a purpose.
4. Edit fstab using `sudo vifs` to remove the line mounting the Nix Store
- volume on `/nix`, which looks like this,
+ volume on `/nix`, which looks like
+ `UUID=<uuid> /nix apfs rw,noauto,nobrowse,suid,owners` or
`LABEL=Nix\040Store /nix apfs rw,nobrowse`. This will prevent automatic
mounting of the Nix Store volume.
@@ -167,7 +168,7 @@ and `/etc/zshrc` which you may remove.
removed next.
7. Remove the Nix Store volume:
-
+
```console
sudo diskutil apfs deleteVolume /nix
```
@@ -175,8 +176,20 @@ and `/etc/zshrc` which you may remove.
This will remove the Nix Store volume and everything that was added to the
store.
+ If the output indicates that the command couldn't remove the volume, you should
+ make sure you don't have an _unmounted_ Nix Store volume. Look for a
+ "Nix Store" volume in the output of the following command:
+
+ ```console
+ diskutil list
+ ```
+
+ If you _do_ see a "Nix Store" volume, delete it by re-running the diskutil
+ deleteVolume command, but replace `/nix` with the store volume's `diskXsY`
+ identifier.
+
> **Note**
->
+>
> After you complete the steps here, you will still have an empty `/nix`
> directory. This is an expected sign of a successful uninstall. The empty
> `/nix` directory will disappear the next time you reboot.
@@ -191,8 +204,7 @@ and `/etc/zshrc` which you may remove.
<!-- Note: anchors above to catch permalinks to old explanations -->
We believe we have ironed out how to cleanly support the read-only root
-on modern macOS. New installs will do this automatically, and you can
-also re-run a new installer to convert your existing setup.
+on modern macOS. New installs will do this automatically.
This section previously detailed the situation, options, and trade-offs,
but it now only outlines what the installer does. You don't need to know
diff --git a/doc/manual/src/expressions/advanced-attributes.md b/doc/manual/src/language/advanced-attributes.md
index 2e7e80ed0..2e7e80ed0 100644
--- a/doc/manual/src/expressions/advanced-attributes.md
+++ b/doc/manual/src/language/advanced-attributes.md
diff --git a/doc/manual/src/expressions/builtin-constants.md b/doc/manual/src/language/builtin-constants.md
index 78d066a82..78d066a82 100644
--- a/doc/manual/src/expressions/builtin-constants.md
+++ b/doc/manual/src/language/builtin-constants.md
diff --git a/doc/manual/src/expressions/builtins-prefix.md b/doc/manual/src/language/builtins-prefix.md
index c631a8453..c631a8453 100644
--- a/doc/manual/src/expressions/builtins-prefix.md
+++ b/doc/manual/src/language/builtins-prefix.md
diff --git a/doc/manual/src/expressions/builtins-suffix.md b/doc/manual/src/language/builtins-suffix.md
index a74db2857..a74db2857 100644
--- a/doc/manual/src/expressions/builtins-suffix.md
+++ b/doc/manual/src/language/builtins-suffix.md
diff --git a/doc/manual/src/expressions/language-constructs.md b/doc/manual/src/language/constructs.md
index 1c01f2cc7..1c01f2cc7 100644
--- a/doc/manual/src/expressions/language-constructs.md
+++ b/doc/manual/src/language/constructs.md
diff --git a/doc/manual/src/expressions/derivations.md b/doc/manual/src/language/derivations.md
index 3391ec0d8..3391ec0d8 100644
--- a/doc/manual/src/expressions/derivations.md
+++ b/doc/manual/src/language/derivations.md
diff --git a/doc/manual/src/language/index.md b/doc/manual/src/language/index.md
new file mode 100644
index 000000000..a4b402f8b
--- /dev/null
+++ b/doc/manual/src/language/index.md
@@ -0,0 +1,33 @@
+# Nix Language
+
+The Nix language is
+
+- *domain-specific*
+
+ It only exists for the Nix package manager:
+ to describe packages and configurations as well as their variants and compositions.
+ It is not intended for general purpose use.
+
+- *declarative*
+
+ There is no notion of executing sequential steps.
+ Dependencies between operations are established only through data.
+
+- *pure*
+
+ Values cannot change during computation.
+ Functions always produce the same output if their input does not change.
+
+- *functional*
+
+ Functions are like any other value.
+ Functions can be assigned to names, taken as arguments, or returned by functions.
+
+- *lazy*
+
+ Expressions are only evaluated when their value is needed.
+
+- *dynamically typed*
+
+ Type errors are only detected when expressions are evaluated.
+
diff --git a/doc/manual/src/expressions/language-operators.md b/doc/manual/src/language/operators.md
index 268b44f4c..32398189d 100644
--- a/doc/manual/src/expressions/language-operators.md
+++ b/doc/manual/src/language/operators.md
@@ -1,6 +1,6 @@
# Operators
-The table below lists the operators in the Nix expression language, in
+The table below lists the operators in the Nix language, in
order of precedence (from strongest to weakest binding).
| Name | Syntax | Associativity | Description | Precedence |
diff --git a/doc/manual/src/language/values.md b/doc/manual/src/language/values.md
new file mode 100644
index 000000000..f09400d02
--- /dev/null
+++ b/doc/manual/src/language/values.md
@@ -0,0 +1,261 @@
+# Data Types
+
+## Primitives
+
+- <a id="type-string" href="#type-string">String</a>
+
+ *Strings* can be written in three ways.
+
+ The most common way is to enclose the string between double quotes,
+ e.g., `"foo bar"`. Strings can span multiple lines. The special
+ characters `"` and `\` and the character sequence `${` must be
+ escaped by prefixing them with a backslash (`\`). Newlines, carriage
+ returns and tabs can be written as `\n`, `\r` and `\t`,
+ respectively.
+
+ You can include the result of an expression into a string by
+ enclosing it in `${...}`, a feature known as *antiquotation*. The
+ enclosed expression must evaluate to something that can be coerced
+ into a string (meaning that it must be a string, a path, or a
+ derivation). For instance, rather than writing
+
+ ```nix
+ "--with-freetype2-library=" + freetype + "/lib"
+ ```
+
+ (where `freetype` is a derivation), you can instead write the more
+ natural
+
+ ```nix
+ "--with-freetype2-library=${freetype}/lib"
+ ```
+
+ The latter is automatically translated to the former. A more
+ complicated example (from the Nix expression for
+ [Qt](http://www.trolltech.com/products/qt)):
+
+ ```nix
+ configureFlags = "
+ -system-zlib -system-libpng -system-libjpeg
+ ${if openglSupport then "-dlopen-opengl
+ -L${mesa}/lib -I${mesa}/include
+ -L${libXmu}/lib -I${libXmu}/include" else ""}
+ ${if threadSupport then "-thread" else "-no-thread"}
+ ";
+ ```
+
+ Note that Nix expressions and strings can be arbitrarily nested; in
+ this case the outer string contains various antiquotations that
+ themselves contain strings (e.g., `"-thread"`), some of which in
+ turn contain expressions (e.g., `${mesa}`).
+
+ The second way to write string literals is as an *indented string*,
+ which is enclosed between pairs of *double single-quotes*, like so:
+
+ ```nix
+ ''
+ This is the first line.
+ This is the second line.
+ This is the third line.
+ ''
+ ```
+
+ This kind of string literal intelligently strips indentation from
+ the start of each line. To be precise, it strips from each line a
+ number of spaces equal to the minimal indentation of the string as a
+ whole (disregarding the indentation of empty lines). For instance,
+ the first and second line are indented two spaces, while the third
+ line is indented four spaces. Thus, two spaces are stripped from
+ each line, so the resulting string is
+
+ ```nix
+ "This is the first line.\nThis is the second line.\n This is the third line.\n"
+ ```
+
+ Note that the whitespace and newline following the opening `''` is
+ ignored if there is no non-whitespace text on the initial line.
+
+ Antiquotation (`${expr}`) is supported in indented strings.
+
+ Since `${` and `''` have special meaning in indented strings, you
+ need a way to quote them. `$` can be escaped by prefixing it with
+ `''` (that is, two single quotes), i.e., `''$`. `''` can be escaped
+ by prefixing it with `'`, i.e., `'''`. `$` removes any special
+ meaning from the following `$`. Linefeed, carriage-return and tab
+ characters can be written as `''\n`, `''\r`, `''\t`, and `''\`
+ escapes any other character.
+
+ Indented strings are primarily useful in that they allow multi-line
+ string literals to follow the indentation of the enclosing Nix
+ expression, and that less escaping is typically necessary for
+ strings representing languages such as shell scripts and
+ configuration files because `''` is much less common than `"`.
+ Example:
+
+ ```nix
+ stdenv.mkDerivation {
+ ...
+ postInstall =
+ ''
+ mkdir $out/bin $out/etc
+ cp foo $out/bin
+ echo "Hello World" > $out/etc/foo.conf
+ ${if enableBar then "cp bar $out/bin" else ""}
+ '';
+ ...
+ }
+ ```
+
+ Finally, as a convenience, *URIs* as defined in appendix B of
+ [RFC 2396](http://www.ietf.org/rfc/rfc2396.txt) can be written *as
+ is*, without quotes. For instance, the string
+ `"http://example.org/foo.tar.bz2"` can also be written as
+ `http://example.org/foo.tar.bz2`.
+
+- <a id="type-number" href="#type-number">Number</a>
+
+ Numbers, which can be *integers* (like `123`) or *floating point*
+ (like `123.43` or `.27e13`).
+
+ Numbers are type-compatible: pure integer operations will always
+ return integers, whereas any operation involving at least one
+ floating point number will have a floating point number as a result.
+
+- <a id="type-path" href="#type-path">Path</a>
+
+ *Paths*, e.g., `/bin/sh` or `./builder.sh`. A path must contain at
+ least one slash to be recognised as such. For instance, `builder.sh`
+ is not a path: it's parsed as an expression that selects the
+ attribute `sh` from the variable `builder`. If the file name is
+ relative, i.e., if it does not begin with a slash, it is made
+ absolute at parse time relative to the directory of the Nix
+ expression that contained it. For instance, if a Nix expression in
+ `/foo/bar/bla.nix` refers to `../xyzzy/fnord.nix`, the absolute path
+ is `/foo/xyzzy/fnord.nix`.
+
+ If the first component of a path is a `~`, it is interpreted as if
+ the rest of the path were relative to the user's home directory.
+ e.g. `~/foo` would be equivalent to `/home/edolstra/foo` for a user
+ whose home directory is `/home/edolstra`.
+
+ Paths can also be specified between angle brackets, e.g.
+ `<nixpkgs>`. This means that the directories listed in the
+ environment variable `NIX_PATH` will be searched for the given file
+ or directory name.
+
+ Antiquotation is supported in any paths except those in angle brackets.
+ `./${foo}-${bar}.nix` is a more convenient way of writing
+ `./. + "/" + foo + "-" + bar + ".nix"` or `./. + "/${foo}-${bar}.nix"`. At
+ least one slash must appear *before* any antiquotations for this to be
+ recognized as a path. `a.${foo}/b.${bar}` is a syntactically valid division
+ operation. `./a.${foo}/b.${bar}` is a path.
+
+- <a id="type-boolean" href="#type-boolean">Boolean</a>
+
+ *Booleans* with values `true` and `false`.
+
+- <a id="type-null" href="#type-null">Null</a>
+
+ The null value, denoted as `null`.
+
+## List
+
+Lists are formed by enclosing a whitespace-separated list of values
+between square brackets. For example,
+
+```nix
+[ 123 ./foo.nix "abc" (f { x = y; }) ]
+```
+
+defines a list of four elements, the last being the result of a call to
+the function `f`. Note that function calls have to be enclosed in
+parentheses. If they had been omitted, e.g.,
+
+```nix
+[ 123 ./foo.nix "abc" f { x = y; } ]
+```
+
+the result would be a list of five elements, the fourth one being a
+function and the fifth being a set.
+
+Note that lists are only lazy in values, and they are strict in length.
+
+## Attribute Set
+
+An attribute set is a collection of name-value-pairs (called *attributes*) enclosed in curly brackets (`{ }`).
+
+Names and values are separated by an equal sign (`=`).
+Each value is an arbitrary expression terminated by a semicolon (`;`).
+
+Attributes can appear in any order.
+An attribute name may only occur once.
+
+Example:
+
+```nix
+{
+ x = 123;
+ text = "Hello";
+ y = f { bla = 456; };
+}
+```
+
+This defines a set with attributes named `x`, `text`, `y`.
+
+Attributes can be selected from a set using the `.` operator. For
+instance,
+
+```nix
+{ a = "Foo"; b = "Bar"; }.a
+```
+
+evaluates to `"Foo"`. It is possible to provide a default value in an
+attribute selection using the `or` keyword. For example,
+
+```nix
+{ a = "Foo"; b = "Bar"; }.c or "Xyzzy"
+```
+
+will evaluate to `"Xyzzy"` because there is no `c` attribute in the set.
+
+You can use arbitrary double-quoted strings as attribute names:
+
+```nix
+{ "foo ${bar}" = 123; "nix-1.0" = 456; }."foo ${bar}"
+```
+
+This will evaluate to `123` (Assuming `bar` is antiquotable). In the
+case where an attribute name is just a single antiquotation, the quotes
+can be dropped:
+
+```nix
+{ foo = 123; }.${bar} or 456
+```
+
+This will evaluate to `123` if `bar` evaluates to `"foo"` when coerced
+to a string and `456` otherwise (again assuming `bar` is antiquotable).
+
+In the special case where an attribute name inside of a set declaration
+evaluates to `null` (which is normally an error, as `null` is not
+antiquotable), that attribute is simply not added to the set:
+
+```nix
+{ ${if foo then "bar" else null} = true; }
+```
+
+This will evaluate to `{}` if `foo` evaluates to `false`.
+
+A set that has a `__functor` attribute whose value is callable (i.e. is
+itself a function or a set with a `__functor` attribute whose value is
+callable) can be applied as if it were a function, with the set itself
+passed in first , e.g.,
+
+```nix
+let add = { __functor = self: x: x + self.x; };
+ inc = add // { x = 1; };
+in inc 1
+```
+
+evaluates to `2`. This can be used to attach metadata to a function
+without the caller needing to treat it specially, or to implement a form
+of object-oriented programming, for example.
diff --git a/doc/manual/src/package-management/package-management.md b/doc/manual/src/package-management/package-management.md
index bd26a09ab..d528112e2 100644
--- a/doc/manual/src/package-management/package-management.md
+++ b/doc/manual/src/package-management/package-management.md
@@ -1,5 +1,4 @@
This chapter discusses how to do package management with Nix, i.e.,
how to obtain, install, upgrade, and erase packages. This is the
“user’s” perspective of the Nix system — people who want to *create*
-packages should consult the [chapter on writing Nix
-expressions](../expressions/writing-nix-expressions.md).
+packages should consult the chapter on the [Nix language](../language/index.md).
diff --git a/doc/manual/src/release-notes/rl-2.10.md b/doc/manual/src/release-notes/rl-2.10.md
new file mode 100644
index 000000000..b99dbeef0
--- /dev/null
+++ b/doc/manual/src/release-notes/rl-2.10.md
@@ -0,0 +1,31 @@
+# Release 2.10 (2022-07-11)
+
+* `nix repl` now takes installables on the command line, unifying the usage
+ with other commands that use `--file` and `--expr`. Primary breaking change
+ is for the common usage of `nix repl '<nixpkgs>'` which can be recovered with
+ `nix repl --file '<nixpkgs>'` or `nix repl --expr 'import <nixpkgs>{}'`.
+
+ This is currently guarded by the `repl-flake` experimental feature.
+
+* A new function `builtins.traceVerbose` is available. It is similar
+ to `builtins.trace` if the `trace-verbose` setting is set to true,
+ and it is a no-op otherwise.
+
+* `nix search` has a new flag `--exclude` to filter out packages.
+
+* On Linux, if `/nix` doesn't exist and cannot be created and you're
+ not running as root, Nix will automatically use
+ `~/.local/share/nix/root` as a chroot store. This enables non-root
+ users to download the statically linked Nix binary and have it work
+ out of the box, e.g.
+
+ ```
+ # ~/nix run nixpkgs#hello
+ warning: '/nix' does not exists, so Nix will use '/home/ubuntu/.local/share/nix/root' as a chroot store
+ Hello, world!
+ ```
+
+* `flake-registry.json` is now fetched from `channels.nixos.org`.
+
+* Nix can now be built with LTO by passing `--enable-lto` to `configure`.
+ LTO is currently only supported when building with GCC.
diff --git a/doc/manual/src/release-notes/rl-2.11.md b/doc/manual/src/release-notes/rl-2.11.md
new file mode 100644
index 000000000..b322a4e5e
--- /dev/null
+++ b/doc/manual/src/release-notes/rl-2.11.md
@@ -0,0 +1,5 @@
+# Release 2.11 (2022-08-24)
+
+* `nix copy` now copies the store paths in parallel as much as possible (again).
+ This doesn't apply for the `daemon` and `ssh-ng` stores which copy everything
+ in one batch to avoid latencies issues.
diff --git a/doc/manual/src/release-notes/rl-next.md b/doc/manual/src/release-notes/rl-next.md
index 52e3b6240..68f7d1a9d 100644
--- a/doc/manual/src/release-notes/rl-next.md
+++ b/doc/manual/src/release-notes/rl-next.md
@@ -1,4 +1,7 @@
# Release X.Y (202?-??-??)
-* Nix can now be built with LTO by passing `--enable-lto` to `configure`.
- LTO is currently only supported when building with GCC.
+* `<nix/fetchurl.nix>` now accepts an additional argument `impure` which
+ defaults to `false`. If it is set to `true`, the `hash` and `sha256`
+ arguments will be ignored and the resulting derivation will have
+ `__impure` set to `true`, making it an impure derivation.
+
diff --git a/doc/manual/utils.nix b/doc/manual/utils.nix
index d4b18472f..d0643ef46 100644
--- a/doc/manual/utils.nix
+++ b/doc/manual/utils.nix
@@ -5,6 +5,32 @@ rec {
concatStrings = concatStringsSep "";
+ replaceStringsRec = from: to: string:
+ # recursively replace occurrences of `from` with `to` within `string`
+ # example:
+ # replaceStringRec "--" "-" "hello-----world"
+ # => "hello-world"
+ let
+ replaced = replaceStrings [ from ] [ to ] string;
+ in
+ if replaced == string then string else replaceStringsRec from to replaced;
+
+ squash = replaceStringsRec "\n\n\n" "\n\n";
+
+ trim = string:
+ # trim trailing spaces and squash non-leading spaces
+ let
+ trimLine = line:
+ let
+ # separate leading spaces from the rest
+ parts = split "(^ *)" line;
+ spaces = head (elemAt parts 1);
+ rest = elemAt parts 2;
+ # drop trailing spaces
+ body = head (split " *$" rest);
+ in spaces + replaceStringsRec " " " " body;
+ in concatStringsSep "\n" (map trimLine (splitLines string));
+
# FIXME: O(n^2)
unique = foldl' (acc: e: if elem e acc then acc else acc ++ [ e ]) [];
diff --git a/docker.nix b/docker.nix
index 0cd64856f..bb2b4e7ff 100644
--- a/docker.nix
+++ b/docker.nix
@@ -2,8 +2,12 @@
, lib ? pkgs.lib
, name ? "nix"
, tag ? "latest"
+, bundleNixpkgs ? true
, channelName ? "nixpkgs"
, channelURL ? "https://nixos.org/channels/nixpkgs-unstable"
+, extraPkgs ? []
+, maxLayers ? 100
+, nixConf ? {}
}:
let
defaultPkgs = with pkgs; [
@@ -23,13 +27,13 @@ let
iana-etc
git
openssh
- ];
+ ] ++ extraPkgs;
users = {
root = {
uid = 0;
- shell = "/bin/bash";
+ shell = "${pkgs.bashInteractive}/bin/bash";
home = "/root";
gid = 0;
};
@@ -121,20 +125,27 @@ let
(lib.attrValues (lib.mapAttrs groupToGroup groups))
);
- nixConf = {
+ defaultNixConf = {
sandbox = "false";
build-users-group = "nixbld";
- trusted-public-keys = "cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY=";
+ trusted-public-keys = [ "cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY=" ];
};
- nixConfContents = (lib.concatStringsSep "\n" (lib.mapAttrsFlatten (n: v: "${n} = ${v}") nixConf)) + "\n";
+
+ nixConfContents = (lib.concatStringsSep "\n" (lib.mapAttrsFlatten (n: v:
+ let
+ vStr = if builtins.isList v then lib.concatStringsSep " " v else v;
+ in
+ "${n} = ${vStr}") (defaultNixConf // nixConf))) + "\n";
baseSystem =
let
nixpkgs = pkgs.path;
- channel = pkgs.runCommand "channel-nixos" { } ''
+ channel = pkgs.runCommand "channel-nixos" { inherit bundleNixpkgs; } ''
mkdir $out
- ln -s ${nixpkgs} $out/nixpkgs
- echo "[]" > $out/manifest.nix
+ if [ "$bundleNixpkgs" ]; then
+ ln -s ${nixpkgs} $out/nixpkgs
+ echo "[]" > $out/manifest.nix
+ fi
'';
rootEnv = pkgs.buildPackages.buildEnv {
name = "root-profile-env";
@@ -229,7 +240,7 @@ let
in
pkgs.dockerTools.buildLayeredImageWithNixDb {
- inherit name tag;
+ inherit name tag maxLayers;
contents = [ baseSystem ];
diff --git a/flake.lock b/flake.lock
index 01e4f506a..a66c9cb1b 100644
--- a/flake.lock
+++ b/flake.lock
@@ -18,11 +18,11 @@
},
"nixpkgs": {
"locked": {
- "lastModified": 1653988320,
- "narHash": "sha256-ZaqFFsSDipZ6KVqriwM34T739+KLYJvNmCWzErjAg7c=",
+ "lastModified": 1657693803,
+ "narHash": "sha256-G++2CJ9u0E7NNTAi9n5G8TdDmGJXcIjkJ3NF8cetQB8=",
"owner": "NixOS",
"repo": "nixpkgs",
- "rev": "2fa57ed190fd6c7c746319444f34b5917666e5c1",
+ "rev": "365e1b3a859281cf11b94f87231adeabbdd878a2",
"type": "github"
},
"original": {
diff --git a/flake.nix b/flake.nix
index d683570af..cc2a48d9c 100644
--- a/flake.nix
+++ b/flake.nix
@@ -23,7 +23,7 @@
crossSystems = [ "armv6l-linux" "armv7l-linux" ];
- stdenvs = [ "gccStdenv" "clangStdenv" "clang11Stdenv" "stdenv" "libcxxStdenv" ];
+ stdenvs = [ "gccStdenv" "clangStdenv" "clang11Stdenv" "stdenv" "libcxxStdenv" "ccacheStdenv" ];
forAllSystems = f: nixpkgs.lib.genAttrs systems (system: f system);
forAllSystemsAndStdenvs = f: forAllSystems (system:
@@ -54,7 +54,7 @@
# we want most of the time and for backwards compatibility
forAllSystems (system: stdenvsPackages.${system} // stdenvsPackages.${system}.stdenvPackages);
- commonDeps = pkgs: with pkgs; rec {
+ commonDeps = { pkgs, isStatic ? false }: with pkgs; rec {
# Use "busybox-sandbox-shell" if present,
# if not (legacy) fallback and hope it's sufficient.
sh = pkgs.busybox-sandbox-shell or (busybox.override {
@@ -85,6 +85,8 @@
lib.optionals stdenv.isLinux [
"--with-boost=${boost}/lib"
"--with-sandbox-shell=${sh}/bin/busybox"
+ ]
+ ++ lib.optionals (stdenv.isLinux && !(isStatic && stdenv.system == "aarch64-linux")) [
"LDFLAGS=-fuse-ld=gold"
];
@@ -106,7 +108,7 @@
++ lib.optionals stdenv.hostPlatform.isLinux [(buildPackages.util-linuxMinimal or buildPackages.utillinuxMinimal)];
buildDeps =
- [ curl
+ [ (curl.override { patchNetrcRegression = true; })
bzip2 xz brotli editline
openssl sqlite
libarchive
@@ -170,7 +172,7 @@
echo "file installer $out/install" >> $out/nix-support/hydra-build-products
'';
- testNixVersions = pkgs: client: daemon: with commonDeps pkgs; with pkgs.lib; pkgs.stdenv.mkDerivation {
+ testNixVersions = pkgs: client: daemon: with commonDeps { inherit pkgs; }; with pkgs.lib; pkgs.stdenv.mkDerivation {
NIX_DAEMON_PACKAGE = daemon;
NIX_CLIENT_PACKAGE = client;
name =
@@ -258,6 +260,7 @@
echo "file binary-dist $fn" >> $out/nix-support/hydra-build-products
tar cvfJ $fn \
--owner=0 --group=0 --mode=u+rw,uga+r \
+ --mtime='1970-01-01' \
--absolute-names \
--hard-dereference \
--transform "s,$TMPDIR/install,$dir/install," \
@@ -281,7 +284,7 @@
# Forward from the previous stage as we don’t want it to pick the lowdown override
nixUnstable = prev.nixUnstable;
- nix = with final; with commonDeps pkgs; currentStdenv.mkDerivation {
+ nix = with final; with commonDeps { inherit pkgs; }; currentStdenv.mkDerivation {
name = "nix-${version}";
inherit version;
@@ -361,7 +364,7 @@
buildInputs =
[ nix
- curl
+ (curl.override { patchNetrcRegression = true; })
bzip2
xz
pkgs.perl
@@ -448,7 +451,7 @@
# Line coverage analysis.
coverage =
with nixpkgsFor.x86_64-linux;
- with commonDeps pkgs;
+ with commonDeps { inherit pkgs; };
releaseTools.coverageAnalysis {
name = "nix-coverage-${version}";
@@ -543,6 +546,11 @@
# againstLatestStable = testNixVersions pkgs pkgs.nix pkgs.nixStable;
} "touch $out");
+ installerTests = import ./tests/installer {
+ binaryTarballs = self.hydraJobs.binaryTarball;
+ inherit nixpkgsFor;
+ };
+
};
checks = forAllSystems (system: {
@@ -559,7 +567,7 @@
} // (nixpkgs.lib.optionalAttrs (builtins.elem system linux64BitSystems) {
nix-static = let
nixpkgs = nixpkgsFor.${system}.pkgsStatic;
- in with commonDeps nixpkgs; nixpkgs.stdenv.mkDerivation {
+ in with commonDeps { pkgs = nixpkgs; isStatic = true; }; nixpkgs.stdenv.mkDerivation {
name = "nix-${version}";
src = self;
@@ -571,14 +579,24 @@
nativeBuildInputs = nativeBuildDeps;
buildInputs = buildDeps ++ propagatedDeps;
- configureFlags = [ "--sysconfdir=/etc" ];
+ # Work around pkgsStatic disabling all tests.
+ # Remove in NixOS 22.11, see https://github.com/NixOS/nixpkgs/pull/140271.
+ preHook =
+ ''
+ doCheck=1
+ doInstallCheck=1
+ '';
+
+ configureFlags =
+ configureFlags ++
+ [ "--sysconfdir=/etc"
+ "--enable-embedded-sandbox-shell"
+ ];
enableParallelBuilding = true;
makeFlags = "profiledir=$(out)/etc/profile.d";
- doCheck = true;
-
installFlags = "sysconfdir=$(out)/etc";
postInstall = ''
@@ -588,7 +606,6 @@
echo "file binary-dist $out/bin/nix" >> $out/nix-support/hydra-build-products
'';
- doInstallCheck = true;
installCheckFlags = "sysconfdir=$(out)/etc";
stripAllList = ["bin"];
@@ -597,6 +614,7 @@
hardeningDisable = [ "pie" ];
};
+
dockerImage =
let
pkgs = nixpkgsFor.${system};
@@ -620,7 +638,7 @@
inherit system crossSystem;
overlays = [ self.overlays.default ];
};
- in with commonDeps nixpkgsCross; nixpkgsCross.stdenv.mkDerivation {
+ in with commonDeps { pkgs = nixpkgsCross; }; nixpkgsCross.stdenv.mkDerivation {
name = "nix-${version}";
src = self;
@@ -663,7 +681,7 @@
devShells = forAllSystems (system:
forAllStdenvs (stdenv:
with nixpkgsFor.${system};
- with commonDeps pkgs;
+ with commonDeps { inherit pkgs; };
nixpkgsFor.${system}.${stdenv}.mkDerivation {
name = "nix";
diff --git a/misc/zsh/completion.zsh b/misc/zsh/completion.zsh
index e702c721e..f9b3dca74 100644
--- a/misc/zsh/completion.zsh
+++ b/misc/zsh/completion.zsh
@@ -10,14 +10,15 @@ function _nix() {
local -a suggestions
declare -a suggestions
for suggestion in ${res:1}; do
- # FIXME: This doesn't work properly if the suggestion word contains a `:`
- # itself
- suggestions+="${suggestion/ /:}"
+ suggestions+=("${suggestion%% *}")
done
+ local -a args
if [[ "$tpe" == filenames ]]; then
- compadd -f
+ args+=('-f')
+ elif [[ "$tpe" == attrs ]]; then
+ args+=('-S' '')
fi
- _describe 'nix' suggestions
+ compadd -J nix "${args[@]}" -a suggestions
}
_nix "$@"
diff --git a/mk/libraries.mk b/mk/libraries.mk
index 876148a55..6541775f3 100644
--- a/mk/libraries.mk
+++ b/mk/libraries.mk
@@ -125,7 +125,7 @@ define build-library
$(1)_PATH := $$(_d)/$$($(1)_NAME).a
$$($(1)_PATH): $$($(1)_OBJS) | $$(_d)/
- +$$(trace-ld) $(LD) -Ur -o $$(_d)/$$($(1)_NAME).o $$?
+ +$$(trace-ld) $(LD) -Ur -o $$(_d)/$$($(1)_NAME).o $$^
$$(trace-ar) $(AR) crs $$@ $$(_d)/$$($(1)_NAME).o
$(1)_LDFLAGS_USE += $$($(1)_PATH) $$($(1)_LDFLAGS)
diff --git a/scripts/install-darwin-multi-user.sh b/scripts/install-darwin-multi-user.sh
index afaa6783b..5111a5dde 100644
--- a/scripts/install-darwin-multi-user.sh
+++ b/scripts/install-darwin-multi-user.sh
@@ -167,7 +167,7 @@ poly_user_shell_get() {
}
poly_user_shell_set() {
- _sudo "in order to give $1 a safe home directory" \
+ _sudo "in order to give $1 a safe shell" \
/usr/bin/dscl . -create "/Users/$1" "UserShell" "$2"
}
diff --git a/scripts/install-multi-user.sh b/scripts/install-multi-user.sh
index b79a9c23a..a39339050 100644
--- a/scripts/install-multi-user.sh
+++ b/scripts/install-multi-user.sh
@@ -37,6 +37,19 @@ readonly PROFILE_TARGETS=("/etc/bashrc" "/etc/profile.d/nix.sh" "/etc/zshrc" "/e
readonly PROFILE_BACKUP_SUFFIX=".backup-before-nix"
readonly PROFILE_NIX_FILE="$NIX_ROOT/var/nix/profiles/default/etc/profile.d/nix-daemon.sh"
+# Fish has different syntax than zsh/bash, treat it separate
+readonly PROFILE_FISH_SUFFIX="conf.d/nix.fish"
+readonly PROFILE_FISH_PREFIXES=(
+ # each of these are common values of $__fish_sysconf_dir,
+ # under which Fish will look for a file named
+ # $PROFILE_FISH_SUFFIX.
+ "/etc/fish" # standard
+ "/usr/local/etc/fish" # their installer .pkg for macOS
+ "/opt/homebrew/etc/fish" # homebrew
+ "/opt/local/etc/fish" # macports
+)
+readonly PROFILE_NIX_FILE_FISH="$NIX_ROOT/var/nix/profiles/default/etc/profile.d/nix-daemon.fish"
+
readonly NIX_INSTALLED_NIX="@nix@"
readonly NIX_INSTALLED_CACERT="@cacert@"
#readonly NIX_INSTALLED_NIX="/nix/store/j8dbv5w6jl34caywh2ygdy88knx1mdf7-nix-2.3.6"
@@ -59,6 +72,30 @@ headless() {
fi
}
+is_root() {
+ if [ "$EUID" -eq 0 ]; then
+ return 0
+ else
+ return 1
+ fi
+}
+
+is_os_linux() {
+ if [ "$(uname -s)" = "Linux" ]; then
+ return 0
+ else
+ return 1
+ fi
+}
+
+is_os_darwin() {
+ if [ "$(uname -s)" = "Darwin" ]; then
+ return 0
+ else
+ return 1
+ fi
+}
+
contact_us() {
echo "You can open an issue at https://github.com/nixos/nix/issues"
echo ""
@@ -313,14 +350,23 @@ __sudo() {
_sudo() {
local expl="$1"
shift
- if ! headless; then
+ if ! headless || is_root; then
__sudo "$expl" "$*" >&2
fi
- sudo "$@"
+
+ if is_root; then
+ env "$@"
+ else
+ sudo "$@"
+ fi
}
+# Ensure that $TMPDIR exists if defined.
+if [[ -n "${TMPDIR:-}" ]] && [[ ! -d "${TMPDIR:-}" ]]; then
+ mkdir -m 0700 -p "${TMPDIR:-}"
+fi
-readonly SCRATCH=$(mktemp -d "${TMPDIR:-/tmp/}tmp.XXXXXXXXXX")
+readonly SCRATCH=$(mktemp -d)
finish_cleanup() {
rm -rf "$SCRATCH"
}
@@ -329,7 +375,7 @@ finish_fail() {
finish_cleanup
failure <<EOF
-Jeeze, something went wrong. If you can take all the output and open
+Oh no, something went wrong. If you can take all the output and open
an issue, we'd love to fix the problem so nobody else has this issue.
:(
@@ -423,7 +469,7 @@ EOF
fi
done
- if [ "$(uname -s)" = "Linux" ] && [ ! -e /run/systemd/system ]; then
+ if is_os_linux && [ ! -e /run/systemd/system ]; then
warning <<EOF
We did not detect systemd on your system. With a multi-user install
without systemd you will have to manually configure your init system to
@@ -638,6 +684,17 @@ place_channel_configuration() {
fi
}
+check_selinux() {
+ if command -v getenforce > /dev/null 2>&1; then
+ if [ "$(getenforce)" = "Enforcing" ]; then
+ failure <<EOF
+Nix does not work with selinux enabled yet!
+see https://github.com/NixOS/nix/issues/2374
+EOF
+ fi
+ fi
+}
+
welcome_to_nix() {
ok "Welcome to the Multi-User Nix Installation"
@@ -766,7 +823,7 @@ EOF
fi
_sudo "to load data for the first time in to the Nix Database" \
- "$NIX_INSTALLED_NIX/bin/nix-store" --load-db < ./.reginfo
+ HOME="$ROOT_HOME" "$NIX_INSTALLED_NIX/bin/nix-store" --load-db < ./.reginfo
echo " Just finished getting the nix database ready."
)
@@ -784,6 +841,19 @@ fi
EOF
}
+# Fish has differing syntax
+fish_source_lines() {
+ cat <<EOF
+
+# Nix
+if test -e '$PROFILE_NIX_FILE_FISH'
+ . '$PROFILE_NIX_FILE_FISH'
+end
+# End Nix
+
+EOF
+}
+
configure_shell_profile() {
task "Setting up shell profiles: ${PROFILE_TARGETS[*]}"
for profile_target in "${PROFILE_TARGETS[@]}"; do
@@ -805,6 +875,27 @@ configure_shell_profile() {
tee -a "$profile_target"
fi
done
+
+ task "Setting up shell profiles for Fish with with ${PROFILE_FISH_SUFFIX} inside ${PROFILE_FISH_PREFIXES[*]}"
+ for fish_prefix in "${PROFILE_FISH_PREFIXES[@]}"; do
+ if [ ! -d "$fish_prefix" ]; then
+ # this specific prefix (ie: /etc/fish) is very likely to exist
+ # if Fish is installed with this sysconfdir.
+ continue
+ fi
+
+ profile_target="${fish_prefix}/${PROFILE_FISH_SUFFIX}"
+ conf_dir=$(dirname "$profile_target")
+ if [ ! -d "$conf_dir" ]; then
+ _sudo "create $conf_dir for our Fish hook" \
+ mkdir "$conf_dir"
+ fi
+
+ fish_source_lines \
+ | _sudo "write nix-daemon settings to $profile_target" \
+ tee "$profile_target"
+ done
+
# TODO: should we suggest '. $PROFILE_NIX_FILE'? It would get them on
# their way less disruptively, but a counter-argument is that they won't
# immediately notice if something didn't get set up right?
@@ -854,22 +945,14 @@ EOF
install -m 0664 "$SCRATCH/nix.conf" /etc/nix/nix.conf
}
+
main() {
- # TODO: I've moved this out of validate_starting_assumptions so we
- # can fail faster in this case. Sourcing install-darwin... now runs
- # `touch /` to detect Read-only root, but it could update times on
- # pre-Catalina macOS if run as root user.
- if [ "$EUID" -eq 0 ]; then
- failure <<EOF
-Please do not run this script with root privileges. I will call sudo
-when I need to.
-EOF
- fi
+ check_selinux
- if [ "$(uname -s)" = "Darwin" ]; then
+ if is_os_darwin; then
# shellcheck source=./install-darwin-multi-user.sh
. "$EXTRACTED_NIX_PATH/install-darwin-multi-user.sh"
- elif [ "$(uname -s)" = "Linux" ]; then
+ elif is_os_linux; then
# shellcheck source=./install-systemd-multi-user.sh
. "$EXTRACTED_NIX_PATH/install-systemd-multi-user.sh" # most of this works on non-systemd distros also
else
@@ -877,7 +960,10 @@ EOF
fi
welcome_to_nix
- chat_about_sudo
+
+ if ! is_root; then
+ chat_about_sudo
+ fi
cure_artifacts
# TODO: there's a tension between cure and validate. I moved the
diff --git a/scripts/install-nix-from-closure.sh b/scripts/install-nix-from-closure.sh
index d543b4463..d4eed2efe 100644
--- a/scripts/install-nix-from-closure.sh
+++ b/scripts/install-nix-from-closure.sh
@@ -148,7 +148,9 @@ if ! [ -w "$dest" ]; then
exit 1
fi
-mkdir -p "$dest/store"
+# The auto-chroot code in openFromNonUri() checks for the
+# non-existence of /nix/var/nix, so we need to create it here.
+mkdir -p "$dest/store" "$dest/var/nix"
printf "copying Nix to %s..." "${dest}/store" >&2
# Insert a newline if no progress is shown.
@@ -207,31 +209,50 @@ if [ -z "$NIX_INSTALLER_NO_CHANNEL_ADD" ]; then
fi
added=
-p=$HOME/.nix-profile/etc/profile.d/nix.sh
+p=
+p_sh=$HOME/.nix-profile/etc/profile.d/nix.sh
+p_fish=$HOME/.nix-profile/etc/profile.d/nix.fish
if [ -z "$NIX_INSTALLER_NO_MODIFY_PROFILE" ]; then
# Make the shell source nix.sh during login.
for i in .bash_profile .bash_login .profile; do
fn="$HOME/$i"
if [ -w "$fn" ]; then
- if ! grep -q "$p" "$fn"; then
+ if ! grep -q "$p_sh" "$fn"; then
echo "modifying $fn..." >&2
- printf '\nif [ -e %s ]; then . %s; fi # added by Nix installer\n' "$p" "$p" >> "$fn"
+ printf '\nif [ -e %s ]; then . %s; fi # added by Nix installer\n' "$p_sh" "$p_sh" >> "$fn"
fi
added=1
+ p=${p_sh}
break
fi
done
for i in .zshenv .zshrc; do
fn="$HOME/$i"
if [ -w "$fn" ]; then
- if ! grep -q "$p" "$fn"; then
+ if ! grep -q "$p_sh" "$fn"; then
echo "modifying $fn..." >&2
- printf '\nif [ -e %s ]; then . %s; fi # added by Nix installer\n' "$p" "$p" >> "$fn"
+ printf '\nif [ -e %s ]; then . %s; fi # added by Nix installer\n' "$p_sh" "$p_sh" >> "$fn"
fi
added=1
+ p=${p_sh}
break
fi
done
+
+ if [ -d "$HOME/.config/fish" ]; then
+ fishdir=$HOME/.config/fish/conf.d
+ if [ ! -d "$fishdir" ]; then
+ mkdir -p "$fishdir"
+ fi
+
+ fn="$fishdir/nix.fish"
+ echo "placing $fn..." >&2
+ printf '\nif test -e %s; . %s; end # added by Nix installer\n' "$p_fish" "$p_fish" > "$fn"
+ added=1
+ p=${p_fish}
+ fi
+else
+ p=${p_sh}
fi
if [ -z "$added" ]; then
diff --git a/scripts/install.in b/scripts/install.in
index af5f71080..7d2e52b26 100755
--- a/scripts/install.in
+++ b/scripts/install.in
@@ -40,12 +40,12 @@ case "$(uname -s).$(uname -m)" in
path=@tarballPath_aarch64-linux@
system=aarch64-linux
;;
- Linux.armv6l_linux)
+ Linux.armv6l)
hash=@tarballHash_armv6l-linux@
path=@tarballPath_armv6l-linux@
system=armv6l-linux
;;
- Linux.armv7l_linux)
+ Linux.armv7l)
hash=@tarballHash_armv7l-linux@
path=@tarballPath_armv7l-linux@
system=armv7l-linux
diff --git a/scripts/local.mk b/scripts/local.mk
index b8477178e..46255e432 100644
--- a/scripts/local.mk
+++ b/scripts/local.mk
@@ -6,6 +6,8 @@ noinst-scripts += $(nix_noinst_scripts)
profiledir = $(sysconfdir)/profile.d
$(eval $(call install-file-as, $(d)/nix-profile.sh, $(profiledir)/nix.sh, 0644))
+$(eval $(call install-file-as, $(d)/nix-profile.fish, $(profiledir)/nix.fish, 0644))
$(eval $(call install-file-as, $(d)/nix-profile-daemon.sh, $(profiledir)/nix-daemon.sh, 0644))
+$(eval $(call install-file-as, $(d)/nix-profile-daemon.fish, $(profiledir)/nix-daemon.fish, 0644))
clean-files += $(nix_noinst_scripts)
diff --git a/scripts/nix-profile-daemon.fish.in b/scripts/nix-profile-daemon.fish.in
new file mode 100644
index 000000000..3d587dd7f
--- /dev/null
+++ b/scripts/nix-profile-daemon.fish.in
@@ -0,0 +1,35 @@
+# Only execute this file once per shell.
+if test -n "$__ETC_PROFILE_NIX_SOURCED"
+ exit
+end
+
+set __ETC_PROFILE_NIX_SOURCED 1
+
+set --export NIX_PROFILES "@localstatedir@/nix/profiles/default $HOME/.nix-profile"
+
+# Set $NIX_SSL_CERT_FILE so that Nixpkgs applications like curl work.
+if test -n "$NIX_SSH_CERT_FILE"
+ : # Allow users to override the NIX_SSL_CERT_FILE
+else if test -e /etc/ssl/certs/ca-certificates.crt # NixOS, Ubuntu, Debian, Gentoo, Arch
+ set --export NIX_SSL_CERT_FILE /etc/ssl/certs/ca-certificates.crt
+else if test -e /etc/ssl/ca-bundle.pem # openSUSE Tumbleweed
+ set --export NIX_SSL_CERT_FILE /etc/ssl/ca-bundle.pem
+else if test -e /etc/ssl/certs/ca-bundle.crt # Old NixOS
+ set --export NIX_SSL_CERT_FILE /etc/ssl/certs/ca-bundle.crt
+else if test -e /etc/pki/tls/certs/ca-bundle.crt # Fedora, CentOS
+ set --export NIX_SSL_CERT_FILE /etc/pki/tls/certs/ca-bundle.crt
+else if test -e "$NIX_LINK/etc/ssl/certs/ca-bundle.crt" # fall back to cacert in Nix profile
+ set --export NIX_SSL_CERT_FILE "$NIX_LINK/etc/ssl/certs/ca-bundle.crt"
+else if test -e "$NIX_LINK/etc/ca-bundle.crt" # old cacert in Nix profile
+ set --export NIX_SSL_CERT_FILE "$NIX_LINK/etc/ca-bundle.crt"
+else
+ # Fall back to what is in the nix profiles, favouring whatever is defined last.
+ for i in $NIX_PROFILES
+ if test -e "$i/etc/ssl/certs/ca-bundle.crt"
+ set --export NIX_SSL_CERT_FILE "$i/etc/ssl/certs/ca-bundle.crt"
+ end
+ end
+end
+
+fish_add_path --prepend --global "@localstatedir@/nix/profiles/default/bin"
+fish_add_path --prepend --global "$HOME/.nix-profile/bin"
diff --git a/scripts/nix-profile.fish.in b/scripts/nix-profile.fish.in
new file mode 100644
index 000000000..59d247771
--- /dev/null
+++ b/scripts/nix-profile.fish.in
@@ -0,0 +1,35 @@
+if test -n "$HOME" && test -n "$USER"
+
+ # Set up the per-user profile.
+
+ set NIX_LINK $HOME/.nix-profile
+
+ # Set up environment.
+ # This part should be kept in sync with nixpkgs:nixos/modules/programs/environment.nix
+ set --export NIX_PROFILES "@localstatedir@/nix/profiles/default $HOME/.nix-profile"
+
+ # Set $NIX_SSL_CERT_FILE so that Nixpkgs applications like curl work.
+ if test -n "$NIX_SSH_CERT_FILE"
+ : # Allow users to override the NIX_SSL_CERT_FILE
+ else if test -e /etc/ssl/certs/ca-certificates.crt # NixOS, Ubuntu, Debian, Gentoo, Arch
+ set --export NIX_SSL_CERT_FILE /etc/ssl/certs/ca-certificates.crt
+ else if test -e /etc/ssl/ca-bundle.pem # openSUSE Tumbleweed
+ set --export NIX_SSL_CERT_FILE /etc/ssl/ca-bundle.pem
+ else if test -e /etc/ssl/certs/ca-bundle.crt # Old NixOS
+ set --export NIX_SSL_CERT_FILE /etc/ssl/certs/ca-bundle.crt
+ else if test -e /etc/pki/tls/certs/ca-bundle.crt # Fedora, CentOS
+ set --export NIX_SSL_CERT_FILE /etc/pki/tls/certs/ca-bundle.crt
+ else if test -e "$NIX_LINK/etc/ssl/certs/ca-bundle.crt" # fall back to cacert in Nix profile
+ set --export NIX_SSL_CERT_FILE "$NIX_LINK/etc/ssl/certs/ca-bundle.crt"
+ else if test -e "$NIX_LINK/etc/ca-bundle.crt" # old cacert in Nix profile
+ set --export NIX_SSL_CERT_FILE "$NIX_LINK/etc/ca-bundle.crt"
+ end
+
+ # Only use MANPATH if it is already set. In general `man` will just simply
+ # pick up `.nix-profile/share/man` because is it close to `.nix-profile/bin`
+ # which is in the $PATH. For more info, run `manpath -d`.
+ set --export --prepend --path MANPATH "$NIX_LINK/share/man"
+
+ fish_add_path --prepend --global "$NIX_LINK/bin"
+ set --erase NIX_LINK
+end
diff --git a/scripts/nix-profile.sh.in b/scripts/nix-profile.sh.in
index 45cbcbe74..5636085d4 100644
--- a/scripts/nix-profile.sh.in
+++ b/scripts/nix-profile.sh.in
@@ -1,7 +1,6 @@
if [ -n "$HOME" ] && [ -n "$USER" ]; then
# Set up the per-user profile.
- # This part should be kept in sync with nixpkgs:nixos/modules/programs/shell.nix
NIX_LINK=$HOME/.nix-profile
diff --git a/src/libcmd/command.cc b/src/libcmd/command.cc
index 7f8072d75..14bb27936 100644
--- a/src/libcmd/command.cc
+++ b/src/libcmd/command.cc
@@ -120,7 +120,7 @@ ref<EvalState> EvalCommand::getEvalState()
;
if (startReplOnEvalErrors) {
- evalState->debugRepl = &runRepl;
+ evalState->debugRepl = &runRepl;
};
}
return ref<EvalState>(evalState);
diff --git a/src/libcmd/command.hh b/src/libcmd/command.hh
index 8982f21d0..3b4b40981 100644
--- a/src/libcmd/command.hh
+++ b/src/libcmd/command.hh
@@ -58,6 +58,7 @@ struct CopyCommand : virtual StoreCommand
struct EvalCommand : virtual StoreCommand, MixEvalArgs
{
bool startReplOnEvalErrors = false;
+ bool ignoreExceptionsDuringTry = false;
EvalCommand();
@@ -77,10 +78,16 @@ struct MixFlakeOptions : virtual Args, EvalCommand
{
flake::LockFlags lockFlags;
+ std::optional<std::string> needsFlakeInputCompletion = {};
+
MixFlakeOptions();
- virtual std::optional<FlakeRef> getFlakeRefForCompletion()
+ virtual std::vector<std::string> getFlakesForCompletion()
{ return {}; }
+
+ void completeFlakeInput(std::string_view prefix);
+
+ void completionHook() override;
};
struct SourceExprCommand : virtual Args, MixFlakeOptions
@@ -116,12 +123,13 @@ struct InstallablesCommand : virtual Args, SourceExprCommand
InstallablesCommand();
void prepare() override;
+ Installables load();
virtual bool useDefaultInstallables() { return true; }
- std::optional<FlakeRef> getFlakeRefForCompletion() override;
+ std::vector<std::string> getFlakesForCompletion() override;
-private:
+protected:
std::vector<std::string> _installables;
};
@@ -135,9 +143,9 @@ struct InstallableCommand : virtual Args, SourceExprCommand
void prepare() override;
- std::optional<FlakeRef> getFlakeRefForCompletion() override
+ std::vector<std::string> getFlakesForCompletion() override
{
- return parseFlakeRefWithFragment(_installable, absPath(".")).first;
+ return {_installable};
}
private:
diff --git a/src/libcmd/installables.cc b/src/libcmd/installables.cc
index 3cf25e2bc..e097f23b3 100644
--- a/src/libcmd/installables.cc
+++ b/src/libcmd/installables.cc
@@ -23,17 +23,6 @@
namespace nix {
-void completeFlakeInputPath(
- ref<EvalState> evalState,
- const FlakeRef & flakeRef,
- std::string_view prefix)
-{
- auto flake = flake::getFlake(*evalState, flakeRef, true);
- for (auto & input : flake.inputs)
- if (hasPrefix(input.first, prefix))
- completions->add(input.first);
-}
-
MixFlakeOptions::MixFlakeOptions()
{
auto category = "Common flake-related options";
@@ -86,8 +75,7 @@ MixFlakeOptions::MixFlakeOptions()
lockFlags.inputUpdates.insert(flake::parseInputPath(s));
}},
.completer = {[&](size_t, std::string_view prefix) {
- if (auto flakeRef = getFlakeRefForCompletion())
- completeFlakeInputPath(getEvalState(), *flakeRef, prefix);
+ needsFlakeInputCompletion = {std::string(prefix)};
}}
});
@@ -103,12 +91,10 @@ MixFlakeOptions::MixFlakeOptions()
parseFlakeRef(flakeRef, absPath("."), true));
}},
.completer = {[&](size_t n, std::string_view prefix) {
- if (n == 0) {
- if (auto flakeRef = getFlakeRefForCompletion())
- completeFlakeInputPath(getEvalState(), *flakeRef, prefix);
- } else if (n == 1) {
+ if (n == 0)
+ needsFlakeInputCompletion = {std::string(prefix)};
+ else if (n == 1)
completeFlakeRef(getEvalState()->store, prefix);
- }
}}
});
@@ -139,6 +125,24 @@ MixFlakeOptions::MixFlakeOptions()
});
}
+void MixFlakeOptions::completeFlakeInput(std::string_view prefix)
+{
+ auto evalState = getEvalState();
+ for (auto & flakeRefS : getFlakesForCompletion()) {
+ auto flakeRef = parseFlakeRefWithFragment(expandTilde(flakeRefS), absPath(".")).first;
+ auto flake = flake::getFlake(*evalState, flakeRef, true);
+ for (auto & input : flake.inputs)
+ if (hasPrefix(input.first, prefix))
+ completions->add(input.first);
+ }
+}
+
+void MixFlakeOptions::completionHook()
+{
+ if (auto & prefix = needsFlakeInputCompletion)
+ completeFlakeInput(*prefix);
+}
+
SourceExprCommand::SourceExprCommand(bool supportReadOnlyMode)
{
addFlag({
@@ -612,6 +616,8 @@ InstallableFlake::InstallableFlake(
std::tuple<std::string, FlakeRef, InstallableValue::DerivationInfo> InstallableFlake::toDerivation()
{
+ Activity act(*logger, lvlTalkative, actUnknown, fmt("evaluating derivation '%s'", what()));
+
auto attr = getCursor(*state);
auto attrPath = attr->getAttrPathStr();
@@ -920,6 +926,9 @@ std::vector<std::pair<std::shared_ptr<Installable>, BuiltPath>> Installable::bui
break;
case Realise::Outputs: {
+ if (settings.printMissing)
+ printMissing(store, pathsToBuild, lvlInfo);
+
for (auto & buildResult : store->buildPathsWithResults(pathsToBuild, bMode, evalStore)) {
if (!buildResult.success())
buildResult.rethrow();
@@ -1033,21 +1042,26 @@ InstallablesCommand::InstallablesCommand()
void InstallablesCommand::prepare()
{
+ installables = load();
+}
+
+Installables InstallablesCommand::load() {
+ Installables installables;
if (_installables.empty() && useDefaultInstallables())
// FIXME: commands like "nix profile install" should not have a
// default, probably.
_installables.push_back(".");
- installables = parseInstallables(getStore(), _installables);
+ return parseInstallables(getStore(), _installables);
}
-std::optional<FlakeRef> InstallablesCommand::getFlakeRefForCompletion()
+std::vector<std::string> InstallablesCommand::getFlakesForCompletion()
{
if (_installables.empty()) {
if (useDefaultInstallables())
- return parseFlakeRefWithFragment(".", absPath(".")).first;
+ return {"."};
return {};
}
- return parseFlakeRefWithFragment(_installables.front(), absPath(".")).first;
+ return _installables;
}
InstallableCommand::InstallableCommand(bool supportReadOnlyMode)
diff --git a/src/libcmd/installables.hh b/src/libcmd/installables.hh
index 5d715210e..948f78919 100644
--- a/src/libcmd/installables.hh
+++ b/src/libcmd/installables.hh
@@ -132,6 +132,8 @@ struct Installable
const std::vector<std::shared_ptr<Installable>> & installables);
};
+typedef std::vector<std::shared_ptr<Installable>> Installables;
+
struct InstallableValue : Installable
{
ref<EvalState> state;
diff --git a/src/libcmd/markdown.cc b/src/libcmd/markdown.cc
index 71f9c8dff..668a07763 100644
--- a/src/libcmd/markdown.cc
+++ b/src/libcmd/markdown.cc
@@ -18,7 +18,7 @@ std::string renderMarkdownToTerminal(std::string_view markdown)
.hmargin = 0,
.vmargin = 0,
.feat = LOWDOWN_COMMONMARK | LOWDOWN_FENCED | LOWDOWN_DEFLIST | LOWDOWN_TABLES,
- .oflags = 0,
+ .oflags = LOWDOWN_TERM_NOLINK,
};
auto doc = lowdown_doc_new(&opts);
diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc
index 458e824c5..61c05050f 100644
--- a/src/libcmd/repl.cc
+++ b/src/libcmd/repl.cc
@@ -22,6 +22,7 @@ extern "C" {
#include "ansicolor.hh"
#include "shared.hh"
#include "eval.hh"
+#include "eval-cache.hh"
#include "eval-inline.hh"
#include "attr-path.hh"
#include "store-api.hh"
@@ -34,6 +35,7 @@ extern "C" {
#include "finally.hh"
#include "markdown.hh"
#include "local-fs-store.hh"
+#include "progress-bar.hh"
#if HAVE_BOEHMGC
#define GC_INCLUDE_NEW
@@ -54,6 +56,8 @@ struct NixRepl
size_t debugTraceIndex;
Strings loadedFiles;
+ typedef std::vector<std::pair<Value*,std::string>> AnnotatedValues;
+ std::function<AnnotatedValues()> getValues;
const static int envSize = 32768;
std::shared_ptr<StaticEnv> staticEnv;
@@ -63,13 +67,15 @@ struct NixRepl
const Path historyFile;
- NixRepl(ref<EvalState> state);
+ NixRepl(const Strings & searchPath, nix::ref<Store> store,ref<EvalState> state,
+ std::function<AnnotatedValues()> getValues);
~NixRepl();
- void mainLoop(const std::vector<std::string> & files);
+ void mainLoop();
StringSet completePrefix(const std::string & prefix);
bool getLine(std::string & input, const std::string & prompt);
StorePath getDerivationPath(Value & v);
bool processLine(std::string line);
+
void loadFile(const Path & path);
void loadFlake(const std::string & flakeRef);
void initEnv();
@@ -96,9 +102,11 @@ std::string removeWhitespace(std::string s)
}
-NixRepl::NixRepl(ref<EvalState> state)
+NixRepl::NixRepl(const Strings & searchPath, nix::ref<Store> store, ref<EvalState> state,
+ std::function<NixRepl::AnnotatedValues()> getValues)
: state(state)
, debugTraceIndex(0)
+ , getValues(getValues)
, staticEnv(new StaticEnv(false, state->staticBaseEnv.get()))
, historyFile(getDataDir() + "/nix/repl-history")
{
@@ -111,23 +119,20 @@ NixRepl::~NixRepl()
write_history(historyFile.c_str());
}
-std::string runNix(Path program, const Strings & args,
+void runNix(Path program, const Strings & args,
const std::optional<std::string> & input = {})
{
auto subprocessEnv = getEnv();
subprocessEnv["NIX_CONFIG"] = globalConfig.toKeyValue();
- auto res = runProgram(RunOptions {
+ runProgram2(RunOptions {
.program = settings.nixBinDir+ "/" + program,
.args = args,
.environment = subprocessEnv,
.input = input,
});
- if (!statusOk(res.first))
- throw ExecError(res.first, "program '%1%' %2%", program, statusToString(res.first));
-
- return res.second;
+ return;
}
static NixRepl * curRepl; // ugly
@@ -228,22 +233,20 @@ static std::ostream & showDebugTrace(std::ostream & out, const PosTable & positi
return out;
}
-void NixRepl::mainLoop(const std::vector<std::string> & files)
+void NixRepl::mainLoop()
{
std::string error = ANSI_RED "error:" ANSI_NORMAL " ";
notice("Welcome to Nix " + nixVersion + ". Type :? for help.\n");
- if (!files.empty()) {
- for (auto & i : files)
- loadedFiles.push_back(i);
- }
-
loadFiles();
- if (!loadedFiles.empty()) notice("");
// Allow nix-repl specific settings in .inputrc
rl_readline_name = "nix-repl";
- createDirs(dirOf(historyFile));
+ try {
+ createDirs(dirOf(historyFile));
+ } catch (SysError & e) {
+ logWarning(e.info());
+ }
#ifndef READLINE
el_hist_size = 1000;
#endif
@@ -254,6 +257,10 @@ void NixRepl::mainLoop(const std::vector<std::string> & files)
rl_set_list_possib_func(listPossibleCallback);
#endif
+ /* Stop the progress bar because it interferes with the display of
+ the repl. */
+ stopProgressBar();
+
std::string input;
while (true) {
@@ -749,7 +756,6 @@ bool NixRepl::processLine(std::string line)
return true;
}
-
void NixRepl::loadFile(const Path & path)
{
loadedFiles.remove(path);
@@ -809,13 +815,15 @@ void NixRepl::loadFiles()
Strings old = loadedFiles;
loadedFiles.clear();
- bool first = true;
for (auto & i : old) {
- if (!first) notice("");
- first = false;
notice("Loading '%1%'...", i);
loadFile(i);
}
+
+ for (auto & [i, what] : getValues()) {
+ notice("Loading installable '%1%'...", what);
+ addAttrsToScope(*i);
+ }
}
@@ -1015,7 +1023,17 @@ void runRepl(
ref<EvalState>evalState,
const ValMap & extraEnv)
{
- auto repl = std::make_unique<NixRepl>(evalState);
+ auto getValues = [&]()->NixRepl::AnnotatedValues{
+ NixRepl::AnnotatedValues values;
+ return values;
+ };
+ const Strings & searchPath = {};
+ auto repl = std::make_unique<NixRepl>(
+ searchPath,
+ openStore(),
+ evalState,
+ getValues
+ );
repl->initEnv();
@@ -1023,20 +1041,44 @@ void runRepl(
for (auto & [name, value] : extraEnv)
repl->addVarToScope(repl->state->symbols.create(name), *value);
- repl->mainLoop({});
+ repl->mainLoop();
}
-struct CmdRepl : StoreCommand, MixEvalArgs
+struct CmdRepl : InstallablesCommand
{
+ CmdRepl() {
+ evalSettings.pureEval = false;
+ }
+
+ void prepare()
+ {
+ if (!settings.isExperimentalFeatureEnabled(Xp::ReplFlake) && !(file) && this->_installables.size() >= 1) {
+ warn("future versions of Nix will require using `--file` to load a file");
+ if (this->_installables.size() > 1)
+ warn("more than one input file is not currently supported");
+ auto filePath = this->_installables[0].data();
+ file = std::optional(filePath);
+ _installables.front() = _installables.back();
+ _installables.pop_back();
+ }
+ installables = InstallablesCommand::load();
+ }
+
std::vector<std::string> files;
- CmdRepl()
+ Strings getDefaultFlakeAttrPaths() override
+ {
+ return {""};
+ }
+
+ bool useDefaultInstallables() override
{
- expectArgs({
- .label = "files",
- .handler = {&files},
- .completer = completePath
- });
+ return file.has_value() or expr.has_value();
+ }
+
+ bool forceImpureByDefault() override
+ {
+ return true;
}
std::string description() override
@@ -1053,14 +1095,37 @@ struct CmdRepl : StoreCommand, MixEvalArgs
void run(ref<Store> store) override
{
- evalSettings.pureEval = false;
-
- auto evalState = make_ref<EvalState>(searchPath, store);
-
- auto repl = std::make_unique<NixRepl>(evalState);
+ auto state = getEvalState();
+ auto getValues = [&]()->NixRepl::AnnotatedValues{
+ auto installables = load();
+ NixRepl::AnnotatedValues values;
+ for (auto & installable: installables){
+ auto what = installable->what();
+ if (file){
+ auto [val, pos] = installable->toValue(*state);
+ auto what = installable->what();
+ state->forceValue(*val, pos);
+ auto autoArgs = getAutoArgs(*state);
+ auto valPost = state->allocValue();
+ state->autoCallFunction(*autoArgs, *val, *valPost);
+ state->forceValue(*valPost, pos);
+ values.push_back( {valPost, what });
+ } else {
+ auto [val, pos] = installable->toValue(*state);
+ values.push_back( {val, what} );
+ }
+ }
+ return values;
+ };
+ auto repl = std::make_unique<NixRepl>(
+ searchPath,
+ openStore(),
+ state,
+ getValues
+ );
repl->autoArgs = getAutoArgs(*repl->state);
repl->initEnv();
- repl->mainLoop(files);
+ repl->mainLoop();
}
};
diff --git a/src/libexpr/eval-cache.cc b/src/libexpr/eval-cache.cc
index d77b25898..b259eec63 100644
--- a/src/libexpr/eval-cache.cc
+++ b/src/libexpr/eval-cache.cc
@@ -282,7 +282,7 @@ struct AttrDb
auto queryAttribute(state->queryAttribute.use()(key.first)(symbols[key.second]));
if (!queryAttribute.next()) return {};
- auto rowId = (AttrType) queryAttribute.getInt(0);
+ auto rowId = (AttrId) queryAttribute.getInt(0);
auto type = (AttrType) queryAttribute.getInt(1);
switch (type) {
@@ -486,7 +486,7 @@ std::shared_ptr<AttrCursor> AttrCursor::maybeGetAttr(Symbol name, bool forceErro
return nullptr;
else if (std::get_if<failed_t>(&attr->second)) {
if (forceErrors)
- debug("reevaluating failed cached attribute '%s'");
+ debug("reevaluating failed cached attribute '%s'", getAttrPathStr(name));
else
throw CachedEvalError("cached failure of attribute '%s'", getAttrPathStr(name));
} else
@@ -507,11 +507,6 @@ std::shared_ptr<AttrCursor> AttrCursor::maybeGetAttr(Symbol name, bool forceErro
return nullptr;
//throw TypeError("'%s' is not an attribute set", getAttrPathStr());
- for (auto & attr : *v.attrs) {
- if (root->db)
- root->db->setPlaceholder({cachedValue->first, attr.name});
- }
-
auto attr = v.attrs->get(name);
if (!attr) {
diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc
index 40462afdf..e3716f217 100644
--- a/src/libexpr/eval.cc
+++ b/src/libexpr/eval.cc
@@ -464,9 +464,10 @@ EvalState::EvalState(
, emptyBindings(0)
, store(store)
, buildStore(buildStore ? buildStore : store)
- , debugRepl(0)
+ , debugRepl(nullptr)
, debugStop(false)
, debugQuit(false)
+ , trylevel(0)
, regexCache(makeRegexCache())
#if HAVE_BOEHMGC
, valueAllocCache(std::allocate_shared<void *>(traceable_allocator<void *>(), nullptr))
@@ -832,7 +833,14 @@ void EvalState::runDebugRepl(const Error * error, const Env & env, const Expr &
: nullptr;
if (error)
- printError("%s\n\n" ANSI_BOLD "Starting REPL to allow you to inspect the current state of the evaluator.\n" ANSI_NORMAL, error->what());
+ {
+ printError("%s\n\n", error->what());
+
+ if (trylevel > 0 && error->info().level != lvlInfo)
+ printError("This exception occurred in a 'tryEval' call. Use " ANSI_GREEN "--ignore-try" ANSI_NORMAL " to skip these.\n");
+
+ printError(ANSI_BOLD "Starting REPL to allow you to inspect the current state of the evaluator.\n" ANSI_NORMAL);
+ }
auto se = getStaticEnv(expr);
if (se) {
@@ -2493,18 +2501,18 @@ void EvalState::printStats()
}
{
auto list = topObj.list("functions");
- for (auto & i : functionCalls) {
+ for (auto & [fun, count] : functionCalls) {
auto obj = list.object();
- if (i.first->name)
- obj.attr("name", (const std::string &) i.first->name);
+ if (fun->name)
+ obj.attr("name", (std::string_view) symbols[fun->name]);
else
obj.attr("name", nullptr);
- if (auto pos = positions[i.first->pos]) {
- obj.attr("file", (const std::string &) pos.file);
+ if (auto pos = positions[fun->pos]) {
+ obj.attr("file", (std::string_view) pos.file);
obj.attr("line", pos.line);
obj.attr("column", pos.column);
}
- obj.attr("count", i.second);
+ obj.attr("count", count);
}
}
{
diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh
index 4eaa3c9b0..f07f15d43 100644
--- a/src/libexpr/eval.hh
+++ b/src/libexpr/eval.hh
@@ -130,6 +130,7 @@ public:
void (* debugRepl)(ref<EvalState> es, const ValMap & extraEnv);
bool debugStop;
bool debugQuit;
+ int trylevel;
std::list<DebugTrace> debugTraces;
std::map<const Expr*, const std::shared_ptr<const StaticEnv>> exprEnvs;
const std::shared_ptr<const StaticEnv> getStaticEnv(const Expr & expr) const
@@ -646,6 +647,15 @@ struct EvalSettings : Config
Setting<bool> useEvalCache{this, true, "eval-cache",
"Whether to use the flake evaluation cache."};
+
+ Setting<bool> ignoreExceptionsDuringTry{this, false, "ignore-try",
+ R"(
+ If set to true, ignore exceptions inside 'tryEval' calls when evaluating nix expressions in
+ debug mode (using the --debugger flag). By default the debugger will pause on all exceptions.
+ )"};
+
+ Setting<bool> traceVerbose{this, false, "trace-verbose",
+ "Whether `builtins.traceVerbose` should trace its first argument when evaluated."};
};
extern EvalSettings evalSettings;
diff --git a/src/libexpr/fetchurl.nix b/src/libexpr/fetchurl.nix
index 02531103b..9d1b61d7f 100644
--- a/src/libexpr/fetchurl.nix
+++ b/src/libexpr/fetchurl.nix
@@ -12,13 +12,13 @@
, executable ? false
, unpack ? false
, name ? baseNameOf (toString url)
+, impure ? false
}:
-derivation {
+derivation ({
builder = "builtin:fetchurl";
# New-style output content requirements.
- inherit outputHashAlgo outputHash;
outputHashMode = if unpack || executable then "recursive" else "flat";
inherit name url executable unpack;
@@ -38,4 +38,6 @@ derivation {
# To make "nix-prefetch-url" work.
urls = [ url ];
-}
+} // (if impure
+ then { __impure = true; }
+ else { inherit outputHashAlgo outputHash; }))
diff --git a/src/libexpr/flake/config.cc b/src/libexpr/flake/config.cc
index 3e9d264b4..6df95f1f0 100644
--- a/src/libexpr/flake/config.cc
+++ b/src/libexpr/flake/config.cc
@@ -68,7 +68,7 @@ void ConfigFile::apply()
}
}
if (!trusted) {
- warn("ignoring untrusted flake configuration setting '%s'", name);
+ warn("ignoring untrusted flake configuration setting '%s'.\nPass '%s' to trust it", name, "--accept-flake-config");
continue;
}
}
diff --git a/src/libexpr/flake/flake.cc b/src/libexpr/flake/flake.cc
index 35c841897..119c556ac 100644
--- a/src/libexpr/flake/flake.cc
+++ b/src/libexpr/flake/flake.cc
@@ -341,7 +341,6 @@ LockedFlake lockFlake(
debug("old lock file: %s", oldLockFile);
- // FIXME: check whether all overrides are used.
std::map<InputPath, FlakeInput> overrides;
std::set<InputPath> overridesUsed, updatesUsed;
@@ -384,6 +383,18 @@ LockedFlake lockFlake(
}
}
+ /* Check whether this input has overrides for a
+ non-existent input. */
+ for (auto [inputPath, inputOverride] : overrides) {
+ auto inputPath2(inputPath);
+ auto follow = inputPath2.back();
+ inputPath2.pop_back();
+ if (inputPath2 == inputPathPrefix && !flakeInputs.count(follow))
+ warn(
+ "input '%s' has an override for a non-existent input '%s'",
+ printInputPath(inputPathPrefix), follow);
+ }
+
/* Go over the flake inputs, resolve/fetch them if
necessary (i.e. if they're new or the flakeref changed
from what's in the lock file). */
@@ -472,12 +483,12 @@ LockedFlake lockFlake(
} else if (auto follows = std::get_if<1>(&i.second)) {
if (! trustLock) {
// It is possible that the flake has changed,
- // so we must confirm all the follows that are in the lockfile are also in the flake.
+ // so we must confirm all the follows that are in the lock file are also in the flake.
auto overridePath(inputPath);
overridePath.push_back(i.first);
auto o = overrides.find(overridePath);
// If the override disappeared, we have to refetch the flake,
- // since some of the inputs may not be present in the lockfile.
+ // since some of the inputs may not be present in the lock file.
if (o == overrides.end()) {
mustRefetch = true;
// There's no point populating the rest of the fake inputs,
@@ -513,6 +524,15 @@ LockedFlake lockFlake(
if (!lockFlags.allowMutable && !input.ref->input.isLocked())
throw Error("cannot update flake input '%s' in pure mode", inputPathS);
+ /* Note: in case of an --override-input, we use
+ the *original* ref (input2.ref) for the
+ "original" field, rather than the
+ override. This ensures that the override isn't
+ nuked the next time we update the lock
+ file. That is, overrides are sticky unless you
+ use --no-write-lock-file. */
+ auto ref = input2.ref ? *input2.ref : *input.ref;
+
if (input.isFlake) {
Path localPath = parentPath;
FlakeRef localRef = *input.ref;
@@ -524,15 +544,7 @@ LockedFlake lockFlake(
auto inputFlake = getFlake(state, localRef, useRegistries, flakeCache, inputPath);
- /* Note: in case of an --override-input, we use
- the *original* ref (input2.ref) for the
- "original" field, rather than the
- override. This ensures that the override isn't
- nuked the next time we update the lock
- file. That is, overrides are sticky unless you
- use --no-write-lock-file. */
- auto childNode = std::make_shared<LockedNode>(
- inputFlake.lockedRef, input2.ref ? *input2.ref : *input.ref);
+ auto childNode = std::make_shared<LockedNode>(inputFlake.lockedRef, ref);
node->inputs.insert_or_assign(id, childNode);
@@ -560,7 +572,7 @@ LockedFlake lockFlake(
auto [sourceInfo, resolvedRef, lockedRef] = fetchOrSubstituteTree(
state, *input.ref, useRegistries, flakeCache);
node->inputs.insert_or_assign(id,
- std::make_shared<LockedNode>(lockedRef, *input.ref, false));
+ std::make_shared<LockedNode>(lockedRef, ref, false));
}
}
diff --git a/src/libexpr/flake/flakeref.hh b/src/libexpr/flake/flakeref.hh
index a9182f4bf..fe4f67193 100644
--- a/src/libexpr/flake/flakeref.hh
+++ b/src/libexpr/flake/flakeref.hh
@@ -28,7 +28,7 @@ typedef std::string FlakeId;
* object that fetcher generates (usually via
* FlakeRef::fromAttrs(attrs) or parseFlakeRef(url) calls).
*
- * The actual fetch not have been performed yet (i.e. a FlakeRef may
+ * The actual fetch may not have been performed yet (i.e. a FlakeRef may
* be lazy), but the fetcher can be invoked at any time via the
* FlakeRef to ensure the store is populated with this input.
*/
diff --git a/src/libexpr/flake/lockfile.cc b/src/libexpr/flake/lockfile.cc
index 60b52d578..629d2e669 100644
--- a/src/libexpr/flake/lockfile.cc
+++ b/src/libexpr/flake/lockfile.cc
@@ -36,7 +36,7 @@ LockedNode::LockedNode(const nlohmann::json & json)
, isFlake(json.find("flake") != json.end() ? (bool) json["flake"] : true)
{
if (!lockedRef.input.isLocked())
- throw Error("lockfile contains mutable lock '%s'",
+ throw Error("lock file contains mutable lock '%s'",
fetchers::attrsToJSON(lockedRef.input.toAttrs()));
}
diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y
index 8cbc2da4d..7c9b5a2db 100644
--- a/src/libexpr/parser.y
+++ b/src/libexpr/parser.y
@@ -520,6 +520,12 @@ path_start
$$ = new ExprPath(path);
}
| HPATH {
+ if (evalSettings.pureEval) {
+ throw Error(
+ "the path '%s' can not be resolved in pure mode",
+ std::string_view($1.p, $1.l)
+ );
+ }
Path path(getHome() + std::string($1.p + 1, $1.l - 1));
$$ = new ExprPath(path);
}
diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index eea274301..28b998474 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -851,6 +851,18 @@ static RegisterPrimOp primop_floor({
static void prim_tryEval(EvalState & state, const PosIdx pos, Value * * args, Value & v)
{
auto attrs = state.buildBindings(2);
+
+ /* increment state.trylevel, and decrement it when this function returns. */
+ MaintainCount trylevel(state.trylevel);
+
+ void (* savedDebugRepl)(ref<EvalState> es, const ValMap & extraEnv) = nullptr;
+ if (state.debugRepl && evalSettings.ignoreExceptionsDuringTry)
+ {
+ /* to prevent starting the repl from exceptions withing a tryEval, null it. */
+ savedDebugRepl = state.debugRepl;
+ state.debugRepl = nullptr;
+ }
+
try {
state.forceValue(*args[0], pos);
attrs.insert(state.sValue, args[0]);
@@ -859,6 +871,11 @@ static void prim_tryEval(EvalState & state, const PosIdx pos, Value * * args, Va
attrs.alloc(state.sValue).mkBool(false);
attrs.alloc("success").mkBool(false);
}
+
+ // restore the debugRepl pointer if we saved it earlier.
+ if (savedDebugRepl)
+ state.debugRepl = savedDebugRepl;
+
v.mkAttrs(attrs);
}
@@ -970,6 +987,15 @@ static RegisterPrimOp primop_trace({
});
+/* Takes two arguments and evaluates to the second one. Used as the
+ * builtins.traceVerbose implementation when --trace-verbose is not enabled
+ */
+static void prim_second(EvalState & state, const PosIdx pos, Value * * args, Value & v)
+{
+ state.forceValue(*args[1], pos);
+ v = *args[1];
+}
+
/*************************************************************
* Derivations
*************************************************************/
@@ -2428,8 +2454,8 @@ static RegisterPrimOp primop_intersectAttrs({
.name = "__intersectAttrs",
.args = {"e1", "e2"},
.doc = R"(
- Return a set consisting of the attributes in the set *e2* that also
- exist in the set *e1*.
+ Return a set consisting of the attributes in the set *e2* which have the
+ same name as some attribute in *e1*.
)",
.fun = prim_intersectAttrs,
});
@@ -3926,6 +3952,18 @@ void EvalState::createBaseEnv()
addPrimOp("__exec", 1, prim_exec);
}
+ addPrimOp({
+ .fun = evalSettings.traceVerbose ? prim_trace : prim_second,
+ .arity = 2,
+ .name = "__traceVerbose",
+ .args = { "e1", "e2" },
+ .doc = R"(
+ Evaluate *e1* and print its abstract syntax representation on standard
+ error if `--trace-verbose` is enabled. Then return *e2*. This function
+ is useful for debugging.
+ )",
+ });
+
/* Add a value containing the current Nix expression search path. */
mkList(v, searchPath.size());
int n = 0;
diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc
index e5eeea520..84e7f5c02 100644
--- a/src/libexpr/primops/fetchTree.cc
+++ b/src/libexpr/primops/fetchTree.cc
@@ -364,6 +364,10 @@ static RegisterPrimOp primop_fetchGit({
A Boolean parameter that specifies whether submodules should be
checked out. Defaults to `false`.
+ - shallow\
+ A Boolean parameter that specifies whether fetching a shallow clone
+ is allowed. Defaults to `false`.
+
- allRefs\
Whether to fetch all refs of the repository. With this argument being
true, it's possible to load a `rev` from *any* `ref` (by default only
diff --git a/src/libexpr/tests/primops.cc b/src/libexpr/tests/primops.cc
index f65b6593d..16cf66d2c 100644
--- a/src/libexpr/tests/primops.cc
+++ b/src/libexpr/tests/primops.cc
@@ -540,22 +540,22 @@ namespace nix {
ASSERT_THAT(v, IsStringEq(output));
}
-#define CASE(input, output) (std::make_tuple(std::string_view("builtins.toString " #input), std::string_view(output)))
+#define CASE(input, output) (std::make_tuple(std::string_view("builtins.toString " input), std::string_view(output)))
INSTANTIATE_TEST_SUITE_P(
toString,
ToStringPrimOpTest,
testing::Values(
- CASE("foo", "foo"),
- CASE(1, "1"),
- CASE([1 2 3], "1 2 3"),
- CASE(.123, "0.123000"),
- CASE(true, "1"),
- CASE(false, ""),
- CASE(null, ""),
- CASE({ v = "bar"; __toString = self: self.v; }, "bar"),
- CASE({ v = "bar"; __toString = self: self.v; outPath = "foo"; }, "bar"),
- CASE({ outPath = "foo"; }, "foo"),
- CASE(./test, "/test")
+ CASE(R"("foo")", "foo"),
+ CASE(R"(1)", "1"),
+ CASE(R"([1 2 3])", "1 2 3"),
+ CASE(R"(.123)", "0.123000"),
+ CASE(R"(true)", "1"),
+ CASE(R"(false)", ""),
+ CASE(R"(null)", ""),
+ CASE(R"({ v = "bar"; __toString = self: self.v; })", "bar"),
+ CASE(R"({ v = "bar"; __toString = self: self.v; outPath = "foo"; })", "bar"),
+ CASE(R"({ outPath = "foo"; })", "foo"),
+ CASE(R"(./test)", "/test")
)
);
#undef CASE
diff --git a/src/libexpr/value-to-json.cc b/src/libexpr/value-to-json.cc
index 03504db61..4d63d8b49 100644
--- a/src/libexpr/value-to-json.cc
+++ b/src/libexpr/value-to-json.cc
@@ -10,7 +10,7 @@
namespace nix {
void printValueAsJSON(EvalState & state, bool strict,
- Value & v, const PosIdx pos, JSONPlaceholder & out, PathSet & context)
+ Value & v, const PosIdx pos, JSONPlaceholder & out, PathSet & context, bool copyToStore)
{
checkInterrupt();
@@ -32,7 +32,10 @@ void printValueAsJSON(EvalState & state, bool strict,
break;
case nPath:
- out.write(state.copyPathToStore(context, v.path));
+ if (copyToStore)
+ out.write(state.copyPathToStore(context, v.path));
+ else
+ out.write(v.path);
break;
case nNull:
@@ -54,10 +57,10 @@ void printValueAsJSON(EvalState & state, bool strict,
for (auto & j : names) {
Attr & a(*v.attrs->find(state.symbols.create(j)));
auto placeholder(obj.placeholder(j));
- printValueAsJSON(state, strict, *a.value, a.pos, placeholder, context);
+ printValueAsJSON(state, strict, *a.value, a.pos, placeholder, context, copyToStore);
}
} else
- printValueAsJSON(state, strict, *i->value, i->pos, out, context);
+ printValueAsJSON(state, strict, *i->value, i->pos, out, context, copyToStore);
break;
}
@@ -65,13 +68,13 @@ void printValueAsJSON(EvalState & state, bool strict,
auto list(out.list());
for (auto elem : v.listItems()) {
auto placeholder(list.placeholder());
- printValueAsJSON(state, strict, *elem, pos, placeholder, context);
+ printValueAsJSON(state, strict, *elem, pos, placeholder, context, copyToStore);
}
break;
}
case nExternal:
- v.external->printValueAsJSON(state, strict, out, context);
+ v.external->printValueAsJSON(state, strict, out, context, copyToStore);
break;
case nFloat:
@@ -91,14 +94,14 @@ void printValueAsJSON(EvalState & state, bool strict,
}
void printValueAsJSON(EvalState & state, bool strict,
- Value & v, const PosIdx pos, std::ostream & str, PathSet & context)
+ Value & v, const PosIdx pos, std::ostream & str, PathSet & context, bool copyToStore)
{
JSONPlaceholder out(str);
- printValueAsJSON(state, strict, v, pos, out, context);
+ printValueAsJSON(state, strict, v, pos, out, context, copyToStore);
}
void ExternalValueBase::printValueAsJSON(EvalState & state, bool strict,
- JSONPlaceholder & out, PathSet & context) const
+ JSONPlaceholder & out, PathSet & context, bool copyToStore) const
{
state.debugThrowLastTrace(TypeError("cannot convert %1% to JSON", showType()));
}
diff --git a/src/libexpr/value-to-json.hh b/src/libexpr/value-to-json.hh
index c020a817a..7ddc8a5b1 100644
--- a/src/libexpr/value-to-json.hh
+++ b/src/libexpr/value-to-json.hh
@@ -11,9 +11,9 @@ namespace nix {
class JSONPlaceholder;
void printValueAsJSON(EvalState & state, bool strict,
- Value & v, const PosIdx pos, JSONPlaceholder & out, PathSet & context);
+ Value & v, const PosIdx pos, JSONPlaceholder & out, PathSet & context, bool copyToStore = true);
void printValueAsJSON(EvalState & state, bool strict,
- Value & v, const PosIdx pos, std::ostream & str, PathSet & context);
+ Value & v, const PosIdx pos, std::ostream & str, PathSet & context, bool copyToStore = true);
}
diff --git a/src/libexpr/value.hh b/src/libexpr/value.hh
index 2008df74d..590ba7783 100644
--- a/src/libexpr/value.hh
+++ b/src/libexpr/value.hh
@@ -99,7 +99,7 @@ class ExternalValueBase
/* Print the value as JSON. Defaults to unconvertable, i.e. throws an error */
virtual void printValueAsJSON(EvalState & state, bool strict,
- JSONPlaceholder & out, PathSet & context) const;
+ JSONPlaceholder & out, PathSet & context, bool copyToStore = true) const;
/* Print the value as XML. Defaults to unevaluated */
virtual void printValueAsXML(EvalState & state, bool strict, bool location,
diff --git a/src/libfetchers/fetch-settings.hh b/src/libfetchers/fetch-settings.hh
index 04c9feda0..6452143a1 100644
--- a/src/libfetchers/fetch-settings.hh
+++ b/src/libfetchers/fetch-settings.hh
@@ -70,7 +70,7 @@ struct FetchSettings : public Config
Setting<bool> warnDirty{this, true, "warn-dirty",
"Whether to warn about dirty Git/Mercurial trees."};
- Setting<std::string> flakeRegistry{this, "https://github.com/NixOS/flake-registry/raw/master/flake-registry.json", "flake-registry",
+ Setting<std::string> flakeRegistry{this, "https://channels.nixos.org/flake-registry.json", "flake-registry",
"Path or URI of the global flake registry."};
Setting<bool> useRegistries{this, true, "use-registries",
diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc
index 35fdf807a..c1a21e764 100644
--- a/src/libfetchers/git.cc
+++ b/src/libfetchers/git.cc
@@ -370,7 +370,7 @@ struct GitInputScheme : InputScheme
auto gitDir = ".git";
runProgram("git", true,
- { "-C", *sourcePath, "--git-dir", gitDir, "add", "--force", "--intent-to-add", "--", std::string(file) });
+ { "-C", *sourcePath, "--git-dir", gitDir, "add", "--intent-to-add", "--", std::string(file) });
if (commitMsg)
runProgram("git", true,
@@ -574,7 +574,7 @@ struct GitInputScheme : InputScheme
bool isShallow = chomp(runProgram("git", true, { "-C", repoDir, "--git-dir", gitDir, "rev-parse", "--is-shallow-repository" })) == "true";
if (isShallow && !shallow)
- throw Error("'%s' is a shallow Git repository, but a non-shallow repository is needed", actualUrl);
+ throw Error("'%s' is a shallow Git repository, but shallow repositories are only allowed when `shallow = true;` is specified.", actualUrl);
// FIXME: check whether rev is an ancestor of ref.
diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc
index 0631fb6e8..a491d82a6 100644
--- a/src/libfetchers/github.cc
+++ b/src/libfetchers/github.cc
@@ -381,7 +381,7 @@ struct SourceHutInputScheme : GitArchiveInputScheme
Headers headers = makeHeadersWithAuthTokens(host);
- std::string ref_uri;
+ std::string refUri;
if (ref == "HEAD") {
auto file = store->toRealPath(
downloadFile(store, fmt("%s/HEAD", base_url), "source", false, headers).storePath);
@@ -393,10 +393,11 @@ struct SourceHutInputScheme : GitArchiveInputScheme
if (!remoteLine) {
throw BadURL("in '%d', couldn't resolve HEAD ref '%d'", input.to_string(), ref);
}
- ref_uri = remoteLine->target;
+ refUri = remoteLine->target;
} else {
- ref_uri = fmt("refs/(heads|tags)/%s", ref);
+ refUri = fmt("refs/(heads|tags)/%s", ref);
}
+ std::regex refRegex(refUri);
auto file = store->toRealPath(
downloadFile(store, fmt("%s/info/refs", base_url), "source", false, headers).storePath);
@@ -406,7 +407,7 @@ struct SourceHutInputScheme : GitArchiveInputScheme
std::optional<std::string> id;
while(!id && getline(is, line)) {
auto parsedLine = git::parseLsRemoteLine(line);
- if (parsedLine && parsedLine->reference == ref_uri)
+ if (parsedLine && parsedLine->reference && std::regex_match(*parsedLine->reference, refRegex))
id = parsedLine->target;
}
diff --git a/src/libmain/loggers.cc b/src/libmain/loggers.cc
index cdf23859b..cda5cb939 100644
--- a/src/libmain/loggers.cc
+++ b/src/libmain/loggers.cc
@@ -30,8 +30,11 @@ Logger * makeDefaultLogger() {
return makeJSONLogger(*makeSimpleLogger(true));
case LogFormat::bar:
return makeProgressBar();
- case LogFormat::barWithLogs:
- return makeProgressBar(true);
+ case LogFormat::barWithLogs: {
+ auto logger = makeProgressBar();
+ logger->setPrintBuildLogs(true);
+ return logger;
+ }
default:
abort();
}
diff --git a/src/libmain/progress-bar.cc b/src/libmain/progress-bar.cc
index f4306ab91..0bbeaff8d 100644
--- a/src/libmain/progress-bar.cc
+++ b/src/libmain/progress-bar.cc
@@ -8,6 +8,7 @@
#include <map>
#include <thread>
#include <iostream>
+#include <chrono>
namespace nix {
@@ -48,6 +49,7 @@ private:
bool visible = true;
ActivityId parent;
std::optional<std::string> name;
+ std::chrono::time_point<std::chrono::steady_clock> startTime;
};
struct ActivitiesByType
@@ -79,22 +81,22 @@ private:
std::condition_variable quitCV, updateCV;
- bool printBuildLogs;
+ bool printBuildLogs = false;
bool isTTY;
public:
- ProgressBar(bool printBuildLogs, bool isTTY)
- : printBuildLogs(printBuildLogs)
- , isTTY(isTTY)
+ ProgressBar(bool isTTY)
+ : isTTY(isTTY)
{
state_.lock()->active = isTTY;
updateThread = std::thread([&]() {
auto state(state_.lock());
+ auto nextWakeup = std::chrono::milliseconds::max();
while (state->active) {
if (!state->haveUpdate)
- state.wait(updateCV);
- draw(*state);
+ state.wait_for(updateCV, nextWakeup);
+ nextWakeup = draw(*state);
state.wait_for(quitCV, std::chrono::milliseconds(50));
}
});
@@ -118,7 +120,8 @@ public:
updateThread.join();
}
- bool isVerbose() override {
+ bool isVerbose() override
+ {
return printBuildLogs;
}
@@ -159,11 +162,13 @@ public:
if (lvl <= verbosity && !s.empty() && type != actBuildWaiting)
log(*state, lvl, s + "...");
- state->activities.emplace_back(ActInfo());
+ state->activities.emplace_back(ActInfo {
+ .s = s,
+ .type = type,
+ .parent = parent,
+ .startTime = std::chrono::steady_clock::now()
+ });
auto i = std::prev(state->activities.end());
- i->s = s;
- i->type = type;
- i->parent = parent;
state->its.emplace(act, i);
state->activitiesByType[type].its.emplace(act, i);
@@ -327,10 +332,12 @@ public:
updateCV.notify_one();
}
- void draw(State & state)
+ std::chrono::milliseconds draw(State & state)
{
+ auto nextWakeup = std::chrono::milliseconds::max();
+
state.haveUpdate = false;
- if (!state.active) return;
+ if (!state.active) return nextWakeup;
std::string line;
@@ -341,12 +348,25 @@ public:
line += "]";
}
+ auto now = std::chrono::steady_clock::now();
+
if (!state.activities.empty()) {
if (!status.empty()) line += " ";
auto i = state.activities.rbegin();
- while (i != state.activities.rend() && (!i->visible || (i->s.empty() && i->lastLine.empty())))
+ while (i != state.activities.rend()) {
+ if (i->visible && (!i->s.empty() || !i->lastLine.empty())) {
+ /* Don't show activities until some time has
+ passed, to avoid displaying very short
+ activities. */
+ auto delay = std::chrono::milliseconds(10);
+ if (i->startTime + delay < now)
+ break;
+ else
+ nextWakeup = std::min(nextWakeup, std::chrono::duration_cast<std::chrono::milliseconds>(delay - (now - i->startTime)));
+ }
++i;
+ }
if (i != state.activities.rend()) {
line += i->s;
@@ -366,6 +386,8 @@ public:
if (width <= 0) width = std::numeric_limits<decltype(width)>::max();
writeToStderr("\r" + filterANSIEscapes(line, false, width) + ANSI_NORMAL + "\e[K");
+
+ return nextWakeup;
}
std::string getStatus(State & state)
@@ -480,19 +502,21 @@ public:
draw(*state);
return s[0];
}
+
+ virtual void setPrintBuildLogs(bool printBuildLogs)
+ {
+ this->printBuildLogs = printBuildLogs;
+ }
};
-Logger * makeProgressBar(bool printBuildLogs)
+Logger * makeProgressBar()
{
- return new ProgressBar(
- printBuildLogs,
- shouldANSI()
- );
+ return new ProgressBar(shouldANSI());
}
-void startProgressBar(bool printBuildLogs)
+void startProgressBar()
{
- logger = makeProgressBar(printBuildLogs);
+ logger = makeProgressBar();
}
void stopProgressBar()
diff --git a/src/libmain/progress-bar.hh b/src/libmain/progress-bar.hh
index 7f0dafecf..3a76f8448 100644
--- a/src/libmain/progress-bar.hh
+++ b/src/libmain/progress-bar.hh
@@ -4,9 +4,9 @@
namespace nix {
-Logger * makeProgressBar(bool printBuildLogs = false);
+Logger * makeProgressBar();
-void startProgressBar(bool printBuildLogs = false);
+void startProgressBar();
void stopProgressBar();
diff --git a/src/libmain/shared.cc b/src/libmain/shared.cc
index 31454e49d..c1cf38565 100644
--- a/src/libmain/shared.cc
+++ b/src/libmain/shared.cc
@@ -4,6 +4,7 @@
#include "gc-store.hh"
#include "util.hh"
#include "loggers.hh"
+#include "progress-bar.hh"
#include <algorithm>
#include <cctype>
@@ -181,8 +182,9 @@ void initNix()
/* Reset SIGCHLD to its default. */
struct sigaction act;
sigemptyset(&act.sa_mask);
- act.sa_handler = SIG_DFL;
act.sa_flags = 0;
+
+ act.sa_handler = SIG_DFL;
if (sigaction(SIGCHLD, &act, 0))
throw SysError("resetting SIGCHLD");
@@ -194,9 +196,20 @@ void initNix()
/* HACK: on darwin, we need can’t use sigprocmask with SIGWINCH.
* Instead, add a dummy sigaction handler, and signalHandlerThread
* can handle the rest. */
- struct sigaction sa;
- sa.sa_handler = sigHandler;
- if (sigaction(SIGWINCH, &sa, 0)) throw SysError("handling SIGWINCH");
+ act.sa_handler = sigHandler;
+ if (sigaction(SIGWINCH, &act, 0)) throw SysError("handling SIGWINCH");
+
+ /* Disable SA_RESTART for interrupts, so that system calls on this thread
+ * error with EINTR like they do on Linux.
+ * Most signals on BSD systems default to SA_RESTART on, but Nix
+ * expects EINTR from syscalls to properly exit. */
+ act.sa_handler = SIG_DFL;
+ if (sigaction(SIGINT, &act, 0)) throw SysError("handling SIGINT");
+ if (sigaction(SIGTERM, &act, 0)) throw SysError("handling SIGTERM");
+ if (sigaction(SIGHUP, &act, 0)) throw SysError("handling SIGHUP");
+ if (sigaction(SIGPIPE, &act, 0)) throw SysError("handling SIGPIPE");
+ if (sigaction(SIGQUIT, &act, 0)) throw SysError("handling SIGQUIT");
+ if (sigaction(SIGTRAP, &act, 0)) throw SysError("handling SIGTRAP");
#endif
/* Register a SIGSEGV handler to detect stack overflows. */
@@ -410,6 +423,8 @@ RunPager::RunPager()
if (!pager) pager = getenv("PAGER");
if (pager && ((std::string) pager == "" || (std::string) pager == "cat")) return;
+ stopProgressBar();
+
Pipe toPager;
toPager.create();
diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc
index 3fff2385f..41d2e2a1c 100644
--- a/src/libstore/build/derivation-goal.cc
+++ b/src/libstore/build/derivation-goal.cc
@@ -344,7 +344,7 @@ void DerivationGoal::gaveUpOnSubstitution()
for (auto & i : dynamic_cast<Derivation *>(drv.get())->inputDrvs) {
/* Ensure that pure, non-fixed-output derivations don't
depend on impure derivations. */
- if (drv->type().isPure() && !drv->type().isFixed()) {
+ if (settings.isExperimentalFeatureEnabled(Xp::ImpureDerivations) && drv->type().isPure() && !drv->type().isFixed()) {
auto inputDrv = worker.evalStore.readDerivation(i.first);
if (!inputDrv.type().isPure())
throw Error("pure derivation '%s' depends on impure derivation '%s'",
@@ -705,8 +705,7 @@ static void movePath(const Path & src, const Path & dst)
if (changePerm)
chmod_(src, st.st_mode | S_IWUSR);
- if (rename(src.c_str(), dst.c_str()))
- throw SysError("renaming '%1%' to '%2%'", src, dst);
+ renameFile(src, dst);
if (changePerm)
chmod_(dst, st.st_mode);
@@ -914,12 +913,6 @@ void DerivationGoal::buildDone()
outputPaths
);
- if (buildMode == bmCheck) {
- cleanupPostOutputsRegisteredModeCheck();
- done(BuildResult::Built, std::move(builtOutputs));
- return;
- }
-
cleanupPostOutputsRegisteredModeNonCheck();
/* Repeat the build if necessary. */
diff --git a/src/libstore/build/hook-instance.cc b/src/libstore/build/hook-instance.cc
index 0f6f580be..1f19ddccc 100644
--- a/src/libstore/build/hook-instance.cc
+++ b/src/libstore/build/hook-instance.cc
@@ -7,6 +7,22 @@ HookInstance::HookInstance()
{
debug("starting build hook '%s'", settings.buildHook);
+ auto buildHookArgs = tokenizeString<std::list<std::string>>(settings.buildHook.get());
+
+ if (buildHookArgs.empty())
+ throw Error("'build-hook' setting is empty");
+
+ auto buildHook = buildHookArgs.front();
+ buildHookArgs.pop_front();
+
+ Strings args;
+
+ for (auto & arg : buildHookArgs)
+ args.push_back(arg);
+
+ args.push_back(std::string(baseNameOf(settings.buildHook.get())));
+ args.push_back(std::to_string(verbosity));
+
/* Create a pipe to get the output of the child. */
fromHook.create();
@@ -36,14 +52,9 @@ HookInstance::HookInstance()
if (dup2(builderOut.readSide.get(), 5) == -1)
throw SysError("dupping builder's stdout/stderr");
- Strings args = {
- std::string(baseNameOf(settings.buildHook.get())),
- std::to_string(verbosity),
- };
-
- execv(settings.buildHook.get().c_str(), stringsToCharPtrs(args).data());
+ execv(buildHook.c_str(), stringsToCharPtrs(args).data());
- throw SysError("executing '%s'", settings.buildHook);
+ throw SysError("executing '%s'", buildHook);
});
pid.setSeparatePG(true);
diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc
index 3ac9c20f9..18b682e13 100644
--- a/src/libstore/build/local-derivation-goal.cc
+++ b/src/libstore/build/local-derivation-goal.cc
@@ -223,8 +223,7 @@ static void movePath(const Path & src, const Path & dst)
if (changePerm)
chmod_(src, st.st_mode | S_IWUSR);
- if (rename(src.c_str(), dst.c_str()))
- throw SysError("renaming '%1%' to '%2%'", src, dst);
+ renameFile(src, dst);
if (changePerm)
chmod_(dst, st.st_mode);
@@ -311,7 +310,7 @@ bool LocalDerivationGoal::cleanupDecideWhetherDiskFull()
if (buildMode != bmCheck && status.known->isValid()) continue;
auto p = worker.store.printStorePath(status.known->path);
if (pathExists(chrootRootDir + p))
- rename((chrootRootDir + p).c_str(), p.c_str());
+ renameFile((chrootRootDir + p), p);
}
return diskFull;
@@ -845,18 +844,43 @@ void LocalDerivationGoal::startBuilder()
/* Some distros patch Linux to not allow unprivileged
* user namespaces. If we get EPERM or EINVAL, try
* without CLONE_NEWUSER and see if that works.
+ * Details: https://salsa.debian.org/kernel-team/linux/-/commit/d98e00eda6bea437e39b9e80444eee84a32438a6
*/
usingUserNamespace = false;
flags &= ~CLONE_NEWUSER;
child = clone(childEntry, stack + stackSize, flags, this);
}
- /* Otherwise exit with EPERM so we can handle this in the
- parent. This is only done when sandbox-fallback is set
- to true (the default). */
- if (child == -1 && (errno == EPERM || errno == EINVAL) && settings.sandboxFallback)
- _exit(1);
- if (child == -1) throw SysError("cloning builder process");
-
+ if (child == -1) {
+ switch(errno) {
+ case EPERM:
+ case EINVAL: {
+ int errno_ = errno;
+ if (!userNamespacesEnabled && errno==EPERM)
+ notice("user namespaces appear to be disabled; they are required for sandboxing; check /proc/sys/user/max_user_namespaces");
+ if (userNamespacesEnabled) {
+ Path procSysKernelUnprivilegedUsernsClone = "/proc/sys/kernel/unprivileged_userns_clone";
+ if (pathExists(procSysKernelUnprivilegedUsernsClone)
+ && trim(readFile(procSysKernelUnprivilegedUsernsClone)) == "0") {
+ notice("user namespaces appear to be disabled; they are required for sandboxing; check /proc/sys/kernel/unprivileged_userns_clone");
+ }
+ }
+ Path procSelfNsUser = "/proc/self/ns/user";
+ if (!pathExists(procSelfNsUser))
+ notice("/proc/self/ns/user does not exist; your kernel was likely built without CONFIG_USER_NS=y, which is required for sandboxing");
+ /* Otherwise exit with EPERM so we can handle this in the
+ parent. This is only done when sandbox-fallback is set
+ to true (the default). */
+ if (settings.sandboxFallback)
+ _exit(1);
+ /* Mention sandbox-fallback in the error message so the user
+ knows that having it disabled contributed to the
+ unrecoverability of this failure */
+ throw SysError(errno_, "creating sandboxed builder process using clone(), without sandbox-fallback");
+ }
+ default:
+ throw SysError("creating sandboxed builder process using clone()");
+ }
+ }
writeFull(builderOut.writeSide.get(),
fmt("%d %d\n", usingUserNamespace, child));
_exit(0);
@@ -1717,7 +1741,19 @@ void LocalDerivationGoal::runChild()
for (auto & i : dirsInChroot) {
if (i.second.source == "/proc") continue; // backwards compatibility
- doBind(i.second.source, chrootRootDir + i.first, i.second.optional);
+
+ #if HAVE_EMBEDDED_SANDBOX_SHELL
+ if (i.second.source == "__embedded_sandbox_shell__") {
+ static unsigned char sh[] = {
+ #include "embedded-sandbox-shell.gen.hh"
+ };
+ auto dst = chrootRootDir + i.first;
+ createDirs(dirOf(dst));
+ writeFile(dst, std::string_view((const char *) sh, sizeof(sh)));
+ chmod_(dst, 0555);
+ } else
+ #endif
+ doBind(i.second.source, chrootRootDir + i.first, i.second.optional);
}
/* Bind a new instance of procfs on /proc. */
@@ -2338,10 +2374,8 @@ DrvOutputs LocalDerivationGoal::registerOutputs()
if (*scratchPath != finalPath) {
// Also rewrite the output path
auto source = sinkToSource([&](Sink & nextSink) {
- StringSink sink;
- dumpPath(actualPath, sink);
RewritingSink rsink2(oldHashPart, std::string(finalPath.hashPart()), nextSink);
- rsink2(sink.s);
+ dumpPath(actualPath, rsink2);
rsink2.flush();
});
Path tmpPath = actualPath + ".tmp";
@@ -2588,8 +2622,7 @@ DrvOutputs LocalDerivationGoal::registerOutputs()
Path prev = path + checkSuffix;
deletePath(prev);
Path dst = path + checkSuffix;
- if (rename(path.c_str(), dst.c_str()))
- throw SysError("renaming '%s' to '%s'", path, dst);
+ renameFile(path, dst);
}
}
diff --git a/src/libstore/build/substitution-goal.cc b/src/libstore/build/substitution-goal.cc
index ca5218627..2af105b4d 100644
--- a/src/libstore/build/substitution-goal.cc
+++ b/src/libstore/build/substitution-goal.cc
@@ -154,7 +154,7 @@ void PathSubstitutionGoal::tryNext()
only after we've downloaded the path. */
if (!sub->isTrusted && worker.store.pathInfoIsUntrusted(*info))
{
- warn("the substitute for '%s' from '%s' is not signed by any of the keys in 'trusted-public-keys'",
+ warn("ignoring substitute for '%s' from '%s', as it's not signed by any of the keys in 'trusted-public-keys'",
worker.store.printStorePath(storePath), sub->getUri());
tryNext();
return;
diff --git a/src/libstore/builtins/unpack-channel.cc b/src/libstore/builtins/unpack-channel.cc
index 426d58a53..ba04bb16c 100644
--- a/src/libstore/builtins/unpack-channel.cc
+++ b/src/libstore/builtins/unpack-channel.cc
@@ -22,8 +22,7 @@ void builtinUnpackChannel(const BasicDerivation & drv)
auto entries = readDirectory(out);
if (entries.size() != 1)
throw Error("channel tarball '%s' contains more than one file", src);
- if (rename((out + "/" + entries[0].name).c_str(), (out + "/" + channelName).c_str()) == -1)
- throw SysError("renaming channel directory");
+ renameFile((out + "/" + entries[0].name), (out + "/" + channelName));
}
}
diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc
index de69b50ee..48dd5c247 100644
--- a/src/libstore/daemon.cc
+++ b/src/libstore/daemon.cc
@@ -239,6 +239,8 @@ struct ClientSettings
else if (trusted
|| name == settings.buildTimeout.name
|| name == settings.buildRepeat.name
+ || name == settings.maxSilentTime.name
+ || name == settings.pollInterval.name
|| name == "connect-timeout"
|| (name == "builders" && value == ""))
settings.set(name, value);
diff --git a/src/libstore/filetransfer.cc b/src/libstore/filetransfer.cc
index 8454ad7d2..5746c32a3 100644
--- a/src/libstore/filetransfer.cc
+++ b/src/libstore/filetransfer.cc
@@ -308,6 +308,9 @@ struct curlFileTransfer : public FileTransfer
curl_easy_setopt(req, CURLOPT_HTTPHEADER, requestHeaders);
+ if (settings.downloadSpeed.get() > 0)
+ curl_easy_setopt(req, CURLOPT_MAX_RECV_SPEED_LARGE, (curl_off_t) (settings.downloadSpeed.get() * 1024));
+
if (request.head)
curl_easy_setopt(req, CURLOPT_NOBODY, 1);
@@ -319,7 +322,6 @@ struct curlFileTransfer : public FileTransfer
}
if (request.verifyTLS) {
- debug("verify TLS: Nix CA file = '%s'", settings.caFile);
if (settings.caFile != "")
curl_easy_setopt(req, CURLOPT_CAINFO, settings.caFile.c_str());
} else {
diff --git a/src/libstore/gc.cc b/src/libstore/gc.cc
index f65fb1b2e..9ef8972f3 100644
--- a/src/libstore/gc.cc
+++ b/src/libstore/gc.cc
@@ -39,9 +39,7 @@ static void makeSymlink(const Path & link, const Path & target)
createSymlink(target, tempLink);
/* Atomically replace the old one. */
- if (rename(tempLink.c_str(), link.c_str()) == -1)
- throw SysError("cannot rename '%1%' to '%2%'",
- tempLink , link);
+ renameFile(tempLink, link);
}
@@ -135,6 +133,7 @@ void LocalStore::addTempRoot(const StorePath & path)
state->fdRootsSocket.close();
goto restart;
}
+ throw;
}
}
@@ -153,6 +152,7 @@ void LocalStore::addTempRoot(const StorePath & path)
state->fdRootsSocket.close();
goto restart;
}
+ throw;
} catch (EndOfFile & e) {
debug("GC socket disconnected");
state->fdRootsSocket.close();
@@ -619,6 +619,17 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results)
Path path = storeDir + "/" + std::string(baseName);
Path realPath = realStoreDir + "/" + std::string(baseName);
+ /* There may be temp directories in the store that are still in use
+ by another process. We need to be sure that we can acquire an
+ exclusive lock before deleting them. */
+ if (baseName.find("tmp-", 0) == 0) {
+ AutoCloseFD tmpDirFd = open(realPath.c_str(), O_RDONLY | O_DIRECTORY);
+ if (tmpDirFd.get() == -1 || !lockFile(tmpDirFd.get(), ltWrite, false)) {
+ debug("skipping locked tempdir '%s'", realPath);
+ return;
+ }
+ }
+
printInfo("deleting '%1%'", path);
results.paths.insert(path);
diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc
index cc009a026..d724897bb 100644
--- a/src/libstore/globals.cc
+++ b/src/libstore/globals.cc
@@ -36,7 +36,6 @@ Settings::Settings()
, nixStateDir(canonPath(getEnv("NIX_STATE_DIR").value_or(NIX_STATE_DIR)))
, nixConfDir(canonPath(getEnv("NIX_CONF_DIR").value_or(NIX_CONF_DIR)))
, nixUserConfFiles(getUserConfigFiles())
- , nixLibexecDir(canonPath(getEnv("NIX_LIBEXEC_DIR").value_or(NIX_LIBEXEC_DIR)))
, nixBinDir(canonPath(getEnv("NIX_BIN_DIR").value_or(NIX_BIN_DIR)))
, nixManDir(canonPath(NIX_MAN_DIR))
, nixDaemonSocketFile(canonPath(getEnv("NIX_DAEMON_SOCKET_PATH").value_or(nixStateDir + DEFAULT_SOCKET_PATH)))
@@ -67,12 +66,13 @@ Settings::Settings()
sandboxPaths = tokenizeString<StringSet>("/bin/sh=" SANDBOX_SHELL);
#endif
-
-/* chroot-like behavior from Apple's sandbox */
+ /* chroot-like behavior from Apple's sandbox */
#if __APPLE__
sandboxPaths = tokenizeString<StringSet>("/System/Library/Frameworks /System/Library/PrivateFrameworks /bin/sh /bin/bash /private/tmp /private/var/tmp /usr/lib");
allowedImpureHostPrefixes = tokenizeString<StringSet>("/System/Library /usr/lib /dev /bin/sh");
#endif
+
+ buildHook = getSelfExe().value_or("nix") + " __build-remote";
}
void loadConfFile()
@@ -114,7 +114,13 @@ std::vector<Path> getUserConfigFiles()
unsigned int Settings::getDefaultCores()
{
- return std::max(1U, std::thread::hardware_concurrency());
+ const unsigned int concurrency = std::max(1U, std::thread::hardware_concurrency());
+ const unsigned int maxCPU = getMaxCPU();
+
+ if (maxCPU > 0)
+ return maxCPU;
+ else
+ return concurrency;
}
StringSet Settings::getDefaultSystemFeatures()
diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh
index feb6899cd..3dcf3d479 100644
--- a/src/libstore/globals.hh
+++ b/src/libstore/globals.hh
@@ -79,9 +79,6 @@ public:
/* A list of user configuration files to load. */
std::vector<Path> nixUserConfFiles;
- /* The directory where internal helper programs are stored. */
- Path nixLibexecDir;
-
/* The directory where the main programs are stored. */
Path nixBinDir;
@@ -195,7 +192,7 @@ public:
)",
{"build-timeout"}};
- PathSetting buildHook{this, true, nixLibexecDir + "/nix/build-remote", "build-hook",
+ PathSetting buildHook{this, true, "", "build-hook",
"The path of the helper program that executes builds to remote machines."};
Setting<std::string> builders{
@@ -563,9 +560,15 @@ public:
R"(
If set to `true` (the default), any non-content-addressed path added
or copied to the Nix store (e.g. when substituting from a binary
- cache) must have a valid signature, that is, be signed using one of
- the keys listed in `trusted-public-keys` or `secret-key-files`. Set
- to `false` to disable signature checking.
+ cache) must have a signature by a trusted key. A trusted key is one
+ listed in `trusted-public-keys`, or a public key counterpart to a
+ private key stored in a file listed in `secret-key-files`.
+
+ Set to `false` to disable signature checking and trust all
+ non-content-addressed paths unconditionally.
+
+ (Content-addressed paths are inherently trustworthy and thus
+ unaffected by this configuration option.)
)"};
Setting<StringSet> extraPlatforms{
@@ -616,6 +619,14 @@ public:
are tried based on their Priority value, which each substituter can set
independently. Lower value means higher priority.
The default is `https://cache.nixos.org`, with a Priority of 40.
+
+ Nix will copy a store path from a remote store only if one
+ of the following is true:
+
+ - the store object is signed by one of the [`trusted-public-keys`](#conf-trusted-public-keys)
+ - the substituter is in the [`trusted-substituters`](#conf-trusted-substituters) list
+ - the [`require-sigs`](#conf-require-sigs) option has been set to `false`
+ - the store object is [output-addressed](glossary.md#gloss-output-addressed-store-object)
)",
{"binary-caches"}};
@@ -749,6 +760,13 @@ public:
/nix/store/xfghy8ixrhz3kyy6p724iv3cxji088dx-bash-4.4-p23`.
)"};
+ Setting<unsigned int> downloadSpeed {
+ this, 0, "download-speed",
+ R"(
+ Specify the maximum transfer rate in kilobytes per second you want
+ Nix to use for downloads.
+ )"};
+
Setting<std::string> netrcFile{
this, fmt("%s/%s", nixConfDir, "netrc"), "netrc-file",
R"(
@@ -802,7 +820,7 @@ public:
)"};
Setting<StringSet> ignoredAcls{
- this, {"security.selinux", "system.nfs4_acl"}, "ignored-acls",
+ this, {"security.selinux", "system.nfs4_acl", "security.csm"}, "ignored-acls",
R"(
A list of ACLs that should be ignored, normally Nix attempts to
remove all ACLs from files and directories in the Nix store, but
diff --git a/src/libstore/local-binary-cache-store.cc b/src/libstore/local-binary-cache-store.cc
index a3c3e4806..f20b1fa02 100644
--- a/src/libstore/local-binary-cache-store.cc
+++ b/src/libstore/local-binary-cache-store.cc
@@ -57,8 +57,7 @@ protected:
AutoDelete del(tmp, false);
StreamToSourceAdapter source(istream);
writeFile(tmp, source);
- if (rename(tmp.c_str(), path2.c_str()))
- throw SysError("renaming '%1%' to '%2%'", tmp, path2);
+ renameFile(tmp, path2);
del.cancel();
}
@@ -69,6 +68,7 @@ protected:
} catch (SysError & e) {
if (e.errNo == ENOENT)
throw NoSuchBinaryCacheFile("file '%s' does not exist in binary cache", path);
+ throw;
}
}
diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc
index eba3b0fa5..d374d4558 100644
--- a/src/libstore/local-store.cc
+++ b/src/libstore/local-store.cc
@@ -158,7 +158,7 @@ void migrateCASchema(SQLite& db, Path schemaPath, AutoCloseFD& lockFd)
txn.commit();
}
- writeFile(schemaPath, fmt("%d", nixCASchemaVersion));
+ writeFile(schemaPath, fmt("%d", nixCASchemaVersion), 0666, true);
lockFile(lockFd.get(), ltRead, true);
}
}
@@ -281,7 +281,7 @@ LocalStore::LocalStore(const Params & params)
else if (curSchema == 0) { /* new store */
curSchema = nixSchemaVersion;
openDB(*state, true);
- writeFile(schemaPath, (format("%1%") % nixSchemaVersion).str());
+ writeFile(schemaPath, (format("%1%") % nixSchemaVersion).str(), 0666, true);
}
else if (curSchema < nixSchemaVersion) {
@@ -329,7 +329,7 @@ LocalStore::LocalStore(const Params & params)
txn.commit();
}
- writeFile(schemaPath, (format("%1%") % nixSchemaVersion).str());
+ writeFile(schemaPath, (format("%1%") % nixSchemaVersion).str(), 0666, true);
lockFile(globalLock.get(), ltRead, true);
}
@@ -751,7 +751,7 @@ void LocalStore::registerDrvOutput(const Realisation & info, CheckSigsFlag check
if (checkSigs == NoCheckSigs || !realisationIsUntrusted(info))
registerDrvOutput(info);
else
- throw Error("cannot register realisation '%s' because it lacks a valid signature", info.outPath.to_string());
+ throw Error("cannot register realisation '%s' because it lacks a signature by a trusted key", info.outPath.to_string());
}
void LocalStore::registerDrvOutput(const Realisation & info)
@@ -1266,7 +1266,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
RepairFlag repair, CheckSigsFlag checkSigs)
{
if (checkSigs && pathInfoIsUntrusted(info))
- throw Error("cannot add path '%s' because it lacks a valid signature", printStorePath(info.path));
+ throw Error("cannot add path '%s' because it lacks a signature by a trusted key", printStorePath(info.path));
addTempRoot(info.path);
@@ -1382,13 +1382,15 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name
std::unique_ptr<AutoDelete> delTempDir;
Path tempPath;
+ Path tempDir;
+ AutoCloseFD tempDirFd;
if (!inMemory) {
/* Drain what we pulled so far, and then keep on pulling */
StringSource dumpSource { dump };
ChainSource bothSource { dumpSource, source };
- auto tempDir = createTempDir(realStoreDir, "add");
+ std::tie(tempDir, tempDirFd) = createTempDirInStore();
delTempDir = std::make_unique<AutoDelete>(tempDir);
tempPath = tempDir + "/x";
@@ -1430,8 +1432,7 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name
writeFile(realPath, dumpSource);
} else {
/* Move the temporary path we restored above. */
- if (rename(tempPath.c_str(), realPath.c_str()))
- throw Error("renaming '%s' to '%s'", tempPath, realPath);
+ moveFile(tempPath, realPath);
}
/* For computing the nar hash. In recursive SHA-256 mode, this
@@ -1508,18 +1509,24 @@ StorePath LocalStore::addTextToStore(
/* Create a temporary directory in the store that won't be
- garbage-collected. */
-Path LocalStore::createTempDirInStore()
+ garbage-collected until the returned FD is closed. */
+std::pair<Path, AutoCloseFD> LocalStore::createTempDirInStore()
{
- Path tmpDir;
+ Path tmpDirFn;
+ AutoCloseFD tmpDirFd;
+ bool lockedByUs = false;
do {
/* There is a slight possibility that `tmpDir' gets deleted by
- the GC between createTempDir() and addTempRoot(), so repeat
- until `tmpDir' exists. */
- tmpDir = createTempDir(realStoreDir);
- addTempRoot(parseStorePath(tmpDir));
- } while (!pathExists(tmpDir));
- return tmpDir;
+ the GC between createTempDir() and when we acquire a lock on it.
+ We'll repeat until 'tmpDir' exists and we've locked it. */
+ tmpDirFn = createTempDir(realStoreDir, "tmp");
+ tmpDirFd = open(tmpDirFn.c_str(), O_RDONLY | O_DIRECTORY);
+ if (tmpDirFd.get() < 0) {
+ continue;
+ }
+ lockedByUs = lockFile(tmpDirFd.get(), ltWrite, true);
+ } while (!pathExists(tmpDirFn) || !lockedByUs);
+ return {tmpDirFn, std::move(tmpDirFd)};
}
@@ -1942,8 +1949,7 @@ void LocalStore::addBuildLog(const StorePath & drvPath, std::string_view log)
writeFile(tmpFile, compress("bzip2", log));
- if (rename(tmpFile.c_str(), logPath.c_str()) != 0)
- throw SysError("renaming '%1%' to '%2%'", tmpFile, logPath);
+ renameFile(tmpFile, logPath);
}
std::optional<std::string> LocalStore::getVersion()
diff --git a/src/libstore/local-store.hh b/src/libstore/local-store.hh
index 70d225be3..bd0ce1fe6 100644
--- a/src/libstore/local-store.hh
+++ b/src/libstore/local-store.hh
@@ -256,7 +256,7 @@ private:
void findRuntimeRoots(Roots & roots, bool censor);
- Path createTempDirInStore();
+ std::pair<Path, AutoCloseFD> createTempDirInStore();
void checkDerivationOutputs(const StorePath & drvPath, const Derivation & drv);
diff --git a/src/libstore/local.mk b/src/libstore/local.mk
index b992bcbc0..1d26ac918 100644
--- a/src/libstore/local.mk
+++ b/src/libstore/local.mk
@@ -39,14 +39,23 @@ libstore_CXXFLAGS += \
-DNIX_STATE_DIR=\"$(localstatedir)/nix\" \
-DNIX_LOG_DIR=\"$(localstatedir)/log/nix\" \
-DNIX_CONF_DIR=\"$(sysconfdir)/nix\" \
- -DNIX_LIBEXEC_DIR=\"$(libexecdir)\" \
-DNIX_BIN_DIR=\"$(bindir)\" \
-DNIX_MAN_DIR=\"$(mandir)\" \
-DLSOF=\"$(lsof)\"
+ifeq ($(embedded_sandbox_shell),yes)
+libstore_CXXFLAGS += -DSANDBOX_SHELL=\"__embedded_sandbox_shell__\"
+
+$(d)/build/local-derivation-goal.cc: $(d)/embedded-sandbox-shell.gen.hh
+
+$(d)/embedded-sandbox-shell.gen.hh: $(sandbox_shell)
+ $(trace-gen) hexdump -v -e '1/1 "0x%x," "\n"' < $< > $@.tmp
+ @mv $@.tmp $@
+else
ifneq ($(sandbox_shell),)
libstore_CXXFLAGS += -DSANDBOX_SHELL="\"$(sandbox_shell)\""
endif
+endif
$(d)/local-store.cc: $(d)/schema.sql.gen.hh $(d)/ca-specific-schema.sql.gen.hh
diff --git a/src/libstore/lock.cc b/src/libstore/lock.cc
index f1356fdca..fa718f55d 100644
--- a/src/libstore/lock.cc
+++ b/src/libstore/lock.cc
@@ -67,13 +67,26 @@ bool UserLock::findFreeUser() {
#if __linux__
/* Get the list of supplementary groups of this build user. This
is usually either empty or contains a group such as "kvm". */
- supplementaryGIDs.resize(10);
- int ngroups = supplementaryGIDs.size();
- int err = getgrouplist(pw->pw_name, pw->pw_gid,
- supplementaryGIDs.data(), &ngroups);
+ int ngroups = 32; // arbitrary initial guess
+ supplementaryGIDs.resize(ngroups);
+
+ int err = getgrouplist(pw->pw_name, pw->pw_gid, supplementaryGIDs.data(),
+ &ngroups);
+
+ // Our initial size of 32 wasn't sufficient, the correct size has
+ // been stored in ngroups, so we try again.
+ if (err == -1) {
+ supplementaryGIDs.resize(ngroups);
+ err = getgrouplist(pw->pw_name, pw->pw_gid, supplementaryGIDs.data(),
+ &ngroups);
+ }
+
+ // If it failed once more, then something must be broken.
if (err == -1)
- throw Error("failed to get list of supplementary groups for '%1%'", pw->pw_name);
+ throw Error("failed to get list of supplementary groups for '%1%'",
+ pw->pw_name);
+ // Finally, trim back the GID list to its real size
supplementaryGIDs.resize(ngroups);
#endif
diff --git a/src/libstore/nar-accessor.cc b/src/libstore/nar-accessor.cc
index 72d41cc94..398147fc3 100644
--- a/src/libstore/nar-accessor.cc
+++ b/src/libstore/nar-accessor.cc
@@ -75,6 +75,9 @@ struct NarAccessor : public FSAccessor
createMember(path, {FSAccessor::Type::tRegular, false, 0, 0});
}
+ void closeRegularFile() override
+ { }
+
void isExecutable() override
{
parents.top()->isExecutable = true;
diff --git a/src/libstore/nar-info-disk-cache.cc b/src/libstore/nar-info-disk-cache.cc
index 9dd81ddfb..f4ea739b0 100644
--- a/src/libstore/nar-info-disk-cache.cc
+++ b/src/libstore/nar-info-disk-cache.cc
@@ -62,6 +62,9 @@ public:
/* How often to purge expired entries from the cache. */
const int purgeInterval = 24 * 3600;
+ /* How long to cache binary cache info (i.e. /nix-cache-info) */
+ const int cacheInfoTtl = 7 * 24 * 3600;
+
struct Cache
{
int id;
@@ -98,7 +101,7 @@ public:
"insert or replace into BinaryCaches(url, timestamp, storeDir, wantMassQuery, priority) values (?, ?, ?, ?, ?)");
state->queryCache.create(state->db,
- "select id, storeDir, wantMassQuery, priority from BinaryCaches where url = ?");
+ "select id, storeDir, wantMassQuery, priority from BinaryCaches where url = ? and timestamp > ?");
state->insertNAR.create(state->db,
"insert or replace into NARs(cache, hashPart, namePart, url, compression, fileHash, fileSize, narHash, "
@@ -183,7 +186,7 @@ public:
auto i = state->caches.find(uri);
if (i == state->caches.end()) {
- auto queryCache(state->queryCache.use()(uri));
+ auto queryCache(state->queryCache.use()(uri)(time(0) - cacheInfoTtl));
if (!queryCache.next())
return std::nullopt;
state->caches.emplace(uri,
diff --git a/src/libstore/nar-info.cc b/src/libstore/nar-info.cc
index 2d75e7a82..071d8355e 100644
--- a/src/libstore/nar-info.cc
+++ b/src/libstore/nar-info.cc
@@ -69,8 +69,6 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
if (value != "unknown-deriver")
deriver = StorePath(value);
}
- else if (name == "System")
- system = value;
else if (name == "Sig")
sigs.insert(value);
else if (name == "CA") {
@@ -106,9 +104,6 @@ std::string NarInfo::to_string(const Store & store) const
if (deriver)
res += "Deriver: " + std::string(deriver->to_string()) + "\n";
- if (!system.empty())
- res += "System: " + system + "\n";
-
for (auto sig : sigs)
res += "Sig: " + sig + "\n";
diff --git a/src/libstore/nar-info.hh b/src/libstore/nar-info.hh
index 39ced76e5..01683ec73 100644
--- a/src/libstore/nar-info.hh
+++ b/src/libstore/nar-info.hh
@@ -14,7 +14,6 @@ struct NarInfo : ValidPathInfo
std::string compression;
std::optional<Hash> fileHash;
uint64_t fileSize = 0;
- std::string system;
NarInfo() = delete;
NarInfo(StorePath && path, Hash narHash) : ValidPathInfo(std::move(path), narHash) { }
diff --git a/src/libstore/optimise-store.cc b/src/libstore/optimise-store.cc
index 8af9b1dde..4d2781180 100644
--- a/src/libstore/optimise-store.cc
+++ b/src/libstore/optimise-store.cc
@@ -229,7 +229,9 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
}
/* Atomically replace the old file with the new hard link. */
- if (rename(tempLink.c_str(), path.c_str()) == -1) {
+ try {
+ renameFile(tempLink, path);
+ } catch (SysError & e) {
if (unlink(tempLink.c_str()) == -1)
printError("unable to unlink '%1%'", tempLink);
if (errno == EMLINK) {
@@ -240,7 +242,7 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
debug("'%s' has reached maximum number of links", linkPath);
return;
}
- throw SysError("cannot rename '%1%' to '%2%'", tempLink, path);
+ throw;
}
stats.filesLinked++;
diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc
index bc36aef5d..96a29155c 100644
--- a/src/libstore/remote-store.cc
+++ b/src/libstore/remote-store.cc
@@ -580,7 +580,6 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
try {
conn->to.written = 0;
- conn->to.warn = true;
connections->incCapacity();
{
Finally cleanup([&]() { connections->decCapacity(); });
@@ -591,7 +590,6 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
dumpString(contents, conn->to);
}
}
- conn->to.warn = false;
conn.processStderr();
} catch (SysError & e) {
/* Daemon closed while we were sending the path. Probably OOM
@@ -674,6 +672,23 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source,
void RemoteStore::addMultipleToStore(
+ PathsSource & pathsToCopy,
+ Activity & act,
+ RepairFlag repair,
+ CheckSigsFlag checkSigs)
+{
+ auto source = sinkToSource([&](Sink & sink) {
+ sink << pathsToCopy.size();
+ for (auto & [pathInfo, pathSource] : pathsToCopy) {
+ pathInfo.write(sink, *this, 16);
+ pathSource->drainInto(sink);
+ }
+ });
+
+ addMultipleToStore(*source, repair, checkSigs);
+}
+
+void RemoteStore::addMultipleToStore(
Source & source,
RepairFlag repair,
CheckSigsFlag checkSigs)
diff --git a/src/libstore/remote-store.hh b/src/libstore/remote-store.hh
index 8493be6fc..11d089cd2 100644
--- a/src/libstore/remote-store.hh
+++ b/src/libstore/remote-store.hh
@@ -88,6 +88,12 @@ public:
RepairFlag repair,
CheckSigsFlag checkSigs) override;
+ void addMultipleToStore(
+ PathsSource & pathsToCopy,
+ Activity & act,
+ RepairFlag repair,
+ CheckSigsFlag checkSigs) override;
+
StorePath addTextToStore(
std::string_view name,
std::string_view s,
diff --git a/src/libstore/sandbox-defaults.sb b/src/libstore/sandbox-defaults.sb
index 56b35c3fe..d9d710559 100644
--- a/src/libstore/sandbox-defaults.sb
+++ b/src/libstore/sandbox-defaults.sb
@@ -98,7 +98,9 @@
(allow file*
(literal "/private/var/select/sh"))
-; Allow Rosetta 2 to run x86_64 binaries on aarch64-darwin.
+; Allow Rosetta 2 to run x86_64 binaries on aarch64-darwin (and vice versa).
(allow file-read*
(subpath "/Library/Apple/usr/libexec/oah")
- (subpath "/System/Library/Apple/usr/libexec/oah"))
+ (subpath "/System/Library/Apple/usr/libexec/oah")
+ (subpath "/System/Library/LaunchDaemons/com.apple.oahd.plist")
+ (subpath "/Library/Apple/System/Library/LaunchDaemons/com.apple.oahd.plist"))
diff --git a/src/libstore/sandbox-network.sb b/src/libstore/sandbox-network.sb
index 56beec761..19e9eea9a 100644
--- a/src/libstore/sandbox-network.sb
+++ b/src/libstore/sandbox-network.sb
@@ -14,3 +14,7 @@
; Allow DNS lookups.
(allow network-outbound (remote unix-socket (path-literal "/private/var/run/mDNSResponder")))
+
+; Allow access to trustd.
+(allow mach-lookup (global-name "com.apple.trustd"))
+(allow mach-lookup (global-name "com.apple.trustd.agent"))
diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc
index 8861274a2..86b12257a 100644
--- a/src/libstore/store-api.cc
+++ b/src/libstore/store-api.cc
@@ -258,6 +258,84 @@ StorePath Store::addToStore(
return addToStoreFromDump(*source, name, method, hashAlgo, repair, references);
}
+void Store::addMultipleToStore(
+ PathsSource & pathsToCopy,
+ Activity & act,
+ RepairFlag repair,
+ CheckSigsFlag checkSigs)
+{
+ std::atomic<size_t> nrDone{0};
+ std::atomic<size_t> nrFailed{0};
+ std::atomic<uint64_t> bytesExpected{0};
+ std::atomic<uint64_t> nrRunning{0};
+
+ using PathWithInfo = std::pair<ValidPathInfo, std::unique_ptr<Source>>;
+
+ std::map<StorePath, PathWithInfo *> infosMap;
+ StorePathSet storePathsToAdd;
+ for (auto & thingToAdd : pathsToCopy) {
+ infosMap.insert_or_assign(thingToAdd.first.path, &thingToAdd);
+ storePathsToAdd.insert(thingToAdd.first.path);
+ }
+
+ auto showProgress = [&]() {
+ act.progress(nrDone, pathsToCopy.size(), nrRunning, nrFailed);
+ };
+
+ ThreadPool pool;
+
+ processGraph<StorePath>(pool,
+ storePathsToAdd,
+
+ [&](const StorePath & path) {
+
+ auto & [info, _] = *infosMap.at(path);
+
+ if (isValidPath(info.path)) {
+ nrDone++;
+ showProgress();
+ return StorePathSet();
+ }
+
+ bytesExpected += info.narSize;
+ act.setExpected(actCopyPath, bytesExpected);
+
+ return info.references;
+ },
+
+ [&](const StorePath & path) {
+ checkInterrupt();
+
+ auto & [info_, source_] = *infosMap.at(path);
+ auto info = info_;
+ info.ultimate = false;
+
+ /* Make sure that the Source object is destroyed when
+ we're done. In particular, a SinkToSource object must
+ be destroyed to ensure that the destructors on its
+ stack frame are run; this includes
+ LegacySSHStore::narFromPath()'s connection lock. */
+ auto source = std::move(source_);
+
+ if (!isValidPath(info.path)) {
+ MaintainCount<decltype(nrRunning)> mc(nrRunning);
+ showProgress();
+ try {
+ addToStore(info, *source, repair, checkSigs);
+ } catch (Error & e) {
+ nrFailed++;
+ if (!settings.keepGoing)
+ throw e;
+ printMsg(lvlError, "could not copy %s: %s", printStorePath(path), e.what());
+ showProgress();
+ return;
+ }
+ }
+
+ nrDone++;
+ showProgress();
+ });
+}
void Store::addMultipleToStore(
Source & source,
@@ -992,113 +1070,61 @@ std::map<StorePath, StorePath> copyPaths(
for (auto & path : storePaths)
if (!valid.count(path)) missing.insert(path);
+ Activity act(*logger, lvlInfo, actCopyPaths, fmt("copying %d paths", missing.size()));
+
+ // In the general case, `addMultipleToStore` requires a sorted list of
+ // store paths to add, so sort them right now
+ auto sortedMissing = srcStore.topoSortPaths(missing);
+ std::reverse(sortedMissing.begin(), sortedMissing.end());
+
std::map<StorePath, StorePath> pathsMap;
for (auto & path : storePaths)
pathsMap.insert_or_assign(path, path);
- Activity act(*logger, lvlInfo, actCopyPaths, fmt("copying %d paths", missing.size()));
+ Store::PathsSource pathsToCopy;
+
+ auto computeStorePathForDst = [&](const ValidPathInfo & currentPathInfo) -> StorePath {
+ auto storePathForSrc = currentPathInfo.path;
+ auto storePathForDst = storePathForSrc;
+ if (currentPathInfo.ca && currentPathInfo.references.empty()) {
+ storePathForDst = dstStore.makeFixedOutputPathFromCA(storePathForSrc.name(), *currentPathInfo.ca);
+ if (dstStore.storeDir == srcStore.storeDir)
+ assert(storePathForDst == storePathForSrc);
+ if (storePathForDst != storePathForSrc)
+ debug("replaced path '%s' to '%s' for substituter '%s'",
+ srcStore.printStorePath(storePathForSrc),
+ dstStore.printStorePath(storePathForDst),
+ dstStore.getUri());
+ }
+ return storePathForDst;
+ };
- auto sorted = srcStore.topoSortPaths(missing);
- std::reverse(sorted.begin(), sorted.end());
+ for (auto & missingPath : sortedMissing) {
+ auto info = srcStore.queryPathInfo(missingPath);
- auto source = sinkToSource([&](Sink & sink) {
- sink << sorted.size();
- for (auto & storePath : sorted) {
+ auto storePathForDst = computeStorePathForDst(*info);
+ pathsMap.insert_or_assign(missingPath, storePathForDst);
+
+ ValidPathInfo infoForDst = *info;
+ infoForDst.path = storePathForDst;
+
+ auto source = sinkToSource([&](Sink & sink) {
+ // We can reasonably assume that the copy will happen whenever we
+ // read the path, so log something about that at that point
auto srcUri = srcStore.getUri();
auto dstUri = dstStore.getUri();
- auto storePathS = srcStore.printStorePath(storePath);
+ auto storePathS = srcStore.printStorePath(missingPath);
Activity act(*logger, lvlInfo, actCopyPath,
makeCopyPathMessage(srcUri, dstUri, storePathS),
{storePathS, srcUri, dstUri});
PushActivity pact(act.id);
- auto info = srcStore.queryPathInfo(storePath);
- info->write(sink, srcStore, 16);
- srcStore.narFromPath(storePath, sink);
- }
- });
-
- dstStore.addMultipleToStore(*source, repair, checkSigs);
-
- #if 0
- std::atomic<size_t> nrDone{0};
- std::atomic<size_t> nrFailed{0};
- std::atomic<uint64_t> bytesExpected{0};
- std::atomic<uint64_t> nrRunning{0};
-
- auto showProgress = [&]() {
- act.progress(nrDone, missing.size(), nrRunning, nrFailed);
- };
-
- ThreadPool pool;
-
- processGraph<StorePath>(pool,
- StorePathSet(missing.begin(), missing.end()),
-
- [&](const StorePath & storePath) {
- auto info = srcStore.queryPathInfo(storePath);
- auto storePathForDst = storePath;
- if (info->ca && info->references.empty()) {
- storePathForDst = dstStore.makeFixedOutputPathFromCA(storePath.name(), *info->ca);
- if (dstStore.storeDir == srcStore.storeDir)
- assert(storePathForDst == storePath);
- if (storePathForDst != storePath)
- debug("replaced path '%s' to '%s' for substituter '%s'",
- srcStore.printStorePath(storePath),
- dstStore.printStorePath(storePathForDst),
- dstStore.getUri());
- }
- pathsMap.insert_or_assign(storePath, storePathForDst);
-
- if (dstStore.isValidPath(storePath)) {
- nrDone++;
- showProgress();
- return StorePathSet();
- }
-
- bytesExpected += info->narSize;
- act.setExpected(actCopyPath, bytesExpected);
-
- return info->references;
- },
-
- [&](const StorePath & storePath) {
- checkInterrupt();
-
- auto info = srcStore.queryPathInfo(storePath);
-
- auto storePathForDst = storePath;
- if (info->ca && info->references.empty()) {
- storePathForDst = dstStore.makeFixedOutputPathFromCA(storePath.name(), *info->ca);
- if (dstStore.storeDir == srcStore.storeDir)
- assert(storePathForDst == storePath);
- if (storePathForDst != storePath)
- debug("replaced path '%s' to '%s' for substituter '%s'",
- srcStore.printStorePath(storePath),
- dstStore.printStorePath(storePathForDst),
- dstStore.getUri());
- }
- pathsMap.insert_or_assign(storePath, storePathForDst);
-
- if (!dstStore.isValidPath(storePathForDst)) {
- MaintainCount<decltype(nrRunning)> mc(nrRunning);
- showProgress();
- try {
- copyStorePath(srcStore, dstStore, storePath, repair, checkSigs);
- } catch (Error &e) {
- nrFailed++;
- if (!settings.keepGoing)
- throw e;
- printMsg(lvlError, "could not copy %s: %s", dstStore.printStorePath(storePath), e.what());
- showProgress();
- return;
- }
- }
-
- nrDone++;
- showProgress();
+ srcStore.narFromPath(missingPath, sink);
});
- #endif
+ pathsToCopy.push_back(std::pair{infoForDst, std::move(source)});
+ }
+
+ dstStore.addMultipleToStore(pathsToCopy, act, repair, checkSigs);
return pathsMap;
}
@@ -1302,7 +1328,8 @@ std::pair<std::string, Store::Params> splitUriAndParams(const std::string & uri_
return {uri, params};
}
-static bool isNonUriPath(const std::string & spec) {
+static bool isNonUriPath(const std::string & spec)
+{
return
// is not a URL
spec.find("://") == std::string::npos
@@ -1319,6 +1346,31 @@ std::shared_ptr<Store> openFromNonUri(const std::string & uri, const Store::Para
return std::make_shared<LocalStore>(params);
else if (pathExists(settings.nixDaemonSocketFile))
return std::make_shared<UDSRemoteStore>(params);
+ #if __linux__
+ else if (!pathExists(stateDir)
+ && params.empty()
+ && getuid() != 0
+ && !getEnv("NIX_STORE_DIR").has_value()
+ && !getEnv("NIX_STATE_DIR").has_value())
+ {
+ /* If /nix doesn't exist, there is no daemon socket, and
+ we're not root, then automatically set up a chroot
+ store in ~/.local/share/nix/root. */
+ auto chrootStore = getDataDir() + "/nix/root";
+ if (!pathExists(chrootStore)) {
+ try {
+ createDirs(chrootStore);
+ } catch (Error & e) {
+ return std::make_shared<LocalStore>(params);
+ }
+ warn("'/nix' does not exist, so Nix will use '%s' as a chroot store", chrootStore);
+ } else
+ debug("'/nix' does not exist, so Nix will use '%s' as a chroot store", chrootStore);
+ Store::Params params2;
+ params2["root"] = chrootStore;
+ return std::make_shared<LocalStore>(params2);
+ }
+ #endif
else
return std::make_shared<LocalStore>(params);
} else if (uri == "daemon") {
diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh
index 0c8a4db56..c8a667c6d 100644
--- a/src/libstore/store-api.hh
+++ b/src/libstore/store-api.hh
@@ -1,5 +1,6 @@
#pragma once
+#include "nar-info.hh"
#include "realisation.hh"
#include "path.hh"
#include "derived-path.hh"
@@ -359,12 +360,22 @@ public:
virtual void addToStore(const ValidPathInfo & info, Source & narSource,
RepairFlag repair = NoRepair, CheckSigsFlag checkSigs = CheckSigs) = 0;
+ // A list of paths infos along with a source providing the content of the
+ // associated store path
+ using PathsSource = std::vector<std::pair<ValidPathInfo, std::unique_ptr<Source>>>;
+
/* Import multiple paths into the store. */
virtual void addMultipleToStore(
Source & source,
RepairFlag repair = NoRepair,
CheckSigsFlag checkSigs = CheckSigs);
+ virtual void addMultipleToStore(
+ PathsSource & pathsToCopy,
+ Activity & act,
+ RepairFlag repair = NoRepair,
+ CheckSigsFlag checkSigs = CheckSigs);
+
/* Copy the contents of a path to the store and register the
validity the resulting path. The resulting path is returned.
The function object `filter' can be used to exclude files (see
diff --git a/src/libutil/archive.cc b/src/libutil/archive.cc
index 30b471af5..4b0636129 100644
--- a/src/libutil/archive.cc
+++ b/src/libutil/archive.cc
@@ -234,6 +234,7 @@ static void parse(ParseSink & sink, Source & source, const Path & path)
else if (s == "contents" && type == tpRegular) {
parseContents(sink, source, path);
+ sink.closeRegularFile();
}
else if (s == "executable" && type == tpRegular) {
@@ -324,6 +325,12 @@ struct RestoreSink : ParseSink
if (!fd) throw SysError("creating file '%1%'", p);
}
+ void closeRegularFile() override
+ {
+ /* Call close explicitly to make sure the error is checked */
+ fd.close();
+ }
+
void isExecutable() override
{
struct stat st;
diff --git a/src/libutil/archive.hh b/src/libutil/archive.hh
index 79ce08df0..ac4183bf5 100644
--- a/src/libutil/archive.hh
+++ b/src/libutil/archive.hh
@@ -60,6 +60,7 @@ struct ParseSink
virtual void createDirectory(const Path & path) { };
virtual void createRegularFile(const Path & path) { };
+ virtual void closeRegularFile() { };
virtual void isExecutable() { };
virtual void preallocateContents(uint64_t size) { };
virtual void receiveContents(std::string_view data) { };
diff --git a/src/libutil/args.cc b/src/libutil/args.cc
index 4b8c55686..753980fd4 100644
--- a/src/libutil/args.cc
+++ b/src/libutil/args.cc
@@ -124,7 +124,7 @@ bool Args::processFlag(Strings::iterator & pos, Strings::iterator end)
bool anyCompleted = false;
for (size_t n = 0 ; n < flag.handler.arity; ++n) {
if (pos == end) {
- if (flag.handler.arity == ArityAny) break;
+ if (flag.handler.arity == ArityAny || anyCompleted) break;
throw UsageError("flag '%s' requires %d argument(s)", name, flag.handler.arity);
}
if (auto prefix = needsCompletion(*pos)) {
@@ -216,7 +216,7 @@ nlohmann::json Args::toJSON()
if (flag->shortName)
j["shortName"] = std::string(1, flag->shortName);
if (flag->description != "")
- j["description"] = flag->description;
+ j["description"] = trim(flag->description);
j["category"] = flag->category;
if (flag->handler.arity != ArityAny)
j["arity"] = flag->handler.arity;
@@ -237,7 +237,7 @@ nlohmann::json Args::toJSON()
}
auto res = nlohmann::json::object();
- res["description"] = description();
+ res["description"] = trim(description());
res["flags"] = std::move(flags);
res["args"] = std::move(args);
auto s = doc();
@@ -362,6 +362,14 @@ bool MultiCommand::processArgs(const Strings & args, bool finish)
return Args::processArgs(args, finish);
}
+void MultiCommand::completionHook()
+{
+ if (command)
+ return command->second->completionHook();
+ else
+ return Args::completionHook();
+}
+
nlohmann::json MultiCommand::toJSON()
{
auto cmds = nlohmann::json::object();
@@ -371,7 +379,7 @@ nlohmann::json MultiCommand::toJSON()
auto j = command->toJSON();
auto cat = nlohmann::json::object();
cat["id"] = command->category();
- cat["description"] = categories[command->category()];
+ cat["description"] = trim(categories[command->category()]);
j["category"] = std::move(cat);
cmds[name] = std::move(j);
}
diff --git a/src/libutil/args.hh b/src/libutil/args.hh
index fdd036f9a..84866f12b 100644
--- a/src/libutil/args.hh
+++ b/src/libutil/args.hh
@@ -25,6 +25,8 @@ public:
/* Return a short one-line description of the command. */
virtual std::string description() { return ""; }
+ virtual bool forceImpureByDefault() { return false; }
+
/* Return documentation about this command, in Markdown format. */
virtual std::string doc() { return ""; }
@@ -146,6 +148,11 @@ protected:
argument (if any) have been processed. */
virtual void initialFlagsProcessed() {}
+ /* Called after the command line has been processed if we need to generate
+ completions. Useful for commands that need to know the whole command line
+ in order to know what completions to generate. */
+ virtual void completionHook() { }
+
public:
void addFlag(Flag && flag);
@@ -221,6 +228,8 @@ public:
bool processArgs(const Strings & args, bool finish) override;
+ void completionHook() override;
+
nlohmann::json toJSON() override;
};
diff --git a/src/libutil/error.hh b/src/libutil/error.hh
index a53e9802e..3d1479c54 100644
--- a/src/libutil/error.hh
+++ b/src/libutil/error.hh
@@ -204,13 +204,19 @@ public:
int errNo;
template<typename... Args>
- SysError(const Args & ... args)
+ SysError(int errNo_, const Args & ... args)
: Error("")
{
- errNo = errno;
+ errNo = errNo_;
auto hf = hintfmt(args...);
err.msg = hintfmt("%1%: %2%", normaltxt(hf.str()), strerror(errNo));
}
+
+ template<typename... Args>
+ SysError(const Args & ... args)
+ : SysError(errno, args ...)
+ {
+ }
};
}
diff --git a/src/libutil/experimental-features.cc b/src/libutil/experimental-features.cc
index 315de64a4..fa79cca6b 100644
--- a/src/libutil/experimental-features.cc
+++ b/src/libutil/experimental-features.cc
@@ -13,6 +13,7 @@ std::map<ExperimentalFeature, std::string> stringifiedXpFeatures = {
{ Xp::RecursiveNix, "recursive-nix" },
{ Xp::NoUrlLiterals, "no-url-literals" },
{ Xp::FetchClosure, "fetch-closure" },
+ { Xp::ReplFlake, "repl-flake" },
};
const std::optional<ExperimentalFeature> parseExperimentalFeature(const std::string_view & name)
diff --git a/src/libutil/experimental-features.hh b/src/libutil/experimental-features.hh
index 57512830c..d09ab025c 100644
--- a/src/libutil/experimental-features.hh
+++ b/src/libutil/experimental-features.hh
@@ -22,6 +22,7 @@ enum struct ExperimentalFeature
RecursiveNix,
NoUrlLiterals,
FetchClosure,
+ ReplFlake,
};
/**
diff --git a/src/libutil/filesystem.cc b/src/libutil/filesystem.cc
new file mode 100644
index 000000000..403389e60
--- /dev/null
+++ b/src/libutil/filesystem.cc
@@ -0,0 +1,172 @@
+#include <sys/time.h>
+#include <filesystem>
+
+#include "finally.hh"
+#include "util.hh"
+#include "types.hh"
+
+namespace fs = std::filesystem;
+
+namespace nix {
+
+static Path tempName(Path tmpRoot, const Path & prefix, bool includePid,
+ int & counter)
+{
+ tmpRoot = canonPath(tmpRoot.empty() ? getEnv("TMPDIR").value_or("/tmp") : tmpRoot, true);
+ if (includePid)
+ return (format("%1%/%2%-%3%-%4%") % tmpRoot % prefix % getpid() % counter++).str();
+ else
+ return (format("%1%/%2%-%3%") % tmpRoot % prefix % counter++).str();
+}
+
+Path createTempDir(const Path & tmpRoot, const Path & prefix,
+ bool includePid, bool useGlobalCounter, mode_t mode)
+{
+ static int globalCounter = 0;
+ int localCounter = 0;
+ int & counter(useGlobalCounter ? globalCounter : localCounter);
+
+ while (1) {
+ checkInterrupt();
+ Path tmpDir = tempName(tmpRoot, prefix, includePid, counter);
+ if (mkdir(tmpDir.c_str(), mode) == 0) {
+#if __FreeBSD__
+ /* Explicitly set the group of the directory. This is to
+ work around around problems caused by BSD's group
+ ownership semantics (directories inherit the group of
+ the parent). For instance, the group of /tmp on
+ FreeBSD is "wheel", so all directories created in /tmp
+ will be owned by "wheel"; but if the user is not in
+ "wheel", then "tar" will fail to unpack archives that
+ have the setgid bit set on directories. */
+ if (chown(tmpDir.c_str(), (uid_t) -1, getegid()) != 0)
+ throw SysError("setting group of directory '%1%'", tmpDir);
+#endif
+ return tmpDir;
+ }
+ if (errno != EEXIST)
+ throw SysError("creating directory '%1%'", tmpDir);
+ }
+}
+
+
+std::pair<AutoCloseFD, Path> createTempFile(const Path & prefix)
+{
+ Path tmpl(getEnv("TMPDIR").value_or("/tmp") + "/" + prefix + ".XXXXXX");
+ // Strictly speaking, this is UB, but who cares...
+ // FIXME: use O_TMPFILE.
+ AutoCloseFD fd(mkstemp((char *) tmpl.c_str()));
+ if (!fd)
+ throw SysError("creating temporary file '%s'", tmpl);
+ closeOnExec(fd.get());
+ return {std::move(fd), tmpl};
+}
+
+void createSymlink(const Path & target, const Path & link,
+ std::optional<time_t> mtime)
+{
+ if (symlink(target.c_str(), link.c_str()))
+ throw SysError("creating symlink from '%1%' to '%2%'", link, target);
+ if (mtime) {
+ struct timeval times[2];
+ times[0].tv_sec = *mtime;
+ times[0].tv_usec = 0;
+ times[1].tv_sec = *mtime;
+ times[1].tv_usec = 0;
+ if (lutimes(link.c_str(), times))
+ throw SysError("setting time of symlink '%s'", link);
+ }
+}
+
+void replaceSymlink(const Path & target, const Path & link,
+ std::optional<time_t> mtime)
+{
+ for (unsigned int n = 0; true; n++) {
+ Path tmp = canonPath(fmt("%s/.%d_%s", dirOf(link), n, baseNameOf(link)));
+
+ try {
+ createSymlink(target, tmp, mtime);
+ } catch (SysError & e) {
+ if (e.errNo == EEXIST) continue;
+ throw;
+ }
+
+ renameFile(tmp, link);
+
+ break;
+ }
+}
+
+void setWriteTime(const fs::path & p, const struct stat & st)
+{
+ struct timeval times[2];
+ times[0] = {
+ .tv_sec = st.st_atime,
+ .tv_usec = 0,
+ };
+ times[1] = {
+ .tv_sec = st.st_mtime,
+ .tv_usec = 0,
+ };
+ if (lutimes(p.c_str(), times) != 0)
+ throw SysError("changing modification time of '%s'", p);
+}
+
+void copy(const fs::directory_entry & from, const fs::path & to, bool andDelete)
+{
+ // TODO: Rewrite the `is_*` to use `symlink_status()`
+ auto statOfFrom = lstat(from.path().c_str());
+ auto fromStatus = from.symlink_status();
+
+ // Mark the directory as writable so that we can delete its children
+ if (andDelete && fs::is_directory(fromStatus)) {
+ fs::permissions(from.path(), fs::perms::owner_write, fs::perm_options::add | fs::perm_options::nofollow);
+ }
+
+
+ if (fs::is_symlink(fromStatus) || fs::is_regular_file(fromStatus)) {
+ fs::copy(from.path(), to, fs::copy_options::copy_symlinks | fs::copy_options::overwrite_existing);
+ } else if (fs::is_directory(fromStatus)) {
+ fs::create_directory(to);
+ for (auto & entry : fs::directory_iterator(from.path())) {
+ copy(entry, to / entry.path().filename(), andDelete);
+ }
+ } else {
+ throw Error("file '%s' has an unsupported type", from.path());
+ }
+
+ setWriteTime(to, statOfFrom);
+ if (andDelete) {
+ if (!fs::is_symlink(fromStatus))
+ fs::permissions(from.path(), fs::perms::owner_write, fs::perm_options::add | fs::perm_options::nofollow);
+ fs::remove(from.path());
+ }
+}
+
+void renameFile(const Path & oldName, const Path & newName)
+{
+ fs::rename(oldName, newName);
+}
+
+void moveFile(const Path & oldName, const Path & newName)
+{
+ try {
+ renameFile(oldName, newName);
+ } catch (fs::filesystem_error & e) {
+ auto oldPath = fs::path(oldName);
+ auto newPath = fs::path(newName);
+ // For the move to be as atomic as possible, copy to a temporary
+ // directory
+ fs::path temp = createTempDir(newPath.parent_path(), "rename-tmp");
+ Finally removeTemp = [&]() { fs::remove(temp); };
+ auto tempCopyTarget = temp / "copy-target";
+ if (e.code().value() == EXDEV) {
+ fs::remove(newPath);
+ warn("Can’t rename %s as %s, copying instead", oldName, newName);
+ copy(fs::directory_entry(oldPath), tempCopyTarget, true);
+ renameFile(tempCopyTarget, newPath);
+ }
+ }
+}
+
+}
diff --git a/src/libutil/json.cc b/src/libutil/json.cc
index b0a5d7e75..2f9e97ff5 100644
--- a/src/libutil/json.cc
+++ b/src/libutil/json.cc
@@ -6,7 +6,8 @@
namespace nix {
-void toJSON(std::ostream & str, const char * start, const char * end)
+template<>
+void toJSON<std::string_view>(std::ostream & str, const std::string_view & s)
{
constexpr size_t BUF_SIZE = 4096;
char buf[BUF_SIZE + 7]; // BUF_SIZE + largest single sequence of puts
@@ -21,7 +22,7 @@ void toJSON(std::ostream & str, const char * start, const char * end)
};
put('"');
- for (auto i = start; i != end; i++) {
+ for (auto i = s.begin(); i != s.end(); i++) {
if (bufPos >= BUF_SIZE) flush();
if (*i == '\"' || *i == '\\') { put('\\'); put(*i); }
else if (*i == '\n') { put('\\'); put('n'); }
@@ -44,7 +45,7 @@ void toJSON(std::ostream & str, const char * start, const char * end)
void toJSON(std::ostream & str, const char * s)
{
- if (!s) str << "null"; else toJSON(str, s, s + strlen(s));
+ if (!s) str << "null"; else toJSON(str, std::string_view(s));
}
template<> void toJSON<int>(std::ostream & str, const int & n) { str << n; }
@@ -55,11 +56,7 @@ template<> void toJSON<long long>(std::ostream & str, const long long & n) { str
template<> void toJSON<unsigned long long>(std::ostream & str, const unsigned long long & n) { str << n; }
template<> void toJSON<float>(std::ostream & str, const float & n) { str << n; }
template<> void toJSON<double>(std::ostream & str, const double & n) { str << n; }
-
-template<> void toJSON<std::string>(std::ostream & str, const std::string & s)
-{
- toJSON(str, s.c_str(), s.c_str() + s.size());
-}
+template<> void toJSON<std::string>(std::ostream & str, const std::string & s) { toJSON(str, (std::string_view) s); }
template<> void toJSON<bool>(std::ostream & str, const bool & b)
{
@@ -154,7 +151,7 @@ JSONObject::~JSONObject()
}
}
-void JSONObject::attr(const std::string & s)
+void JSONObject::attr(std::string_view s)
{
comma();
toJSON(state->str, s);
@@ -162,19 +159,19 @@ void JSONObject::attr(const std::string & s)
if (state->indent) state->str << ' ';
}
-JSONList JSONObject::list(const std::string & name)
+JSONList JSONObject::list(std::string_view name)
{
attr(name);
return JSONList(state);
}
-JSONObject JSONObject::object(const std::string & name)
+JSONObject JSONObject::object(std::string_view name)
{
attr(name);
return JSONObject(state);
}
-JSONPlaceholder JSONObject::placeholder(const std::string & name)
+JSONPlaceholder JSONObject::placeholder(std::string_view name)
{
attr(name);
return JSONPlaceholder(state);
@@ -196,7 +193,11 @@ JSONObject JSONPlaceholder::object()
JSONPlaceholder::~JSONPlaceholder()
{
- assert(!first || std::uncaught_exceptions());
+ if (first) {
+ assert(std::uncaught_exceptions());
+ if (state->stack != 0)
+ write(nullptr);
+ }
}
}
diff --git a/src/libutil/json.hh b/src/libutil/json.hh
index 83213ca66..3790b1a2e 100644
--- a/src/libutil/json.hh
+++ b/src/libutil/json.hh
@@ -6,7 +6,6 @@
namespace nix {
-void toJSON(std::ostream & str, const char * start, const char * end);
void toJSON(std::ostream & str, const char * s);
template<typename T>
@@ -107,7 +106,7 @@ private:
open();
}
- void attr(const std::string & s);
+ void attr(std::string_view s);
public:
@@ -128,18 +127,18 @@ public:
~JSONObject();
template<typename T>
- JSONObject & attr(const std::string & name, const T & v)
+ JSONObject & attr(std::string_view name, const T & v)
{
attr(name);
toJSON(state->str, v);
return *this;
}
- JSONList list(const std::string & name);
+ JSONList list(std::string_view name);
- JSONObject object(const std::string & name);
+ JSONObject object(std::string_view name);
- JSONPlaceholder placeholder(const std::string & name);
+ JSONPlaceholder placeholder(std::string_view name);
};
class JSONPlaceholder : JSONWriter
diff --git a/src/libutil/logging.hh b/src/libutil/logging.hh
index 6f81b92de..d0817b4a9 100644
--- a/src/libutil/logging.hh
+++ b/src/libutil/logging.hh
@@ -111,6 +111,9 @@ public:
virtual std::optional<char> ask(std::string_view s)
{ return {}; }
+
+ virtual void setPrintBuildLogs(bool printBuildLogs)
+ { }
};
ActivityId getCurActivity();
diff --git a/src/libutil/serialise.cc b/src/libutil/serialise.cc
index 8ff904583..2c3597775 100644
--- a/src/libutil/serialise.cc
+++ b/src/libutil/serialise.cc
@@ -48,24 +48,9 @@ FdSink::~FdSink()
}
-size_t threshold = 256 * 1024 * 1024;
-
-static void warnLargeDump()
-{
- warn("dumping very large path (> 256 MiB); this may run out of memory");
-}
-
-
void FdSink::write(std::string_view data)
{
written += data.size();
- static bool warned = false;
- if (warn && !warned) {
- if (written > threshold) {
- warnLargeDump();
- warned = true;
- }
- }
try {
writeFull(fd, data);
} catch (SysError & e) {
@@ -448,11 +433,6 @@ Error readError(Source & source)
void StringSink::operator () (std::string_view data)
{
- static bool warned = false;
- if (!warned && s.size() > threshold) {
- warnLargeDump();
- warned = true;
- }
s.append(data);
}
diff --git a/src/libutil/serialise.hh b/src/libutil/serialise.hh
index 13da26c6a..84847835a 100644
--- a/src/libutil/serialise.hh
+++ b/src/libutil/serialise.hh
@@ -97,19 +97,17 @@ protected:
struct FdSink : BufferedSink
{
int fd;
- bool warn = false;
size_t written = 0;
FdSink() : fd(-1) { }
FdSink(int fd) : fd(fd) { }
FdSink(FdSink&&) = default;
- FdSink& operator=(FdSink && s)
+ FdSink & operator=(FdSink && s)
{
flush();
fd = s.fd;
s.fd = -1;
- warn = s.warn;
written = s.written;
return *this;
}
diff --git a/src/libutil/tests/json.cc b/src/libutil/tests/json.cc
index dea73f53a..156286999 100644
--- a/src/libutil/tests/json.cc
+++ b/src/libutil/tests/json.cc
@@ -102,8 +102,8 @@ namespace nix {
TEST(toJSON, substringEscape) {
std::stringstream out;
- const char *s = "foo\t";
- toJSON(out, s+3, s + strlen(s));
+ std::string_view s = "foo\t";
+ toJSON(out, s.substr(3));
ASSERT_EQ(out.str(), "\"\\t\"");
}
diff --git a/src/libutil/util.cc b/src/libutil/util.cc
index 1c19938a8..623b74bdd 100644
--- a/src/libutil/util.cc
+++ b/src/libutil/util.cc
@@ -29,11 +29,15 @@
#ifdef __APPLE__
#include <sys/syscall.h>
+#include <mach-o/dyld.h>
#endif
#ifdef __linux__
#include <sys/prctl.h>
#include <sys/resource.h>
+
+#include <mntent.h>
+#include <cmath>
#endif
@@ -349,7 +353,7 @@ void readFile(const Path & path, Sink & sink)
}
-void writeFile(const Path & path, std::string_view s, mode_t mode)
+void writeFile(const Path & path, std::string_view s, mode_t mode, bool sync)
{
AutoCloseFD fd = open(path.c_str(), O_WRONLY | O_TRUNC | O_CREAT | O_CLOEXEC, mode);
if (!fd)
@@ -360,10 +364,16 @@ void writeFile(const Path & path, std::string_view s, mode_t mode)
e.addTrace({}, "writing file '%1%'", path);
throw;
}
+ if (sync)
+ fd.fsync();
+ // Explicitly close to make sure exceptions are propagated.
+ fd.close();
+ if (sync)
+ syncParent(path);
}
-void writeFile(const Path & path, Source & source, mode_t mode)
+void writeFile(const Path & path, Source & source, mode_t mode, bool sync)
{
AutoCloseFD fd = open(path.c_str(), O_WRONLY | O_TRUNC | O_CREAT | O_CLOEXEC, mode);
if (!fd)
@@ -382,6 +392,20 @@ void writeFile(const Path & path, Source & source, mode_t mode)
e.addTrace({}, "writing file '%1%'", path);
throw;
}
+ if (sync)
+ fd.fsync();
+ // Explicitly close to make sure exceptions are propagated.
+ fd.close();
+ if (sync)
+ syncParent(path);
+}
+
+void syncParent(const Path & path)
+{
+ AutoCloseFD fd = open(dirOf(path).c_str(), O_RDONLY, 0);
+ if (!fd)
+ throw SysError("opening file '%1%'", path);
+ fd.fsync();
}
std::string readLine(int fd)
@@ -504,61 +528,6 @@ void deletePath(const Path & path, uint64_t & bytesFreed)
}
-static Path tempName(Path tmpRoot, const Path & prefix, bool includePid,
- int & counter)
-{
- tmpRoot = canonPath(tmpRoot.empty() ? getEnv("TMPDIR").value_or("/tmp") : tmpRoot, true);
- if (includePid)
- return (format("%1%/%2%-%3%-%4%") % tmpRoot % prefix % getpid() % counter++).str();
- else
- return (format("%1%/%2%-%3%") % tmpRoot % prefix % counter++).str();
-}
-
-
-Path createTempDir(const Path & tmpRoot, const Path & prefix,
- bool includePid, bool useGlobalCounter, mode_t mode)
-{
- static int globalCounter = 0;
- int localCounter = 0;
- int & counter(useGlobalCounter ? globalCounter : localCounter);
-
- while (1) {
- checkInterrupt();
- Path tmpDir = tempName(tmpRoot, prefix, includePid, counter);
- if (mkdir(tmpDir.c_str(), mode) == 0) {
-#if __FreeBSD__
- /* Explicitly set the group of the directory. This is to
- work around around problems caused by BSD's group
- ownership semantics (directories inherit the group of
- the parent). For instance, the group of /tmp on
- FreeBSD is "wheel", so all directories created in /tmp
- will be owned by "wheel"; but if the user is not in
- "wheel", then "tar" will fail to unpack archives that
- have the setgid bit set on directories. */
- if (chown(tmpDir.c_str(), (uid_t) -1, getegid()) != 0)
- throw SysError("setting group of directory '%1%'", tmpDir);
-#endif
- return tmpDir;
- }
- if (errno != EEXIST)
- throw SysError("creating directory '%1%'", tmpDir);
- }
-}
-
-
-std::pair<AutoCloseFD, Path> createTempFile(const Path & prefix)
-{
- Path tmpl(getEnv("TMPDIR").value_or("/tmp") + "/" + prefix + ".XXXXXX");
- // Strictly speaking, this is UB, but who cares...
- // FIXME: use O_TMPFILE.
- AutoCloseFD fd(mkstemp((char *) tmpl.c_str()));
- if (!fd)
- throw SysError("creating temporary file '%s'", tmpl);
- closeOnExec(fd.get());
- return {std::move(fd), tmpl};
-}
-
-
std::string getUserName()
{
auto pw = getpwuid(geteuid());
@@ -573,7 +542,21 @@ Path getHome()
{
static Path homeDir = []()
{
+ std::optional<std::string> unownedUserHomeDir = {};
auto homeDir = getEnv("HOME");
+ if (homeDir) {
+ // Only use $HOME if doesn't exist or is owned by the current user.
+ struct stat st;
+ int result = stat(homeDir->c_str(), &st);
+ if (result != 0) {
+ if (errno != ENOENT) {
+ warn("couldn't stat $HOME ('%s') for reason other than not existing ('%d'), falling back to the one defined in the 'passwd' file", *homeDir, errno);
+ homeDir.reset();
+ }
+ } else if (st.st_uid != geteuid()) {
+ unownedUserHomeDir.swap(homeDir);
+ }
+ }
if (!homeDir) {
std::vector<char> buf(16384);
struct passwd pwbuf;
@@ -582,6 +565,9 @@ Path getHome()
|| !pw || !pw->pw_dir || !pw->pw_dir[0])
throw Error("cannot determine user's home directory");
homeDir = pw->pw_dir;
+ if (unownedUserHomeDir.has_value() && unownedUserHomeDir != homeDir) {
+ warn("$HOME ('%s') is not owned by you, falling back to the one defined in the 'passwd' file ('%s')", *unownedUserHomeDir, *homeDir);
+ }
}
return *homeDir;
}();
@@ -619,6 +605,27 @@ Path getDataDir()
}
+std::optional<Path> getSelfExe()
+{
+ static auto cached = []() -> std::optional<Path>
+ {
+ #if __linux__
+ return readLink("/proc/self/exe");
+ #elif __APPLE__
+ char buf[1024];
+ uint32_t size = sizeof(buf);
+ if (_NSGetExecutablePath(buf, &size) == 0)
+ return buf;
+ else
+ return std::nullopt;
+ #else
+ return std::nullopt;
+ #endif
+ }();
+ return cached;
+}
+
+
Paths createDirs(const Path & path)
{
Paths created;
@@ -642,44 +649,6 @@ Paths createDirs(const Path & path)
}
-void createSymlink(const Path & target, const Path & link,
- std::optional<time_t> mtime)
-{
- if (symlink(target.c_str(), link.c_str()))
- throw SysError("creating symlink from '%1%' to '%2%'", link, target);
- if (mtime) {
- struct timeval times[2];
- times[0].tv_sec = *mtime;
- times[0].tv_usec = 0;
- times[1].tv_sec = *mtime;
- times[1].tv_usec = 0;
- if (lutimes(link.c_str(), times))
- throw SysError("setting time of symlink '%s'", link);
- }
-}
-
-
-void replaceSymlink(const Path & target, const Path & link,
- std::optional<time_t> mtime)
-{
- for (unsigned int n = 0; true; n++) {
- Path tmp = canonPath(fmt("%s/.%d_%s", dirOf(link), n, baseNameOf(link)));
-
- try {
- createSymlink(target, tmp, mtime);
- } catch (SysError & e) {
- if (e.errNo == EEXIST) continue;
- throw;
- }
-
- if (rename(tmp.c_str(), link.c_str()) != 0)
- throw SysError("renaming '%1%' to '%2%'", tmp, link);
-
- break;
- }
-}
-
-
void readFull(int fd, char * buf, size_t count)
{
while (count) {
@@ -752,7 +721,55 @@ void drainFD(int fd, Sink & sink, bool block)
}
}
+//////////////////////////////////////////////////////////////////////
+
+unsigned int getMaxCPU()
+{
+ #if __linux__
+ try {
+ FILE *fp = fopen("/proc/mounts", "r");
+ if (!fp)
+ return 0;
+
+ Strings cgPathParts;
+ struct mntent *ent;
+ while ((ent = getmntent(fp))) {
+ std::string mountType, mountPath;
+
+ mountType = ent->mnt_type;
+ mountPath = ent->mnt_dir;
+
+ if (mountType == "cgroup2") {
+ cgPathParts.push_back(mountPath);
+ break;
+ }
+ }
+
+ fclose(fp);
+
+ if (cgPathParts.size() > 0 && pathExists("/proc/self/cgroup")) {
+ std::string currentCgroup = readFile("/proc/self/cgroup");
+ Strings cgValues = tokenizeString<Strings>(currentCgroup, ":");
+ cgPathParts.push_back(trim(cgValues.back(), "\n"));
+ cgPathParts.push_back("cpu.max");
+ std::string fullCgPath = canonPath(concatStringsSep("/", cgPathParts));
+
+ if (pathExists(fullCgPath)) {
+ std::string cpuMax = readFile(fullCgPath);
+ std::vector<std::string> cpuMaxParts = tokenizeString<std::vector<std::string>>(cpuMax, " ");
+ std::string quota = cpuMaxParts[0];
+ std::string period = trim(cpuMaxParts[1], "\n");
+
+ if (quota != "max")
+ return std::ceil(std::stoi(quota) / std::stof(period));
+ }
+ }
+ } catch (Error &) { ignoreException(); }
+ #endif
+
+ return 0;
+}
//////////////////////////////////////////////////////////////////////
@@ -844,6 +861,20 @@ void AutoCloseFD::close()
}
}
+void AutoCloseFD::fsync()
+{
+ if (fd != -1) {
+ int result;
+#if __APPLE__
+ result = ::fcntl(fd, F_FULLFSYNC);
+#else
+ result = ::fsync(fd);
+#endif
+ if (result == -1)
+ throw SysError("fsync file descriptor %1%", fd);
+ }
+}
+
AutoCloseFD::operator bool() const
{
diff --git a/src/libutil/util.hh b/src/libutil/util.hh
index 90418b04d..e5c678682 100644
--- a/src/libutil/util.hh
+++ b/src/libutil/util.hh
@@ -115,9 +115,12 @@ std::string readFile(const Path & path);
void readFile(const Path & path, Sink & sink);
/* Write a string to a file. */
-void writeFile(const Path & path, std::string_view s, mode_t mode = 0666);
+void writeFile(const Path & path, std::string_view s, mode_t mode = 0666, bool sync = false);
-void writeFile(const Path & path, Source & source, mode_t mode = 0666);
+void writeFile(const Path & path, Source & source, mode_t mode = 0666, bool sync = false);
+
+/* Flush a file's parent directory to disk */
+void syncParent(const Path & path);
/* Read a line from a file descriptor. */
std::string readLine(int fd);
@@ -149,10 +152,14 @@ std::vector<Path> getConfigDirs();
/* Return $XDG_DATA_HOME or $HOME/.local/share. */
Path getDataDir();
+/* Return the path of the current executable. */
+std::optional<Path> getSelfExe();
+
/* Create a directory and all its parents, if necessary. Returns the
list of created directories, in order of creation. */
Paths createDirs(const Path & path);
-inline Paths createDirs(PathView path) {
+inline Paths createDirs(PathView path)
+{
return createDirs(Path(path));
}
@@ -164,6 +171,17 @@ void createSymlink(const Path & target, const Path & link,
void replaceSymlink(const Path & target, const Path & link,
std::optional<time_t> mtime = {});
+void renameFile(const Path & src, const Path & dst);
+
+/**
+ * Similar to 'renameFile', but fallback to a copy+remove if `src` and `dst`
+ * are on a different filesystem.
+ *
+ * Beware that this might not be atomic because of the copy that happens behind
+ * the scenes
+ */
+void moveFile(const Path & src, const Path & dst);
+
/* Wrappers arount read()/write() that read/write exactly the
requested number of bytes. */
@@ -178,6 +196,9 @@ std::string drainFD(int fd, bool block = true, const size_t reserveSize=0);
void drainFD(int fd, Sink & sink, bool block = true);
+/* If cgroups are active, attempt to calculate the number of CPUs available.
+ If cgroups are unavailable or if cpu.max is set to "max", return 0. */
+unsigned int getMaxCPU();
/* Automatic cleanup of resources. */
@@ -213,6 +234,7 @@ public:
explicit operator bool() const;
int release();
void close();
+ void fsync();
};
diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc
index 426f23905..adcaab686 100644
--- a/src/nix-build/nix-build.cc
+++ b/src/nix-build/nix-build.cc
@@ -85,7 +85,6 @@ static void main_nix_build(int argc, char * * argv)
Strings attrPaths;
Strings left;
RepairFlag repair = NoRepair;
- Path gcRoot;
BuildMode buildMode = bmNormal;
bool readStdin = false;
@@ -167,9 +166,6 @@ static void main_nix_build(int argc, char * * argv)
else if (*arg == "--out-link" || *arg == "-o")
outLink = getArg(*arg, arg, end);
- else if (*arg == "--add-root")
- gcRoot = getArg(*arg, arg, end);
-
else if (*arg == "--dry-run")
dryRun = true;
@@ -257,11 +253,12 @@ static void main_nix_build(int argc, char * * argv)
auto autoArgs = myArgs.getAutoArgs(*state);
+ auto autoArgsWithInNixShell = autoArgs;
if (runEnv) {
- auto newArgs = state->buildBindings(autoArgs->size() + 1);
+ auto newArgs = state->buildBindings(autoArgsWithInNixShell->size() + 1);
newArgs.alloc("inNixShell").mkBool(true);
for (auto & i : *autoArgs) newArgs.insert(i);
- autoArgs = newArgs.finish();
+ autoArgsWithInNixShell = newArgs.finish();
}
if (packages) {
@@ -316,10 +313,39 @@ static void main_nix_build(int argc, char * * argv)
Value vRoot;
state->eval(e, vRoot);
+ std::function<bool(const Value & v)> takesNixShellAttr;
+ takesNixShellAttr = [&](const Value & v) {
+ if (!runEnv) {
+ return false;
+ }
+ bool add = false;
+ if (v.type() == nFunction && v.lambda.fun->hasFormals()) {
+ for (auto & i : v.lambda.fun->formals->formals) {
+ if (state->symbols[i.name] == "inNixShell") {
+ add = true;
+ break;
+ }
+ }
+ }
+ return add;
+ };
+
for (auto & i : attrPaths) {
- Value & v(*findAlongAttrPath(*state, i, *autoArgs, vRoot).first);
+ Value & v(*findAlongAttrPath(
+ *state,
+ i,
+ takesNixShellAttr(vRoot) ? *autoArgsWithInNixShell : *autoArgs,
+ vRoot
+ ).first);
state->forceValue(v, [&]() { return v.determinePos(noPos); });
- getDerivations(*state, v, "", *autoArgs, drvs, false);
+ getDerivations(
+ *state,
+ v,
+ "",
+ takesNixShellAttr(v) ? *autoArgsWithInNixShell : *autoArgs,
+ drvs,
+ false
+ );
}
}
@@ -371,7 +397,7 @@ static void main_nix_build(int argc, char * * argv)
auto bashDrv = drv->requireDrvPath();
pathsToBuild.push_back(DerivedPath::Built {
.drvPath = bashDrv,
- .outputs = {},
+ .outputs = {"out"},
});
pathsToCopy.insert(bashDrv);
shellDrv = bashDrv;
@@ -543,6 +569,8 @@ static void main_nix_build(int argc, char * * argv)
restoreProcessContext();
+ logger->stop();
+
execvp(shell->c_str(), argPtrs.data());
throw SysError("executing shell '%s'", *shell);
@@ -601,6 +629,8 @@ static void main_nix_build(int argc, char * * argv)
outPaths.push_back(outputPath);
}
+ logger->stop();
+
for (auto & path : outPaths)
std::cout << store->printStorePath(path) << '\n';
}
diff --git a/src/nix-collect-garbage/nix-collect-garbage.cc b/src/nix-collect-garbage/nix-collect-garbage.cc
index af6f1c88c..e413faffe 100644
--- a/src/nix-collect-garbage/nix-collect-garbage.cc
+++ b/src/nix-collect-garbage/nix-collect-garbage.cc
@@ -37,6 +37,7 @@ void removeOldGenerations(std::string dir)
link = readLink(path);
} catch (SysError & e) {
if (e.errNo == ENOENT) continue;
+ throw;
}
if (link.find("link") != std::string::npos) {
printInfo(format("removing old generations of profile %1%") % path);
diff --git a/src/nix-env/nix-env.cc b/src/nix-env/nix-env.cc
index a69d3700d..fdd66220a 100644
--- a/src/nix-env/nix-env.cc
+++ b/src/nix-env/nix-env.cc
@@ -940,12 +940,12 @@ static void queryJSON(Globals & globals, std::vector<DrvInfo> & elems, bool prin
JSONObject metaObj = pkgObj.object("meta");
StringSet metaNames = i.queryMetaNames();
for (auto & j : metaNames) {
- auto placeholder = metaObj.placeholder(j);
Value * v = i.queryMeta(j);
if (!v) {
printError("derivation '%s' has invalid meta attribute '%s'", i.queryName(), j);
- placeholder.write(nullptr);
+ metaObj.attr(j, nullptr);
} else {
+ auto placeholder = metaObj.placeholder(j);
PathSet context;
printValueAsJSON(*globals.state, true, *v, noPos, placeholder, context);
}
diff --git a/src/nix-instantiate/nix-instantiate.cc b/src/nix-instantiate/nix-instantiate.cc
index d3144e131..6b5ba595d 100644
--- a/src/nix-instantiate/nix-instantiate.cc
+++ b/src/nix-instantiate/nix-instantiate.cc
@@ -52,9 +52,10 @@ void processExpr(EvalState & state, const Strings & attrPaths,
state.autoCallFunction(autoArgs, v, vRes);
if (output == okXML)
printValueAsXML(state, strict, location, vRes, std::cout, context, noPos);
- else if (output == okJSON)
+ else if (output == okJSON) {
printValueAsJSON(state, strict, vRes, v.determinePos(noPos), std::cout, context);
- else {
+ std::cout << std::endl;
+ } else {
if (strict) state.forceValueDeep(vRes);
vRes.print(state.symbols, std::cout);
std::cout << std::endl;
diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc
index b453ea1ca..23f2ad3cf 100644
--- a/src/nix-store/nix-store.cc
+++ b/src/nix-store/nix-store.cc
@@ -922,7 +922,7 @@ static void opServe(Strings opFlags, Strings opArgs)
if (GET_PROTOCOL_MINOR(clientVersion) >= 3)
out << status.timesBuilt << status.isNonDeterministic << status.startTime << status.stopTime;
- if (GET_PROTOCOL_MINOR(clientVersion >= 6)) {
+ if (GET_PROTOCOL_MINOR(clientVersion) >= 6) {
worker_proto::write(*store, out, status.builtOutputs);
}
diff --git a/src/nix/bundle.md b/src/nix/bundle.md
index 2bb70711f..a18161a3c 100644
--- a/src/nix/bundle.md
+++ b/src/nix/bundle.md
@@ -44,7 +44,7 @@ flake output attributes:
* `bundlers.<system>.default`
-If an attribute *name* is given, `nix run` tries the following flake
+If an attribute *name* is given, `nix bundle` tries the following flake
output attributes:
* `bundlers.<system>.<name>`
diff --git a/src/nix/develop.cc b/src/nix/develop.cc
index 2a3fc0213..4de109754 100644
--- a/src/nix/develop.cc
+++ b/src/nix/develop.cc
@@ -246,6 +246,7 @@ struct Common : InstallableCommand, MixProfile
"NIX_LOG_FD",
"NIX_REMOTE",
"PPID",
+ "SHELL",
"SHELLOPTS",
"SSL_CERT_FILE", // FIXME: only want to ignore /no-cert-file.crt
"TEMP",
@@ -276,15 +277,27 @@ struct Common : InstallableCommand, MixProfile
const BuildEnvironment & buildEnvironment,
const Path & outputsDir = absPath(".") + "/outputs")
{
+ // A list of colon-separated environment variables that should be
+ // prepended to, rather than overwritten, in order to keep the shell usable.
+ // Please keep this list minimal in order to avoid impurities.
+ static const char * const savedVars[] = {
+ "PATH", // for commands
+ "XDG_DATA_DIRS", // for loadable completion
+ };
+
std::ostringstream out;
out << "unset shellHook\n";
- out << "nix_saved_PATH=\"$PATH\"\n";
+ for (auto & var : savedVars) {
+ out << fmt("%s=${%s:-}\n", var, var);
+ out << fmt("nix_saved_%s=\"$%s\"\n", var, var);
+ }
buildEnvironment.toBash(out, ignoreVars);
- out << "PATH=\"$PATH:$nix_saved_PATH\"\n";
+ for (auto & var : savedVars)
+ out << fmt("%s=\"$%s:$nix_saved_%s\"\n", var, var, var);
out << "export NIX_BUILD_TOP=\"$(mktemp -d -t nix-shell.XXXXXX)\"\n";
for (auto & i : {"TMP", "TMPDIR", "TEMP", "TEMPDIR"})
diff --git a/src/nix/develop.md b/src/nix/develop.md
index e036ec6b9..4e8542d1b 100644
--- a/src/nix/develop.md
+++ b/src/nix/develop.md
@@ -66,6 +66,12 @@ R""(
`nixpkgs#glibc` in `~/my-glibc` and want to compile another package
against it.
+* Run a series of script commands:
+
+ ```console
+ # nix develop --command bash -c "mkdir build && cmake .. && make"
+ ```
+
# Description
`nix develop` starts a `bash` shell that provides an interactive build
diff --git a/src/nix/eval.cc b/src/nix/eval.cc
index 967dc8519..ddd2790c6 100644
--- a/src/nix/eval.cc
+++ b/src/nix/eval.cc
@@ -116,7 +116,8 @@ struct CmdEval : MixJSON, InstallableCommand
else if (json) {
JSONPlaceholder jsonOut(std::cout);
- printValueAsJSON(*state, true, *v, pos, jsonOut, context);
+ printValueAsJSON(*state, true, *v, pos, jsonOut, context, false);
+ std::cout << std::endl;
}
else {
diff --git a/src/nix/flake-update.md b/src/nix/flake-update.md
index 03b50e38e..2ee8a707d 100644
--- a/src/nix/flake-update.md
+++ b/src/nix/flake-update.md
@@ -6,7 +6,7 @@ R""(
lock file:
```console
- # nix flake update
+ # nix flake update --commit-lock-file
* Updated 'nix': 'github:NixOS/nix/9fab14adbc3810d5cc1f88672fde1eee4358405c' -> 'github:NixOS/nix/8927cba62f5afb33b01016d5c4f7f8b7d0adde3c'
* Updated 'nixpkgs': 'github:NixOS/nixpkgs/3d2d8f281a27d466fa54b469b5993f7dde198375' -> 'github:NixOS/nixpkgs/a3a3dda3bacf61e8a39258a0ed9c924eeca8e293'
diff --git a/src/nix/flake.cc b/src/nix/flake.cc
index 8370b8dcf..3967f1102 100644
--- a/src/nix/flake.cc
+++ b/src/nix/flake.cc
@@ -50,9 +50,9 @@ public:
return flake::lockFlake(*getEvalState(), getFlakeRef(), lockFlags);
}
- std::optional<FlakeRef> getFlakeRefForCompletion() override
+ std::vector<std::string> getFlakesForCompletion() override
{
- return getFlakeRef();
+ return {flakeUrl};
}
};
@@ -212,7 +212,8 @@ struct CmdFlakeMetadata : FlakeCommand, MixJSON
ANSI_BOLD "Last modified:" ANSI_NORMAL " %s",
std::put_time(std::localtime(&*lastModified), "%F %T"));
- logger->cout(ANSI_BOLD "Inputs:" ANSI_NORMAL);
+ if (!lockedFlake.lockFile.root->inputs.empty())
+ logger->cout(ANSI_BOLD "Inputs:" ANSI_NORMAL);
std::unordered_set<std::shared_ptr<Node>> visited;
@@ -740,7 +741,8 @@ struct CmdFlakeInitCommon : virtual Args, EvalCommand
"If you've set '%s' to a string, try using a path instead.",
templateDir, templateDirAttr->getAttrPathStr());
- std::vector<Path> files;
+ std::vector<Path> changedFiles;
+ std::vector<Path> conflictedFiles;
std::function<void(const Path & from, const Path & to)> copyDir;
copyDir = [&](const Path & from, const Path & to)
@@ -757,31 +759,41 @@ struct CmdFlakeInitCommon : virtual Args, EvalCommand
auto contents = readFile(from2);
if (pathExists(to2)) {
auto contents2 = readFile(to2);
- if (contents != contents2)
- throw Error("refusing to overwrite existing file '%s'", to2);
+ if (contents != contents2) {
+ printError("refusing to overwrite existing file '%s'\n please merge it manually with '%s'", to2, from2);
+ conflictedFiles.push_back(to2);
+ } else {
+ notice("skipping identical file: %s", from2);
+ }
+ continue;
} else
writeFile(to2, contents);
}
else if (S_ISLNK(st.st_mode)) {
auto target = readLink(from2);
if (pathExists(to2)) {
- if (readLink(to2) != target)
- throw Error("refusing to overwrite existing symlink '%s'", to2);
+ if (readLink(to2) != target) {
+ printError("refusing to overwrite existing file '%s'\n please merge it manually with '%s'", to2, from2);
+ conflictedFiles.push_back(to2);
+ } else {
+ notice("skipping identical file: %s", from2);
+ }
+ continue;
} else
createSymlink(target, to2);
}
else
throw Error("file '%s' has unsupported type", from2);
- files.push_back(to2);
+ changedFiles.push_back(to2);
notice("wrote: %s", to2);
}
};
copyDir(templateDir, flakeDir);
- if (pathExists(flakeDir + "/.git")) {
+ if (!changedFiles.empty() && pathExists(flakeDir + "/.git")) {
Strings args = { "-C", flakeDir, "add", "--intent-to-add", "--force", "--" };
- for (auto & s : files) args.push_back(s);
+ for (auto & s : changedFiles) args.push_back(s);
runProgram("git", true, args);
}
auto welcomeText = cursor->maybeGetAttr("welcomeText");
@@ -789,6 +801,9 @@ struct CmdFlakeInitCommon : virtual Args, EvalCommand
notice("\n");
notice(renderMarkdownToTerminal(welcomeText->getString()));
}
+
+ if (!conflictedFiles.empty())
+ throw Error("Encountered %d conflicts - see above", conflictedFiles.size());
}
};
diff --git a/src/nix/get-env.sh b/src/nix/get-env.sh
index 42c806450..a7a8a01b9 100644
--- a/src/nix/get-env.sh
+++ b/src/nix/get-env.sh
@@ -43,6 +43,7 @@ __dumpEnv() {
local __var_name="${BASH_REMATCH[2]}"
if [[ $__var_name =~ ^BASH_ || \
+ $__var_name =~ ^COMP_ || \
$__var_name = _ || \
$__var_name = DIRSTACK || \
$__var_name = EUID || \
@@ -54,7 +55,9 @@ __dumpEnv() {
$__var_name = PWD || \
$__var_name = RANDOM || \
$__var_name = SHLVL || \
- $__var_name = SECONDS \
+ $__var_name = SECONDS || \
+ $__var_name = EPOCHREALTIME || \
+ $__var_name = EPOCHSECONDS \
]]; then continue; fi
if [[ -z $__first ]]; then printf ',\n'; else __first=; fi
diff --git a/src/nix/main.cc b/src/nix/main.cc
index dadb54306..e0155cd5d 100644
--- a/src/nix/main.cc
+++ b/src/nix/main.cc
@@ -82,7 +82,7 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs
.shortName = 'L',
.description = "Print full build logs on standard error.",
.category = loggingCategory,
- .handler = {[&]() {setLogFormat(LogFormat::barWithLogs); }},
+ .handler = {[&]() { logger->setPrintBuildLogs(true); }},
});
addFlag({
@@ -266,6 +266,11 @@ void mainWrapped(int argc, char * * argv)
programPath = argv[0];
auto programName = std::string(baseNameOf(programPath));
+ if (argc > 0 && std::string_view(argv[0]) == "__build-remote") {
+ programName = "build-remote";
+ argv++; argc--;
+ }
+
{
auto legacy = (*RegisterLegacyCommand::commands)[programName];
if (legacy) return legacy(argc, argv);
@@ -320,7 +325,7 @@ void mainWrapped(int argc, char * * argv)
std::cout << "attrs\n"; break;
}
for (auto & s : *completions)
- std::cout << s.completion << "\t" << s.description << "\n";
+ std::cout << s.completion << "\t" << trim(s.description) << "\n";
}
});
@@ -342,7 +347,10 @@ void mainWrapped(int argc, char * * argv)
if (!completions) throw;
}
- if (completions) return;
+ if (completions) {
+ args.completionHook();
+ return;
+ }
if (args.showVersion) {
printVersion(programName);
@@ -380,6 +388,9 @@ void mainWrapped(int argc, char * * argv)
settings.ttlPositiveNarInfoCache = 0;
}
+ if (args.command->second->forceImpureByDefault() && !evalSettings.pureEval.overridden) {
+ evalSettings.pureEval = false;
+ }
args.command->second->prepare();
args.command->second->run();
}
diff --git a/src/nix/make-content-addressed.md b/src/nix/make-content-addressed.md
index 215683e6d..32eecc880 100644
--- a/src/nix/make-content-addressed.md
+++ b/src/nix/make-content-addressed.md
@@ -22,7 +22,7 @@ R""(
```console
# nix copy --to /tmp/nix --trusted-public-keys '' nixpkgs#hello
- cannot add path '/nix/store/zy9wbxwcygrwnh8n2w9qbbcr6zk87m26-libunistring-0.9.10' because it lacks a valid signature
+ cannot add path '/nix/store/zy9wbxwcygrwnh8n2w9qbbcr6zk87m26-libunistring-0.9.10' because it lacks a signature by a trusted key
```
* Create a content-addressed representation of the current NixOS
diff --git a/src/nix/profile.md b/src/nix/profile.md
index 8dade051d..be3c5ba1a 100644
--- a/src/nix/profile.md
+++ b/src/nix/profile.md
@@ -11,7 +11,7 @@ them to be rolled back easily.
The default profile used by `nix profile` is `$HOME/.nix-profile`,
which, if it does not exist, is created as a symlink to
-`/nix/var/nix/profiles/per-user/default` if Nix is invoked by the
+`/nix/var/nix/profiles/default` if Nix is invoked by the
`root` user, or `/nix/var/nix/profiles/per-user/`*username* otherwise.
You can specify another profile location using `--profile` *path*.
diff --git a/src/nix/registry.md b/src/nix/registry.md
index d5c9ef442..bd3575d1b 100644
--- a/src/nix/registry.md
+++ b/src/nix/registry.md
@@ -29,7 +29,7 @@ highest precedence:
can be specified using the NixOS option `nix.registry`.
* The user registry `~/.config/nix/registry.json`. This registry can
- be modified by commands such as `nix flake pin`.
+ be modified by commands such as `nix registry pin`.
* Overrides specified on the command line using the option
`--override-flake`.
diff --git a/src/nix/repl.md b/src/nix/repl.md
index 9b6f2bee3..23ef0f4e6 100644
--- a/src/nix/repl.md
+++ b/src/nix/repl.md
@@ -24,10 +24,34 @@ R""(
* Interact with Nixpkgs in the REPL:
```console
- # nix repl '<nixpkgs>'
+ # nix repl --file example.nix
+ Loading Installable ''...
+ Added 3 variables.
- Loading '<nixpkgs>'...
- Added 12428 variables.
+ # nix repl --expr '{a={b=3;c=4;};}'
+ Loading Installable ''...
+ Added 1 variables.
+
+ # nix repl --expr '{a={b=3;c=4;};}' a
+ Loading Installable ''...
+ Added 1 variables.
+
+ # nix repl --extra_experimental_features 'flakes repl-flake' nixpkgs
+ Loading Installable 'flake:nixpkgs#'...
+ Added 5 variables.
+
+ nix-repl> legacyPackages.x86_64-linux.emacs.name
+ "emacs-27.1"
+
+ nix-repl> legacyPackages.x86_64-linux.emacs.name
+ "emacs-27.1"
+
+ nix-repl> :q
+
+ # nix repl --expr 'import <nixpkgs>{}'
+
+ Loading Installable ''...
+ Added 12439 variables.
nix-repl> emacs.name
"emacs-27.1"
diff --git a/src/nix/run.cc b/src/nix/run.cc
index 25a8fa8d3..45d2dfd0d 100644
--- a/src/nix/run.cc
+++ b/src/nix/run.cc
@@ -47,7 +47,7 @@ void runProgramInStore(ref<Store> store,
Strings helperArgs = { chrootHelperName, store->storeDir, store2->getRealStoreDir(), program };
for (auto & arg : args) helperArgs.push_back(arg);
- execv(readLink("/proc/self/exe").c_str(), stringsToCharPtrs(helperArgs).data());
+ execv(getSelfExe().value_or("nix").c_str(), stringsToCharPtrs(helperArgs).data());
throw SysError("could not execute chroot helper");
}
diff --git a/src/nix/search.cc b/src/nix/search.cc
index f1f5f9641..bdd45cbed 100644
--- a/src/nix/search.cc
+++ b/src/nix/search.cc
@@ -34,7 +34,9 @@ struct CmdSearch : InstallableCommand, MixJSON
.shortName = 'e',
.description = "Hide packages whose attribute path, name or description contain *regex*.",
.labels = {"regex"},
- .handler = Handler(&excludeRes),
+ .handler = {[this](std::string s) {
+ excludeRes.push_back(s);
+ }},
});
}
diff --git a/src/nix/shell.md b/src/nix/shell.md
index 90b81fb2f..9fa1031f5 100644
--- a/src/nix/shell.md
+++ b/src/nix/shell.md
@@ -23,6 +23,12 @@ R""(
Hi everybody!
```
+* Run multiple commands in a shell environment:
+
+ ```console
+ # nix shell nixpkgs#gnumake -c sh -c "cd src && make"
+ ```
+
* Run GNU Hello in a chroot store:
```console
diff --git a/src/nix/verify.cc b/src/nix/verify.cc
index e92df1303..efa2434dc 100644
--- a/src/nix/verify.cc
+++ b/src/nix/verify.cc
@@ -41,7 +41,7 @@ struct CmdVerify : StorePathsCommand
addFlag({
.longName = "sigs-needed",
.shortName = 'n',
- .description = "Require that each path has at least *n* valid signatures.",
+ .description = "Require that each path is signed by at least *n* different keys.",
.labels = {"n"},
.handler = {&sigsNeeded}
});
diff --git a/tests/build-dry.sh b/tests/build-dry.sh
index f0f38e9a0..5f29239dc 100644
--- a/tests/build-dry.sh
+++ b/tests/build-dry.sh
@@ -18,9 +18,6 @@ nix-build --no-out-link dependencies.nix --dry-run 2>&1 | grep "will be built"
# Now new command:
nix build -f dependencies.nix --dry-run 2>&1 | grep "will be built"
-# TODO: XXX: FIXME: #1793
-# Disable this part of the test until the problem is resolved:
-if [ -n "$ISSUE_1795_IS_FIXED" ]; then
clearStore
clearCache
@@ -28,7 +25,6 @@ clearCache
nix build -f dependencies.nix --dry-run 2>&1 | grep "will be built"
# Now old command:
nix-build --no-out-link dependencies.nix --dry-run 2>&1 | grep "will be built"
-fi
###################################################
# Check --dry-run doesn't create links with --dry-run
diff --git a/tests/ca-shell.nix b/tests/ca-shell.nix
index ad2ab6aff..36e1d1526 100644
--- a/tests/ca-shell.nix
+++ b/tests/ca-shell.nix
@@ -1 +1 @@
-{ ... }@args: import ./shell.nix (args // { contentAddressed = true; })
+{ inNixShell ? false, ... }@args: import ./shell.nix (args // { contentAddressed = true; })
diff --git a/tests/ca/content-addressed.nix b/tests/ca/content-addressed.nix
index 31c144ae0..81bc4bf5c 100644
--- a/tests/ca/content-addressed.nix
+++ b/tests/ca/content-addressed.nix
@@ -75,7 +75,7 @@ rec {
buildCommand = ''
mkdir -p $out/bin
echo ${rootCA} # Just to make it depend on it
- echo "" > $out/bin/${name}
+ echo "#! ${shell}" > $out/bin/${name}
chmod +x $out/bin/${name}
'';
};
diff --git a/tests/check.sh b/tests/check.sh
index ab48ff865..495202781 100644
--- a/tests/check.sh
+++ b/tests/check.sh
@@ -40,6 +40,14 @@ nix-build check.nix -A deterministic --argstr checkBuildId $checkBuildId \
if grep -q 'may not be deterministic' $TEST_ROOT/log; then false; fi
checkBuildTempDirRemoved $TEST_ROOT/log
+nix build -f check.nix deterministic --rebuild --repeat 1 \
+ --argstr checkBuildId $checkBuildId --keep-failed --no-link \
+ 2> $TEST_ROOT/log
+if grep -q 'checking is not possible' $TEST_ROOT/log; then false; fi
+# Repeat is set to 1, ie. nix should build deterministic twice.
+if [ "$(grep "checking outputs" $TEST_ROOT/log | wc -l)" -ne 2 ]; then false; fi
+checkBuildTempDirRemoved $TEST_ROOT/log
+
nix-build check.nix -A nondeterministic --argstr checkBuildId $checkBuildId \
--no-out-link 2> $TEST_ROOT/log
checkBuildTempDirRemoved $TEST_ROOT/log
@@ -50,6 +58,12 @@ grep 'may not be deterministic' $TEST_ROOT/log
[ "$status" = "104" ]
checkBuildTempDirRemoved $TEST_ROOT/log
+nix build -f check.nix nondeterministic --rebuild --repeat 1 \
+ --argstr checkBuildId $checkBuildId --keep-failed --no-link \
+ 2> $TEST_ROOT/log || status=$?
+grep 'may not be deterministic' $TEST_ROOT/log
+checkBuildTempDirRemoved $TEST_ROOT/log
+
nix-build check.nix -A nondeterministic --argstr checkBuildId $checkBuildId \
--no-out-link --check --keep-failed 2> $TEST_ROOT/log || status=$?
grep 'may not be deterministic' $TEST_ROOT/log
diff --git a/tests/common.sh.in b/tests/common.sh.in
index 6cb579e0d..73c2d2309 100644
--- a/tests/common.sh.in
+++ b/tests/common.sh.in
@@ -50,6 +50,8 @@ export busybox="@sandbox_shell@"
export version=@PACKAGE_VERSION@
export system=@system@
+export BUILD_SHARED_LIBS=@BUILD_SHARED_LIBS@
+
export IMPURE_VAR1=foo
export IMPURE_VAR2=bar
@@ -117,11 +119,11 @@ killDaemon() {
}
restartDaemon() {
- [[ -z "${pidDaemon:-}" ]] && return 0
+ [[ -z "${pidDaemon:-}" ]] && return 0
- killDaemon
- unset NIX_REMOTE
- startDaemon
+ killDaemon
+ unset NIX_REMOTE
+ startDaemon
}
if [[ $(uname) == Linux ]] && [[ -L /proc/self/ns/user ]] && unshare --user true; then
@@ -188,4 +190,15 @@ if [[ -n "${NIX_DAEMON_PACKAGE:-}" ]]; then
startDaemon
fi
+onError() {
+ set +x
+ echo "$0: test failed at:" >&2
+ for ((i = 1; i < ${#BASH_SOURCE[@]}; i++)); do
+ if [[ -z ${BASH_SOURCE[i]} ]]; then break; fi
+ echo " ${FUNCNAME[i]} in ${BASH_SOURCE[i]}:${BASH_LINENO[i-1]}" >&2
+ done
+}
+
+trap onError ERR
+
fi # COMMON_SH_SOURCED
diff --git a/tests/completions.sh b/tests/completions.sh
new file mode 100644
index 000000000..522aa1c86
--- /dev/null
+++ b/tests/completions.sh
@@ -0,0 +1,62 @@
+source common.sh
+
+cd "$TEST_ROOT"
+
+mkdir -p dep
+cat <<EOF > dep/flake.nix
+{
+ outputs = i: { };
+}
+EOF
+mkdir -p foo
+cat <<EOF > foo/flake.nix
+{
+ inputs.a.url = "path:$(realpath dep)";
+
+ outputs = i: {
+ sampleOutput = 1;
+ };
+}
+EOF
+mkdir -p bar
+cat <<EOF > bar/flake.nix
+{
+ inputs.b.url = "path:$(realpath dep)";
+
+ outputs = i: {
+ sampleOutput = 1;
+ };
+}
+EOF
+
+# Test the completion of a subcommand
+[[ "$(NIX_GET_COMPLETIONS=1 nix buil)" == $'normal\nbuild\t' ]]
+[[ "$(NIX_GET_COMPLETIONS=2 nix flake metad)" == $'normal\nmetadata\t' ]]
+
+# Filename completion
+[[ "$(NIX_GET_COMPLETIONS=2 nix build ./f)" == $'filenames\n./foo\t' ]]
+[[ "$(NIX_GET_COMPLETIONS=2 nix build ./nonexistent)" == $'filenames' ]]
+
+# Input override completion
+[[ "$(NIX_GET_COMPLETIONS=4 nix build ./foo --override-input '')" == $'normal\na\t' ]]
+[[ "$(NIX_GET_COMPLETIONS=5 nix flake show ./foo --override-input '')" == $'normal\na\t' ]]
+## With multiple input flakes
+[[ "$(NIX_GET_COMPLETIONS=5 nix build ./foo ./bar --override-input '')" == $'normal\na\t\nb\t' ]]
+## With tilde expansion
+[[ "$(HOME=$PWD NIX_GET_COMPLETIONS=4 nix build '~/foo' --override-input '')" == $'normal\na\t' ]]
+## Out of order
+[[ "$(NIX_GET_COMPLETIONS=3 nix build --update-input '' ./foo)" == $'normal\na\t' ]]
+[[ "$(NIX_GET_COMPLETIONS=4 nix build ./foo --update-input '' ./bar)" == $'normal\na\t\nb\t' ]]
+
+# Cli flag completion
+NIX_GET_COMPLETIONS=2 nix build --log-form | grep -- "--log-format"
+
+# Config option completion
+## With `--option`
+NIX_GET_COMPLETIONS=3 nix build --option allow-import-from | grep -- "allow-import-from-derivation"
+## As a cli flag – not working atm
+# NIX_GET_COMPLETIONS=2 nix build --allow-import-from | grep -- "allow-import-from-derivation"
+
+# Attr path completions
+[[ "$(NIX_GET_COMPLETIONS=2 nix eval ./foo\#sam)" == $'attrs\n./foo#sampleOutput\t' ]]
+[[ "$(NIX_GET_COMPLETIONS=4 nix eval --file ./foo/flake.nix outp)" == $'attrs\noutputs\t' ]]
diff --git a/tests/flake-bundler.sh b/tests/flakes/bundle.sh
index 9496b8f92..67bbb05ac 100644
--- a/tests/flake-bundler.sh
+++ b/tests/flakes/bundle.sh
@@ -1,9 +1,6 @@
source common.sh
-clearStore
-rm -rf $TEST_HOME/.cache $TEST_HOME/.config $TEST_HOME/.local
-
-cp ./simple.nix ./simple.builder.sh ./config.nix $TEST_HOME
+cp ../simple.nix ../simple.builder.sh ../config.nix $TEST_HOME
cd $TEST_HOME
@@ -25,6 +22,7 @@ cat <<EOF > flake.nix
};
}
EOF
+
nix build .#
nix bundle --bundler .# .#
nix bundle --bundler .#bundlers.$system.default .#packages.$system.default
@@ -32,6 +30,3 @@ nix bundle --bundler .#bundlers.$system.simple .#packages.$system.default
nix bundle --bundler .#bundlers.$system.default .#apps.$system.default
nix bundle --bundler .#bundlers.$system.simple .#apps.$system.default
-
-clearStore
-
diff --git a/tests/flakes/check.sh b/tests/flakes/check.sh
new file mode 100644
index 000000000..f572aa75c
--- /dev/null
+++ b/tests/flakes/check.sh
@@ -0,0 +1,89 @@
+source common.sh
+
+flakeDir=$TEST_ROOT/flake3
+mkdir -p $flakeDir
+
+cat > $flakeDir/flake.nix <<EOF
+{
+ outputs = { self }: {
+ overlay = final: prev: {
+ };
+ };
+}
+EOF
+
+nix flake check $flakeDir
+
+cat > $flakeDir/flake.nix <<EOF
+{
+ outputs = { self }: {
+ overlay = finalll: prev: {
+ };
+ };
+}
+EOF
+
+(! nix flake check $flakeDir)
+
+cat > $flakeDir/flake.nix <<EOF
+{
+ outputs = { self }: {
+ nixosModules.foo = {
+ a.b.c = 123;
+ foo = true;
+ };
+ };
+}
+EOF
+
+nix flake check $flakeDir
+
+cat > $flakeDir/flake.nix <<EOF
+{
+ outputs = { self }: {
+ nixosModules.foo = {
+ a.b.c = 123;
+ foo = assert false; true;
+ };
+ };
+}
+EOF
+
+(! nix flake check $flakeDir)
+
+cat > $flakeDir/flake.nix <<EOF
+{
+ outputs = { self }: {
+ nixosModule = { config, pkgs, ... }: {
+ a.b.c = 123;
+ };
+ };
+}
+EOF
+
+nix flake check $flakeDir
+
+cat > $flakeDir/flake.nix <<EOF
+{
+ outputs = { self }: {
+ nixosModule = { config, pkgs }: {
+ a.b.c = 123;
+ };
+ };
+}
+EOF
+
+(! nix flake check $flakeDir)
+
+cat > $flakeDir/flake.nix <<EOF
+{
+ outputs = { self }: {
+ packages.system-1.default = "foo";
+ packages.system-2.default = "bar";
+ };
+}
+EOF
+
+checkRes=$(nix flake check --keep-going $flakeDir 2>&1 && fail "nix flake check should have failed" || true)
+echo "$checkRes" | grep -q "packages.system-1.default"
+echo "$checkRes" | grep -q "packages.system-2.default"
diff --git a/tests/flakes/circular.sh b/tests/flakes/circular.sh
new file mode 100644
index 000000000..09cd02edf
--- /dev/null
+++ b/tests/flakes/circular.sh
@@ -0,0 +1,49 @@
+# Test circular flake dependencies.
+source ./common.sh
+
+requireGit
+
+flakeA=$TEST_ROOT/flakeA
+flakeB=$TEST_ROOT/flakeB
+
+createGitRepo $flakeA
+createGitRepo $flakeB
+
+cat > $flakeA/flake.nix <<EOF
+{
+ inputs.b.url = git+file://$flakeB;
+ inputs.b.inputs.a.follows = "/";
+
+ outputs = { self, b }: {
+ foo = 123 + b.bar;
+ xyzzy = 1000;
+ };
+}
+EOF
+
+git -C $flakeA add flake.nix
+
+cat > $flakeB/flake.nix <<EOF
+{
+ inputs.a.url = git+file://$flakeA;
+
+ outputs = { self, a }: {
+ bar = 456 + a.xyzzy;
+ };
+}
+EOF
+
+git -C $flakeB add flake.nix
+git -C $flakeB commit -a -m 'Foo'
+
+[[ $(nix eval $flakeA#foo) = 1579 ]]
+[[ $(nix eval $flakeA#foo) = 1579 ]]
+
+sed -i $flakeB/flake.nix -e 's/456/789/'
+git -C $flakeB commit -a -m 'Foo'
+
+[[ $(nix eval --update-input b $flakeA#foo) = 1912 ]]
+
+# Test list-inputs with circular dependencies
+nix flake metadata $flakeA
+
diff --git a/tests/flakes/common.sh b/tests/flakes/common.sh
new file mode 100644
index 000000000..c333733c2
--- /dev/null
+++ b/tests/flakes/common.sh
@@ -0,0 +1,73 @@
+source ../common.sh
+
+registry=$TEST_ROOT/registry.json
+
+requireGit() {
+ if [[ -z $(type -p git) ]]; then
+ echo "Git not installed; skipping flake tests"
+ exit 99
+ fi
+}
+
+writeSimpleFlake() {
+ local flakeDir="$1"
+ cat > $flakeDir/flake.nix <<EOF
+{
+ description = "Bla bla";
+
+ outputs = inputs: rec {
+ packages.$system = rec {
+ foo = import ./simple.nix;
+ default = foo;
+ };
+
+ # To test "nix flake init".
+ legacyPackages.x86_64-linux.hello = import ./simple.nix;
+ };
+}
+EOF
+
+ cp ../simple.nix ../simple.builder.sh ../config.nix $flakeDir/
+}
+
+createSimpleGitFlake() {
+ local flakeDir="$1"
+ writeSimpleFlake $flakeDir
+ git -C $flakeDir add flake.nix simple.nix simple.builder.sh config.nix
+ git -C $flakeDir commit -m 'Initial'
+}
+
+writeDependentFlake() {
+ local flakeDir="$1"
+ cat > $flakeDir/flake.nix <<EOF
+{
+ outputs = { self, flake1 }: {
+ packages.$system.default = flake1.packages.$system.default;
+ expr = assert builtins.pathExists ./flake.lock; 123;
+ };
+}
+EOF
+}
+
+writeTrivialFlake() {
+ local flakeDir="$1"
+ cat > $flakeDir/flake.nix <<EOF
+{
+ outputs = { self }: {
+ expr = 123;
+ };
+}
+EOF
+}
+
+createGitRepo() {
+ local repo="$1"
+ local extraArgs="$2"
+
+ rm -rf $repo $repo.tmp
+ mkdir -p $repo
+
+ git -C $repo init $extraArgs
+ git -C $repo config user.email "foobar@example.com"
+ git -C $repo config user.name "Foobar"
+}
diff --git a/tests/flake-local-settings.sh b/tests/flakes/config.sh
index e92c16f87..d1941a6be 100644
--- a/tests/flake-local-settings.sh
+++ b/tests/flakes/config.sh
@@ -1,9 +1,6 @@
source common.sh
-clearStore
-rm -rf $TEST_HOME/.cache $TEST_HOME/.config $TEST_HOME/.local
-
-cp ./simple.nix ./simple.builder.sh ./config.nix $TEST_HOME
+cp ../simple.nix ../simple.builder.sh ../config.nix $TEST_HOME
cd $TEST_HOME
diff --git a/tests/flakes.sh b/tests/flakes/flakes.sh
index 36bffcf3b..267e2cd6f 100644
--- a/tests/flakes.sh
+++ b/tests/flakes/flakes.sh
@@ -1,67 +1,30 @@
-source common.sh
+source ./common.sh
-if [[ -z $(type -p git) ]]; then
- echo "Git not installed; skipping flake tests"
- exit 99
-fi
+requireGit
clearStore
rm -rf $TEST_HOME/.cache $TEST_HOME/.config
-registry=$TEST_ROOT/registry.json
-
flake1Dir=$TEST_ROOT/flake1
flake2Dir=$TEST_ROOT/flake2
flake3Dir=$TEST_ROOT/flake3
flake5Dir=$TEST_ROOT/flake5
-flake6Dir=$TEST_ROOT/flake6
flake7Dir=$TEST_ROOT/flake7
-templatesDir=$TEST_ROOT/templates
nonFlakeDir=$TEST_ROOT/nonFlake
badFlakeDir=$TEST_ROOT/badFlake
-flakeA=$TEST_ROOT/flakeA
-flakeB=$TEST_ROOT/flakeB
flakeGitBare=$TEST_ROOT/flakeGitBare
-flakeFollowsA=$TEST_ROOT/follows/flakeA
-flakeFollowsB=$TEST_ROOT/follows/flakeA/flakeB
-flakeFollowsC=$TEST_ROOT/follows/flakeA/flakeB/flakeC
-flakeFollowsD=$TEST_ROOT/follows/flakeA/flakeD
-flakeFollowsE=$TEST_ROOT/follows/flakeA/flakeE
-
-for repo in $flake1Dir $flake2Dir $flake3Dir $flake7Dir $templatesDir $nonFlakeDir $flakeA $flakeB $flakeFollowsA; do
- rm -rf $repo $repo.tmp
- mkdir -p $repo
+for repo in $flake1Dir $flake2Dir $flake3Dir $flake7Dir $nonFlakeDir; do
# Give one repo a non-main initial branch.
extraArgs=
if [[ $repo == $flake2Dir ]]; then
extraArgs="--initial-branch=main"
fi
- git -C $repo init $extraArgs
- git -C $repo config user.email "foobar@example.com"
- git -C $repo config user.name "Foobar"
+ createGitRepo "$repo" "$extraArgs"
done
-cat > $flake1Dir/flake.nix <<EOF
-{
- description = "Bla bla";
-
- outputs = inputs: rec {
- packages.$system = rec {
- foo = import ./simple.nix;
- default = foo;
- };
-
- # To test "nix flake init".
- legacyPackages.x86_64-linux.hello = import ./simple.nix;
- };
-}
-EOF
-
-cp ./simple.nix ./simple.builder.sh ./config.nix $flake1Dir/
-git -C $flake1Dir add flake.nix simple.nix simple.builder.sh config.nix
-git -C $flake1Dir commit -m 'Initial'
+createSimpleGitFlake $flake1Dir
cat > $flake2Dir/flake.nix <<EOF
{
@@ -105,12 +68,10 @@ nix registry add --registry $registry flake1 git+file://$flake1Dir
nix registry add --registry $registry flake2 git+file://$flake2Dir
nix registry add --registry $registry flake3 git+file://$flake3Dir
nix registry add --registry $registry flake4 flake3
-nix registry add --registry $registry flake5 hg+file://$flake5Dir
nix registry add --registry $registry nixpkgs flake1
-nix registry add --registry $registry templates git+file://$templatesDir
# Test 'nix flake list'.
-[[ $(nix registry list | wc -l) == 7 ]]
+[[ $(nix registry list | wc -l) == 5 ]]
# Test 'nix flake metadata'.
nix flake metadata flake1
@@ -291,7 +252,7 @@ cat > $flake3Dir/flake.nix <<EOF
}
EOF
-cp ./config.nix $flake3Dir
+cp ../config.nix $flake3Dir
git -C $flake3Dir add flake.nix config.nix
git -C $flake3Dir commit -m 'Add nonFlakeInputs'
@@ -366,161 +327,19 @@ nix build -o $TEST_ROOT/result flake4/removeXyzzy#sth
# Testing the nix CLI
nix registry add flake1 flake3
-[[ $(nix registry list | wc -l) == 8 ]]
+[[ $(nix registry list | wc -l) == 6 ]]
nix registry pin flake1
-[[ $(nix registry list | wc -l) == 8 ]]
+[[ $(nix registry list | wc -l) == 6 ]]
nix registry pin flake1 flake3
-[[ $(nix registry list | wc -l) == 8 ]]
+[[ $(nix registry list | wc -l) == 6 ]]
nix registry remove flake1
-[[ $(nix registry list | wc -l) == 7 ]]
-
-# Test 'nix flake init'.
-cat > $templatesDir/flake.nix <<EOF
-{
- description = "Some templates";
-
- outputs = { self }: {
- templates = rec {
- trivial = {
- path = ./trivial;
- description = "A trivial flake";
- welcomeText = ''
- Welcome to my trivial flake
- '';
- };
- default = trivial;
- };
- };
-}
-EOF
-
-mkdir $templatesDir/trivial
-
-cat > $templatesDir/trivial/flake.nix <<EOF
-{
- description = "A flake for building Hello World";
-
- outputs = { self, nixpkgs }: {
- packages.x86_64-linux = rec {
- hello = nixpkgs.legacyPackages.x86_64-linux.hello;
- default = hello;
- };
- };
-}
-EOF
-
-git -C $templatesDir add flake.nix trivial/flake.nix
-git -C $templatesDir commit -m 'Initial'
-
-nix flake check templates
-nix flake show templates
-nix flake show templates --json | jq
-
-(cd $flake7Dir && nix flake init)
-(cd $flake7Dir && nix flake init) # check idempotence
-git -C $flake7Dir add flake.nix
-nix flake check $flake7Dir
-nix flake show $flake7Dir
-nix flake show $flake7Dir --json | jq
-git -C $flake7Dir commit -a -m 'Initial'
-
-# Test 'nix flake new'.
-rm -rf $flake6Dir
-nix flake new -t templates#trivial $flake6Dir
-nix flake new -t templates#trivial $flake6Dir # check idempotence
-nix flake check $flake6Dir
+[[ $(nix registry list | wc -l) == 5 ]]
# Test 'nix flake clone'.
rm -rf $TEST_ROOT/flake1-v2
nix flake clone flake1 --dest $TEST_ROOT/flake1-v2
[ -e $TEST_ROOT/flake1-v2/flake.nix ]
-# More 'nix flake check' tests.
-cat > $flake3Dir/flake.nix <<EOF
-{
- outputs = { flake1, self }: {
- overlay = final: prev: {
- };
- };
-}
-EOF
-
-nix flake check $flake3Dir
-
-cat > $flake3Dir/flake.nix <<EOF
-{
- outputs = { flake1, self }: {
- overlay = finalll: prev: {
- };
- };
-}
-EOF
-
-(! nix flake check $flake3Dir)
-
-cat > $flake3Dir/flake.nix <<EOF
-{
- outputs = { flake1, self }: {
- nixosModules.foo = {
- a.b.c = 123;
- foo = true;
- };
- };
-}
-EOF
-
-nix flake check $flake3Dir
-
-cat > $flake3Dir/flake.nix <<EOF
-{
- outputs = { flake1, self }: {
- nixosModules.foo = {
- a.b.c = 123;
- foo = assert false; true;
- };
- };
-}
-EOF
-
-(! nix flake check $flake3Dir)
-
-cat > $flake3Dir/flake.nix <<EOF
-{
- outputs = { flake1, self }: {
- nixosModule = { config, pkgs, ... }: {
- a.b.c = 123;
- };
- };
-}
-EOF
-
-nix flake check $flake3Dir
-
-cat > $flake3Dir/flake.nix <<EOF
-{
- outputs = { flake1, self }: {
- nixosModule = { config, pkgs }: {
- a.b.c = 123;
- };
- };
-}
-EOF
-
-(! nix flake check $flake3Dir)
-
-cat > $flake3Dir/flake.nix <<EOF
-{
- outputs = { flake1, self }: {
- packages.system-1.default = "foo";
- packages.system-2.default = "bar";
- };
-}
-EOF
-
-checkRes=$(nix flake check --keep-going $flake3Dir 2>&1 && fail "nix flake check should have failed" || true)
-echo "$checkRes" | grep -q "packages.system-1.default"
-echo "$checkRes" | grep -q "packages.system-2.default"
-
# Test 'follows' inputs.
cat > $flake3Dir/flake.nix <<EOF
{
@@ -563,6 +382,10 @@ nix flake lock $flake3Dir
[[ $(jq -c .nodes.root.inputs.bar $flake3Dir/flake.lock) = '["flake2"]' ]]
# Test overriding inputs of inputs.
+writeTrivialFlake $flake7Dir
+git -C $flake7Dir add flake.nix
+git -C $flake7Dir commit -m 'Initial'
+
cat > $flake3Dir/flake.nix <<EOF
{
inputs.flake2.inputs.flake1 = {
@@ -597,50 +420,9 @@ rm -rf $flakeGitBare
git clone --bare $flake1Dir $flakeGitBare
nix build -o $TEST_ROOT/result git+file://$flakeGitBare
-# Test Mercurial flakes.
-rm -rf $flake5Dir
-mkdir $flake5Dir
-
-cat > $flake5Dir/flake.nix <<EOF
-{
- outputs = { self, flake1 }: {
- packages.$system.default = flake1.packages.$system.default;
- expr = assert builtins.pathExists ./flake.lock; 123;
- };
-}
-EOF
-
-if [[ -n $(type -p hg) ]]; then
- hg init $flake5Dir
-
- hg add $flake5Dir/flake.nix
- hg commit --config ui.username=foobar@example.org $flake5Dir -m 'Initial commit'
-
- nix build -o $TEST_ROOT/result hg+file://$flake5Dir
- [[ -e $TEST_ROOT/result/hello ]]
-
- (! nix flake metadata --json hg+file://$flake5Dir | jq -e -r .revision)
-
- nix eval hg+file://$flake5Dir#expr
-
- nix eval hg+file://$flake5Dir#expr
-
- (! nix eval hg+file://$flake5Dir#expr --no-allow-dirty)
-
- (! nix flake metadata --json hg+file://$flake5Dir | jq -e -r .revision)
-
- hg commit --config ui.username=foobar@example.org $flake5Dir -m 'Add lock file'
-
- nix flake metadata --json hg+file://$flake5Dir --refresh | jq -e -r .revision
- nix flake metadata --json hg+file://$flake5Dir
- [[ $(nix flake metadata --json hg+file://$flake5Dir | jq -e -r .revCount) = 1 ]]
-
- nix build -o $TEST_ROOT/result hg+file://$flake5Dir --no-registries --no-allow-dirty
- nix build -o $TEST_ROOT/result hg+file://$flake5Dir --no-use-registries --no-allow-dirty
-fi
-
# Test path flakes.
-rm -rf $flake5Dir/.hg $flake5Dir/flake.lock
+mkdir -p $flake5Dir
+writeDependentFlake $flake5Dir
nix flake lock path://$flake5Dir
# Test tarball flakes.
@@ -678,165 +460,6 @@ nix flake lock $flake3Dir --update-input flake2/flake1
# Test 'nix flake metadata --json'.
nix flake metadata $flake3Dir --json | jq .
-# Test circular flake dependencies.
-cat > $flakeA/flake.nix <<EOF
-{
- inputs.b.url = git+file://$flakeB;
- inputs.b.inputs.a.follows = "/";
-
- outputs = { self, nixpkgs, b }: {
- foo = 123 + b.bar;
- xyzzy = 1000;
- };
-}
-EOF
-
-git -C $flakeA add flake.nix
-
-cat > $flakeB/flake.nix <<EOF
-{
- inputs.a.url = git+file://$flakeA;
-
- outputs = { self, nixpkgs, a }: {
- bar = 456 + a.xyzzy;
- };
-}
-EOF
-
-git -C $flakeB add flake.nix
-git -C $flakeB commit -a -m 'Foo'
-
-[[ $(nix eval $flakeA#foo) = 1579 ]]
-[[ $(nix eval $flakeA#foo) = 1579 ]]
-
-sed -i $flakeB/flake.nix -e 's/456/789/'
-git -C $flakeB commit -a -m 'Foo'
-
-[[ $(nix eval --update-input b $flakeA#foo) = 1912 ]]
-
-# Test list-inputs with circular dependencies
-nix flake metadata $flakeA
-
-# Test flake follow paths
-mkdir -p $flakeFollowsB
-mkdir -p $flakeFollowsC
-mkdir -p $flakeFollowsD
-mkdir -p $flakeFollowsE
-
-cat > $flakeFollowsA/flake.nix <<EOF
-{
- description = "Flake A";
- inputs = {
- B = {
- url = "path:./flakeB";
- inputs.foobar.follows = "foobar";
- };
-
- foobar.url = "path:$flakeFollowsA/flakeE";
- };
- outputs = { ... }: {};
-}
-EOF
-
-cat > $flakeFollowsB/flake.nix <<EOF
-{
- description = "Flake B";
- inputs = {
- foobar.url = "path:$flakeFollowsA/flakeE";
- goodoo.follows = "C/goodoo";
- C = {
- url = "path:./flakeC";
- inputs.foobar.follows = "foobar";
- };
- };
- outputs = { ... }: {};
-}
-EOF
-
-cat > $flakeFollowsC/flake.nix <<EOF
-{
- description = "Flake C";
- inputs = {
- foobar.url = "path:$flakeFollowsA/flakeE";
- goodoo.follows = "foobar";
- };
- outputs = { ... }: {};
-}
-EOF
-
-cat > $flakeFollowsD/flake.nix <<EOF
-{
- description = "Flake D";
- inputs = {};
- outputs = { ... }: {};
-}
-EOF
-
-cat > $flakeFollowsE/flake.nix <<EOF
-{
- description = "Flake E";
- inputs = {};
- outputs = { ... }: {};
-}
-EOF
-
-git -C $flakeFollowsA add flake.nix flakeB/flake.nix \
- flakeB/flakeC/flake.nix flakeD/flake.nix flakeE/flake.nix
-
-nix flake metadata $flakeFollowsA
-
-nix flake update $flakeFollowsA
-
-oldLock="$(cat "$flakeFollowsA/flake.lock")"
-
-# Ensure that locking twice doesn't change anything
-
-nix flake lock $flakeFollowsA
-
-newLock="$(cat "$flakeFollowsA/flake.lock")"
-
-diff <(echo "$newLock") <(echo "$oldLock")
-
-[[ $(jq -c .nodes.B.inputs.C $flakeFollowsA/flake.lock) = '"C"' ]]
-[[ $(jq -c .nodes.B.inputs.foobar $flakeFollowsA/flake.lock) = '["foobar"]' ]]
-[[ $(jq -c .nodes.C.inputs.foobar $flakeFollowsA/flake.lock) = '["B","foobar"]' ]]
-
-# Ensure removing follows from flake.nix removes them from the lockfile
-
-cat > $flakeFollowsA/flake.nix <<EOF
-{
- description = "Flake A";
- inputs = {
- B = {
- url = "path:./flakeB";
- inputs.nonFlake.follows = "D";
- };
- D.url = "path:./flakeD";
- };
- outputs = { ... }: {};
-}
-EOF
-
-nix flake lock $flakeFollowsA
-
-[[ $(jq -c .nodes.B.inputs.foobar $flakeFollowsA/flake.lock) = '"foobar"' ]]
-jq -r -c '.nodes | keys | .[]' $flakeFollowsA/flake.lock | grep "^foobar$"
-
-# Ensure a relative path is not allowed to go outside the store path
-cat > $flakeFollowsA/flake.nix <<EOF
-{
- description = "Flake A";
- inputs = {
- B.url = "path:../flakeB";
- };
- outputs = { ... }: {};
-}
-EOF
-
-git -C $flakeFollowsA add flake.nix
-
-nix flake lock $flakeFollowsA 2>&1 | grep 'points outside'
-
# Test flake in store does not evaluate
rm -rf $badFlakeDir
mkdir $badFlakeDir
diff --git a/tests/flakes/follow-paths.sh b/tests/flakes/follow-paths.sh
new file mode 100644
index 000000000..19cc1bafa
--- /dev/null
+++ b/tests/flakes/follow-paths.sh
@@ -0,0 +1,150 @@
+source ./common.sh
+
+requireGit
+
+flakeFollowsA=$TEST_ROOT/follows/flakeA
+flakeFollowsB=$TEST_ROOT/follows/flakeA/flakeB
+flakeFollowsC=$TEST_ROOT/follows/flakeA/flakeB/flakeC
+flakeFollowsD=$TEST_ROOT/follows/flakeA/flakeD
+flakeFollowsE=$TEST_ROOT/follows/flakeA/flakeE
+
+# Test following path flakerefs.
+createGitRepo $flakeFollowsA
+mkdir -p $flakeFollowsB
+mkdir -p $flakeFollowsC
+mkdir -p $flakeFollowsD
+mkdir -p $flakeFollowsE
+
+cat > $flakeFollowsA/flake.nix <<EOF
+{
+ description = "Flake A";
+ inputs = {
+ B = {
+ url = "path:./flakeB";
+ inputs.foobar.follows = "foobar";
+ };
+
+ foobar.url = "path:$flakeFollowsA/flakeE";
+ };
+ outputs = { ... }: {};
+}
+EOF
+
+cat > $flakeFollowsB/flake.nix <<EOF
+{
+ description = "Flake B";
+ inputs = {
+ foobar.url = "path:$flakeFollowsA/flakeE";
+ goodoo.follows = "C/goodoo";
+ C = {
+ url = "path:./flakeC";
+ inputs.foobar.follows = "foobar";
+ };
+ };
+ outputs = { ... }: {};
+}
+EOF
+
+cat > $flakeFollowsC/flake.nix <<EOF
+{
+ description = "Flake C";
+ inputs = {
+ foobar.url = "path:$flakeFollowsA/flakeE";
+ goodoo.follows = "foobar";
+ };
+ outputs = { ... }: {};
+}
+EOF
+
+cat > $flakeFollowsD/flake.nix <<EOF
+{
+ description = "Flake D";
+ inputs = {};
+ outputs = { ... }: {};
+}
+EOF
+
+cat > $flakeFollowsE/flake.nix <<EOF
+{
+ description = "Flake E";
+ inputs = {};
+ outputs = { ... }: {};
+}
+EOF
+
+git -C $flakeFollowsA add flake.nix flakeB/flake.nix \
+ flakeB/flakeC/flake.nix flakeD/flake.nix flakeE/flake.nix
+
+nix flake metadata $flakeFollowsA
+
+nix flake update $flakeFollowsA
+
+nix flake lock $flakeFollowsA
+
+oldLock="$(cat "$flakeFollowsA/flake.lock")"
+
+# Ensure that locking twice doesn't change anything
+
+nix flake lock $flakeFollowsA
+
+newLock="$(cat "$flakeFollowsA/flake.lock")"
+
+diff <(echo "$newLock") <(echo "$oldLock")
+
+[[ $(jq -c .nodes.B.inputs.C $flakeFollowsA/flake.lock) = '"C"' ]]
+[[ $(jq -c .nodes.B.inputs.foobar $flakeFollowsA/flake.lock) = '["foobar"]' ]]
+[[ $(jq -c .nodes.C.inputs.foobar $flakeFollowsA/flake.lock) = '["B","foobar"]' ]]
+
+# Ensure removing follows from flake.nix removes them from the lockfile
+
+cat > $flakeFollowsA/flake.nix <<EOF
+{
+ description = "Flake A";
+ inputs = {
+ B = {
+ url = "path:./flakeB";
+ };
+ D.url = "path:./flakeD";
+ };
+ outputs = { ... }: {};
+}
+EOF
+
+nix flake lock $flakeFollowsA
+
+[[ $(jq -c .nodes.B.inputs.foobar $flakeFollowsA/flake.lock) = '"foobar"' ]]
+jq -r -c '.nodes | keys | .[]' $flakeFollowsA/flake.lock | grep "^foobar$"
+
+# Ensure a relative path is not allowed to go outside the store path
+cat > $flakeFollowsA/flake.nix <<EOF
+{
+ description = "Flake A";
+ inputs = {
+ B.url = "path:../flakeB";
+ };
+ outputs = { ... }: {};
+}
+EOF
+
+git -C $flakeFollowsA add flake.nix
+
+nix flake lock $flakeFollowsA 2>&1 | grep 'points outside'
+
+# Non-existant follows should print a warning.
+cat >$flakeFollowsA/flake.nix <<EOF
+{
+ description = "Flake A";
+ inputs.B = {
+ url = "path:./flakeB";
+ inputs.invalid.follows = "D";
+ inputs.invalid2.url = "path:./flakeD";
+ };
+ inputs.D.url = "path:./flakeD";
+ outputs = { ... }: {};
+}
+EOF
+
+git -C $flakeFollowsA add flake.nix
+
+nix flake lock $flakeFollowsA 2>&1 | grep "warning: input 'B' has an override for a non-existent input 'invalid'"
+nix flake lock $flakeFollowsA 2>&1 | grep "warning: input 'B' has an override for a non-existent input 'invalid2'"
diff --git a/tests/flakes/init.sh b/tests/flakes/init.sh
new file mode 100644
index 000000000..36cb9956a
--- /dev/null
+++ b/tests/flakes/init.sh
@@ -0,0 +1,87 @@
+source ./common.sh
+
+requireGit
+
+templatesDir=$TEST_ROOT/templates
+flakeDir=$TEST_ROOT/flake
+nixpkgsDir=$TEST_ROOT/nixpkgs
+
+nix registry add --registry $registry templates git+file://$templatesDir
+nix registry add --registry $registry nixpkgs git+file://$nixpkgsDir
+
+createGitRepo $nixpkgsDir
+createSimpleGitFlake $nixpkgsDir
+
+# Test 'nix flake init'.
+createGitRepo $templatesDir
+
+cat > $templatesDir/flake.nix <<EOF
+{
+ description = "Some templates";
+
+ outputs = { self }: {
+ templates = rec {
+ trivial = {
+ path = ./trivial;
+ description = "A trivial flake";
+ welcomeText = ''
+ Welcome to my trivial flake
+ '';
+ };
+ default = trivial;
+ };
+ };
+}
+EOF
+
+mkdir $templatesDir/trivial
+
+cat > $templatesDir/trivial/flake.nix <<EOF
+{
+ description = "A flake for building Hello World";
+
+ outputs = { self, nixpkgs }: {
+ packages.x86_64-linux = rec {
+ hello = nixpkgs.legacyPackages.x86_64-linux.hello;
+ default = hello;
+ };
+ };
+}
+EOF
+echo a > $templatesDir/trivial/a
+echo b > $templatesDir/trivial/b
+
+git -C $templatesDir add flake.nix trivial/
+git -C $templatesDir commit -m 'Initial'
+
+nix flake check templates
+nix flake show templates
+nix flake show templates --json | jq
+
+createGitRepo $flakeDir
+(cd $flakeDir && nix flake init)
+(cd $flakeDir && nix flake init) # check idempotence
+git -C $flakeDir add flake.nix
+nix flake check $flakeDir
+nix flake show $flakeDir
+nix flake show $flakeDir --json | jq
+git -C $flakeDir commit -a -m 'Initial'
+
+# Test 'nix flake init' with benign conflicts
+createGitRepo "$flakeDir"
+echo a > $flakeDir/a
+(cd $flakeDir && nix flake init) # check idempotence
+
+# Test 'nix flake init' with conflicts
+createGitRepo "$flakeDir"
+echo b > $flakeDir/a
+pushd $flakeDir
+(! nix flake init) |& grep "refusing to overwrite existing file '$flakeDir/a'"
+popd
+git -C $flakeDir commit -a -m 'Changed'
+
+# Test 'nix flake new'.
+rm -rf $flakeDir
+nix flake new -t templates#trivial $flakeDir
+nix flake new -t templates#trivial $flakeDir # check idempotence
+nix flake check $flakeDir
diff --git a/tests/flakes/mercurial.sh b/tests/flakes/mercurial.sh
new file mode 100644
index 000000000..2614006c8
--- /dev/null
+++ b/tests/flakes/mercurial.sh
@@ -0,0 +1,46 @@
+source ./common.sh
+
+if [[ -z $(type -p hg) ]]; then
+ echo "Mercurial not installed; skipping"
+ exit 99
+fi
+
+flake1Dir=$TEST_ROOT/flake-hg1
+mkdir -p $flake1Dir
+writeSimpleFlake $flake1Dir
+hg init $flake1Dir
+
+nix registry add --registry $registry flake1 hg+file://$flake1Dir
+
+flake2Dir=$TEST_ROOT/flake-hg2
+mkdir -p $flake2Dir
+writeDependentFlake $flake2Dir
+hg init $flake2Dir
+
+hg add $flake1Dir/*
+hg commit --config ui.username=foobar@example.org $flake1Dir -m 'Initial commit'
+
+hg add $flake2Dir/flake.nix
+hg commit --config ui.username=foobar@example.org $flake2Dir -m 'Initial commit'
+
+nix build -o $TEST_ROOT/result hg+file://$flake2Dir
+[[ -e $TEST_ROOT/result/hello ]]
+
+(! nix flake metadata --json hg+file://$flake2Dir | jq -e -r .revision)
+
+nix eval hg+file://$flake2Dir#expr
+
+nix eval hg+file://$flake2Dir#expr
+
+(! nix eval hg+file://$flake2Dir#expr --no-allow-dirty)
+
+(! nix flake metadata --json hg+file://$flake2Dir | jq -e -r .revision)
+
+hg commit --config ui.username=foobar@example.org $flake2Dir -m 'Add lock file'
+
+nix flake metadata --json hg+file://$flake2Dir --refresh | jq -e -r .revision
+nix flake metadata --json hg+file://$flake2Dir
+[[ $(nix flake metadata --json hg+file://$flake2Dir | jq -e -r .revCount) = 1 ]]
+
+nix build -o $TEST_ROOT/result hg+file://$flake2Dir --no-registries --no-allow-dirty
+nix build -o $TEST_ROOT/result hg+file://$flake2Dir --no-use-registries --no-allow-dirty
diff --git a/tests/flakes-run.sh b/tests/flakes/run.sh
index 88fc3e628..9fa51d1c7 100644
--- a/tests/flakes-run.sh
+++ b/tests/flakes/run.sh
@@ -1,8 +1,8 @@
-source common.sh
+source ../common.sh
clearStore
rm -rf $TEST_HOME/.cache $TEST_HOME/.config $TEST_HOME/.local
-cp ./shell-hello.nix ./config.nix $TEST_HOME
+cp ../shell-hello.nix ../config.nix $TEST_HOME
cd $TEST_HOME
cat <<EOF > flake.nix
diff --git a/tests/flake-searching.sh b/tests/flakes/search-root.sh
index db241f6d2..d8586dc8a 100644
--- a/tests/flake-searching.sh
+++ b/tests/flakes/search-root.sh
@@ -1,15 +1,11 @@
source common.sh
-if [[ -z $(type -p git) ]]; then
- echo "Git not installed; skipping flake search tests"
- exit 99
-fi
-
clearStore
-cp ./simple.nix ./simple.builder.sh ./config.nix $TEST_HOME
+writeSimpleFlake $TEST_HOME
cd $TEST_HOME
mkdir -p foo/subdir
+
echo '{ outputs = _: {}; }' > foo/flake.nix
cat <<EOF > flake.nix
{
@@ -43,10 +39,12 @@ nix build --override-input foo . || fail "flake should search up directories whe
sed "s,$PWD/foo,$PWD/foo/subdir,g" -i flake.nix
! nix build || fail "flake should not search upwards when part of inputs"
-pushd subdir
-git init
-for i in "${success[@]}" "${failure[@]}"; do
- ! nix build $i || fail "flake should not search past a git repository"
-done
-rm -rf .git
-popd
+if [[ -n $(type -p git) ]]; then
+ pushd subdir
+ git init
+ for i in "${success[@]}" "${failure[@]}"; do
+ ! nix build $i || fail "flake should not search past a git repository"
+ done
+ rm -rf .git
+ popd
+fi
diff --git a/tests/fmt.sh b/tests/fmt.sh
index bc05118ff..254681ca2 100644
--- a/tests/fmt.sh
+++ b/tests/fmt.sh
@@ -18,7 +18,12 @@ cat << EOF > flake.nix
with import ./config.nix;
mkDerivation {
name = "formatter";
- buildCommand = "mkdir -p \$out/bin; cp \${./fmt.simple.sh} \$out/bin/formatter";
+ buildCommand = ''
+ mkdir -p \$out/bin
+ echo "#! ${shell}" > \$out/bin/formatter
+ cat \${./fmt.simple.sh} >> \$out/bin/formatter
+ chmod +x \$out/bin/formatter
+ '';
};
};
}
diff --git a/tests/github-flakes.nix b/tests/github-flakes.nix
index ddae6a21c..fc481c7e3 100644
--- a/tests/github-flakes.nix
+++ b/tests/github-flakes.nix
@@ -7,7 +7,7 @@ with import (nixpkgs + "/nixos/lib/testing-python.nix") {
let
- # Generate a fake root CA and a fake github.com certificate.
+ # Generate a fake root CA and a fake api.github.com / channels.nixos.org certificate.
cert = pkgs.runCommand "cert" { buildInputs = [ pkgs.openssl ]; }
''
mkdir -p $out
@@ -18,7 +18,7 @@ let
openssl req -newkey rsa:2048 -nodes -keyout $out/server.key \
-subj "/C=CN/ST=Denial/L=Springfield/O=Dis/CN=github.com" -out server.csr
- openssl x509 -req -extfile <(printf "subjectAltName=DNS:api.github.com,DNS:github.com,DNS:raw.githubusercontent.com") \
+ openssl x509 -req -extfile <(printf "subjectAltName=DNS:api.github.com,DNS:channels.nixos.org") \
-days 36500 -in server.csr -CA $out/ca.crt -CAkey ca.key -CAcreateserial -out $out/server.crt
'';
@@ -67,7 +67,7 @@ makeTest (
name = "github-flakes";
nodes =
- { # Impersonate github.com and api.github.com.
+ {
github =
{ config, pkgs, ... }:
{ networking.firewall.allowedTCPPorts = [ 80 443 ];
@@ -77,12 +77,12 @@ makeTest (
services.httpd.extraConfig = ''
ErrorLog syslog:local6
'';
- services.httpd.virtualHosts."github.com" =
+ services.httpd.virtualHosts."channels.nixos.org" =
{ forceSSL = true;
sslServerKey = "${cert}/server.key";
sslServerCert = "${cert}/server.crt";
servedDirs =
- [ { urlPath = "/NixOS/flake-registry/raw/master";
+ [ { urlPath = "/";
dir = registry;
}
];
@@ -109,7 +109,7 @@ makeTest (
nix.extraOptions = "experimental-features = nix-command flakes";
environment.systemPackages = [ pkgs.jq ];
networking.hosts.${(builtins.head nodes.github.config.networking.interfaces.eth1.ipv4.addresses).address} =
- [ "github.com" "api.github.com" "raw.githubusercontent.com" ];
+ [ "channels.nixos.org" "api.github.com" ];
security.pki.certificateFiles = [ "${cert}/ca.crt" ];
};
};
@@ -123,7 +123,7 @@ makeTest (
github.wait_for_unit("httpd.service")
- client.succeed("curl -v https://github.com/ >&2")
+ client.succeed("curl -v https://api.github.com/ >&2")
client.succeed("nix registry list | grep nixpkgs")
rev = client.succeed("nix flake info nixpkgs --json | jq -r .revision")
diff --git a/tests/installer/default.nix b/tests/installer/default.nix
new file mode 100644
index 000000000..32aa7889a
--- /dev/null
+++ b/tests/installer/default.nix
@@ -0,0 +1,220 @@
+{ binaryTarballs
+, nixpkgsFor
+}:
+
+let
+
+ installScripts = {
+ install-default = {
+ script = ''
+ tar -xf ./nix.tar.xz
+ mv ./nix-* nix
+ ./nix/install --no-channel-add
+ '';
+ };
+
+ install-force-no-daemon = {
+ script = ''
+ tar -xf ./nix.tar.xz
+ mv ./nix-* nix
+ ./nix/install --no-daemon
+ '';
+ };
+
+ install-force-daemon = {
+ script = ''
+ tar -xf ./nix.tar.xz
+ mv ./nix-* nix
+ ./nix/install --daemon --no-channel-add
+ '';
+ };
+ };
+
+ disableSELinux = "sudo setenforce 0";
+
+ images = {
+
+ /*
+ "ubuntu-14-04" = {
+ image = import <nix/fetchurl.nix> {
+ url = "https://app.vagrantup.com/ubuntu/boxes/trusty64/versions/20190514.0.0/providers/virtualbox.box";
+ hash = "sha256-iUUXyRY8iW7DGirb0zwGgf1fRbLA7wimTJKgP7l/OQ8=";
+ };
+ rootDisk = "box-disk1.vmdk";
+ system = "x86_64-linux";
+ };
+ */
+
+ "ubuntu-16-04" = {
+ image = import <nix/fetchurl.nix> {
+ url = "https://app.vagrantup.com/generic/boxes/ubuntu1604/versions/4.1.12/providers/libvirt.box";
+ hash = "sha256-lO4oYQR2tCh5auxAYe6bPOgEqOgv3Y3GC1QM1tEEEU8=";
+ };
+ rootDisk = "box.img";
+ system = "x86_64-linux";
+ };
+
+ "ubuntu-22-04" = {
+ image = import <nix/fetchurl.nix> {
+ url = "https://app.vagrantup.com/generic/boxes/ubuntu2204/versions/4.1.12/providers/libvirt.box";
+ hash = "sha256-HNll0Qikw/xGIcogni5lz01vUv+R3o8xowP2EtqjuUQ=";
+ };
+ rootDisk = "box.img";
+ system = "x86_64-linux";
+ };
+
+ "fedora-36" = {
+ image = import <nix/fetchurl.nix> {
+ url = "https://app.vagrantup.com/generic/boxes/fedora36/versions/4.1.12/providers/libvirt.box";
+ hash = "sha256-rxPgnDnFkTDwvdqn2CV3ZUo3re9AdPtSZ9SvOHNvaks=";
+ };
+ rootDisk = "box.img";
+ system = "x86_64-linux";
+ postBoot = disableSELinux;
+ };
+
+ # Currently fails with 'error while loading shared libraries:
+ # libsodium.so.23: cannot stat shared object: Invalid argument'.
+ /*
+ "rhel-6" = {
+ image = import <nix/fetchurl.nix> {
+ url = "https://app.vagrantup.com/generic/boxes/rhel6/versions/4.1.12/providers/libvirt.box";
+ hash = "sha256-QwzbvRoRRGqUCQptM7X/InRWFSP2sqwRt2HaaO6zBGM=";
+ };
+ rootDisk = "box.img";
+ system = "x86_64-linux";
+ };
+ */
+
+ "rhel-7" = {
+ image = import <nix/fetchurl.nix> {
+ url = "https://app.vagrantup.com/generic/boxes/rhel7/versions/4.1.12/providers/libvirt.box";
+ hash = "sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U=";
+ };
+ rootDisk = "box.img";
+ system = "x86_64-linux";
+ };
+
+ "rhel-8" = {
+ image = import <nix/fetchurl.nix> {
+ url = "https://app.vagrantup.com/generic/boxes/rhel8/versions/4.1.12/providers/libvirt.box";
+ hash = "sha256-zFOPjSputy1dPgrQRixBXmlyN88cAKjJ21VvjSWUCUY=";
+ };
+ rootDisk = "box.img";
+ system = "x86_64-linux";
+ postBoot = disableSELinux;
+ };
+
+ "rhel-9" = {
+ image = import <nix/fetchurl.nix> {
+ url = "https://app.vagrantup.com/generic/boxes/rhel9/versions/4.1.12/providers/libvirt.box";
+ hash = "sha256-vL/FbB3kK1rcSaR627nWmScYGKGk4seSmAdq6N5diMg=";
+ };
+ rootDisk = "box.img";
+ system = "x86_64-linux";
+ postBoot = disableSELinux;
+ extraQemuOpts = "-cpu Westmere-v2";
+ };
+
+ };
+
+ makeTest = imageName: testName:
+ let image = images.${imageName}; in
+ with nixpkgsFor.${image.system};
+ runCommand
+ "installer-test-${imageName}-${testName}"
+ { buildInputs = [ qemu_kvm openssh ];
+ image = image.image;
+ postBoot = image.postBoot or "";
+ installScript = installScripts.${testName}.script;
+ binaryTarball = binaryTarballs.${system};
+ }
+ ''
+ shopt -s nullglob
+
+ echo "Unpacking Vagrant box $image..."
+ tar xvf $image
+
+ image_type=$(qemu-img info ${image.rootDisk} | sed 's/file format: \(.*\)/\1/; t; d')
+
+ qemu-img create -b ./${image.rootDisk} -F "$image_type" -f qcow2 ./disk.qcow2
+
+ extra_qemu_opts="${image.extraQemuOpts or ""}"
+
+ # Add the config disk, required by the Ubuntu images.
+ config_drive=$(echo *configdrive.vmdk || true)
+ if [[ -n $config_drive ]]; then
+ extra_qemu_opts+=" -drive id=disk2,file=$config_drive,if=virtio"
+ fi
+
+ echo "Starting qemu..."
+ qemu-kvm -m 4096 -nographic \
+ -drive id=disk1,file=./disk.qcow2,if=virtio \
+ -netdev user,id=net0,restrict=yes,hostfwd=tcp::20022-:22 -device virtio-net-pci,netdev=net0 \
+ $extra_qemu_opts &
+ qemu_pid=$!
+ trap "kill $qemu_pid" EXIT
+
+ if ! [ -e ./vagrant_insecure_key ]; then
+ cp ${./vagrant_insecure_key} vagrant_insecure_key
+ fi
+
+ chmod 0400 ./vagrant_insecure_key
+
+ ssh_opts="-o StrictHostKeyChecking=no -o HostKeyAlgorithms=+ssh-rsa -o PubkeyAcceptedKeyTypes=+ssh-rsa -i ./vagrant_insecure_key"
+ ssh="ssh -p 20022 -q $ssh_opts vagrant@localhost"
+
+ echo "Waiting for SSH..."
+ for ((i = 0; i < 120; i++)); do
+ echo "[ssh] Trying to connect..."
+ if $ssh -- true; then
+ echo "[ssh] Connected!"
+ break
+ fi
+ if ! kill -0 $qemu_pid; then
+ echo "qemu died unexpectedly"
+ exit 1
+ fi
+ sleep 1
+ done
+
+ if [[ -n $postBoot ]]; then
+ echo "Running post-boot commands..."
+ $ssh "set -ex; $postBoot"
+ fi
+
+ echo "Copying installer..."
+ scp -P 20022 $ssh_opts $binaryTarball/nix-*.tar.xz vagrant@localhost:nix.tar.xz
+
+ echo "Running installer..."
+ $ssh "set -eux; $installScript"
+
+ echo "Testing Nix installation..."
+ $ssh <<EOF
+ set -ex
+
+ # FIXME: get rid of this; ideally ssh should just work.
+ source ~/.bash_profile || true
+ source ~/.bash_login || true
+ source ~/.profile || true
+ source /etc/bashrc || true
+
+ nix-env --version
+ nix --extra-experimental-features nix-command store ping
+
+ out=\$(nix-build --no-substitute -E 'derivation { name = "foo"; system = "x86_64-linux"; builder = "/bin/sh"; args = ["-c" "echo foobar > \$out"]; }')
+ [[ \$(cat \$out) = foobar ]]
+ EOF
+
+ echo "Done!"
+ touch $out
+ '';
+
+in
+
+builtins.mapAttrs (imageName: image:
+ { ${image.system} = builtins.mapAttrs (testName: test:
+ makeTest imageName testName
+ ) installScripts;
+ }
+) images
diff --git a/tests/installer/vagrant_insecure_key b/tests/installer/vagrant_insecure_key
new file mode 100644
index 000000000..7d6a08390
--- /dev/null
+++ b/tests/installer/vagrant_insecure_key
@@ -0,0 +1,27 @@
+-----BEGIN RSA PRIVATE KEY-----
+MIIEogIBAAKCAQEA6NF8iallvQVp22WDkTkyrtvp9eWW6A8YVr+kz4TjGYe7gHzI
+w+niNltGEFHzD8+v1I2YJ6oXevct1YeS0o9HZyN1Q9qgCgzUFtdOKLv6IedplqoP
+kcmF0aYet2PkEDo3MlTBckFXPITAMzF8dJSIFo9D8HfdOV0IAdx4O7PtixWKn5y2
+hMNG0zQPyUecp4pzC6kivAIhyfHilFR61RGL+GPXQ2MWZWFYbAGjyiYJnAmCP3NO
+Td0jMZEnDkbUvxhMmBYSdETk1rRgm+R4LOzFUGaHqHDLKLX+FIPKcF96hrucXzcW
+yLbIbEgE98OHlnVYCzRdK8jlqm8tehUc9c9WhQIBIwKCAQEA4iqWPJXtzZA68mKd
+ELs4jJsdyky+ewdZeNds5tjcnHU5zUYE25K+ffJED9qUWICcLZDc81TGWjHyAqD1
+Bw7XpgUwFgeUJwUlzQurAv+/ySnxiwuaGJfhFM1CaQHzfXphgVml+fZUvnJUTvzf
+TK2Lg6EdbUE9TarUlBf/xPfuEhMSlIE5keb/Zz3/LUlRg8yDqz5w+QWVJ4utnKnK
+iqwZN0mwpwU7YSyJhlT4YV1F3n4YjLswM5wJs2oqm0jssQu/BT0tyEXNDYBLEF4A
+sClaWuSJ2kjq7KhrrYXzagqhnSei9ODYFShJu8UWVec3Ihb5ZXlzO6vdNQ1J9Xsf
+4m+2ywKBgQD6qFxx/Rv9CNN96l/4rb14HKirC2o/orApiHmHDsURs5rUKDx0f9iP
+cXN7S1uePXuJRK/5hsubaOCx3Owd2u9gD6Oq0CsMkE4CUSiJcYrMANtx54cGH7Rk
+EjFZxK8xAv1ldELEyxrFqkbE4BKd8QOt414qjvTGyAK+OLD3M2QdCQKBgQDtx8pN
+CAxR7yhHbIWT1AH66+XWN8bXq7l3RO/ukeaci98JfkbkxURZhtxV/HHuvUhnPLdX
+3TwygPBYZFNo4pzVEhzWoTtnEtrFueKxyc3+LjZpuo+mBlQ6ORtfgkr9gBVphXZG
+YEzkCD3lVdl8L4cw9BVpKrJCs1c5taGjDgdInQKBgHm/fVvv96bJxc9x1tffXAcj
+3OVdUN0UgXNCSaf/3A/phbeBQe9xS+3mpc4r6qvx+iy69mNBeNZ0xOitIjpjBo2+
+dBEjSBwLk5q5tJqHmy/jKMJL4n9ROlx93XS+njxgibTvU6Fp9w+NOFD/HvxB3Tcz
+6+jJF85D5BNAG3DBMKBjAoGBAOAxZvgsKN+JuENXsST7F89Tck2iTcQIT8g5rwWC
+P9Vt74yboe2kDT531w8+egz7nAmRBKNM751U/95P9t88EDacDI/Z2OwnuFQHCPDF
+llYOUI+SpLJ6/vURRbHSnnn8a/XG+nzedGH5JGqEJNQsz+xT2axM0/W/CRknmGaJ
+kda/AoGANWrLCz708y7VYgAtW2Uf1DPOIYMdvo6fxIB5i9ZfISgcJ/bbCUkFrhoH
++vq/5CIWxCPp0f85R4qxxQ5ihxJ0YDQT9Jpx4TMss4PSavPaBH3RXow5Ohe+bYoQ
+NE5OgEXk2wVfZczCZpigBKbKZHNYcelXtTt/nP3rsCuGcM4h53s=
+-----END RSA PRIVATE KEY-----
diff --git a/tests/lang.sh b/tests/lang.sh
index f09eaeb31..c0b0fc58c 100644
--- a/tests/lang.sh
+++ b/tests/lang.sh
@@ -5,6 +5,8 @@ export NIX_REMOTE=dummy://
nix-instantiate --eval -E 'builtins.trace "Hello" 123' 2>&1 | grep -q Hello
nix-instantiate --eval -E 'builtins.addErrorContext "Hello" 123' 2>&1
+nix-instantiate --trace-verbose --eval -E 'builtins.traceVerbose "Hello" 123' 2>&1 | grep -q Hello
+(! nix-instantiate --eval -E 'builtins.traceVerbose "Hello" 123' 2>&1 | grep -q Hello)
(! nix-instantiate --show-trace --eval -E 'builtins.addErrorContext "Hello" 123' 2>&1 | grep -q Hello)
nix-instantiate --show-trace --eval -E 'builtins.addErrorContext "Hello" (throw "Foo")' 2>&1 | grep -q Hello
diff --git a/tests/local.mk b/tests/local.mk
index 2932d2b13..5e48ceae1 100644
--- a/tests/local.mk
+++ b/tests/local.mk
@@ -1,6 +1,12 @@
nix_tests = \
- flakes.sh \
- flakes-run.sh \
+ flakes/flakes.sh \
+ flakes/run.sh \
+ flakes/mercurial.sh \
+ flakes/circular.sh \
+ flakes/init.sh \
+ flakes/follow-paths.sh \
+ flakes/bundle.sh \
+ flakes/check.sh \
ca/gc.sh \
gc.sh \
remote-store.sh \
@@ -44,7 +50,7 @@ nix_tests = \
secure-drv-outputs.sh \
restricted.sh \
fetchGitSubmodules.sh \
- flake-searching.sh \
+ flakes/search-root.sh \
ca/duplicate-realisation-in-closure.sh \
readfile-context.sh \
nix-channel.sh \
@@ -80,7 +86,7 @@ nix_tests = \
nix-copy-ssh.sh \
post-hook.sh \
function-trace.sh \
- flake-local-settings.sh \
+ flakes/config.sh \
fmt.sh \
eval-store.sh \
why-depends.sh \
@@ -102,6 +108,7 @@ nix_tests = \
suggestions.sh \
store-ping.sh \
fetchClosure.sh \
+ completions.sh \
impure-derivations.sh
ifeq ($(HAVE_LIBCPUID), 1)
@@ -114,4 +121,8 @@ tests-environment = NIX_REMOTE= $(bash) -e
clean-files += $(d)/common.sh $(d)/config.nix $(d)/ca/config.nix
-test-deps += tests/common.sh tests/config.nix tests/ca/config.nix tests/plugins/libplugintest.$(SO_EXT)
+test-deps += tests/common.sh tests/config.nix tests/ca/config.nix
+
+ifeq ($(BUILD_SHARED_LIBS), 1)
+ test-deps += tests/plugins/libplugintest.$(SO_EXT)
+endif
diff --git a/tests/nix-shell.sh b/tests/nix-shell.sh
index 3241d7a0f..f291c6f79 100644
--- a/tests/nix-shell.sh
+++ b/tests/nix-shell.sh
@@ -102,3 +102,11 @@ source <(nix print-dev-env -f "$shellDotNix" shellDrv)
[[ ${arr2[1]} = $'\n' ]]
[[ ${arr2[2]} = $'x\ny' ]]
[[ $(fun) = blabla ]]
+
+# Test nix-shell with ellipsis and no `inNixShell` argument (for backwards compat with old nixpkgs)
+cat >$TEST_ROOT/shell-ellipsis.nix <<EOF
+{ system ? "x86_64-linux", ... }@args:
+assert (!(args ? inNixShell));
+(import $shellDotNix { }).shellDrv
+EOF
+nix-shell $TEST_ROOT/shell-ellipsis.nix --run "true"
diff --git a/tests/plugins.sh b/tests/plugins.sh
index e22bf4408..6e278ad9d 100644
--- a/tests/plugins.sh
+++ b/tests/plugins.sh
@@ -2,6 +2,11 @@ source common.sh
set -o pipefail
+if [[ $BUILD_SHARED_LIBS != 1 ]]; then
+ echo "plugins are not supported"
+ exit 99
+fi
+
res=$(nix --option setting-set true --option plugin-files $PWD/plugins/libplugintest* eval --expr builtins.anotherNull)
[ "$res"x = "nullx" ]
diff --git a/tests/pure-eval.sh b/tests/pure-eval.sh
index 1a4568ea6..b83ab8afe 100644
--- a/tests/pure-eval.sh
+++ b/tests/pure-eval.sh
@@ -30,3 +30,5 @@ nix eval --store dummy:// --write-to $TEST_ROOT/eval-out --expr '{ x = "foo" + "
rm -rf $TEST_ROOT/eval-out
(! nix eval --store dummy:// --write-to $TEST_ROOT/eval-out --expr '{ "." = "bla"; }')
+
+(! nix eval --expr '~/foo')
diff --git a/tests/repl.sh b/tests/repl.sh
index b6937b9e9..c555560cc 100644
--- a/tests/repl.sh
+++ b/tests/repl.sh
@@ -42,6 +42,11 @@ testRepl () {
echo "$replOutput"
echo "$replOutput" | grep -qs "while evaluating the file" \
|| fail "nix repl --show-trace doesn't show the trace"
+
+ nix repl "${nixArgs[@]}" --option pure-eval true 2>&1 <<< "builtins.currentSystem" \
+ | grep "attribute 'currentSystem' missing"
+ nix repl "${nixArgs[@]}" 2>&1 <<< "builtins.currentSystem" \
+ | grep "$(nix-instantiate --eval -E 'builtins.currentSystem')"
}
# Simple test, try building a drv
@@ -50,15 +55,17 @@ testRepl
testRepl --store "$TEST_ROOT/store?real=$NIX_STORE_DIR"
testReplResponse () {
- local response="$(nix repl <<< "$1")"
- echo "$response" | grep -qs "$2" \
+ local commands="$1"; shift
+ local expectedResponse="$1"; shift
+ local response="$(nix repl "$@" <<< "$commands")"
+ echo "$response" | grep -qs "$expectedResponse" \
|| fail "repl command set:
-$1
+$commands
does not respond with:
-$2
+$expectedResponse
but with:
@@ -71,3 +78,48 @@ testReplResponse '
:a { a = "2"; }
"result: ${a}"
' "result: 2"
+
+testReplResponse '
+drvPath
+' '".*-simple.drv"' \
+$testDir/simple.nix
+
+testReplResponse '
+drvPath
+' '".*-simple.drv"' \
+--file $testDir/simple.nix --experimental-features 'ca-derivations'
+
+testReplResponse '
+drvPath
+' '".*-simple.drv"' \
+--file $testDir/simple.nix --extra-experimental-features 'repl-flake ca-derivations'
+
+mkdir -p flake && cat <<EOF > flake/flake.nix
+{
+ outputs = { self }: {
+ foo = 1;
+ bar.baz = 2;
+
+ changingThing = "beforeChange";
+ };
+}
+EOF
+testReplResponse '
+foo + baz
+' "3" \
+ ./flake ./flake\#bar --experimental-features 'flakes repl-flake'
+
+# Test the `:reload` mechansim with flakes:
+# - Eval `./flake#changingThing`
+# - Modify the flake
+# - Re-eval it
+# - Check that the result has changed
+replResult=$( (
+echo "changingThing"
+sleep 1 # Leave the repl the time to eval 'foo'
+sed -i 's/beforeChange/afterChange/' flake/flake.nix
+echo ":reload"
+echo "changingThing"
+) | nix repl ./flake --experimental-features 'flakes repl-flake')
+echo "$replResult" | grep -qs beforeChange
+echo "$replResult" | grep -qs afterChange
diff --git a/tests/search.sh b/tests/search.sh
index 41b706ac6..1a98f5b49 100644
--- a/tests/search.sh
+++ b/tests/search.sh
@@ -43,3 +43,4 @@ e=$'\x1b' # grep doesn't support \e, \033 or even \x1b
(( $(nix search -f search.nix foo --exclude 'foo|bar' | grep -Ec 'foo|bar') == 0 ))
(( $(nix search -f search.nix foo -e foo --exclude bar | grep -Ec 'foo|bar') == 0 ))
+[[ $(nix search -f search.nix -e bar --json | jq -c 'keys') == '["foo","hello"]' ]]
diff --git a/tests/signing.sh b/tests/signing.sh
index 6aafbeb91..9b673c609 100644
--- a/tests/signing.sh
+++ b/tests/signing.sh
@@ -81,7 +81,7 @@ info=$(nix path-info --store file://$cacheDir --json $outPath2)
[[ $info =~ 'cache1.example.org' ]]
[[ $info =~ 'cache2.example.org' ]]
-# Copying to a diverted store should fail due to a lack of valid signatures.
+# Copying to a diverted store should fail due to a lack of signatures by trusted keys.
chmod -R u+w $TEST_ROOT/store0 || true
rm -rf $TEST_ROOT/store0
(! nix copy --to $TEST_ROOT/store0 $outPath)
diff --git a/tests/sourcehut-flakes.nix b/tests/sourcehut-flakes.nix
index aadab9bb5..daa259dd6 100644
--- a/tests/sourcehut-flakes.nix
+++ b/tests/sourcehut-flakes.nix
@@ -59,7 +59,7 @@ let
echo 'ref: refs/heads/master' > $out/HEAD
mkdir -p $out/info
- echo -e '${nixpkgs.rev}\trefs/heads/master' > $out/info/refs
+ echo -e '${nixpkgs.rev}\trefs/heads/master\n${nixpkgs.rev}\trefs/tags/foo-bar' > $out/info/refs
'';
in
@@ -132,6 +132,17 @@ makeTest (
client.succeed("curl -v https://git.sr.ht/ >&2")
client.succeed("nix registry list | grep nixpkgs")
+ # Test that it resolves HEAD
+ rev = client.succeed("nix flake info sourcehut:~NixOS/nixpkgs --json | jq -r .revision")
+ assert rev.strip() == "${nixpkgs.rev}", "revision mismatch"
+ # Test that it resolves branches
+ rev = client.succeed("nix flake info sourcehut:~NixOS/nixpkgs/master --json | jq -r .revision")
+ assert rev.strip() == "${nixpkgs.rev}", "revision mismatch"
+ # Test that it resolves tags
+ rev = client.succeed("nix flake info sourcehut:~NixOS/nixpkgs/foo-bar --json | jq -r .revision")
+ assert rev.strip() == "${nixpkgs.rev}", "revision mismatch"
+
+ # Registry and pinning test
rev = client.succeed("nix flake info nixpkgs --json | jq -r .revision")
assert rev.strip() == "${nixpkgs.rev}", "revision mismatch"