aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGuillaume Maudoux <guillaume.maudoux@tweag.io>2022-09-07 00:34:03 +0200
committerGuillaume Maudoux <guillaume.maudoux@tweag.io>2022-09-07 00:34:03 +0200
commiteb460a9529dd79995b6b788d59322fbc8f989214 (patch)
tree2dec54ef6b3096d787cb49cb0f2f6b7041b1f6c1
parent9ff892aad4ee13532ea84cb6e4a2a53d70945efe (diff)
parent96001098796c9011d1670cc8a7acd00ef49b2d7a (diff)
WIP: broken merge but need a git checkpoint
-rw-r--r--.github/stale.yml9
-rw-r--r--.github/workflows/backport.yml8
-rw-r--r--.github/workflows/ci.yml41
-rw-r--r--.github/workflows/hydra_status.yml4
-rw-r--r--.gitignore3
-rw-r--r--.version2
-rw-r--r--Makefile4
-rw-r--r--Makefile.config.in6
-rw-r--r--README.md2
-rw-r--r--boehmgc-coroutine-sp-fallback.diff32
-rw-r--r--configure.ac42
-rwxr-xr-xdoc/manual/anchors.jq31
-rw-r--r--doc/manual/book.toml5
-rw-r--r--doc/manual/generate-manpage.nix6
-rw-r--r--doc/manual/local.mk16
-rw-r--r--doc/manual/redirects.js330
-rw-r--r--doc/manual/src/SUMMARY.md.in34
-rw-r--r--doc/manual/src/advanced-topics/diff-hook.md2
-rw-r--r--doc/manual/src/advanced-topics/distributed-builds.md4
-rw-r--r--doc/manual/src/architecture/architecture.md79
-rw-r--r--doc/manual/src/architecture/store/fso.md69
-rw-r--r--doc/manual/src/architecture/store/path.md105
-rw-r--r--doc/manual/src/architecture/store/store.md151
-rw-r--r--doc/manual/src/architecture/store/store/build-system-terminology.md32
-rw-r--r--doc/manual/src/architecture/store/store/closure.md29
-rw-r--r--doc/manual/src/command-ref/env-common.md32
-rw-r--r--doc/manual/src/command-ref/nix-build.md12
-rw-r--r--doc/manual/src/command-ref/nix-env.md6
-rw-r--r--doc/manual/src/command-ref/nix-instantiate.md4
-rw-r--r--doc/manual/src/command-ref/nix-shell.md6
-rw-r--r--doc/manual/src/command-ref/nix-store.md4
-rw-r--r--doc/manual/src/command-ref/opt-common.md46
-rw-r--r--doc/manual/src/contributing/hacking.md38
-rw-r--r--doc/manual/src/expressions/arguments-variables.md80
-rw-r--r--doc/manual/src/expressions/build-script.md70
-rw-r--r--doc/manual/src/expressions/expression-language.md12
-rw-r--r--doc/manual/src/expressions/expression-syntax.md93
-rw-r--r--doc/manual/src/expressions/generic-builder.md66
-rw-r--r--doc/manual/src/expressions/language-values.md251
-rw-r--r--doc/manual/src/expressions/simple-building-testing.md61
-rw-r--r--doc/manual/src/expressions/simple-expression.md23
-rw-r--r--doc/manual/src/expressions/writing-nix-expressions.md12
-rw-r--r--doc/manual/src/glossary.md38
-rw-r--r--doc/manual/src/installation/installing-binary.md35
-rw-r--r--doc/manual/src/language/advanced-attributes.md (renamed from doc/manual/src/expressions/advanced-attributes.md)22
-rw-r--r--doc/manual/src/language/builtin-constants.md (renamed from doc/manual/src/expressions/builtin-constants.md)2
-rw-r--r--doc/manual/src/language/builtins-prefix.md (renamed from doc/manual/src/expressions/builtins-prefix.md)0
-rw-r--r--doc/manual/src/language/builtins-suffix.md (renamed from doc/manual/src/expressions/builtins-suffix.md)0
-rw-r--r--doc/manual/src/language/constructs.md (renamed from doc/manual/src/expressions/language-constructs.md)0
-rw-r--r--doc/manual/src/language/derivations.md (renamed from doc/manual/src/expressions/derivations.md)2
-rw-r--r--doc/manual/src/language/index.md33
-rw-r--r--doc/manual/src/language/operators.md (renamed from doc/manual/src/expressions/language-operators.md)2
-rw-r--r--doc/manual/src/language/values.md261
-rw-r--r--doc/manual/src/package-management/package-management.md3
-rw-r--r--doc/manual/src/release-notes/rl-2.10.md31
-rw-r--r--doc/manual/src/release-notes/rl-2.11.md5
-rw-r--r--doc/manual/src/release-notes/rl-2.9.md47
-rw-r--r--doc/manual/src/release-notes/rl-next.md15
-rw-r--r--docker.nix27
-rw-r--r--flake.lock18
-rw-r--r--flake.nix145
-rw-r--r--misc/systemd/nix-daemon.service.in1
-rw-r--r--misc/zsh/completion.zsh11
-rw-r--r--mk/libraries.mk6
-rw-r--r--mk/programs.mk4
-rwxr-xr-xscripts/create-darwin-volume.sh19
-rw-r--r--scripts/install-darwin-multi-user.sh2
-rw-r--r--scripts/install-multi-user.sh75
-rw-r--r--scripts/install-nix-from-closure.sh4
-rw-r--r--src/libcmd/command.cc12
-rw-r--r--src/libcmd/command.hh24
-rw-r--r--src/libcmd/installables.cc160
-rw-r--r--src/libcmd/installables.hh7
-rw-r--r--src/libcmd/local.mk4
-rw-r--r--src/libcmd/markdown.cc6
-rw-r--r--src/libcmd/repl.cc (renamed from src/nix/repl.cc)309
-rw-r--r--src/libexpr/eval-cache.cc126
-rw-r--r--src/libexpr/eval-cache.hh11
-rw-r--r--src/libexpr/eval-inline.hh1
-rw-r--r--src/libexpr/eval.cc432
-rw-r--r--src/libexpr/eval.hh246
-rw-r--r--src/libexpr/flake/config.cc13
-rw-r--r--src/libexpr/flake/flake.cc35
-rw-r--r--src/libexpr/flake/flakeref.cc17
-rw-r--r--src/libexpr/flake/flakeref.hh10
-rw-r--r--src/libexpr/get-drvs.cc39
-rw-r--r--src/libexpr/get-drvs.hh2
-rw-r--r--src/libexpr/lexer.l2
-rw-r--r--src/libexpr/nixexpr.cc132
-rw-r--r--src/libexpr/nixexpr.hh49
-rw-r--r--src/libexpr/parser.y30
-rw-r--r--src/libexpr/primops.cc280
-rw-r--r--src/libexpr/primops/fetchTree.cc40
-rw-r--r--src/libexpr/tests/json.cc68
-rw-r--r--src/libexpr/tests/libexprtests.hh136
-rw-r--r--src/libexpr/tests/local.mk15
-rw-r--r--src/libexpr/tests/primops.cc839
-rw-r--r--src/libexpr/tests/trivial.cc196
-rw-r--r--src/libexpr/value-to-json.cc24
-rw-r--r--src/libexpr/value-to-json.hh4
-rw-r--r--src/libexpr/value.hh8
-rw-r--r--src/libfetchers/fetch-settings.hh2
-rw-r--r--src/libfetchers/git.cc380
-rw-r--r--src/libfetchers/github.cc45
-rw-r--r--src/libfetchers/tarball.cc90
-rw-r--r--src/libmain/loggers.cc7
-rw-r--r--src/libmain/progress-bar.cc66
-rw-r--r--src/libmain/progress-bar.hh4
-rw-r--r--src/libmain/shared.cc20
-rw-r--r--src/libstore/build/derivation-goal.cc49
-rw-r--r--src/libstore/build/hook-instance.cc25
-rw-r--r--src/libstore/build/local-derivation-goal.cc183
-rw-r--r--src/libstore/build/substitution-goal.cc2
-rw-r--r--src/libstore/build/worker.cc7
-rw-r--r--src/libstore/builtins/buildenv.cc3
-rw-r--r--src/libstore/builtins/fetchurl.cc2
-rw-r--r--src/libstore/builtins/unpack-channel.cc3
-rw-r--r--src/libstore/derivations.cc13
-rw-r--r--src/libstore/derived-path.cc23
-rw-r--r--src/libstore/filetransfer.cc11
-rw-r--r--src/libstore/gc.cc6
-rw-r--r--src/libstore/globals.cc14
-rw-r--r--src/libstore/globals.hh14
-rw-r--r--src/libstore/http-binary-cache-store.cc7
-rw-r--r--src/libstore/local-binary-cache-store.cc6
-rw-r--r--src/libstore/local-store.cc12
-rw-r--r--src/libstore/local.mk11
-rw-r--r--src/libstore/lock.cc23
-rw-r--r--src/libstore/misc.cc9
-rw-r--r--src/libstore/nar-info-disk-cache.cc7
-rw-r--r--src/libstore/nar-info.cc5
-rw-r--r--src/libstore/nar-info.hh1
-rw-r--r--src/libstore/optimise-store.cc6
-rw-r--r--src/libstore/path-with-outputs.cc56
-rw-r--r--src/libstore/path-with-outputs.hh22
-rw-r--r--src/libstore/remote-store.cc86
-rw-r--r--src/libstore/remote-store.hh6
-rw-r--r--src/libstore/s3.hh1
-rw-r--r--src/libstore/sandbox-defaults.sb6
-rw-r--r--src/libstore/sandbox-network.sb4
-rw-r--r--src/libstore/schema.sql2
-rw-r--r--src/libstore/store-api.cc242
-rw-r--r--src/libstore/store-api.hh11
-rw-r--r--src/libstore/tests/path-with-outputs.cc46
-rw-r--r--src/libutil/args.cc10
-rw-r--r--src/libutil/args.hh9
-rw-r--r--src/libutil/error.hh19
-rw-r--r--src/libutil/experimental-features.cc11
-rw-r--r--src/libutil/experimental-features.hh5
-rw-r--r--src/libutil/filesystem.cc172
-rw-r--r--src/libutil/git.cc25
-rw-r--r--src/libutil/git.hh40
-rw-r--r--src/libutil/hilite.cc4
-rw-r--r--src/libutil/json-utils.hh21
-rw-r--r--src/libutil/json.cc28
-rw-r--r--src/libutil/json.hh11
-rw-r--r--src/libutil/logging.hh3
-rw-r--r--src/libutil/ref.hh2
-rw-r--r--src/libutil/serialise.cc20
-rw-r--r--src/libutil/serialise.hh4
-rw-r--r--src/libutil/tests/git.cc33
-rw-r--r--src/libutil/tests/json.cc4
-rw-r--r--src/libutil/tests/tests.cc20
-rw-r--r--src/libutil/url.cc18
-rw-r--r--src/libutil/url.hh15
-rw-r--r--src/libutil/util.cc186
-rw-r--r--src/libutil/util.hh59
-rw-r--r--[-rwxr-xr-x]src/nix-build/nix-build.cc42
-rw-r--r--src/nix-collect-garbage/nix-collect-garbage.cc1
-rw-r--r--src/nix-env/nix-env.cc8
-rw-r--r--src/nix-instantiate/nix-instantiate.cc5
-rw-r--r--src/nix-store/nix-store.cc6
-rw-r--r--src/nix/app.cc2
-rw-r--r--src/nix/bundle.cc4
-rw-r--r--src/nix/bundle.md2
-rw-r--r--src/nix/develop.cc40
-rw-r--r--src/nix/develop.md4
-rw-r--r--src/nix/eval.cc3
-rw-r--r--src/nix/flake-update.md2
-rw-r--r--src/nix/flake.cc55
-rw-r--r--src/nix/flake.md28
-rw-r--r--src/nix/fmt.cc3
-rw-r--r--src/nix/get-env.sh5
-rw-r--r--src/nix/key-generate-secret.md2
-rw-r--r--src/nix/main.cc19
-rw-r--r--src/nix/nix.md45
-rw-r--r--src/nix/profile-install.md7
-rw-r--r--src/nix/profile.cc59
-rw-r--r--src/nix/profile.md2
-rw-r--r--src/nix/registry.md2
-rw-r--r--src/nix/repl.md30
-rw-r--r--src/nix/run.cc2
-rw-r--r--src/nix/search.cc37
-rw-r--r--src/nix/search.md13
-rw-r--r--src/nix/upgrade-nix.cc2
-rw-r--r--src/nix/upgrade-nix.md9
-rw-r--r--src/resolve-system-dependencies/resolve-system-dependencies.cc2
-rw-r--r--tests/build-remote.sh3
-rw-r--r--tests/build.sh57
-rw-r--r--tests/ca-shell.nix2
-rw-r--r--tests/ca/content-addressed.nix4
-rw-r--r--tests/ca/substitute.sh3
-rw-r--r--tests/check.sh14
-rw-r--r--tests/common.sh.in32
-rw-r--r--tests/completions.sh62
-rw-r--r--tests/fetchClosure.sh2
-rw-r--r--tests/fetchGit.sh8
-rw-r--r--tests/fetchTree-file.sh105
-rw-r--r--tests/flakes/bundle.sh (renamed from tests/flake-bundler.sh)9
-rw-r--r--tests/flakes/check.sh89
-rw-r--r--tests/flakes/circular.sh49
-rw-r--r--tests/flakes/common.sh73
-rw-r--r--tests/flakes/config.sh (renamed from tests/flake-local-settings.sh)5
-rw-r--r--tests/flakes/flakes.sh (renamed from tests/flakes.sh)429
-rw-r--r--tests/flakes/follow-paths.sh150
-rw-r--r--tests/flakes/init.sh87
-rw-r--r--tests/flakes/mercurial.sh46
-rw-r--r--tests/flakes/run.sh (renamed from tests/flakes-run.sh)4
-rw-r--r--tests/flakes/search-root.sh (renamed from tests/flake-searching.sh)24
-rw-r--r--tests/fmt.sh7
-rw-r--r--tests/github-flakes.nix16
-rw-r--r--tests/lang.sh13
-rw-r--r--tests/lang/eval-okay-fromjson.nix49
-rw-r--r--tests/lang/parse-fail-eof-in-string.nix3
-rw-r--r--tests/local.mk22
-rw-r--r--tests/multiple-outputs.nix7
-rw-r--r--tests/nix-copy-closure.nix4
-rw-r--r--tests/nix-profile.sh47
-rw-r--r--tests/nix-shell.sh8
-rw-r--r--tests/nss-preload.nix70
-rw-r--r--tests/plugins.sh5
-rw-r--r--tests/post-hook.sh6
-rw-r--r--tests/pure-eval.sh2
-rwxr-xr-xtests/push-to-store.sh4
-rw-r--r--tests/remote-builds.nix2
-rw-r--r--tests/repl.sh60
-rw-r--r--tests/search.sh14
-rw-r--r--tests/setuid.nix4
-rw-r--r--tests/shell-hello.nix11
-rw-r--r--tests/shell.sh4
-rw-r--r--tests/sourcehut-flakes.nix15
241 files changed, 7584 insertions, 2691 deletions
diff --git a/.github/stale.yml b/.github/stale.yml
index fe24942f4..ee831135a 100644
--- a/.github/stale.yml
+++ b/.github/stale.yml
@@ -1,10 +1,9 @@
# Configuration for probot-stale - https://github.com/probot/stale
daysUntilStale: 180
-daysUntilClose: 365
+daysUntilClose: false
exemptLabels:
- "critical"
+ - "never-stale"
staleLabel: "stale"
-markComment: |
- I marked this as stale due to inactivity. &rarr; [More info](https://github.com/NixOS/nix/blob/master/.github/STALE-BOT.md)
-closeComment: |
- I closed this issue due to inactivity. &rarr; [More info](https://github.com/NixOS/nix/blob/master/.github/STALE-BOT.md)
+markComment: false
+closeComment: false
diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml
index dd481160f..75be788ef 100644
--- a/.github/workflows/backport.yml
+++ b/.github/workflows/backport.yml
@@ -2,9 +2,15 @@ name: Backport
on:
pull_request_target:
types: [closed, labeled]
+permissions:
+ contents: read
jobs:
backport:
name: Backport Pull Request
+ permissions:
+ # for zeebe-io/backport-action
+ contents: write
+ pull-requests: write
if: github.repository_owner == 'NixOS' && github.event.pull_request.merged == true && (github.event_name != 'labeled' || startsWith('backport', github.event.label.name))
runs-on: ubuntu-latest
steps:
@@ -15,7 +21,7 @@ jobs:
fetch-depth: 0
- name: Create backport PRs
# should be kept in sync with `version`
- uses: zeebe-io/backport-action@v0.0.7
+ uses: zeebe-io/backport-action@v0.0.8
with:
# Config README: https://github.com/zeebe-io/backport-action#backport-action
github_token: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index d01ef4768..86b5dfd2e 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -4,10 +4,12 @@ on:
pull_request:
push:
+permissions: read-all
+
jobs:
tests:
- needs: [check_cachix]
+ needs: [check_secrets]
strategy:
matrix:
os: [ubuntu-latest, macos-latest]
@@ -20,28 +22,34 @@ jobs:
- uses: cachix/install-nix-action@v17
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
- uses: cachix/cachix-action@v10
- if: needs.check_cachix.outputs.secret == 'true'
+ if: needs.check_secrets.outputs.cachix == 'true'
with:
name: '${{ env.CACHIX_NAME }}'
signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}'
authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
- run: nix --experimental-features 'nix-command flakes' flake check -L
- check_cachix:
- name: Cachix secret present for installer tests
+ check_secrets:
+ permissions:
+ contents: none
+ name: Check Cachix and Docker secrets present for installer tests
runs-on: ubuntu-latest
outputs:
- secret: ${{ steps.secret.outputs.secret }}
+ cachix: ${{ steps.secret.outputs.cachix }}
+ docker: ${{ steps.secret.outputs.docker }}
steps:
- - name: Check for Cachix secret
+ - name: Check for secrets
id: secret
env:
_CACHIX_SECRETS: ${{ secrets.CACHIX_SIGNING_KEY }}${{ secrets.CACHIX_AUTH_TOKEN }}
- run: echo "::set-output name=secret::${{ env._CACHIX_SECRETS != '' }}"
+ _DOCKER_SECRETS: ${{ secrets.DOCKERHUB_USERNAME }}${{ secrets.DOCKERHUB_TOKEN }}
+ run: |
+ echo "::set-output name=cachix::${{ env._CACHIX_SECRETS != '' }}"
+ echo "::set-output name=docker::${{ env._DOCKER_SECRETS != '' }}"
installer:
- needs: [tests, check_cachix]
- if: github.event_name == 'push' && needs.check_cachix.outputs.secret == 'true'
+ needs: [tests, check_secrets]
+ if: github.event_name == 'push' && needs.check_secrets.outputs.cachix == 'true'
runs-on: ubuntu-latest
outputs:
installerURL: ${{ steps.prepare-installer.outputs.installerURL }}
@@ -60,8 +68,8 @@ jobs:
run: scripts/prepare-installer-for-github-actions
installer_test:
- needs: [installer, check_cachix]
- if: github.event_name == 'push' && needs.check_cachix.outputs.secret == 'true'
+ needs: [installer, check_secrets]
+ if: github.event_name == 'push' && needs.check_secrets.outputs.cachix == 'true'
strategy:
matrix:
os: [ubuntu-latest, macos-latest]
@@ -76,11 +84,12 @@ jobs:
- run: nix-instantiate -E 'builtins.currentTime' --eval
docker_push_image:
- needs: [check_cachix, tests]
+ needs: [check_secrets, tests]
if: >-
github.event_name == 'push' &&
github.ref_name == 'master' &&
- needs.check_cachix.outputs.secret == 'true'
+ needs.check_secrets.outputs.cachix == 'true' &&
+ needs.check_secrets.outputs.docker == 'true'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
@@ -88,9 +97,9 @@ jobs:
fetch-depth: 0
- uses: cachix/install-nix-action@v17
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
- - run: echo NIX_VERSION="$(nix-instantiate --eval -E '(import ./default.nix).defaultPackage.${builtins.currentSystem}.version' | tr -d \")" >> $GITHUB_ENV
+ - run: echo NIX_VERSION="$(nix --experimental-features 'nix-command flakes' eval .\#default.version | tr -d \")" >> $GITHUB_ENV
- uses: cachix/cachix-action@v10
- if: needs.check_cachix.outputs.secret == 'true'
+ if: needs.check_secrets.outputs.cachix == 'true'
with:
name: '${{ env.CACHIX_NAME }}'
signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}'
@@ -100,7 +109,7 @@ jobs:
- run: docker tag nix:$NIX_VERSION nixos/nix:$NIX_VERSION
- run: docker tag nix:$NIX_VERSION nixos/nix:master
- name: Login to Docker Hub
- uses: docker/login-action@v1
+ uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
diff --git a/.github/workflows/hydra_status.yml b/.github/workflows/hydra_status.yml
index 53e69cb2d..38a9c0877 100644
--- a/.github/workflows/hydra_status.yml
+++ b/.github/workflows/hydra_status.yml
@@ -1,8 +1,12 @@
name: Hydra status
+
+permissions: read-all
+
on:
schedule:
- cron: "12,42 * * * *"
workflow_dispatch:
+
jobs:
check_hydra_status:
name: Check Hydra status
diff --git a/.gitignore b/.gitignore
index db363aefd..0c1b89ace 100644
--- a/.gitignore
+++ b/.gitignore
@@ -22,7 +22,7 @@ perl/Makefile.config
/doc/manual/src/SUMMARY.md
/doc/manual/src/command-ref/new-cli
/doc/manual/src/command-ref/conf-file.md
-/doc/manual/src/expressions/builtins.md
+/doc/manual/src/language/builtins.md
# /scripts/
/scripts/nix-profile.sh
@@ -35,6 +35,7 @@ perl/Makefile.config
/src/libexpr/parser-tab.hh
/src/libexpr/parser-tab.output
/src/libexpr/nix.tbl
+/src/libexpr/tests/libexpr-tests
# /src/libstore/
*.gen.*
diff --git a/.version b/.version
index f3ac133c5..3ca2c9b2c 100644
--- a/.version
+++ b/.version
@@ -1 +1 @@
-2.9.0 \ No newline at end of file
+2.12.0 \ No newline at end of file
diff --git a/Makefile b/Makefile
index 5040d2884..c1a1ce2c7 100644
--- a/Makefile
+++ b/Makefile
@@ -8,6 +8,7 @@ makefiles = \
src/libfetchers/local.mk \
src/libmain/local.mk \
src/libexpr/local.mk \
+ src/libexpr/tests/local.mk \
src/libcmd/local.mk \
src/nix/local.mk \
src/resolve-system-dependencies/local.mk \
@@ -27,7 +28,8 @@ makefiles = \
OPTIMIZE = 1
ifeq ($(OPTIMIZE), 1)
- GLOBAL_CXXFLAGS += -O3
+ GLOBAL_CXXFLAGS += -O3 $(CXXLTO)
+ GLOBAL_LDFLAGS += $(CXXLTO)
else
GLOBAL_CXXFLAGS += -O0 -U_FORTIFY_SOURCE
endif
diff --git a/Makefile.config.in b/Makefile.config.in
index 3505f337e..1c5405c6d 100644
--- a/Makefile.config.in
+++ b/Makefile.config.in
@@ -1,4 +1,3 @@
-HOST_OS = @host_os@
AR = @AR@
BDW_GC_LIBS = @BDW_GC_LIBS@
BOOST_LDFLAGS = @BOOST_LDFLAGS@
@@ -7,18 +6,20 @@ CC = @CC@
CFLAGS = @CFLAGS@
CXX = @CXX@
CXXFLAGS = @CXXFLAGS@
+CXXLTO = @CXXLTO@
EDITLINE_LIBS = @EDITLINE_LIBS@
ENABLE_S3 = @ENABLE_S3@
GTEST_LIBS = @GTEST_LIBS@
HAVE_LIBCPUID = @HAVE_LIBCPUID@
HAVE_SECCOMP = @HAVE_SECCOMP@
+HOST_OS = @host_os@
LDFLAGS = @LDFLAGS@
LIBARCHIVE_LIBS = @LIBARCHIVE_LIBS@
LIBBROTLI_LIBS = @LIBBROTLI_LIBS@
LIBCURL_LIBS = @LIBCURL_LIBS@
+LIBSECCOMP_LIBS = @LIBSECCOMP_LIBS@
LOWDOWN_LIBS = @LOWDOWN_LIBS@
OPENSSL_LIBS = @OPENSSL_LIBS@
-LIBSECCOMP_LIBS = @LIBSECCOMP_LIBS@
PACKAGE_NAME = @PACKAGE_NAME@
PACKAGE_VERSION = @PACKAGE_VERSION@
SHELL = @bash@
@@ -30,6 +31,7 @@ datadir = @datadir@
datarootdir = @datarootdir@
doc_generate = @doc_generate@
docdir = @docdir@
+embedded_sandbox_shell = @embedded_sandbox_shell@
exec_prefix = @exec_prefix@
includedir = @includedir@
libdir = @libdir@
diff --git a/README.md b/README.md
index 80d6f128c..8a02c4c75 100644
--- a/README.md
+++ b/README.md
@@ -20,7 +20,7 @@ Information on additional installation methods is available on the [Nix download
## Building And Developing
-See our [Hacking guide](https://hydra.nixos.org/job/nix/master/build.x86_64-linux/latest/download-by-type/doc/manual/contributing/hacking.html) in our manual for instruction on how to
+See our [Hacking guide](https://nixos.org/manual/nix/stable/contributing/hacking.html) in our manual for instruction on how to
build nix from source with nix-build or how to get a development environment.
## Additional Resources
diff --git a/boehmgc-coroutine-sp-fallback.diff b/boehmgc-coroutine-sp-fallback.diff
index e659bf470..8fdafbecb 100644
--- a/boehmgc-coroutine-sp-fallback.diff
+++ b/boehmgc-coroutine-sp-fallback.diff
@@ -1,3 +1,35 @@
+diff --git a/darwin_stop_world.c b/darwin_stop_world.c
+index 3dbaa3fb..36a1d1f7 100644
+--- a/darwin_stop_world.c
++++ b/darwin_stop_world.c
+@@ -352,6 +352,7 @@ GC_INNER void GC_push_all_stacks(void)
+ int nthreads = 0;
+ word total_size = 0;
+ mach_msg_type_number_t listcount = (mach_msg_type_number_t)THREAD_TABLE_SZ;
++ size_t stack_limit;
+ if (!EXPECT(GC_thr_initialized, TRUE))
+ GC_thr_init();
+
+@@ -407,6 +408,19 @@ GC_INNER void GC_push_all_stacks(void)
+ GC_push_all_stack_sections(lo, hi, p->traced_stack_sect);
+ }
+ if (altstack_lo) {
++ // When a thread goes into a coroutine, we lose its original sp until
++ // control flow returns to the thread.
++ // While in the coroutine, the sp points outside the thread stack,
++ // so we can detect this and push the entire thread stack instead,
++ // as an approximation.
++ // We assume that the coroutine has similarly added its entire stack.
++ // This could be made accurate by cooperating with the application
++ // via new functions and/or callbacks.
++ stack_limit = pthread_get_stacksize_np(p->id);
++ if (altstack_lo >= altstack_hi || altstack_lo < altstack_hi - stack_limit) { // sp outside stack
++ altstack_lo = altstack_hi - stack_limit;
++ }
++
+ total_size += altstack_hi - altstack_lo;
+ GC_push_all_stack(altstack_lo, altstack_hi);
+ }
diff --git a/pthread_stop_world.c b/pthread_stop_world.c
index 4b2c429..1fb4c52 100644
--- a/pthread_stop_world.c
diff --git a/configure.ac b/configure.ac
index 8a01c33ec..64fa12fc7 100644
--- a/configure.ac
+++ b/configure.ac
@@ -147,6 +147,20 @@ if test "x$GCC_ATOMIC_BUILTINS_NEED_LIBATOMIC" = xyes; then
LDFLAGS="-latomic $LDFLAGS"
fi
+# LTO is currently broken with clang for unknown reasons; ld segfaults in the llvm plugin
+AC_ARG_ENABLE(lto, AS_HELP_STRING([--enable-lto],[Enable LTO (only supported with GCC) [default=no]]),
+ lto=$enableval, lto=no)
+if test "$lto" = yes; then
+ if $CXX --version | grep -q GCC; then
+ AC_SUBST(CXXLTO, [-flto=jobserver])
+ else
+ echo "error: LTO is only supported with GCC at the moment" >&2
+ exit 1
+ fi
+else
+ AC_SUBST(CXXLTO, [""])
+fi
+
PKG_PROG_PKG_CONFIG
AC_ARG_ENABLE(shared, AS_HELP_STRING([--enable-shared],[Build shared libraries for Nix [default=yes]]),
@@ -282,18 +296,28 @@ AC_CHECK_FUNCS([setresuid setreuid lchown])
AC_CHECK_FUNCS([strsignal posix_fallocate sysconf])
-# This is needed if bzip2 is a static library, and the Nix libraries
-# are dynamic.
-case "${host_os}" in
- darwin*)
- LDFLAGS="-all_load $LDFLAGS"
- ;;
-esac
-
-
AC_ARG_WITH(sandbox-shell, AS_HELP_STRING([--with-sandbox-shell=PATH],[path of a statically-linked shell to use as /bin/sh in sandboxes]),
sandbox_shell=$withval)
AC_SUBST(sandbox_shell)
+if test ${cross_compiling:-no} = no && ! test -z ${sandbox_shell+x}; then
+ AC_MSG_CHECKING([whether sandbox-shell has the standalone feature])
+ # busybox shell sometimes allows executing other busybox applets,
+ # even if they are not in the path, breaking our sandbox
+ if PATH= $sandbox_shell -c "busybox" 2>&1 | grep -qv "not found"; then
+ AC_MSG_RESULT(enabled)
+ AC_MSG_ERROR([Please disable busybox FEATURE_SH_STANDALONE])
+ else
+ AC_MSG_RESULT(disabled)
+ fi
+fi
+
+AC_ARG_ENABLE(embedded-sandbox-shell, AS_HELP_STRING([--enable-embedded-sandbox-shell],[include the sandbox shell in the Nix binary [default=no]]),
+ embedded_sandbox_shell=$enableval, embedded_sandbox_shell=no)
+AC_SUBST(embedded_sandbox_shell)
+if test "$embedded_sandbox_shell" = yes; then
+ AC_DEFINE(HAVE_EMBEDDED_SANDBOX_SHELL, 1, [Include the sandbox shell in the Nix binary.])
+fi
+
# Expand all variables in config.status.
test "$prefix" = NONE && prefix=$ac_default_prefix
diff --git a/doc/manual/anchors.jq b/doc/manual/anchors.jq
new file mode 100755
index 000000000..72309779c
--- /dev/null
+++ b/doc/manual/anchors.jq
@@ -0,0 +1,31 @@
+"\\[\\]\\{#(?<anchor>[^\\}]+?)\\}" as $empty_anchor_regex |
+"\\[(?<text>[^\\]]+?)\\]\\{#(?<anchor>[^\\}]+?)\\}" as $anchor_regex |
+
+
+def transform_anchors_html:
+ . | gsub($empty_anchor_regex; "<a name=\"" + .anchor + "\"></a>")
+ | gsub($anchor_regex; "<a href=\"#" + .anchor + "\" id=\"" + .anchor + "\">" + .text + "</a>");
+
+
+def transform_anchors_strip:
+ . | gsub($empty_anchor_regex; "")
+ | gsub($anchor_regex; .text);
+
+
+def map_contents_recursively(transformer):
+ . + {
+ Chapter: (.Chapter + {
+ content: .Chapter.content | transformer,
+ sub_items: .Chapter.sub_items | map(map_contents_recursively(transformer)),
+ }),
+ };
+
+
+def process_command:
+ .[0] as $context |
+ .[1] as $body |
+ $body + {
+ sections: $body.sections | map(map_contents_recursively(if $context.renderer == "html" then transform_anchors_html else transform_anchors_strip end)),
+ };
+
+process_command
diff --git a/doc/manual/book.toml b/doc/manual/book.toml
index fee41dfb3..5f78a7614 100644
--- a/doc/manual/book.toml
+++ b/doc/manual/book.toml
@@ -1,2 +1,7 @@
[output.html]
additional-css = ["custom.css"]
+additional-js = ["redirects.js"]
+
+[preprocessor.anchors]
+renderers = ["html"]
+command = "jq --from-file doc/manual/anchors.jq"
diff --git a/doc/manual/generate-manpage.nix b/doc/manual/generate-manpage.nix
index 244cfa0c2..17701c3a3 100644
--- a/doc/manual/generate-manpage.nix
+++ b/doc/manual/generate-manpage.nix
@@ -1,4 +1,4 @@
-{ command, renderLinks ? false }:
+{ command }:
with builtins;
with import ./utils.nix;
@@ -21,9 +21,7 @@ let
listCommands = cmds:
concatStrings (map (name:
"* "
- + (if renderLinks
- then "[`${command} ${name}`](./${appendName filename name}.md)"
- else "`${command} ${name}`")
+ + "[`${command} ${name}`](./${appendName filename name}.md)"
+ " - ${cmds.${name}.description}\n")
(attrNames cmds));
in
diff --git a/doc/manual/local.mk b/doc/manual/local.mk
index c1ce8aaeb..364e02967 100644
--- a/doc/manual/local.mk
+++ b/doc/manual/local.mk
@@ -1,5 +1,9 @@
ifeq ($(doc_generate),yes)
+MANUAL_SRCS := \
+ $(call rwildcard, $(d)/src, *.md) \
+ $(call rwildcard, $(d)/src, */*.md)
+
# Generate man pages.
man-pages := $(foreach n, \
nix-env.1 nix-build.1 nix-shell.1 nix-store.1 nix-instantiate.1 \
@@ -46,7 +50,7 @@ $(d)/src/SUMMARY.md: $(d)/src/SUMMARY.md.in $(d)/src/command-ref/new-cli
$(d)/src/command-ref/new-cli: $(d)/nix.json $(d)/generate-manpage.nix $(bindir)/nix
@rm -rf $@
- $(trace-gen) $(nix-eval) --write-to $@ --expr 'import doc/manual/generate-manpage.nix { command = builtins.readFile $<; renderLinks = true; }'
+ $(trace-gen) $(nix-eval) --write-to $@ --expr 'import doc/manual/generate-manpage.nix { command = builtins.readFile $<; }'
$(d)/src/command-ref/conf-file.md: $(d)/conf-file.json $(d)/generate-options.nix $(d)/src/command-ref/conf-file-prefix.md $(bindir)/nix
@cat doc/manual/src/command-ref/conf-file-prefix.md > $@.tmp
@@ -61,10 +65,10 @@ $(d)/conf-file.json: $(bindir)/nix
$(trace-gen) $(dummy-env) $(bindir)/nix show-config --json --experimental-features nix-command > $@.tmp
@mv $@.tmp $@
-$(d)/src/expressions/builtins.md: $(d)/builtins.json $(d)/generate-builtins.nix $(d)/src/expressions/builtins-prefix.md $(bindir)/nix
- @cat doc/manual/src/expressions/builtins-prefix.md > $@.tmp
+$(d)/src/language/builtins.md: $(d)/builtins.json $(d)/generate-builtins.nix $(d)/src/language/builtins-prefix.md $(bindir)/nix
+ @cat doc/manual/src/language/builtins-prefix.md > $@.tmp
$(trace-gen) $(nix-eval) --expr 'import doc/manual/generate-builtins.nix (builtins.fromJSON (builtins.readFile $<))' >> $@.tmp
- @cat doc/manual/src/expressions/builtins-suffix.md >> $@.tmp
+ @cat doc/manual/src/language/builtins-suffix.md >> $@.tmp
@mv $@.tmp $@
$(d)/builtins.json: $(bindir)/nix
@@ -92,12 +96,12 @@ doc/manual/generated/man1/nix3-manpages: $(d)/src/command-ref/new-cli
if [[ $$name = SUMMARY ]]; then continue; fi; \
printf "Title: %s\n\n" "$$name" > $$tmpFile; \
cat $$i >> $$tmpFile; \
- lowdown -sT man -M section=1 $$tmpFile -o $(DESTDIR)$$(dirname $@)/$$name.1; \
+ lowdown -sT man --nroff-nolinks -M section=1 $$tmpFile -o $(DESTDIR)$$(dirname $@)/$$name.1; \
rm $$tmpFile; \
done
@touch $@
-$(docdir)/manual/index.html: $(MANUAL_SRCS) $(d)/book.toml $(d)/custom.css $(d)/src/SUMMARY.md $(d)/src/command-ref/new-cli $(d)/src/command-ref/conf-file.md $(d)/src/expressions/builtins.md $(call rwildcard, $(d)/src, *.md)
+$(docdir)/manual/index.html: $(MANUAL_SRCS) $(d)/book.toml $(d)/anchors.jq $(d)/custom.css $(d)/src/SUMMARY.md $(d)/src/command-ref/new-cli $(d)/src/command-ref/conf-file.md $(d)/src/language/builtins.md
$(trace-gen) RUST_LOG=warn mdbook build doc/manual -d $(DESTDIR)$(docdir)/manual
endif
diff --git a/doc/manual/redirects.js b/doc/manual/redirects.js
new file mode 100644
index 000000000..167e221b8
--- /dev/null
+++ b/doc/manual/redirects.js
@@ -0,0 +1,330 @@
+// Redirects from old DocBook manual.
+var redirects = {
+ "#part-advanced-topics": "advanced-topics/advanced-topics.html",
+ "#chap-tuning-cores-and-jobs": "advanced-topics/cores-vs-jobs.html",
+ "#chap-diff-hook": "advanced-topics/diff-hook.html",
+ "#check-dirs-are-unregistered": "advanced-topics/diff-hook.html#check-dirs-are-unregistered",
+ "#chap-distributed-builds": "advanced-topics/distributed-builds.html",
+ "#chap-post-build-hook": "advanced-topics/post-build-hook.html",
+ "#chap-post-build-hook-caveats": "advanced-topics/post-build-hook.html#implementation-caveats",
+ "#part-command-ref": "command-ref/command-ref.html",
+ "#conf-allow-import-from-derivation": "command-ref/conf-file.html#conf-allow-import-from-derivation",
+ "#conf-allow-new-privileges": "command-ref/conf-file.html#conf-allow-new-privileges",
+ "#conf-allowed-uris": "command-ref/conf-file.html#conf-allowed-uris",
+ "#conf-allowed-users": "command-ref/conf-file.html#conf-allowed-users",
+ "#conf-auto-optimise-store": "command-ref/conf-file.html#conf-auto-optimise-store",
+ "#conf-binary-cache-public-keys": "command-ref/conf-file.html#conf-binary-cache-public-keys",
+ "#conf-binary-caches": "command-ref/conf-file.html#conf-binary-caches",
+ "#conf-build-compress-log": "command-ref/conf-file.html#conf-build-compress-log",
+ "#conf-build-cores": "command-ref/conf-file.html#conf-build-cores",
+ "#conf-build-extra-chroot-dirs": "command-ref/conf-file.html#conf-build-extra-chroot-dirs",
+ "#conf-build-extra-sandbox-paths": "command-ref/conf-file.html#conf-build-extra-sandbox-paths",
+ "#conf-build-fallback": "command-ref/conf-file.html#conf-build-fallback",
+ "#conf-build-max-jobs": "command-ref/conf-file.html#conf-build-max-jobs",
+ "#conf-build-max-log-size": "command-ref/conf-file.html#conf-build-max-log-size",
+ "#conf-build-max-silent-time": "command-ref/conf-file.html#conf-build-max-silent-time",
+ "#conf-build-repeat": "command-ref/conf-file.html#conf-build-repeat",
+ "#conf-build-timeout": "command-ref/conf-file.html#conf-build-timeout",
+ "#conf-build-use-chroot": "command-ref/conf-file.html#conf-build-use-chroot",
+ "#conf-build-use-sandbox": "command-ref/conf-file.html#conf-build-use-sandbox",
+ "#conf-build-use-substitutes": "command-ref/conf-file.html#conf-build-use-substitutes",
+ "#conf-build-users-group": "command-ref/conf-file.html#conf-build-users-group",
+ "#conf-builders": "command-ref/conf-file.html#conf-builders",
+ "#conf-builders-use-substitutes": "command-ref/conf-file.html#conf-builders-use-substitutes",
+ "#conf-compress-build-log": "command-ref/conf-file.html#conf-compress-build-log",
+ "#conf-connect-timeout": "command-ref/conf-file.html#conf-connect-timeout",
+ "#conf-cores": "command-ref/conf-file.html#conf-cores",
+ "#conf-diff-hook": "command-ref/conf-file.html#conf-diff-hook",
+ "#conf-enforce-determinism": "command-ref/conf-file.html#conf-enforce-determinism",
+ "#conf-env-keep-derivations": "command-ref/conf-file.html#conf-env-keep-derivations",
+ "#conf-extra-binary-caches": "command-ref/conf-file.html#conf-extra-binary-caches",
+ "#conf-extra-platforms": "command-ref/conf-file.html#conf-extra-platforms",
+ "#conf-extra-sandbox-paths": "command-ref/conf-file.html#conf-extra-sandbox-paths",
+ "#conf-extra-substituters": "command-ref/conf-file.html#conf-extra-substituters",
+ "#conf-fallback": "command-ref/conf-file.html#conf-fallback",
+ "#conf-fsync-metadata": "command-ref/conf-file.html#conf-fsync-metadata",
+ "#conf-gc-keep-derivations": "command-ref/conf-file.html#conf-gc-keep-derivations",
+ "#conf-gc-keep-outputs": "command-ref/conf-file.html#conf-gc-keep-outputs",
+ "#conf-hashed-mirrors": "command-ref/conf-file.html#conf-hashed-mirrors",
+ "#conf-http-connections": "command-ref/conf-file.html#conf-http-connections",
+ "#conf-keep-build-log": "command-ref/conf-file.html#conf-keep-build-log",
+ "#conf-keep-derivations": "command-ref/conf-file.html#conf-keep-derivations",
+ "#conf-keep-env-derivations": "command-ref/conf-file.html#conf-keep-env-derivations",
+ "#conf-keep-outputs": "command-ref/conf-file.html#conf-keep-outputs",
+ "#conf-max-build-log-size": "command-ref/conf-file.html#conf-max-build-log-size",
+ "#conf-max-free": "command-ref/conf-file.html#conf-max-free",
+ "#conf-max-jobs": "command-ref/conf-file.html#conf-max-jobs",
+ "#conf-max-silent-time": "command-ref/conf-file.html#conf-max-silent-time",
+ "#conf-min-free": "command-ref/conf-file.html#conf-min-free",
+ "#conf-narinfo-cache-negative-ttl": "command-ref/conf-file.html#conf-narinfo-cache-negative-ttl",
+ "#conf-narinfo-cache-positive-ttl": "command-ref/conf-file.html#conf-narinfo-cache-positive-ttl",
+ "#conf-netrc-file": "command-ref/conf-file.html#conf-netrc-file",
+ "#conf-plugin-files": "command-ref/conf-file.html#conf-plugin-files",
+ "#conf-post-build-hook": "command-ref/conf-file.html#conf-post-build-hook",
+ "#conf-pre-build-hook": "command-ref/conf-file.html#conf-pre-build-hook",
+ "#conf-repeat": "command-ref/conf-file.html#conf-repeat",
+ "#conf-require-sigs": "command-ref/conf-file.html#conf-require-sigs",
+ "#conf-restrict-eval": "command-ref/conf-file.html#conf-restrict-eval",
+ "#conf-run-diff-hook": "command-ref/conf-file.html#conf-run-diff-hook",
+ "#conf-sandbox": "command-ref/conf-file.html#conf-sandbox",
+ "#conf-sandbox-dev-shm-size": "command-ref/conf-file.html#conf-sandbox-dev-shm-size",
+ "#conf-sandbox-paths": "command-ref/conf-file.html#conf-sandbox-paths",
+ "#conf-secret-key-files": "command-ref/conf-file.html#conf-secret-key-files",
+ "#conf-show-trace": "command-ref/conf-file.html#conf-show-trace",
+ "#conf-stalled-download-timeout": "command-ref/conf-file.html#conf-stalled-download-timeout",
+ "#conf-substitute": "command-ref/conf-file.html#conf-substitute",
+ "#conf-substituters": "command-ref/conf-file.html#conf-substituters",
+ "#conf-system": "command-ref/conf-file.html#conf-system",
+ "#conf-system-features": "command-ref/conf-file.html#conf-system-features",
+ "#conf-tarball-ttl": "command-ref/conf-file.html#conf-tarball-ttl",
+ "#conf-timeout": "command-ref/conf-file.html#conf-timeout",
+ "#conf-trace-function-calls": "command-ref/conf-file.html#conf-trace-function-calls",
+ "#conf-trusted-binary-caches": "command-ref/conf-file.html#conf-trusted-binary-caches",
+ "#conf-trusted-public-keys": "command-ref/conf-file.html#conf-trusted-public-keys",
+ "#conf-trusted-substituters": "command-ref/conf-file.html#conf-trusted-substituters",
+ "#conf-trusted-users": "command-ref/conf-file.html#conf-trusted-users",
+ "#extra-sandbox-paths": "command-ref/conf-file.html#extra-sandbox-paths",
+ "#sec-conf-file": "command-ref/conf-file.html",
+ "#env-NIX_PATH": "command-ref/env-common.html#env-NIX_PATH",
+ "#env-common": "command-ref/env-common.html",
+ "#envar-remote": "command-ref/env-common.html#env-NIX_REMOTE",
+ "#sec-common-env": "command-ref/env-common.html",
+ "#ch-files": "command-ref/files.html",
+ "#ch-main-commands": "command-ref/main-commands.html",
+ "#opt-out-link": "command-ref/nix-build.html#opt-out-link",
+ "#sec-nix-build": "command-ref/nix-build.html",
+ "#sec-nix-channel": "command-ref/nix-channel.html",
+ "#sec-nix-collect-garbage": "command-ref/nix-collect-garbage.html",
+ "#sec-nix-copy-closure": "command-ref/nix-copy-closure.html",
+ "#sec-nix-daemon": "command-ref/nix-daemon.html",
+ "#refsec-nix-env-install-examples": "command-ref/nix-env.html#examples",
+ "#rsec-nix-env-install": "command-ref/nix-env.html#operation---install",
+ "#rsec-nix-env-set": "command-ref/nix-env.html#operation---set",
+ "#rsec-nix-env-set-flag": "command-ref/nix-env.html#operation---set-flag",
+ "#rsec-nix-env-upgrade": "command-ref/nix-env.html#operation---upgrade",
+ "#sec-nix-env": "command-ref/nix-env.html",
+ "#ssec-version-comparisons": "command-ref/nix-env.html#versions",
+ "#sec-nix-hash": "command-ref/nix-hash.html",
+ "#sec-nix-instantiate": "command-ref/nix-instantiate.html",
+ "#sec-nix-prefetch-url": "command-ref/nix-prefetch-url.html",
+ "#sec-nix-shell": "command-ref/nix-shell.html",
+ "#ssec-nix-shell-shebang": "command-ref/nix-shell.html#use-as-a--interpreter",
+ "#nixref-queries": "command-ref/nix-store.html#queries",
+ "#opt-add-root": "command-ref/nix-store.html#opt-add-root",
+ "#refsec-nix-store-dump": "command-ref/nix-store.html#operation---dump",
+ "#refsec-nix-store-export": "command-ref/nix-store.html#operation---export",
+ "#refsec-nix-store-import": "command-ref/nix-store.html#operation---import",
+ "#refsec-nix-store-query": "command-ref/nix-store.html#operation---query",
+ "#refsec-nix-store-verify": "command-ref/nix-store.html#operation---verify",
+ "#rsec-nix-store-gc": "command-ref/nix-store.html#operation---gc",
+ "#rsec-nix-store-generate-binary-cache-key": "command-ref/nix-store.html#operation---generate-binary-cache-key",
+ "#rsec-nix-store-realise": "command-ref/nix-store.html#operation---realise",
+ "#rsec-nix-store-serve": "command-ref/nix-store.html#operation---serve",
+ "#sec-nix-store": "command-ref/nix-store.html",
+ "#opt-I": "command-ref/opt-common.html#opt-I",
+ "#opt-attr": "command-ref/opt-common.html#opt-attr",
+ "#opt-common": "command-ref/opt-common.html",
+ "#opt-cores": "command-ref/opt-common.html#opt-cores",
+ "#opt-log-format": "command-ref/opt-common.html#opt-log-format",
+ "#opt-max-jobs": "command-ref/opt-common.html#opt-max-jobs",
+ "#opt-max-silent-time": "command-ref/opt-common.html#opt-max-silent-time",
+ "#opt-timeout": "command-ref/opt-common.html#opt-timeout",
+ "#sec-common-options": "command-ref/opt-common.html",
+ "#ch-utilities": "command-ref/utilities.html",
+ "#chap-hacking": "contributing/hacking.html",
+ "#adv-attr-allowSubstitutes": "language/advanced-attributes.html#adv-attr-allowSubstitutes",
+ "#adv-attr-allowedReferences": "language/advanced-attributes.html#adv-attr-allowedReferences",
+ "#adv-attr-allowedRequisites": "language/advanced-attributes.html#adv-attr-allowedRequisites",
+ "#adv-attr-disallowedReferences": "language/advanced-attributes.html#adv-attr-disallowedReferences",
+ "#adv-attr-disallowedRequisites": "language/advanced-attributes.html#adv-attr-disallowedRequisites",
+ "#adv-attr-exportReferencesGraph": "language/advanced-attributes.html#adv-attr-exportReferencesGraph",
+ "#adv-attr-impureEnvVars": "language/advanced-attributes.html#adv-attr-impureEnvVars",
+ "#adv-attr-outputHash": "language/advanced-attributes.html#adv-attr-outputHash",
+ "#adv-attr-outputHashAlgo": "language/advanced-attributes.html#adv-attr-outputHashAlgo",
+ "#adv-attr-outputHashMode": "language/advanced-attributes.html#adv-attr-outputHashMode",
+ "#adv-attr-passAsFile": "language/advanced-attributes.html#adv-attr-passAsFile",
+ "#adv-attr-preferLocalBuild": "language/advanced-attributes.html#adv-attr-preferLocalBuild",
+ "#fixed-output-drvs": "language/advanced-attributes.html#adv-attr-outputHash",
+ "#sec-advanced-attributes": "language/advanced-attributes.html",
+ "#builtin-abort": "language/builtins.html#builtins-abort",
+ "#builtin-add": "language/builtins.html#builtins-add",
+ "#builtin-all": "language/builtins.html#builtins-all",
+ "#builtin-any": "language/builtins.html#builtins-any",
+ "#builtin-attrNames": "language/builtins.html#builtins-attrNames",
+ "#builtin-attrValues": "language/builtins.html#builtins-attrValues",
+ "#builtin-baseNameOf": "language/builtins.html#builtins-baseNameOf",
+ "#builtin-bitAnd": "language/builtins.html#builtins-bitAnd",
+ "#builtin-bitOr": "language/builtins.html#builtins-bitOr",
+ "#builtin-bitXor": "language/builtins.html#builtins-bitXor",
+ "#builtin-builtins": "language/builtins.html#builtins-builtins",
+ "#builtin-compareVersions": "language/builtins.html#builtins-compareVersions",
+ "#builtin-concatLists": "language/builtins.html#builtins-concatLists",
+ "#builtin-concatStringsSep": "language/builtins.html#builtins-concatStringsSep",
+ "#builtin-currentSystem": "language/builtins.html#builtins-currentSystem",
+ "#builtin-deepSeq": "language/builtins.html#builtins-deepSeq",
+ "#builtin-derivation": "language/builtins.html#builtins-derivation",
+ "#builtin-dirOf": "language/builtins.html#builtins-dirOf",
+ "#builtin-div": "language/builtins.html#builtins-div",
+ "#builtin-elem": "language/builtins.html#builtins-elem",
+ "#builtin-elemAt": "language/builtins.html#builtins-elemAt",
+ "#builtin-fetchGit": "language/builtins.html#builtins-fetchGit",
+ "#builtin-fetchTarball": "language/builtins.html#builtins-fetchTarball",
+ "#builtin-fetchurl": "language/builtins.html#builtins-fetchurl",
+ "#builtin-filterSource": "language/builtins.html#builtins-filterSource",
+ "#builtin-foldl-prime": "language/builtins.html#builtins-foldl-prime",
+ "#builtin-fromJSON": "language/builtins.html#builtins-fromJSON",
+ "#builtin-functionArgs": "language/builtins.html#builtins-functionArgs",
+ "#builtin-genList": "language/builtins.html#builtins-genList",
+ "#builtin-getAttr": "language/builtins.html#builtins-getAttr",
+ "#builtin-getEnv": "language/builtins.html#builtins-getEnv",
+ "#builtin-hasAttr": "language/builtins.html#builtins-hasAttr",
+ "#builtin-hashFile": "language/builtins.html#builtins-hashFile",
+ "#builtin-hashString": "language/builtins.html#builtins-hashString",
+ "#builtin-head": "language/builtins.html#builtins-head",
+ "#builtin-import": "language/builtins.html#builtins-import",
+ "#builtin-intersectAttrs": "language/builtins.html#builtins-intersectAttrs",
+ "#builtin-isAttrs": "language/builtins.html#builtins-isAttrs",
+ "#builtin-isBool": "language/builtins.html#builtins-isBool",
+ "#builtin-isFloat": "language/builtins.html#builtins-isFloat",
+ "#builtin-isFunction": "language/builtins.html#builtins-isFunction",
+ "#builtin-isInt": "language/builtins.html#builtins-isInt",
+ "#builtin-isList": "language/builtins.html#builtins-isList",
+ "#builtin-isNull": "language/builtins.html#builtins-isNull",
+ "#builtin-isString": "language/builtins.html#builtins-isString",
+ "#builtin-length": "language/builtins.html#builtins-length",
+ "#builtin-lessThan": "language/builtins.html#builtins-lessThan",
+ "#builtin-listToAttrs": "language/builtins.html#builtins-listToAttrs",
+ "#builtin-map": "language/builtins.html#builtins-map",
+ "#builtin-match": "language/builtins.html#builtins-match",
+ "#builtin-mul": "language/builtins.html#builtins-mul",
+ "#builtin-parseDrvName": "language/builtins.html#builtins-parseDrvName",
+ "#builtin-path": "language/builtins.html#builtins-path",
+ "#builtin-pathExists": "language/builtins.html#builtins-pathExists",
+ "#builtin-placeholder": "language/builtins.html#builtins-placeholder",
+ "#builtin-readDir": "language/builtins.html#builtins-readDir",
+ "#builtin-readFile": "language/builtins.html#builtins-readFile",
+ "#builtin-removeAttrs": "language/builtins.html#builtins-removeAttrs",
+ "#builtin-replaceStrings": "language/builtins.html#builtins-replaceStrings",
+ "#builtin-seq": "language/builtins.html#builtins-seq",
+ "#builtin-sort": "language/builtins.html#builtins-sort",
+ "#builtin-split": "language/builtins.html#builtins-split",
+ "#builtin-splitVersion": "language/builtins.html#builtins-splitVersion",
+ "#builtin-stringLength": "language/builtins.html#builtins-stringLength",
+ "#builtin-sub": "language/builtins.html#builtins-sub",
+ "#builtin-substring": "language/builtins.html#builtins-substring",
+ "#builtin-tail": "language/builtins.html#builtins-tail",
+ "#builtin-throw": "language/builtins.html#builtins-throw",
+ "#builtin-toFile": "language/builtins.html#builtins-toFile",
+ "#builtin-toJSON": "language/builtins.html#builtins-toJSON",
+ "#builtin-toPath": "language/builtins.html#builtins-toPath",
+ "#builtin-toString": "language/builtins.html#builtins-toString",
+ "#builtin-toXML": "language/builtins.html#builtins-toXML",
+ "#builtin-trace": "language/builtins.html#builtins-trace",
+ "#builtin-tryEval": "language/builtins.html#builtins-tryEval",
+ "#builtin-typeOf": "language/builtins.html#builtins-typeOf",
+ "#ssec-builtins": "language/builtins.html",
+ "#attr-system": "language/derivations.html#attr-system",
+ "#ssec-derivation": "language/derivations.html",
+ "#ch-expression-language": "language/index.html",
+ "#sec-constructs": "language/constructs.html",
+ "#sect-let-language": "language/constructs.html#let-language",
+ "#ss-functions": "language/constructs.html#functions",
+ "#sec-language-operators": "language/operators.html",
+ "#table-operators": "language/operators.html",
+ "#ssec-values": "language/values.html",
+ "#gloss-closure": "glossary.html#gloss-closure",
+ "#gloss-derivation": "glossary.html#gloss-derivation",
+ "#gloss-deriver": "glossary.html#gloss-deriver",
+ "#gloss-nar": "glossary.html#gloss-nar",
+ "#gloss-output-path": "glossary.html#gloss-output-path",
+ "#gloss-profile": "glossary.html#gloss-profile",
+ "#gloss-reachable": "glossary.html#gloss-reachable",
+ "#gloss-reference": "glossary.html#gloss-reference",
+ "#gloss-substitute": "glossary.html#gloss-substitute",
+ "#gloss-user-env": "glossary.html#gloss-user-env",
+ "#gloss-validity": "glossary.html#gloss-validity",
+ "#part-glossary": "glossary.html",
+ "#sec-building-source": "installation/building-source.html",
+ "#ch-env-variables": "installation/env-variables.html",
+ "#sec-installer-proxy-settings": "installation/env-variables.html#proxy-environment-variables",
+ "#sec-nix-ssl-cert-file": "installation/env-variables.html#nix_ssl_cert_file",
+ "#sec-nix-ssl-cert-file-with-nix-daemon-and-macos": "installation/env-variables.html#nix_ssl_cert_file-with-macos-and-the-nix-daemon",
+ "#chap-installation": "installation/installation.html",
+ "#ch-installing-binary": "installation/installing-binary.html",
+ "#sect-macos-installation": "installation/installing-binary.html#macos-installation",
+ "#sect-macos-installation-change-store-prefix": "installation/installing-binary.html#macos-installation",
+ "#sect-macos-installation-encrypted-volume": "installation/installing-binary.html#macos-installation",
+ "#sect-macos-installation-recommended-notes": "installation/installing-binary.html#macos-installation",
+ "#sect-macos-installation-symlink": "installation/installing-binary.html#macos-installation",
+ "#sect-multi-user-installation": "installation/installing-binary.html#multi-user-installation",
+ "#sect-nix-install-binary-tarball": "installation/installing-binary.html#installing-from-a-binary-tarball",
+ "#sect-nix-install-pinned-version-url": "installation/installing-binary.html#installing-a-pinned-nix-version-from-a-url",
+ "#sect-single-user-installation": "installation/installing-binary.html#single-user-installation",
+ "#ch-installing-source": "installation/installing-source.html",
+ "#ssec-multi-user": "installation/multi-user.html",
+ "#ch-nix-security": "installation/nix-security.html",
+ "#sec-obtaining-source": "installation/obtaining-source.html",
+ "#sec-prerequisites-source": "installation/prerequisites-source.html",
+ "#sec-single-user": "installation/single-user.html",
+ "#ch-supported-platforms": "installation/supported-platforms.html",
+ "#ch-upgrading-nix": "installation/upgrading.html",
+ "#ch-about-nix": "introduction.html",
+ "#chap-introduction": "introduction.html",
+ "#ch-basic-package-mgmt": "package-management/basic-package-mgmt.html",
+ "#ssec-binary-cache-substituter": "package-management/binary-cache-substituter.html",
+ "#sec-channels": "package-management/channels.html",
+ "#ssec-copy-closure": "package-management/copy-closure.html",
+ "#sec-garbage-collection": "package-management/garbage-collection.html",
+ "#ssec-gc-roots": "package-management/garbage-collector-roots.html",
+ "#chap-package-management": "package-management/package-management.html",
+ "#sec-profiles": "package-management/profiles.html",
+ "#ssec-s3-substituter": "package-management/s3-substituter.html",
+ "#ssec-s3-substituter-anonymous-reads": "package-management/s3-substituter.html#anonymous-reads-to-your-s3-compatible-binary-cache",
+ "#ssec-s3-substituter-authenticated-reads": "package-management/s3-substituter.html#authenticated-reads-to-your-s3-binary-cache",
+ "#ssec-s3-substituter-authenticated-writes": "package-management/s3-substituter.html#authenticated-writes-to-your-s3-compatible-binary-cache",
+ "#sec-sharing-packages": "package-management/sharing-packages.html",
+ "#ssec-ssh-substituter": "package-management/ssh-substituter.html",
+ "#chap-quick-start": "quick-start.html",
+ "#sec-relnotes": "release-notes/release-notes.html",
+ "#ch-relnotes-0.10.1": "release-notes/rl-0.10.1.html",
+ "#ch-relnotes-0.10": "release-notes/rl-0.10.html",
+ "#ssec-relnotes-0.11": "release-notes/rl-0.11.html",
+ "#ssec-relnotes-0.12": "release-notes/rl-0.12.html",
+ "#ssec-relnotes-0.13": "release-notes/rl-0.13.html",
+ "#ssec-relnotes-0.14": "release-notes/rl-0.14.html",
+ "#ssec-relnotes-0.15": "release-notes/rl-0.15.html",
+ "#ssec-relnotes-0.16": "release-notes/rl-0.16.html",
+ "#ch-relnotes-0.5": "release-notes/rl-0.5.html",
+ "#ch-relnotes-0.6": "release-notes/rl-0.6.html",
+ "#ch-relnotes-0.7": "release-notes/rl-0.7.html",
+ "#ch-relnotes-0.8.1": "release-notes/rl-0.8.1.html",
+ "#ch-relnotes-0.8": "release-notes/rl-0.8.html",
+ "#ch-relnotes-0.9.1": "release-notes/rl-0.9.1.html",
+ "#ch-relnotes-0.9.2": "release-notes/rl-0.9.2.html",
+ "#ch-relnotes-0.9": "release-notes/rl-0.9.html",
+ "#ssec-relnotes-1.0": "release-notes/rl-1.0.html",
+ "#ssec-relnotes-1.1": "release-notes/rl-1.1.html",
+ "#ssec-relnotes-1.10": "release-notes/rl-1.10.html",
+ "#ssec-relnotes-1.11.10": "release-notes/rl-1.11.10.html",
+ "#ssec-relnotes-1.11": "release-notes/rl-1.11.html",
+ "#ssec-relnotes-1.2": "release-notes/rl-1.2.html",
+ "#ssec-relnotes-1.3": "release-notes/rl-1.3.html",
+ "#ssec-relnotes-1.4": "release-notes/rl-1.4.html",
+ "#ssec-relnotes-1.5.1": "release-notes/rl-1.5.1.html",
+ "#ssec-relnotes-1.5.2": "release-notes/rl-1.5.2.html",
+ "#ssec-relnotes-1.5": "release-notes/rl-1.5.html",
+ "#ssec-relnotes-1.6.1": "release-notes/rl-1.6.1.html",
+ "#ssec-relnotes-1.6.0": "release-notes/rl-1.6.html",
+ "#ssec-relnotes-1.7": "release-notes/rl-1.7.html",
+ "#ssec-relnotes-1.8": "release-notes/rl-1.8.html",
+ "#ssec-relnotes-1.9": "release-notes/rl-1.9.html",
+ "#ssec-relnotes-2.0": "release-notes/rl-2.0.html",
+ "#ssec-relnotes-2.1": "release-notes/rl-2.1.html",
+ "#ssec-relnotes-2.2": "release-notes/rl-2.2.html",
+ "#ssec-relnotes-2.3": "release-notes/rl-2.3.html"
+};
+
+var isRoot = (document.location.pathname.endsWith('/') || document.location.pathname.endsWith('/index.html')) && path_to_root === '';
+if (isRoot && redirects[document.location.hash]) {
+ document.location.href = path_to_root + redirects[document.location.hash];
+}
diff --git a/doc/manual/src/SUMMARY.md.in b/doc/manual/src/SUMMARY.md.in
index 860222337..9b66ec3db 100644
--- a/doc/manual/src/SUMMARY.md.in
+++ b/doc/manual/src/SUMMARY.md.in
@@ -26,21 +26,14 @@
- [Copying Closures via SSH](package-management/copy-closure.md)
- [Serving a Nix store via SSH](package-management/ssh-substituter.md)
- [Serving a Nix store via S3](package-management/s3-substituter.md)
-- [Writing Nix Expressions](expressions/writing-nix-expressions.md)
- - [A Simple Nix Expression](expressions/simple-expression.md)
- - [Expression Syntax](expressions/expression-syntax.md)
- - [Build Script](expressions/build-script.md)
- - [Arguments and Variables](expressions/arguments-variables.md)
- - [Building and Testing](expressions/simple-building-testing.md)
- - [Generic Builder Syntax](expressions/generic-builder.md)
- - [Writing Nix Expressions](expressions/expression-language.md)
- - [Values](expressions/language-values.md)
- - [Language Constructs](expressions/language-constructs.md)
- - [Operators](expressions/language-operators.md)
- - [Derivations](expressions/derivations.md)
- - [Advanced Attributes](expressions/advanced-attributes.md)
- - [Built-in Constants](expressions/builtin-constants.md)
- - [Built-in Functions](expressions/builtins.md)
+- [Nix Language](language/index.md)
+ - [Data Types](language/values.md)
+ - [Language Constructs](language/constructs.md)
+ - [Operators](language/operators.md)
+ - [Derivations](language/derivations.md)
+ - [Advanced Attributes](language/advanced-attributes.md)
+ - [Built-in Constants](language/builtin-constants.md)
+ - [Built-in Functions](language/builtins.md)
- [Advanced Topics](advanced-topics/advanced-topics.md)
- [Remote Builds](advanced-topics/distributed-builds.md)
- [Tuning Cores and Jobs](advanced-topics/cores-vs-jobs.md)
@@ -66,12 +59,23 @@
@manpages@
- [Files](command-ref/files.md)
- [nix.conf](command-ref/conf-file.md)
+<!--
+- [Architecture](architecture/architecture.md)
+ - [Store](architecture/store/store.md)
+ - [Closure](architecture/store/store/closure.md)
+ - [Build system terminology](architecture/store/store/build-system-terminology.md)
+ - [Store Path](architecture/store/path.md)
+ - [File System Object](architecture/store/fso.md)
+-->
- [Glossary](glossary.md)
- [Contributing](contributing/contributing.md)
- [Hacking](contributing/hacking.md)
- [CLI guideline](contributing/cli-guideline.md)
- [Release Notes](release-notes/release-notes.md)
- [Release X.Y (202?-??-??)](release-notes/rl-next.md)
+ - [Release 2.11 (2022-08-25)](release-notes/rl-2.11.md)
+ - [Release 2.10 (2022-07-11)](release-notes/rl-2.10.md)
+ - [Release 2.9 (2022-05-30)](release-notes/rl-2.9.md)
- [Release 2.8 (2022-04-19)](release-notes/rl-2.8.md)
- [Release 2.7 (2022-03-07)](release-notes/rl-2.7.md)
- [Release 2.6 (2022-01-24)](release-notes/rl-2.6.md)
diff --git a/doc/manual/src/advanced-topics/diff-hook.md b/doc/manual/src/advanced-topics/diff-hook.md
index 7a2622b3d..161e64b2a 100644
--- a/doc/manual/src/advanced-topics/diff-hook.md
+++ b/doc/manual/src/advanced-topics/diff-hook.md
@@ -101,7 +101,7 @@ In particular, notice the
`/nix/store/krpqk0l9ib0ibi1d2w52z293zw455cap-unstable.check` output. Nix
has copied the build results to that directory where you can examine it.
-> **Note**
+> []{#check-dirs-are-unregistered} **Note**
>
> Check paths are not protected against garbage collection, and this
> path will be deleted on the next garbage collection.
diff --git a/doc/manual/src/advanced-topics/distributed-builds.md b/doc/manual/src/advanced-topics/distributed-builds.md
index b0d7fbf1a..fefd10100 100644
--- a/doc/manual/src/advanced-topics/distributed-builds.md
+++ b/doc/manual/src/advanced-topics/distributed-builds.md
@@ -12,14 +12,14 @@ machine is accessible via SSH and that it has Nix installed. You can
test whether connecting to the remote Nix instance works, e.g.
```console
-$ nix ping-store --store ssh://mac
+$ nix store ping --store ssh://mac
```
will try to connect to the machine named `mac`. It is possible to
specify an SSH identity file as part of the remote store URI, e.g.
```console
-$ nix ping-store --store ssh://mac?ssh-key=/home/alice/my-key
+$ nix store ping --store ssh://mac?ssh-key=/home/alice/my-key
```
Since builds should be non-interactive, the key should not have a
diff --git a/doc/manual/src/architecture/architecture.md b/doc/manual/src/architecture/architecture.md
new file mode 100644
index 000000000..41deb07af
--- /dev/null
+++ b/doc/manual/src/architecture/architecture.md
@@ -0,0 +1,79 @@
+# Architecture
+
+*(This chapter is unstable and a work in progress. Incoming links may rot.)*
+
+This chapter describes how Nix works.
+It should help users understand why Nix behaves as it does, and it should help developers understand how to modify Nix and how to write similar tools.
+
+## Overview
+
+Nix consists of [hierarchical layers][layer-architecture].
+
+```
++-----------------------------------------------------------------+
+| Nix |
+| [ commmand line interface ]------, |
+| | | |
+| evaluates | |
+| | manages |
+| V | |
+| [ configuration language ] | |
+| | | |
+| +-----------------------------|-------------------V-----------+ |
+| | store evaluates to | |
+| | | | |
+| | referenced by V builds | |
+| | [ build input ] ---> [ build plan ] ---> [ build result ] | |
+| | | |
+| +-------------------------------------------------------------+ |
++-----------------------------------------------------------------+
+```
+
+At the top is the [command line interface](../command-ref/command-ref.md), translating from invocations of Nix executables to interactions with the underlying layers.
+
+Below that is the [Nix expression language](../expressions/expression-language.md), a [purely functional][purely-functional-programming] configuration language.
+It is used to compose expressions which ultimately evaluate to self-contained *build plans*, used to derive *build results* from referenced *build inputs*.
+
+The command line and Nix language are what users interact with most.
+
+> **Note**
+> The Nix language itself does not have a notion of *packages* or *configurations*.
+> As far as we are concerned here, the inputs and results of a build plan are just data.
+
+Underlying these is the [Nix store](./store/store.md), a mechanism to keep track of build plans, data, and references between them.
+It can also execute build plans to produce new data.
+
+A build plan is a series of *build tasks*.
+Each build task has a special build input which is used as *build instructions*.
+The result of a build task can be input to another build task.
+
+```
++-----------------------------------------------------------------------------------------+
+| store |
+| ................................................. |
+| : build plan : |
+| : : |
+| [ build input ]-----instructions-, : |
+| : | : |
+| : v : |
+| [ build input ]----------->[ build task ]--instructions-, : |
+| : | : |
+| : | : |
+| : v : |
+| : [ build task ]----->[ build result ] |
+| [ build input ]-----instructions-, ^ : |
+| : | | : |
+| : v | : |
+| [ build input ]----------->[ build task ]---------------' : |
+| : ^ : |
+| : | : |
+| [ build input ]------------------' : |
+| : : |
+| : : |
+| :...............................................: |
+| |
++-----------------------------------------------------------------------------------------+
+```
+
+[layer-architecture]: https://en.m.wikipedia.org/wiki/Multitier_architecture#Layers
+[purely-functional-programming]: https://en.m.wikipedia.org/wiki/Purely_functional_programming
diff --git a/doc/manual/src/architecture/store/fso.md b/doc/manual/src/architecture/store/fso.md
new file mode 100644
index 000000000..e0eb69f60
--- /dev/null
+++ b/doc/manual/src/architecture/store/fso.md
@@ -0,0 +1,69 @@
+# File System Object
+
+The Nix store uses a simple file system model for the data it holds in [store objects](store.md#store-object).
+
+Every file system object is one of the following:
+
+ - File: an executable flag, and arbitrary data for contents
+ - Directory: mapping of names to child file system objects
+ - [Symbolic link][symlink]: may point anywhere.
+
+We call a store object's outermost file system object the *root*.
+
+ data FileSystemObject
+ = File { isExecutable :: Bool, contents :: Bytes }
+ | Directory { entries :: Map FileName FileSystemObject }
+ | SymLink { target :: Path }
+
+Examples:
+
+- a directory with contents
+
+ /nix/store/<hash>-hello-2.10
+ ├── bin
+ │   └── hello
+ └── share
+ ├── info
+ │   └── hello.info
+ └── man
+ └── man1
+ └── hello.1.gz
+
+- a directory with relative symlink and other contents
+
+ /nix/store/<hash>-go-1.16.9
+ ├── bin -> share/go/bin
+ ├── nix-support/
+ └── share/
+
+- a directory with absolute symlink
+
+ /nix/store/d3k...-nodejs
+ └── nix_node -> /nix/store/f20...-nodejs-10.24.
+
+A bare file or symlink can be a root file system object.
+Examples:
+
+ /nix/store/<hash>-hello-2.10.tar.gz
+
+ /nix/store/4j5...-pkg-config-wrapper-0.29.2-doc -> /nix/store/i99...-pkg-config-0.29.2-doc
+
+Symlinks pointing outside of their own root or to a store object without a matching reference are allowed, but might not function as intended.
+Examples:
+
+- an arbitrarily symlinked file may change or not exist at all
+
+ /nix/store/<hash>-foo
+ └── foo -> /home/foo
+
+- if a symlink to a store path was not automatically created by Nix, it may be invalid or get invalidated when the store object is deleted
+
+ /nix/store/<hash>-bar
+ └── bar -> /nix/store/abc...-foo
+
+Nix file system objects do not support [hard links][hardlink]:
+each file system object which is not the root has exactly one parent and one name.
+However, as store objects are immutable, an underlying file system can use hard links for optimization.
+
+[symlink]: https://en.m.wikipedia.org/wiki/Symbolic_link
+[hardlink]: https://en.m.wikipedia.org/wiki/Hard_link
diff --git a/doc/manual/src/architecture/store/path.md b/doc/manual/src/architecture/store/path.md
new file mode 100644
index 000000000..663f04f46
--- /dev/null
+++ b/doc/manual/src/architecture/store/path.md
@@ -0,0 +1,105 @@
+# Store Path
+
+Nix implements [references](store.md#reference) to [store objects](store.md#store-object) as *store paths*.
+
+Store paths are pairs of
+
+- a 20-byte [digest](#digest) for identification
+- a symbolic name for people to read.
+
+Example:
+
+- digest: `b6gvzjyb2pg0kjfwrjmg1vfhh54ad73z`
+- name: `firefox-33.1`
+
+It is rendered to a file system path as the concatenation of
+
+ - [store directory](#store-directory)
+ - path-separator (`/`)
+ - [digest](#digest) rendered in a custom variant of [base-32](https://en.m.wikipedia.org/wiki/Base32) (20 arbitrary bytes become 32 ASCII characters)
+ - hyphen (`-`)
+ - name
+
+Example:
+
+ /nix/store/b6gvzjyb2pg0kjfwrjmg1vfhh54ad73z-firefox-33.1
+ |--------| |------------------------------| |----------|
+ store directory digest name
+
+## Store Directory
+
+Every [store](./store.md) has a store directory.
+
+If the store has a [file system representation](./store.md#files-and-processes), this directory contains the store’s [file system objects](#file-system-object), which can be addressed by [store paths](#store-path).
+
+This means a store path is not just derived from the referenced store object itself, but depends on the store the store object is in.
+
+> **Note**
+> The store directory defaults to `/nix/store`, but is in principle arbitrary.
+
+It is important which store a given store object belongs to:
+Files in the store object can contain store paths, and processes may read these paths.
+Nix can only guarantee [referential integrity](store/closure.md) if store paths do not cross store boundaries.
+
+Therefore one can only copy store objects to a different store if
+
+- the source and target stores' directories match
+
+ or
+
+- the store object in question has no references, that is, contains no store paths.
+
+One cannot copy a store object to a store with a different store directory.
+Instead, it has to be rebuilt, together with all its dependencies.
+It is in general not enough to replace the store directory string in file contents, as this may render executables unusable by invalidating their internal offsets or checksums.
+
+# Digest
+
+In a [store path](#store-path), the [digest][digest] is the output of a [cryptographic hash function][hash] of either all *inputs* involved in building the referenced store object or its actual *contents*.
+
+Store objects are therefore said to be either [input-addressed](#input-addressing) or [content-addressed](#content-addressing).
+
+> **Historical Note**
+> The 20 byte restriction is because originally digests were [SHA-1][sha-1] hashes.
+> Nix now uses [SHA-256][sha-256], and longer hashes are still reduced to 20 bytes for compatibility.
+
+[digest]: https://en.m.wiktionary.org/wiki/digest#Noun
+[hash]: https://en.m.wikipedia.org/wiki/Cryptographic_hash_function
+[sha-1]: https://en.m.wikipedia.org/wiki/SHA-1
+[sha-256]: https://en.m.wikipedia.org/wiki/SHA-256
+
+### Reference scanning
+
+When a new store object is built, Nix scans its file contents for store paths to construct its set of references.
+
+The special format of a store path's [digest](#digest) allows reliably detecting it among arbitrary data.
+Nix uses the [closure](store.md#closure) of build inputs to derive the list of allowed store paths, to avoid false positives.
+
+This way, scanning files captures run time dependencies without the user having to declare them explicitly.
+Doing it at build time and persisting references in the store object avoids repeating this time-consuming operation.
+
+> **Note**
+> In practice, it is sometimes still necessary for users to declare certain dependencies explicitly, if they are to be preserved in the build result's closure.
+This depends on the specifics of the software to build and run.
+>
+> For example, Java programs are compressed after compilation, which obfuscates any store paths they may refer to and prevents Nix from automatically detecting them.
+
+## Input Addressing
+
+Input addressing means that the digest derives from how the store object was produced, namely its build inputs and build plan.
+
+To compute the hash of a store object one needs a deterministic serialisation, i.e., a binary string representation which only changes if the store object changes.
+
+Nix has a custom serialisation format called Nix Archive (NAR)
+
+Store object references of this sort can *not* be validated from the content of the store object.
+Rather, a cryptographic signature has to be used to indicate that someone is vouching for the store object really being produced from a build plan with that digest.
+
+## Content Addressing
+
+Content addressing means that the digest derives from the store object's contents, namely its file system objects and references.
+If one knows content addressing was used, one can recalculate the reference and thus verify the store object.
+
+Content addressing is currently only used for the special cases of source files and "fixed-output derivations", where the contents of a store object are known in advance.
+Content addressing of build results is still an [experimental feature subject to some restrictions](https://github.com/tweag/rfcs/blob/cas-rfc/rfcs/0062-content-addressed-paths.md).
+
diff --git a/doc/manual/src/architecture/store/store.md b/doc/manual/src/architecture/store/store.md
new file mode 100644
index 000000000..08b6701d5
--- /dev/null
+++ b/doc/manual/src/architecture/store/store.md
@@ -0,0 +1,151 @@
+# Store
+
+A Nix store is a collection of *store objects* with references between them.
+It supports operations to manipulate that collection.
+
+The following concept map is a graphical outline of this chapter.
+Arrows indicate suggested reading order.
+
+```
+ ,--------------[ store ]----------------,
+ | | |
+ v v v
+ [ store object ] [ closure ]--, [ operations ]
+ | | | | | |
+ v | | v v |
+ [ files and processes ] | | [ garbage collection ] |
+ / \ | | |
+ v v | v v
+[ file system object ] [ store path ] | [ derivation ]--->[ building ]
+ | ^ | | |
+ v | v v |
+ [ digest ]----' [ reference scanning ]<------------'
+ / \
+ v v
+[ input addressing ] [ content addressing ]
+```
+
+## Store Object
+
+A store object can hold
+
+- arbitrary *data*
+- *references* to other store objects.
+
+Store objects can be build inputs, build results, or build tasks.
+
+Store objects are [immutable][immutable-object]: once created, they do not change until they are deleted.
+
+## Reference
+
+A store object reference is an [opaque][opaque-data-type], [unique identifier][unique-identifier]:
+The only way to obtain references is by adding or building store objects.
+A reference will always point to exactly one store object.
+
+## Operations
+
+A Nix store can *add*, *retrieve*, and *delete* store objects.
+
+ [ data ]
+ |
+ V
+ [ store ] ---> add ----> [ store' ]
+ |
+ V
+ [ reference ]
+
+<!-- -->
+
+ [ reference ]
+ |
+ V
+ [ store ] ---> get
+ |
+ V
+ [ store object ]
+
+<!-- -->
+
+ [ reference ]
+ |
+ V
+ [ store ] --> delete --> [ store' ]
+
+
+It can *perform builds*, that is, create new store objects by transforming build inputs into build outputs, using instructions from the build tasks.
+
+
+ [ reference ]
+ |
+ V
+ [ store ] --> build --(maybe)--> [ store' ]
+ |
+ V
+ [ reference ]
+
+
+As it keeps track of references, it can [garbage-collect][garbage-collection] unused store objects.
+
+
+ [ store ] --> collect garbage --> [ store' ]
+
+## Files and Processes
+
+Nix maps between its store model and the [Unix paradigm][unix-paradigm] of [files and processes][file-descriptor], by encoding immutable store objects and opaque identifiers as file system primitives: files and directories, and paths.
+That allows processes to resolve references contained in files and thus access the contents of store objects.
+
+Store objects are therefore implemented as the pair of
+
+ - a [file system object](fso.md) for data
+ - a set of [store paths](path.md) for references.
+
+[unix-paradigm]: https://en.m.wikipedia.org/wiki/Everything_is_a_file
+[file-descriptor]: https://en.m.wikipedia.org/wiki/File_descriptor
+
+The following diagram shows a radical simplification of how Nix interacts with the operating system:
+It uses files as build inputs, and build outputs are files again.
+On the operating system, files can be run as processes, which in turn operate on files.
+A build function also amounts to an operating system process (not depicted).
+
+```
++-----------------------------------------------------------------+
+| Nix |
+| [ commmand line interface ]------, |
+| | | |
+| evaluates | |
+| | manages |
+| V | |
+| [ configuration language ] | |
+| | | |
+| +-----------------------------|-------------------V-----------+ |
+| | store evaluates to | |
+| | | | |
+| | referenced by V builds | |
+| | [ build input ] ---> [ build plan ] ---> [ build result ] | |
+| | ^ | | |
+| +---------|----------------------------------------|----------+ |
++-----------|----------------------------------------|------------+
+ | |
+ file system object store path
+ | |
++-----------|----------------------------------------|------------+
+| operating system +------------+ | |
+| '------------ | | <-----------' |
+| | file | |
+| ,-- | | <-, |
+| | +------------+ | |
+| execute as | | read, write, execute |
+| | +------------+ | |
+| '-> | process | --' |
+| +------------+ |
++-----------------------------------------------------------------+
+```
+
+There exist different types of stores, which all follow this model.
+Examples:
+- store on the local file system
+- remote store accessible via SSH
+- binary cache store accessible via HTTP
+
+To make store objects accessible to processes, stores ultimately have to expose store objects through the file system.
+
diff --git a/doc/manual/src/architecture/store/store/build-system-terminology.md b/doc/manual/src/architecture/store/store/build-system-terminology.md
new file mode 100644
index 000000000..eefbaa630
--- /dev/null
+++ b/doc/manual/src/architecture/store/store/build-system-terminology.md
@@ -0,0 +1,32 @@
+# A [Rosetta stone][rosetta-stone] for build system terminology
+
+The Nix store's design is comparable to other build systems.
+Usage of terms is, for historic reasons, not entirely consistent within the Nix ecosystem, and still subject to slow change.
+
+The following translation table points out similarities and equivalent terms, to help clarify their meaning and inform consistent use in the future.
+
+| generic build system | Nix | [Bazel][bazel] | [Build Systems à la Carte][bsalc] | programming language |
+| -------------------------------- | ---------------- | -------------------------------------------------------------------- | --------------------------------- | ------------------------ |
+| data (build input, build result) | store object | [artifact][bazel-artifact] | value | value |
+| build instructions | builder | ([depends on action type][bazel-actions]) | function | function |
+| build task | derivation | [action][bazel-action] | `Task` | [thunk][thunk] |
+| build plan | derivation graph | [action graph][bazel-action-graph], [build graph][bazel-build-graph] | `Tasks` | [call graph][call-graph] |
+| build | build | build | application of `Build` | evaluation |
+| persistence layer | store | [action cache][bazel-action-cache] | `Store` | heap |
+
+All of these systems share features of [declarative programming][declarative-programming] languages, a key insight first put forward by Eelco Dolstra et al. in [Imposing a Memory Management Discipline on Software Deployment][immdsd] (2004), elaborated in his PhD thesis [The Purely Functional Software Deployment Model][phd-thesis] (2006), and further refined by Andrey Mokhov et al. in [Build Systems à la Carte][bsalc] (2018).
+
+[rosetta-stone]: https://en.m.wikipedia.org/wiki/Rosetta_Stone
+[bazel]: https://bazel.build/start/bazel-intro
+[bazel-artifact]: https://bazel.build/reference/glossary#artifact
+[bazel-actions]: https://docs.bazel.build/versions/main/skylark/lib/actions.html
+[bazel-action]: https://bazel.build/reference/glossary#action
+[bazel-action-graph]: https://bazel.build/reference/glossary#action-graph
+[bazel-build-graph]: https://bazel.build/reference/glossary#build-graph
+[bazel-action-cache]: https://bazel.build/reference/glossary#action-cache
+[thunk]: https://en.m.wikipedia.org/wiki/Thunk
+[call-graph]: https://en.m.wikipedia.org/wiki/Call_graph
+[declarative-programming]: https://en.m.wikipedia.org/wiki/Declarative_programming
+[immdsd]: https://edolstra.github.io/pubs/immdsd-icse2004-final.pdf
+[phd-thesis]: https://edolstra.github.io/pubs/phd-thesis.pdf
+[bsalc]: https://www.microsoft.com/en-us/research/uploads/prod/2018/03/build-systems.pdf
diff --git a/doc/manual/src/architecture/store/store/closure.md b/doc/manual/src/architecture/store/store/closure.md
new file mode 100644
index 000000000..065b95ffc
--- /dev/null
+++ b/doc/manual/src/architecture/store/store/closure.md
@@ -0,0 +1,29 @@
+# Closure
+
+Nix stores ensure [referential integrity][referential-integrity]: for each store object in the store, all the store objects it references must also be in the store.
+
+The set of all store objects reachable by following references from a given initial set of store objects is called a *closure*.
+
+Adding, building, copying and deleting store objects must be done in a way that preserves referential integrity:
+
+- A newly added store object cannot have references, unless it is a build task.
+
+- Build results must only refer to store objects in the closure of the build inputs.
+
+ Building a store object will add appropriate references, according to the build task.
+
+- Store objects being copied must refer to objects already in the destination store.
+
+ Recursive copying must either proceed in dependency order or be atomic.
+
+- We can only safely delete store objects which are not reachable from any reference still in use.
+
+ <!-- more details in section on garbage collection, link to it once it exists -->
+
+[referential-integrity]: https://en.m.wikipedia.org/wiki/Referential_integrity
+[garbage-collection]: https://en.m.wikipedia.org/wiki/Garbage_collection_(computer_science)
+[immutable-object]: https://en.m.wikipedia.org/wiki/Immutable_object
+[opaque-data-type]: https://en.m.wikipedia.org/wiki/Opaque_data_type
+[unique-identifier]: https://en.m.wikipedia.org/wiki/Unique_identifier
+
+
diff --git a/doc/manual/src/command-ref/env-common.md b/doc/manual/src/command-ref/env-common.md
index 6e2403461..3f3eb6915 100644
--- a/doc/manual/src/command-ref/env-common.md
+++ b/doc/manual/src/command-ref/env-common.md
@@ -2,11 +2,11 @@
Most Nix commands interpret the following environment variables:
- - `IN_NIX_SHELL`\
+ - [`IN_NIX_SHELL`]{#env-IN_NIX_SHELL}\
Indicator that tells if the current environment was set up by
- `nix-shell`. Since Nix 2.0 the values are `"pure"` and `"impure"`
+ `nix-shell`. It can have the values `pure` or `impure`.
- - `NIX_PATH`\
+ - [`NIX_PATH`]{#env-NIX_PATH}\
A colon-separated list of directories used to look up Nix
expressions enclosed in angle brackets (i.e., `<path>`). For
instance, the value
@@ -44,7 +44,7 @@ Most Nix commands interpret the following environment variables:
The Nix search path can also be extended using the `-I` option to
many Nix commands, which takes precedence over `NIX_PATH`.
- - `NIX_IGNORE_SYMLINK_STORE`\
+ - [`NIX_IGNORE_SYMLINK_STORE`]{#env-NIX_IGNORE_SYMLINK_STORE}\
Normally, the Nix store directory (typically `/nix/store`) is not
allowed to contain any symlink components. This is to prevent
“impure” builds. Builders sometimes “canonicalise” paths by
@@ -66,41 +66,41 @@ Most Nix commands interpret the following environment variables:
Consult the mount 8 manual page for details.
- - `NIX_STORE_DIR`\
+ - [`NIX_STORE_DIR`]{#env-NIX_STORE_DIR}\
Overrides the location of the Nix store (default `prefix/store`).
- - `NIX_DATA_DIR`\
+ - [`NIX_DATA_DIR`]{#env-NIX_DATA_DIR}\
Overrides the location of the Nix static data directory (default
`prefix/share`).
- - `NIX_LOG_DIR`\
+ - [`NIX_LOG_DIR`]{#env-NIX_LOG_DIR}\
Overrides the location of the Nix log directory (default
`prefix/var/log/nix`).
- - `NIX_STATE_DIR`\
+ - [`NIX_STATE_DIR`]{#env-NIX_STATE_DIR}\
Overrides the location of the Nix state directory (default
`prefix/var/nix`).
- - `NIX_CONF_DIR`\
+ - [`NIX_CONF_DIR`]{#env-NIX_CONF_DIR}\
Overrides the location of the system Nix configuration directory
(default `prefix/etc/nix`).
- - `NIX_CONFIG`\
+ - [`NIX_CONFIG`]{#env-NIX_CONFIG}\
Applies settings from Nix configuration from the environment.
The content is treated as if it was read from a Nix configuration file.
Settings are separated by the newline character.
- - `NIX_USER_CONF_FILES`\
+ - [`NIX_USER_CONF_FILES`]{#env-NIX_USER_CONF_FILES}\
Overrides the location of the user Nix configuration files to load
from (defaults to the XDG spec locations). The variable is treated
as a list separated by the `:` token.
- - `TMPDIR`\
+ - [`TMPDIR`]{#env-TMPDIR}\
Use the specified directory to store temporary files. In particular,
this includes temporary build directories; these can take up
substantial amounts of disk space. The default is `/tmp`.
- - `NIX_REMOTE`\
+ - [`NIX_REMOTE`]{#env-NIX_REMOTE}\
This variable should be set to `daemon` if you want to use the Nix
daemon to execute Nix operations. This is necessary in [multi-user
Nix installations](../installation/multi-user.md). If the Nix
@@ -108,16 +108,16 @@ Most Nix commands interpret the following environment variables:
should be set to `unix://path/to/socket`. Otherwise, it should be
left unset.
- - `NIX_SHOW_STATS`\
+ - [`NIX_SHOW_STATS`]{#env-NIX_SHOW_STATS}\
If set to `1`, Nix will print some evaluation statistics, such as
the number of values allocated.
- - `NIX_COUNT_CALLS`\
+ - [`NIX_COUNT_CALLS`]{#env-NIX_COUNT_CALLS}\
If set to `1`, Nix will print how often functions were called during
Nix expression evaluation. This is useful for profiling your Nix
expressions.
- - `GC_INITIAL_HEAP_SIZE`\
+ - [`GC_INITIAL_HEAP_SIZE`]{#env-GC_INITIAL_HEAP_SIZE}\
If Nix has been configured to use the Boehm garbage collector, this
variable sets the initial size of the heap in bytes. It defaults to
384 MiB. Setting it to a low value reduces memory consumption, but
diff --git a/doc/manual/src/command-ref/nix-build.md b/doc/manual/src/command-ref/nix-build.md
index 43de7a6e6..49c6f3f55 100644
--- a/doc/manual/src/command-ref/nix-build.md
+++ b/doc/manual/src/command-ref/nix-build.md
@@ -12,6 +12,12 @@
[`--dry-run`]
[{`--out-link` | `-o`} *outlink*]
+# Disambiguation
+
+This man page describes the command `nix-build`, which is distinct from `nix
+build`. For documentation on the latter, run `nix build --help` or see `man
+nix3-build`.
+
# Description
The `nix-build` command builds the derivations described by the Nix
@@ -47,16 +53,16 @@ All options not listed here are passed to `nix-store
--realise`, except for `--arg` and `--attr` / `-A` which are passed to
`nix-instantiate`.
- - `--no-out-link`\
+ - [`--no-out-link`]{#opt-no-out-link}\
Do not create a symlink to the output path. Note that as a result
the output does not become a root of the garbage collector, and so
might be deleted by `nix-store
--gc`.
- - `--dry-run`\
+ - [`--dry-run`]{#opt-dry-run}\
Show what store paths would be built or downloaded.
- - `--out-link` / `-o` *outlink*\
+ - [`--out-link`]{#opt-out-link} / `-o` *outlink*\
Change the name of the symlink to the output path created from
`result` to *outlink*.
diff --git a/doc/manual/src/command-ref/nix-env.md b/doc/manual/src/command-ref/nix-env.md
index 8d6abaf52..a5df35d77 100644
--- a/doc/manual/src/command-ref/nix-env.md
+++ b/doc/manual/src/command-ref/nix-env.md
@@ -31,7 +31,7 @@ subcommand to be performed. These are documented below.
Several commands, such as `nix-env -q` and `nix-env -i`, take a list of
arguments that specify the packages on which to operate. These are
extended regular expressions that must match the entire name of the
-package. (For details on regular expressions, see regex7.) The match is
+package. (For details on regular expressions, see **regex**(7).) The match is
case-sensitive. The regular expression can optionally be followed by a
dash and a version number; if omitted, any version of the package will
match. Here are some examples:
@@ -198,7 +198,7 @@ a number of possible ways:
another.
- If `--from-expression` is given, *args* are Nix
- [functions](../expressions/language-constructs.md#functions)
+ [functions](../language/constructs.md#functions)
that are called with the active Nix expression as their single
argument. The derivations returned by those function calls are
installed. This allows derivations to be specified in an
@@ -412,7 +412,7 @@ The upgrade operation determines whether a derivation `y` is an upgrade
of a derivation `x` by looking at their respective `name` attributes.
The names (e.g., `gcc-3.3.1` are split into two parts: the package name
(`gcc`), and the version (`3.3.1`). The version part starts after the
-first dash not followed by a letter. `x` is considered an upgrade of `y`
+first dash not followed by a letter. `y` is considered an upgrade of `x`
if their package names match, and the version of `y` is higher than that
of `x`.
diff --git a/doc/manual/src/command-ref/nix-instantiate.md b/doc/manual/src/command-ref/nix-instantiate.md
index 2e198daed..8f143729e 100644
--- a/doc/manual/src/command-ref/nix-instantiate.md
+++ b/doc/manual/src/command-ref/nix-instantiate.md
@@ -51,7 +51,7 @@ standard input.
- `--strict`\
When used with `--eval`, recursively evaluate list elements and
attributes. Normally, such sub-expressions are left unevaluated
- (since the Nix expression language is lazy).
+ (since the Nix language is lazy).
> **Warning**
>
@@ -66,7 +66,7 @@ standard input.
When used with `--eval`, print the resulting value as an XML
representation of the abstract syntax tree rather than as an ATerm.
The schema is the same as that used by the [`toXML`
- built-in](../expressions/builtins.md).
+ built-in](../language/builtins.md).
- `--read-write-mode`\
When used with `--eval`, perform evaluation in read/write mode so
diff --git a/doc/manual/src/command-ref/nix-shell.md b/doc/manual/src/command-ref/nix-shell.md
index a2b6d8a8e..840bccd25 100644
--- a/doc/manual/src/command-ref/nix-shell.md
+++ b/doc/manual/src/command-ref/nix-shell.md
@@ -15,6 +15,12 @@
[`--keep` *name*]
{{`--packages` | `-p`} {*packages* | *expressions*} … | [*path*]}
+# Disambiguation
+
+This man page describes the command `nix-shell`, which is distinct from `nix
+shell`. For documentation on the latter, run `nix shell --help` or see `man
+nix3-shell`.
+
# Description
The command `nix-shell` will build the dependencies of the specified
diff --git a/doc/manual/src/command-ref/nix-store.md b/doc/manual/src/command-ref/nix-store.md
index 7db9f0c1c..ecd838e8d 100644
--- a/doc/manual/src/command-ref/nix-store.md
+++ b/doc/manual/src/command-ref/nix-store.md
@@ -22,7 +22,7 @@ This section lists the options that are common to all operations. These
options are allowed for every subcommand, though they may not always
have an effect.
- - `--add-root` *path*\
+ - [`--add-root`]{#opt-add-root} *path*\
Causes the result of a realisation (`--realise` and
`--force-realise`) to be registered as a root of the garbage
collector. *path* will be created as a symlink to the resulting
@@ -121,7 +121,7 @@ Special exit codes:
- `102`\
Hash mismatch, the build output was rejected because it does not
match the [`outputHash` attribute of the
- derivation](../expressions/advanced-attributes.md).
+ derivation](../language/advanced-attributes.md).
- `104`\
Not deterministic, the build succeeded in check mode but the
diff --git a/doc/manual/src/command-ref/opt-common.md b/doc/manual/src/command-ref/opt-common.md
index 7ee1a26bc..e612c416f 100644
--- a/doc/manual/src/command-ref/opt-common.md
+++ b/doc/manual/src/command-ref/opt-common.md
@@ -2,13 +2,13 @@
Most Nix commands accept the following command-line options:
- - `--help`\
+ - [`--help`]{#opt-help}\
Prints out a summary of the command syntax and exits.
- - `--version`\
+ - [`--version`]{#opt-version}\
Prints out the Nix version number on standard output and exits.
- - `--verbose` / `-v`\
+ - [`--verbose`]{#opt-verbose} / `-v`\
Increases the level of verbosity of diagnostic messages printed on
standard error. For each Nix operation, the information printed on
standard output is well-defined; any diagnostic information is
@@ -37,14 +37,14 @@ Most Nix commands accept the following command-line options:
- 5\
“Vomit”: print vast amounts of debug information.
- - `--quiet`\
+ - [`--quiet`]{#opt-quiet}\
Decreases the level of verbosity of diagnostic messages printed on
standard error. This is the inverse option to `-v` / `--verbose`.
This option may be specified repeatedly. See the previous verbosity
levels list.
- - `--log-format` *format*\
+ - [`--log-format`]{#opt-log-format} *format*\
This option can be used to change the output of the log format, with
*format* being one of:
@@ -66,14 +66,14 @@ Most Nix commands accept the following command-line options:
- bar-with-logs\
Display the raw logs, with the progress bar at the bottom.
- - `--no-build-output` / `-Q`\
+ - [`--no-build-output`]{#opt-no-build-output} / `-Q`\
By default, output written by builders to standard output and
standard error is echoed to the Nix command's standard error. This
option suppresses this behaviour. Note that the builder's standard
output and error are always written to a log file in
`prefix/nix/var/log/nix`.
- - `--max-jobs` / `-j` *number*\
+ - [`--max-jobs`]{#opt-max-jobs} / `-j` *number*\
Sets the maximum number of build jobs that Nix will perform in
parallel to the specified number. Specify `auto` to use the number
of CPUs in the system. The default is specified by the `max-jobs`
@@ -83,7 +83,7 @@ Most Nix commands accept the following command-line options:
Setting it to `0` disallows building on the local machine, which is
useful when you want builds to happen only on remote builders.
- - `--cores`\
+ - [`--cores`]{#opt-cores}\
Sets the value of the `NIX_BUILD_CORES` environment variable in
the invocation of builders. Builders can use this variable at
their discretion to control the maximum amount of parallelism. For
@@ -94,18 +94,18 @@ Most Nix commands accept the following command-line options:
means that the builder should use all available CPU cores in the
system.
- - `--max-silent-time`\
+ - [`--max-silent-time`]{#opt-max-silent-time}\
Sets the maximum number of seconds that a builder can go without
producing any data on standard output or standard error. The
default is specified by the `max-silent-time` configuration
setting. `0` means no time-out.
- - `--timeout`\
+ - [`--timeout`]{#opt-timeout}\
Sets the maximum number of seconds that a builder can run. The
default is specified by the `timeout` configuration setting. `0`
means no timeout.
- - `--keep-going` / `-k`\
+ - [`--keep-going`]{#opt-keep-going} / `-k`\
Keep going in case of failed builds, to the greatest extent
possible. That is, if building an input of some derivation fails,
Nix will still build the other inputs, but not the derivation
@@ -113,13 +113,13 @@ Most Nix commands accept the following command-line options:
for builds of substitutes), possibly killing builds in progress (in
case of parallel or distributed builds).
- - `--keep-failed` / `-K`\
+ - [`--keep-failed`]{#opt-keep-failed} / `-K`\
Specifies that in case of a build failure, the temporary directory
(usually in `/tmp`) in which the build takes place should not be
deleted. The path of the build directory is printed as an
informational message.
- - `--fallback`\
+ - [`--fallback`]{#opt-fallback}\
Whenever Nix attempts to build a derivation for which substitutes
are known for each output path, but realising the output paths
through the substitutes fails, fall back on building the derivation.
@@ -134,18 +134,18 @@ Most Nix commands accept the following command-line options:
failure in obtaining the substitutes to lead to a full build from
source (with the related consumption of resources).
- - `--readonly-mode`\
+ - [`--readonly-mode`]{#opt-readonly-mode}\
When this option is used, no attempt is made to open the Nix
database. Most Nix operations do need database access, so those
operations will fail.
- - `--arg` *name* *value*\
+ - [`--arg`]{#opt-arg} *name* *value*\
This option is accepted by `nix-env`, `nix-instantiate`,
`nix-shell` and `nix-build`. When evaluating Nix expressions, the
expression evaluator will automatically try to call functions that
it encounters. It can automatically call functions for which every
argument has a [default
- value](../expressions/language-constructs.md#functions) (e.g.,
+ value](../language/constructs.md#functions) (e.g.,
`{ argName ? defaultValue }: ...`). With `--arg`, you can also
call functions that have arguments without a default value (or
override a default value). That is, if the evaluator encounters a
@@ -164,19 +164,19 @@ Most Nix commands accept the following command-line options:
So if you call this Nix expression (e.g., when you do `nix-env -iA
pkgname`), the function will be called automatically using the
- value [`builtins.currentSystem`](../expressions/builtins.md) for
+ value [`builtins.currentSystem`](../language/builtins.md) for
the `system` argument. You can override this using `--arg`, e.g.,
`nix-env -iA pkgname --arg system \"i686-freebsd\"`. (Note that
since the argument is a Nix string literal, you have to escape the
quotes.)
- - `--argstr` *name* *value*\
+ - [`--argstr`]{#opt-argstr} *name* *value*\
This option is like `--arg`, only the value is not a Nix
expression but a string. So instead of `--arg system
\"i686-linux\"` (the outer quotes are to keep the shell happy) you
can say `--argstr system i686-linux`.
- - `--attr` / `-A` *attrPath*\
+ - [`--attr`]{#opt-attr} / `-A` *attrPath*\
Select an attribute from the top-level Nix expression being
evaluated. (`nix-env`, `nix-instantiate`, `nix-build` and
`nix-shell` only.) The *attribute path* *attrPath* is a sequence
@@ -191,7 +191,7 @@ Most Nix commands accept the following command-line options:
attribute of the fourth element of the array in the `foo` attribute
of the top-level expression.
- - `--expr` / `-E`\
+ - [`--expr`]{#opt-expr} / `-E`\
Interpret the command line arguments as a list of Nix expressions to
be parsed and evaluated, rather than as a list of file names of Nix
expressions. (`nix-instantiate`, `nix-build` and `nix-shell` only.)
@@ -202,17 +202,17 @@ Most Nix commands accept the following command-line options:
use, give your expression to the `nix-shell -p` convenience flag
instead.
- - `-I` *path*\
+ - [`-I`]{#opt-I} *path*\
Add a path to the Nix expression search path. This option may be
given multiple times. See the `NIX_PATH` environment variable for
information on the semantics of the Nix search path. Paths added
through `-I` take precedence over `NIX_PATH`.
- - `--option` *name* *value*\
+ - [`--option`]{#opt-option} *name* *value*\
Set the Nix configuration option *name* to *value*. This overrides
settings in the Nix configuration file (see nix.conf5).
- - `--repair`\
+ - [`--repair`]{#opt-repair}\
Fix corrupted or missing store paths by redownloading or rebuilding
them. Note that this is slow because it requires computing a
cryptographic hash of the contents of every path in the closure of
diff --git a/doc/manual/src/contributing/hacking.md b/doc/manual/src/contributing/hacking.md
index 90a8f1f94..59ce5cac7 100644
--- a/doc/manual/src/contributing/hacking.md
+++ b/doc/manual/src/contributing/hacking.md
@@ -71,18 +71,6 @@ To install it in `$(pwd)/outputs` and test it:
nix (Nix) 3.0
```
-To run a functional test:
-
-```console
-make tests/test-name-should-auto-complete.sh.test
-```
-
-To run the unit-tests for C++ code:
-
-```
-make check
-```
-
If you have a flakes-enabled Nix you can replace:
```console
@@ -94,3 +82,29 @@ by:
```console
$ nix develop
```
+
+## Testing
+
+Nix comes with three different flavors of tests: unit, functional and integration.
+
+### Unit-tests
+
+The unit-tests for each Nix library (`libexpr`, `libstore`, etc..) are defined
+under `src/{library_name}/tests` using the
+[googletest](https://google.github.io/googletest/) framework.
+
+You can run the whole testsuite with `make check`, or the tests for a specific component with `make libfoo-tests_RUN`. Finer-grained filtering is also possible using the [--gtest_filter](https://google.github.io/googletest/advanced.html#running-a-subset-of-the-tests) command-line option.
+
+### Functional tests
+
+The functional tests reside under the `tests` directory and are listed in `tests/local.mk`.
+The whole testsuite can be run with `make install && make installcheck`.
+Individual tests can be run with `make tests/{testName}.sh.test`.
+
+### Integration tests
+
+The integration tests are defined in the Nix flake under the `hydraJobs.tests` attribute.
+These tests include everything that needs to interact with external services or run Nix in a non-trivial distributed setup.
+Because these tests are expensive and require more than what the standard github-actions setup provides, they only run on the master branch (on <https://hydra.nixos.org/jobset/nix/master>).
+
+You can run them manually with `nix build .#hydraJobs.tests.{testName}` or `nix-build -A hydraJobs.tests.{testName}`
diff --git a/doc/manual/src/expressions/arguments-variables.md b/doc/manual/src/expressions/arguments-variables.md
deleted file mode 100644
index 12198c879..000000000
--- a/doc/manual/src/expressions/arguments-variables.md
+++ /dev/null
@@ -1,80 +0,0 @@
-# Arguments and Variables
-
-The [Nix expression for GNU Hello](expression-syntax.md) is a
-function; it is missing some arguments that have to be filled in
-somewhere. In the Nix Packages collection this is done in the file
-`pkgs/top-level/all-packages.nix`, where all Nix expressions for
-packages are imported and called with the appropriate arguments. Here
-are some fragments of `all-packages.nix`, with annotations of what
-they mean:
-
-```nix
-...
-
-rec { ①
-
- hello = import ../applications/misc/hello/ex-1 ② { ③
- inherit fetchurl stdenv perl;
- };
-
- perl = import ../development/interpreters/perl { ④
- inherit fetchurl stdenv;
- };
-
- fetchurl = import ../build-support/fetchurl {
- inherit stdenv; ...
- };
-
- stdenv = ...;
-
-}
-```
-
-1. This file defines a set of attributes, all of which are concrete
- derivations (i.e., not functions). In fact, we define a *mutually
- recursive* set of attributes. That is, the attributes can refer to
- each other. This is precisely what we want since we want to “plug”
- the various packages into each other.
-
-2. Here we *import* the Nix expression for GNU Hello. The import
- operation just loads and returns the specified Nix expression. In
- fact, we could just have put the contents of the Nix expression
- for GNU Hello in `all-packages.nix` at this point. That would be
- completely equivalent, but it would make `all-packages.nix` rather
- bulky.
-
- Note that we refer to `../applications/misc/hello/ex-1`, not
- `../applications/misc/hello/ex-1/default.nix`. When you try to
- import a directory, Nix automatically appends `/default.nix` to the
- file name.
-
-3. This is where the actual composition takes place. Here we *call* the
- function imported from `../applications/misc/hello/ex-1` with a set
- containing the things that the function expects, namely `fetchurl`,
- `stdenv`, and `perl`. We use inherit again to use the attributes
- defined in the surrounding scope (we could also have written
- `fetchurl = fetchurl;`, etc.).
-
- The result of this function call is an actual derivation that can be
- built by Nix (since when we fill in the arguments of the function,
- what we get is its body, which is the call to `stdenv.mkDerivation`
- in the [Nix expression for GNU Hello](expression-syntax.md)).
-
- > **Note**
- >
- > Nixpkgs has a convenience function `callPackage` that imports and
- > calls a function, filling in any missing arguments by passing the
- > corresponding attribute from the Nixpkgs set, like this:
- >
- > ```nix
- > hello = callPackage ../applications/misc/hello/ex-1 { };
- > ```
- >
- > If necessary, you can set or override arguments:
- >
- > ```nix
- > hello = callPackage ../applications/misc/hello/ex-1 { stdenv = myStdenv; };
- > ```
-
-4. Likewise, we have to instantiate Perl, `fetchurl`, and the standard
- environment.
diff --git a/doc/manual/src/expressions/build-script.md b/doc/manual/src/expressions/build-script.md
deleted file mode 100644
index b1eacae88..000000000
--- a/doc/manual/src/expressions/build-script.md
+++ /dev/null
@@ -1,70 +0,0 @@
-# Build Script
-
-Here is the builder referenced from Hello's Nix expression (stored in
-`pkgs/applications/misc/hello/ex-1/builder.sh`):
-
-```bash
-source $stdenv/setup ①
-
-PATH=$perl/bin:$PATH ②
-
-tar xvfz $src ③
-cd hello-*
-./configure --prefix=$out ④
-make ⑤
-make install
-```
-
-The builder can actually be made a lot shorter by using the *generic
-builder* functions provided by `stdenv`, but here we write out the build
-steps to elucidate what a builder does. It performs the following steps:
-
-1. When Nix runs a builder, it initially completely clears the
- environment (except for the attributes declared in the derivation).
- This is done to prevent undeclared inputs from being used in the
- build process. If for example the `PATH` contained `/usr/bin`, then
- you might accidentally use `/usr/bin/gcc`.
-
- So the first step is to set up the environment. This is done by
- calling the `setup` script of the standard environment. The
- environment variable `stdenv` points to the location of the
- standard environment being used. (It wasn't specified explicitly
- as an attribute in Hello's Nix expression, but `mkDerivation` adds
- it automatically.)
-
-2. Since Hello needs Perl, we have to make sure that Perl is in the
- `PATH`. The `perl` environment variable points to the location of
- the Perl package (since it was passed in as an attribute to the
- derivation), so `$perl/bin` is the directory containing the Perl
- interpreter.
-
-3. Now we have to unpack the sources. The `src` attribute was bound to
- the result of fetching the Hello source tarball from the network, so
- the `src` environment variable points to the location in the Nix
- store to which the tarball was downloaded. After unpacking, we `cd`
- to the resulting source directory.
-
- The whole build is performed in a temporary directory created in
- `/tmp`, by the way. This directory is removed after the builder
- finishes, so there is no need to clean up the sources afterwards.
- Also, the temporary directory is always newly created, so you don't
- have to worry about files from previous builds interfering with the
- current build.
-
-4. GNU Hello is a typical Autoconf-based package, so we first have to
- run its `configure` script. In Nix every package is stored in a
- separate location in the Nix store, for instance
- `/nix/store/9a54ba97fb71b65fda531012d0443ce2-hello-2.1.1`. Nix
- computes this path by cryptographically hashing all attributes of
- the derivation. The path is passed to the builder through the `out`
- environment variable. So here we give `configure` the parameter
- `--prefix=$out` to cause Hello to be installed in the expected
- location.
-
-5. Finally we build Hello (`make`) and install it into the location
- specified by `out` (`make install`).
-
-If you are wondering about the absence of error checking on the result
-of various commands called in the builder: this is because the shell
-script is evaluated with Bash's `-e` option, which causes the script to
-be aborted if any command fails without an error check.
diff --git a/doc/manual/src/expressions/expression-language.md b/doc/manual/src/expressions/expression-language.md
deleted file mode 100644
index 267fcb983..000000000
--- a/doc/manual/src/expressions/expression-language.md
+++ /dev/null
@@ -1,12 +0,0 @@
-# Nix Expression Language
-
-The Nix expression language is a pure, lazy, functional language. Purity
-means that operations in the language don't have side-effects (for
-instance, there is no variable assignment). Laziness means that
-arguments to functions are evaluated only when they are needed.
-Functional means that functions are “normal” values that can be passed
-around and manipulated in interesting ways. The language is not a
-full-featured, general purpose language. Its main job is to describe
-packages, compositions of packages, and the variability within packages.
-
-This section presents the various features of the language.
diff --git a/doc/manual/src/expressions/expression-syntax.md b/doc/manual/src/expressions/expression-syntax.md
deleted file mode 100644
index 6b93e692c..000000000
--- a/doc/manual/src/expressions/expression-syntax.md
+++ /dev/null
@@ -1,93 +0,0 @@
-# Expression Syntax
-
-Here is a Nix expression for GNU Hello:
-
-```nix
-{ stdenv, fetchurl, perl }: ①
-
-stdenv.mkDerivation { ②
- name = "hello-2.1.1"; ③
- builder = ./builder.sh; ④
- src = fetchurl { ⑤
- url = "ftp://ftp.nluug.nl/pub/gnu/hello/hello-2.1.1.tar.gz";
- sha256 = "1md7jsfd8pa45z73bz1kszpp01yw6x5ljkjk2hx7wl800any6465";
- };
- inherit perl; ⑥
-}
-```
-
-This file is actually already in the Nix Packages collection in
-`pkgs/applications/misc/hello/ex-1/default.nix`. It is customary to
-place each package in a separate directory and call the single Nix
-expression in that directory `default.nix`. The file has the following
-elements (referenced from the figure by number):
-
-1. This states that the expression is a *function* that expects to be
- called with three arguments: `stdenv`, `fetchurl`, and `perl`. They
- are needed to build Hello, but we don't know how to build them here;
- that's why they are function arguments. `stdenv` is a package that
- is used by almost all Nix Packages; it provides a
- “standard” environment consisting of the things you would expect
- in a basic Unix environment: a C/C++ compiler (GCC, to be precise),
- the Bash shell, fundamental Unix tools such as `cp`, `grep`, `tar`,
- etc. `fetchurl` is a function that downloads files. `perl` is the
- Perl interpreter.
-
- Nix functions generally have the form `{ x, y, ..., z }: e` where
- `x`, `y`, etc. are the names of the expected arguments, and where
- *e* is the body of the function. So here, the entire remainder of
- the file is the body of the function; when given the required
- arguments, the body should describe how to build an instance of
- the Hello package.
-
-2. So we have to build a package. Building something from other stuff
- is called a *derivation* in Nix (as opposed to sources, which are
- built by humans instead of computers). We perform a derivation by
- calling `stdenv.mkDerivation`. `mkDerivation` is a function
- provided by `stdenv` that builds a package from a set of
- *attributes*. A set is just a list of key/value pairs where each
- key is a string and each value is an arbitrary Nix
- expression. They take the general form `{ name1 = expr1; ...
- nameN = exprN; }`.
-
-3. The attribute `name` specifies the symbolic name and version of
- the package. Nix doesn't really care about these things, but they
- are used by for instance `nix-env -q` to show a “human-readable”
- name for packages. This attribute is required by `mkDerivation`.
-
-4. The attribute `builder` specifies the builder. This attribute can
- sometimes be omitted, in which case `mkDerivation` will fill in a
- default builder (which does a `configure; make; make install`, in
- essence). Hello is sufficiently simple that the default builder
- would suffice, but in this case, we will show an actual builder
- for educational purposes. The value `./builder.sh` refers to the
- shell script shown in the [next section](build-script.md),
- discussed below.
-
-5. The builder has to know what the sources of the package are. Here,
- the attribute `src` is bound to the result of a call to the
- `fetchurl` function. Given a URL and a SHA-256 hash of the expected
- contents of the file at that URL, this function builds a derivation
- that downloads the file and checks its hash. So the sources are a
- dependency that like all other dependencies is built before Hello
- itself is built.
-
- Instead of `src` any other name could have been used, and in fact
- there can be any number of sources (bound to different attributes).
- However, `src` is customary, and it's also expected by the default
- builder (which we don't use in this example).
-
-6. Since the derivation requires Perl, we have to pass the value of the
- `perl` function argument to the builder. All attributes in the set
- are actually passed as environment variables to the builder, so
- declaring an attribute
-
- ```nix
- perl = perl;
- ```
-
- will do the trick: it binds an attribute `perl` to the function
- argument which also happens to be called `perl`. However, it looks a
- bit silly, so there is a shorter syntax. The `inherit` keyword
- causes the specified attributes to be bound to whatever variables
- with the same name happen to be in scope.
diff --git a/doc/manual/src/expressions/generic-builder.md b/doc/manual/src/expressions/generic-builder.md
deleted file mode 100644
index cf26b5f82..000000000
--- a/doc/manual/src/expressions/generic-builder.md
+++ /dev/null
@@ -1,66 +0,0 @@
-# Generic Builder Syntax
-
-Recall that the [build script for GNU Hello](build-script.md) looked
-something like this:
-
-```bash
-PATH=$perl/bin:$PATH
-tar xvfz $src
-cd hello-*
-./configure --prefix=$out
-make
-make install
-```
-
-The builders for almost all Unix packages look like this — set up some
-environment variables, unpack the sources, configure, build, and
-install. For this reason the standard environment provides some Bash
-functions that automate the build process. Here is what a builder using
-the generic build facilities looks like:
-
-```bash
-buildInputs="$perl" ①
-
-source $stdenv/setup ②
-
-genericBuild ③
-```
-
-Here is what each line means:
-
-1. The `buildInputs` variable tells `setup` to use the indicated
- packages as “inputs”. This means that if a package provides a `bin`
- subdirectory, it's added to `PATH`; if it has a `include`
- subdirectory, it's added to GCC's header search path; and so on.
- (This is implemented in a modular way: `setup` tries to source the
- file `pkg/nix-support/setup-hook` of all dependencies. These “setup
- hooks” can then set up whatever environment variables they want; for
- instance, the setup hook for Perl sets the `PERL5LIB` environment
- variable to contain the `lib/site_perl` directories of all inputs.)
-
-2. The function `genericBuild` is defined in the file `$stdenv/setup`.
-
-3. The final step calls the shell function `genericBuild`, which
- performs the steps that were done explicitly in the previous build
- script. The generic builder is smart enough to figure out whether
- to unpack the sources using `gzip`, `bzip2`, etc. It can be
- customised in many ways; see the Nixpkgs manual for details.
-
-Discerning readers will note that the `buildInputs` could just as well
-have been set in the Nix expression, like this:
-
-```nix
- buildInputs = [ perl ];
-```
-
-The `perl` attribute can then be removed, and the builder becomes even
-shorter:
-
-```bash
-source $stdenv/setup
-genericBuild
-```
-
-In fact, `mkDerivation` provides a default builder that looks exactly
-like that, so it is actually possible to omit the builder for Hello
-entirely.
diff --git a/doc/manual/src/expressions/language-values.md b/doc/manual/src/expressions/language-values.md
deleted file mode 100644
index 75ae9f2eb..000000000
--- a/doc/manual/src/expressions/language-values.md
+++ /dev/null
@@ -1,251 +0,0 @@
-# Values
-
-## Simple Values
-
-Nix has the following basic data types:
-
- - *Strings* can be written in three ways.
-
- The most common way is to enclose the string between double quotes,
- e.g., `"foo bar"`. Strings can span multiple lines. The special
- characters `"` and `\` and the character sequence `${` must be
- escaped by prefixing them with a backslash (`\`). Newlines, carriage
- returns and tabs can be written as `\n`, `\r` and `\t`,
- respectively.
-
- You can include the result of an expression into a string by
- enclosing it in `${...}`, a feature known as *antiquotation*. The
- enclosed expression must evaluate to something that can be coerced
- into a string (meaning that it must be a string, a path, or a
- derivation). For instance, rather than writing
-
- ```nix
- "--with-freetype2-library=" + freetype + "/lib"
- ```
-
- (where `freetype` is a derivation), you can instead write the more
- natural
-
- ```nix
- "--with-freetype2-library=${freetype}/lib"
- ```
-
- The latter is automatically translated to the former. A more
- complicated example (from the Nix expression for
- [Qt](http://www.trolltech.com/products/qt)):
-
- ```nix
- configureFlags = "
- -system-zlib -system-libpng -system-libjpeg
- ${if openglSupport then "-dlopen-opengl
- -L${mesa}/lib -I${mesa}/include
- -L${libXmu}/lib -I${libXmu}/include" else ""}
- ${if threadSupport then "-thread" else "-no-thread"}
- ";
- ```
-
- Note that Nix expressions and strings can be arbitrarily nested; in
- this case the outer string contains various antiquotations that
- themselves contain strings (e.g., `"-thread"`), some of which in
- turn contain expressions (e.g., `${mesa}`).
-
- The second way to write string literals is as an *indented string*,
- which is enclosed between pairs of *double single-quotes*, like so:
-
- ```nix
- ''
- This is the first line.
- This is the second line.
- This is the third line.
- ''
- ```
-
- This kind of string literal intelligently strips indentation from
- the start of each line. To be precise, it strips from each line a
- number of spaces equal to the minimal indentation of the string as a
- whole (disregarding the indentation of empty lines). For instance,
- the first and second line are indented two spaces, while the third
- line is indented four spaces. Thus, two spaces are stripped from
- each line, so the resulting string is
-
- ```nix
- "This is the first line.\nThis is the second line.\n This is the third line.\n"
- ```
-
- Note that the whitespace and newline following the opening `''` is
- ignored if there is no non-whitespace text on the initial line.
-
- Antiquotation (`${expr}`) is supported in indented strings.
-
- Since `${` and `''` have special meaning in indented strings, you
- need a way to quote them. `$` can be escaped by prefixing it with
- `''` (that is, two single quotes), i.e., `''$`. `''` can be escaped
- by prefixing it with `'`, i.e., `'''`. `$` removes any special
- meaning from the following `$`. Linefeed, carriage-return and tab
- characters can be written as `''\n`, `''\r`, `''\t`, and `''\`
- escapes any other character.
-
- Indented strings are primarily useful in that they allow multi-line
- string literals to follow the indentation of the enclosing Nix
- expression, and that less escaping is typically necessary for
- strings representing languages such as shell scripts and
- configuration files because `''` is much less common than `"`.
- Example:
-
- ```nix
- stdenv.mkDerivation {
- ...
- postInstall =
- ''
- mkdir $out/bin $out/etc
- cp foo $out/bin
- echo "Hello World" > $out/etc/foo.conf
- ${if enableBar then "cp bar $out/bin" else ""}
- '';
- ...
- }
- ```
-
- Finally, as a convenience, *URIs* as defined in appendix B of
- [RFC 2396](http://www.ietf.org/rfc/rfc2396.txt) can be written *as
- is*, without quotes. For instance, the string
- `"http://example.org/foo.tar.bz2"` can also be written as
- `http://example.org/foo.tar.bz2`.
-
- - Numbers, which can be *integers* (like `123`) or *floating point*
- (like `123.43` or `.27e13`).
-
- Numbers are type-compatible: pure integer operations will always
- return integers, whereas any operation involving at least one
- floating point number will have a floating point number as a result.
-
- - *Paths*, e.g., `/bin/sh` or `./builder.sh`. A path must contain at
- least one slash to be recognised as such. For instance, `builder.sh`
- is not a path: it's parsed as an expression that selects the
- attribute `sh` from the variable `builder`. If the file name is
- relative, i.e., if it does not begin with a slash, it is made
- absolute at parse time relative to the directory of the Nix
- expression that contained it. For instance, if a Nix expression in
- `/foo/bar/bla.nix` refers to `../xyzzy/fnord.nix`, the absolute path
- is `/foo/xyzzy/fnord.nix`.
-
- If the first component of a path is a `~`, it is interpreted as if
- the rest of the path were relative to the user's home directory.
- e.g. `~/foo` would be equivalent to `/home/edolstra/foo` for a user
- whose home directory is `/home/edolstra`.
-
- Paths can also be specified between angle brackets, e.g.
- `<nixpkgs>`. This means that the directories listed in the
- environment variable `NIX_PATH` will be searched for the given file
- or directory name.
-
- Antiquotation is supported in any paths except those in angle brackets.
- `./${foo}-${bar}.nix` is a more convenient way of writing
- `./. + "/" + foo + "-" + bar + ".nix"` or `./. + "/${foo}-${bar}.nix"`. At
- least one slash must appear *before* any antiquotations for this to be
- recognized as a path. `a.${foo}/b.${bar}` is a syntactically valid division
- operation. `./a.${foo}/b.${bar}` is a path.
-
- - *Booleans* with values `true` and `false`.
-
- - The null value, denoted as `null`.
-
-## Lists
-
-Lists are formed by enclosing a whitespace-separated list of values
-between square brackets. For example,
-
-```nix
-[ 123 ./foo.nix "abc" (f { x = y; }) ]
-```
-
-defines a list of four elements, the last being the result of a call to
-the function `f`. Note that function calls have to be enclosed in
-parentheses. If they had been omitted, e.g.,
-
-```nix
-[ 123 ./foo.nix "abc" f { x = y; } ]
-```
-
-the result would be a list of five elements, the fourth one being a
-function and the fifth being a set.
-
-Note that lists are only lazy in values, and they are strict in length.
-
-## Sets
-
-Sets are really the core of the language, since ultimately the Nix
-language is all about creating derivations, which are really just sets
-of attributes to be passed to build scripts.
-
-Sets are just a list of name/value pairs (called *attributes*) enclosed
-in curly brackets, where each value is an arbitrary expression
-terminated by a semicolon. For example:
-
-```nix
-{ x = 123;
- text = "Hello";
- y = f { bla = 456; };
-}
-```
-
-This defines a set with attributes named `x`, `text`, `y`. The order of
-the attributes is irrelevant. An attribute name may only occur once.
-
-Attributes can be selected from a set using the `.` operator. For
-instance,
-
-```nix
-{ a = "Foo"; b = "Bar"; }.a
-```
-
-evaluates to `"Foo"`. It is possible to provide a default value in an
-attribute selection using the `or` keyword. For example,
-
-```nix
-{ a = "Foo"; b = "Bar"; }.c or "Xyzzy"
-```
-
-will evaluate to `"Xyzzy"` because there is no `c` attribute in the set.
-
-You can use arbitrary double-quoted strings as attribute names:
-
-```nix
-{ "foo ${bar}" = 123; "nix-1.0" = 456; }."foo ${bar}"
-```
-
-This will evaluate to `123` (Assuming `bar` is antiquotable). In the
-case where an attribute name is just a single antiquotation, the quotes
-can be dropped:
-
-```nix
-{ foo = 123; }.${bar} or 456
-```
-
-This will evaluate to `123` if `bar` evaluates to `"foo"` when coerced
-to a string and `456` otherwise (again assuming `bar` is antiquotable).
-
-In the special case where an attribute name inside of a set declaration
-evaluates to `null` (which is normally an error, as `null` is not
-antiquotable), that attribute is simply not added to the set:
-
-```nix
-{ ${if foo then "bar" else null} = true; }
-```
-
-This will evaluate to `{}` if `foo` evaluates to `false`.
-
-A set that has a `__functor` attribute whose value is callable (i.e. is
-itself a function or a set with a `__functor` attribute whose value is
-callable) can be applied as if it were a function, with the set itself
-passed in first , e.g.,
-
-```nix
-let add = { __functor = self: x: x + self.x; };
- inc = add // { x = 1; };
-in inc 1
-```
-
-evaluates to `2`. This can be used to attach metadata to a function
-without the caller needing to treat it specially, or to implement a form
-of object-oriented programming, for example.
diff --git a/doc/manual/src/expressions/simple-building-testing.md b/doc/manual/src/expressions/simple-building-testing.md
deleted file mode 100644
index 7f0d8f841..000000000
--- a/doc/manual/src/expressions/simple-building-testing.md
+++ /dev/null
@@ -1,61 +0,0 @@
-# Building and Testing
-
-You can now try to build Hello. Of course, you could do `nix-env -f . -iA
-hello`, but you may not want to install a possibly broken package just
-yet. The best way to test the package is by using the command
-`nix-build`, which builds a Nix expression and creates a symlink named
-`result` in the current directory:
-
-```console
-$ nix-build -A hello
-building path `/nix/store/632d2b22514d...-hello-2.1.1'
-hello-2.1.1/
-hello-2.1.1/intl/
-hello-2.1.1/intl/ChangeLog
-...
-
-$ ls -l result
-lrwxrwxrwx ... 2006-09-29 10:43 result -> /nix/store/632d2b22514d...-hello-2.1.1
-
-$ ./result/bin/hello
-Hello, world!
-```
-
-The `-A` option selects the `hello` attribute. This is faster than
-using the symbolic package name specified by the `name` attribute
-(which also happens to be `hello`) and is unambiguous (there can be
-multiple packages with the symbolic name `hello`, but there can be
-only one attribute in a set named `hello`).
-
-`nix-build` registers the `./result` symlink as a garbage collection
-root, so unless and until you delete the `./result` symlink, the output
-of the build will be safely kept on your system. You can use
-`nix-build`’s `-o` switch to give the symlink another name.
-
-Nix has transactional semantics. Once a build finishes successfully, Nix
-makes a note of this in its database: it registers that the path denoted
-by `out` is now “valid”. If you try to build the derivation again, Nix
-will see that the path is already valid and finish immediately. If a
-build fails, either because it returns a non-zero exit code, because Nix
-or the builder are killed, or because the machine crashes, then the
-output paths will not be registered as valid. If you try to build the
-derivation again, Nix will remove the output paths if they exist (e.g.,
-because the builder died half-way through `make
-install`) and try again. Note that there is no “negative caching”: Nix
-doesn't remember that a build failed, and so a failed build can always
-be repeated. This is because Nix cannot distinguish between permanent
-failures (e.g., a compiler error due to a syntax error in the source)
-and transient failures (e.g., a disk full condition).
-
-Nix also performs locking. If you run multiple Nix builds
-simultaneously, and they try to build the same derivation, the first Nix
-instance that gets there will perform the build, while the others block
-(or perform other derivations if available) until the build finishes:
-
-```console
-$ nix-build -A hello
-waiting for lock on `/nix/store/0h5b7hp8d4hqfrw8igvx97x1xawrjnac-hello-2.1.1x'
-```
-
-So it is always safe to run multiple instances of Nix in parallel (which
-isn’t the case with, say, `make`).
diff --git a/doc/manual/src/expressions/simple-expression.md b/doc/manual/src/expressions/simple-expression.md
deleted file mode 100644
index 857f71b9b..000000000
--- a/doc/manual/src/expressions/simple-expression.md
+++ /dev/null
@@ -1,23 +0,0 @@
-# A Simple Nix Expression
-
-This section shows how to add and test the [GNU Hello
-package](http://www.gnu.org/software/hello/hello.html) to the Nix
-Packages collection. Hello is a program that prints out the text “Hello,
-world\!”.
-
-To add a package to the Nix Packages collection, you generally need to
-do three things:
-
-1. Write a Nix expression for the package. This is a file that
- describes all the inputs involved in building the package, such as
- dependencies, sources, and so on.
-
-2. Write a *builder*. This is a shell script that builds the package
- from the inputs. (In fact, it can be written in any language, but
- typically it's a `bash` shell script.)
-
-3. Add the package to the file `pkgs/top-level/all-packages.nix`. The
- Nix expression written in the first step is a *function*; it
- requires other packages in order to build it. In this step you put
- it all together, i.e., you call the function with the right
- arguments to build the actual package.
diff --git a/doc/manual/src/expressions/writing-nix-expressions.md b/doc/manual/src/expressions/writing-nix-expressions.md
deleted file mode 100644
index 5664108e7..000000000
--- a/doc/manual/src/expressions/writing-nix-expressions.md
+++ /dev/null
@@ -1,12 +0,0 @@
-This chapter shows you how to write Nix expressions, which instruct Nix
-how to build packages. It starts with a simple example (a Nix expression
-for GNU Hello), and then moves on to a more in-depth look at the Nix
-expression language.
-
-> **Note**
->
-> This chapter is mostly about the Nix expression language. For more
-> extensive information on adding packages to the Nix Packages
-> collection (such as functions in the standard environment and coding
-> conventions), please consult [its
-> manual](http://nixos.org/nixpkgs/manual/).
diff --git a/doc/manual/src/glossary.md b/doc/manual/src/glossary.md
index 71ff13275..aa0ac78cb 100644
--- a/doc/manual/src/glossary.md
+++ b/doc/manual/src/glossary.md
@@ -1,48 +1,48 @@
# Glossary
- - derivation\
+ - [derivation]{#gloss-derivation}\
A description of a build action. The result of a derivation is a
store object. Derivations are typically specified in Nix expressions
- using the [`derivation` primitive](expressions/derivations.md). These are
+ using the [`derivation` primitive](language/derivations.md). These are
translated into low-level *store derivations* (implicitly by
`nix-env` and `nix-build`, or explicitly by `nix-instantiate`).
- - store\
+ - [store]{#gloss-store}\
The location in the file system where store objects live. Typically
`/nix/store`.
- - store path\
+ - [store path]{#gloss-store-path}\
The location in the file system of a store object, i.e., an
immediate child of the Nix store directory.
- - store object\
+ - [store object]{#gloss-store-object}\
A file that is an immediate child of the Nix store directory. These
can be regular files, but also entire directory trees. Store objects
can be sources (objects copied from outside of the store),
derivation outputs (objects produced by running a build action), or
derivations (files describing a build action).
- - substitute\
+ - [substitute]{#gloss-substitute}\
A substitute is a command invocation stored in the Nix database that
describes how to build a store object, bypassing the normal build
mechanism (i.e., derivations). Typically, the substitute builds the
store object by downloading a pre-built version of the store object
from some server.
- - purity\
+ - [purity]{#gloss-purity}\
The assumption that equal Nix derivations when run always produce
the same output. This cannot be guaranteed in general (e.g., a
builder can rely on external inputs such as the network or the
system time) but the Nix model assumes it.
- - Nix expression\
+ - [Nix expression]{#gloss-nix-expression}\
A high-level description of software packages and compositions
thereof. Deploying software using Nix entails writing Nix
expressions for your packages. Nix expressions are translated to
derivations that are stored in the Nix store. These derivations can
then be built.
- - reference\
+ - [reference]{#gloss-reference}\
A store path `P` is said to have a reference to a store path `Q` if
the store object at `P` contains the path `Q` somewhere. The
*references* of a store path are the set of store paths to which it
@@ -52,11 +52,11 @@
output paths), whereas an output path only references other output
paths.
- - reachable\
+ - [reachable]{#gloss-reachable}\
A store path `Q` is reachable from another store path `P` if `Q`
is in the *closure* of the *references* relation.
- - closure\
+ - [closure]{#gloss-closure}\
The closure of a store path is the set of store paths that are
directly or indirectly “reachable” from that store path; that is,
it’s the closure of the path under the *references* relation. For
@@ -71,34 +71,34 @@
to path `Q`, then `Q` is in the closure of `P`. Further, if `Q`
references `R` then `R` is also in the closure of `P`.
- - output path\
+ - [output path]{#gloss-output-path}\
A store path produced by a derivation.
- - deriver\
+ - [deriver]{#gloss-deriver}\
The deriver of an *output path* is the store
derivation that built it.
- - validity\
+ - [validity]{#gloss-validity}\
A store path is considered *valid* if it exists in the file system,
is listed in the Nix database as being valid, and if all paths in
its closure are also valid.
- - user environment\
+ - [user environment]{#gloss-user-env}\
An automatically generated store object that consists of a set of
symlinks to “active” applications, i.e., other store paths. These
are generated automatically by
[`nix-env`](command-ref/nix-env.md). See *profiles*.
- - profile\
+ - [profile]{#gloss-profile}\
A symlink to the current *user environment* of a user, e.g.,
`/nix/var/nix/profiles/default`.
- - NAR\
+ - [NAR]{#gloss-nar}\
A *N*ix *AR*chive. This is a serialisation of a path in the Nix
store. It can contain regular files, directories and symbolic
links. NARs are generated and unpacked using `nix-store --dump`
and `nix-store --restore`.
- - `∅` \
+ - [`∅`]{#gloss-emtpy-set}\
The empty set symbol. In the context of profile history, this denotes a package is not present in a particular version of the profile.
- - `ε` \
+ - [`ε`]{#gloss-epsilon}\
The epsilon symbol. In the context of a package, this means the version is empty. More precisely, the derivation does not have a version attribute.
diff --git a/doc/manual/src/installation/installing-binary.md b/doc/manual/src/installation/installing-binary.md
index e5fb50088..2d007ca1b 100644
--- a/doc/manual/src/installation/installing-binary.md
+++ b/doc/manual/src/installation/installing-binary.md
@@ -13,7 +13,7 @@ for your platform:
- multi-user on macOS
> **Notes on read-only filesystem root in macOS 10.15 Catalina +**
- >
+ >
> - It took some time to support this cleanly. You may see posts,
> examples, and tutorials using obsolete workarounds.
> - Supporting it cleanly made macOS installs too complex to qualify
@@ -31,8 +31,8 @@ $ sh <(curl -L https://nixos.org/nix/install) --no-daemon
```
This will perform a single-user installation of Nix, meaning that `/nix`
-is owned by the invoking user. You should run this under your usual user
-account, *not* as root. The script will invoke `sudo` to create `/nix`
+is owned by the invoking user. You can run this under your usual user
+account or root. The script will invoke `sudo` to create `/nix`
if it doesn’t already exist. If you don’t have `sudo`, you should
manually create `/nix` first as root, e.g.:
@@ -71,11 +71,11 @@ $ sh <(curl -L https://nixos.org/nix/install) --daemon
The multi-user installation of Nix will create build users between the
user IDs 30001 and 30032, and a group with the group ID 30000. You
-should run this under your usual user account, *not* as root. The script
+can run this under your usual user account or root. The script
will invoke `sudo` as needed.
> **Note**
->
+>
> If you need Nix to use a different group ID or user ID set, you will
> have to download the tarball manually and [edit the install
> script](#installing-from-a-binary-tarball).
@@ -148,7 +148,8 @@ and `/etc/zshrc` which you may remove.
This will remove all the build users that no longer serve a purpose.
4. Edit fstab using `sudo vifs` to remove the line mounting the Nix Store
- volume on `/nix`, which looks like this,
+ volume on `/nix`, which looks like
+ `UUID=<uuid> /nix apfs rw,noauto,nobrowse,suid,owners` or
`LABEL=Nix\040Store /nix apfs rw,nobrowse`. This will prevent automatic
mounting of the Nix Store volume.
@@ -167,7 +168,7 @@ and `/etc/zshrc` which you may remove.
removed next.
7. Remove the Nix Store volume:
-
+
```console
sudo diskutil apfs deleteVolume /nix
```
@@ -175,8 +176,20 @@ and `/etc/zshrc` which you may remove.
This will remove the Nix Store volume and everything that was added to the
store.
+ If the output indicates that the command couldn't remove the volume, you should
+ make sure you don't have an _unmounted_ Nix Store volume. Look for a
+ "Nix Store" volume in the output of the following command:
+
+ ```console
+ diskutil list
+ ```
+
+ If you _do_ see a "Nix Store" volume, delete it by re-running the diskutil
+ deleteVolume command, but replace `/nix` with the store volume's `diskXsY`
+ identifier.
+
> **Note**
->
+>
> After you complete the steps here, you will still have an empty `/nix`
> directory. This is an expected sign of a successful uninstall. The empty
> `/nix` directory will disappear the next time you reboot.
@@ -186,12 +199,12 @@ and `/etc/zshrc` which you may remove.
> read-only root will prevent you from manually deleting the empty `/nix`
> mountpoint.
-# macOS Installation <a name="sect-macos-installation-change-store-prefix"></a><a name="sect-macos-installation-encrypted-volume"></a><a name="sect-macos-installation-symlink"></a><a name="sect-macos-installation-recommended-notes"></a>
+# macOS Installation
+[]{#sect-macos-installation-change-store-prefix}[]{#sect-macos-installation-encrypted-volume}[]{#sect-macos-installation-symlink}[]{#sect-macos-installation-recommended-notes}
<!-- Note: anchors above to catch permalinks to old explanations -->
We believe we have ironed out how to cleanly support the read-only root
-on modern macOS. New installs will do this automatically, and you can
-also re-run a new installer to convert your existing setup.
+on modern macOS. New installs will do this automatically.
This section previously detailed the situation, options, and trade-offs,
but it now only outlines what the installer does. You don't need to know
diff --git a/doc/manual/src/expressions/advanced-attributes.md b/doc/manual/src/language/advanced-attributes.md
index 000595815..2e7e80ed0 100644
--- a/doc/manual/src/expressions/advanced-attributes.md
+++ b/doc/manual/src/language/advanced-attributes.md
@@ -2,7 +2,7 @@
Derivations can declare some infrequently used optional attributes.
- - `allowedReferences`\
+ - [`allowedReferences`]{#adv-attr-allowedReferences}\
The optional attribute `allowedReferences` specifies a list of legal
references (dependencies) of the output of the builder. For example,
@@ -17,7 +17,7 @@ Derivations can declare some infrequently used optional attributes.
booting Linux don’t have accidental dependencies on other paths in
the Nix store.
- - `allowedRequisites`\
+ - [`allowedRequisites`]{#adv-attr-allowedRequisites}\
This attribute is similar to `allowedReferences`, but it specifies
the legal requisites of the whole closure, so all the dependencies
recursively. For example,
@@ -30,7 +30,7 @@ Derivations can declare some infrequently used optional attributes.
runtime dependency than `foobar`, and in addition it enforces that
`foobar` itself doesn't introduce any other dependency itself.
- - `disallowedReferences`\
+ - [`disallowedReferences`]{#adv-attr-disallowedReferences}\
The optional attribute `disallowedReferences` specifies a list of
illegal references (dependencies) of the output of the builder. For
example,
@@ -42,7 +42,7 @@ Derivations can declare some infrequently used optional attributes.
enforces that the output of a derivation cannot have a direct
runtime dependencies on the derivation `foo`.
- - `disallowedRequisites`\
+ - [`disallowedRequisites`]{#adv-attr-disallowedRequisites}\
This attribute is similar to `disallowedReferences`, but it
specifies illegal requisites for the whole closure, so all the
dependencies recursively. For example,
@@ -55,7 +55,7 @@ Derivations can declare some infrequently used optional attributes.
dependency on `foobar` or any other derivation depending recursively
on `foobar`.
- - `exportReferencesGraph`\
+ - [`exportReferencesGraph`]{#adv-attr-exportReferencesGraph}\
This attribute allows builders access to the references graph of
their inputs. The attribute is a list of inputs in the Nix store
whose references graph the builder needs to know. The value of
@@ -84,7 +84,7 @@ Derivations can declare some infrequently used optional attributes.
with a Nix store containing the closure of a bootable NixOS
configuration).
- - `impureEnvVars`\
+ - [`impureEnvVars`]{#adv-attr-impureEnvVars}\
This attribute allows you to specify a list of environment variables
that should be passed from the environment of the calling user to
the builder. Usually, the environment is cleared completely when the
@@ -112,7 +112,7 @@ Derivations can declare some infrequently used optional attributes.
> environmental variables come from the environment of the
> `nix-build`.
- - `outputHash`; `outputHashAlgo`; `outputHashMode`\
+ - [`outputHash`]{#adv-attr-outputHash}; [`outputHashAlgo`]{#adv-attr-outputHashAlgo}; [`outputHashMode`]{#adv-attr-outputHashMode}\
These attributes declare that the derivation is a so-called
*fixed-output derivation*, which means that a cryptographic hash of
the output is already known in advance. When the build of a
@@ -208,7 +208,7 @@ Derivations can declare some infrequently used optional attributes.
[`nix-hash` command](../command-ref/nix-hash.md) for information
about converting to and from base-32 notation.)
- - `__contentAddressed`
+ - [`__contentAddressed`]{#adv-attr-__contentAddressed}
If this **experimental** attribute is set to true, then the derivation
outputs will be stored in a content-addressed location rather than the
traditional input-addressed one.
@@ -216,7 +216,7 @@ Derivations can declare some infrequently used optional attributes.
Setting this attribute also requires setting `outputHashMode` and `outputHashAlgo` like for *fixed-output derivations* (see above).
- - `passAsFile`\
+ - [`passAsFile`]{#adv-attr-passAsFile}\
A list of names of attributes that should be passed via files rather
than environment variables. For example, if you have
@@ -234,7 +234,7 @@ Derivations can declare some infrequently used optional attributes.
builder, since most operating systems impose a limit on the size
of the environment (typically, a few hundred kilobyte).
- - `preferLocalBuild`\
+ - [`preferLocalBuild`]{#adv-attr-preferLocalBuild}\
If this attribute is set to `true` and [distributed building is
enabled](../advanced-topics/distributed-builds.md), then, if
possible, the derivation will be built locally instead of forwarded
@@ -242,7 +242,7 @@ Derivations can declare some infrequently used optional attributes.
where the cost of doing a download or remote build would exceed
the cost of building locally.
- - `allowSubstitutes`\
+ - [`allowSubstitutes`]{#adv-attr-allowSubstitutes}\
If this attribute is set to `false`, then Nix will always build this
derivation; it will not try to substitute its outputs. This is
useful for very trivial derivations (such as `writeText` in Nixpkgs)
diff --git a/doc/manual/src/expressions/builtin-constants.md b/doc/manual/src/language/builtin-constants.md
index 1404289e5..78d066a82 100644
--- a/doc/manual/src/expressions/builtin-constants.md
+++ b/doc/manual/src/language/builtin-constants.md
@@ -14,7 +14,7 @@ Here are the constants built into the Nix expression evaluator:
This allows a Nix expression to fall back gracefully on older Nix
installations that don’t have the desired built-in function.
- - `builtins.currentSystem`\
+ - [`builtins.currentSystem`]{#builtins-currentSystem}\
The built-in value `currentSystem` evaluates to the Nix platform
identifier for the Nix installation on which the expression is being
evaluated, such as `"i686-linux"` or `"x86_64-darwin"`.
diff --git a/doc/manual/src/expressions/builtins-prefix.md b/doc/manual/src/language/builtins-prefix.md
index c631a8453..c631a8453 100644
--- a/doc/manual/src/expressions/builtins-prefix.md
+++ b/doc/manual/src/language/builtins-prefix.md
diff --git a/doc/manual/src/expressions/builtins-suffix.md b/doc/manual/src/language/builtins-suffix.md
index a74db2857..a74db2857 100644
--- a/doc/manual/src/expressions/builtins-suffix.md
+++ b/doc/manual/src/language/builtins-suffix.md
diff --git a/doc/manual/src/expressions/language-constructs.md b/doc/manual/src/language/constructs.md
index 1c01f2cc7..1c01f2cc7 100644
--- a/doc/manual/src/expressions/language-constructs.md
+++ b/doc/manual/src/language/constructs.md
diff --git a/doc/manual/src/expressions/derivations.md b/doc/manual/src/language/derivations.md
index d26a33b7f..3391ec0d8 100644
--- a/doc/manual/src/expressions/derivations.md
+++ b/doc/manual/src/language/derivations.md
@@ -4,7 +4,7 @@ The most important built-in function is `derivation`, which is used to
describe a single derivation (a build action). It takes as input a set,
the attributes of which specify the inputs of the build.
- - There must be an attribute named `system` whose value must be a
+ - There must be an attribute named [`system`]{#attr-system} whose value must be a
string specifying a Nix system type, such as `"i686-linux"` or
`"x86_64-darwin"`. (To figure out your system type, run `nix -vv
--version`.) The build can only be performed on a machine and
diff --git a/doc/manual/src/language/index.md b/doc/manual/src/language/index.md
new file mode 100644
index 000000000..a4b402f8b
--- /dev/null
+++ b/doc/manual/src/language/index.md
@@ -0,0 +1,33 @@
+# Nix Language
+
+The Nix language is
+
+- *domain-specific*
+
+ It only exists for the Nix package manager:
+ to describe packages and configurations as well as their variants and compositions.
+ It is not intended for general purpose use.
+
+- *declarative*
+
+ There is no notion of executing sequential steps.
+ Dependencies between operations are established only through data.
+
+- *pure*
+
+ Values cannot change during computation.
+ Functions always produce the same output if their input does not change.
+
+- *functional*
+
+ Functions are like any other value.
+ Functions can be assigned to names, taken as arguments, or returned by functions.
+
+- *lazy*
+
+ Expressions are only evaluated when their value is needed.
+
+- *dynamically typed*
+
+ Type errors are only detected when expressions are evaluated.
+
diff --git a/doc/manual/src/expressions/language-operators.md b/doc/manual/src/language/operators.md
index 268b44f4c..32398189d 100644
--- a/doc/manual/src/expressions/language-operators.md
+++ b/doc/manual/src/language/operators.md
@@ -1,6 +1,6 @@
# Operators
-The table below lists the operators in the Nix expression language, in
+The table below lists the operators in the Nix language, in
order of precedence (from strongest to weakest binding).
| Name | Syntax | Associativity | Description | Precedence |
diff --git a/doc/manual/src/language/values.md b/doc/manual/src/language/values.md
new file mode 100644
index 000000000..f09400d02
--- /dev/null
+++ b/doc/manual/src/language/values.md
@@ -0,0 +1,261 @@
+# Data Types
+
+## Primitives
+
+- <a id="type-string" href="#type-string">String</a>
+
+ *Strings* can be written in three ways.
+
+ The most common way is to enclose the string between double quotes,
+ e.g., `"foo bar"`. Strings can span multiple lines. The special
+ characters `"` and `\` and the character sequence `${` must be
+ escaped by prefixing them with a backslash (`\`). Newlines, carriage
+ returns and tabs can be written as `\n`, `\r` and `\t`,
+ respectively.
+
+ You can include the result of an expression into a string by
+ enclosing it in `${...}`, a feature known as *antiquotation*. The
+ enclosed expression must evaluate to something that can be coerced
+ into a string (meaning that it must be a string, a path, or a
+ derivation). For instance, rather than writing
+
+ ```nix
+ "--with-freetype2-library=" + freetype + "/lib"
+ ```
+
+ (where `freetype` is a derivation), you can instead write the more
+ natural
+
+ ```nix
+ "--with-freetype2-library=${freetype}/lib"
+ ```
+
+ The latter is automatically translated to the former. A more
+ complicated example (from the Nix expression for
+ [Qt](http://www.trolltech.com/products/qt)):
+
+ ```nix
+ configureFlags = "
+ -system-zlib -system-libpng -system-libjpeg
+ ${if openglSupport then "-dlopen-opengl
+ -L${mesa}/lib -I${mesa}/include
+ -L${libXmu}/lib -I${libXmu}/include" else ""}
+ ${if threadSupport then "-thread" else "-no-thread"}
+ ";
+ ```
+
+ Note that Nix expressions and strings can be arbitrarily nested; in
+ this case the outer string contains various antiquotations that
+ themselves contain strings (e.g., `"-thread"`), some of which in
+ turn contain expressions (e.g., `${mesa}`).
+
+ The second way to write string literals is as an *indented string*,
+ which is enclosed between pairs of *double single-quotes*, like so:
+
+ ```nix
+ ''
+ This is the first line.
+ This is the second line.
+ This is the third line.
+ ''
+ ```
+
+ This kind of string literal intelligently strips indentation from
+ the start of each line. To be precise, it strips from each line a
+ number of spaces equal to the minimal indentation of the string as a
+ whole (disregarding the indentation of empty lines). For instance,
+ the first and second line are indented two spaces, while the third
+ line is indented four spaces. Thus, two spaces are stripped from
+ each line, so the resulting string is
+
+ ```nix
+ "This is the first line.\nThis is the second line.\n This is the third line.\n"
+ ```
+
+ Note that the whitespace and newline following the opening `''` is
+ ignored if there is no non-whitespace text on the initial line.
+
+ Antiquotation (`${expr}`) is supported in indented strings.
+
+ Since `${` and `''` have special meaning in indented strings, you
+ need a way to quote them. `$` can be escaped by prefixing it with
+ `''` (that is, two single quotes), i.e., `''$`. `''` can be escaped
+ by prefixing it with `'`, i.e., `'''`. `$` removes any special
+ meaning from the following `$`. Linefeed, carriage-return and tab
+ characters can be written as `''\n`, `''\r`, `''\t`, and `''\`
+ escapes any other character.
+
+ Indented strings are primarily useful in that they allow multi-line
+ string literals to follow the indentation of the enclosing Nix
+ expression, and that less escaping is typically necessary for
+ strings representing languages such as shell scripts and
+ configuration files because `''` is much less common than `"`.
+ Example:
+
+ ```nix
+ stdenv.mkDerivation {
+ ...
+ postInstall =
+ ''
+ mkdir $out/bin $out/etc
+ cp foo $out/bin
+ echo "Hello World" > $out/etc/foo.conf
+ ${if enableBar then "cp bar $out/bin" else ""}
+ '';
+ ...
+ }
+ ```
+
+ Finally, as a convenience, *URIs* as defined in appendix B of
+ [RFC 2396](http://www.ietf.org/rfc/rfc2396.txt) can be written *as
+ is*, without quotes. For instance, the string
+ `"http://example.org/foo.tar.bz2"` can also be written as
+ `http://example.org/foo.tar.bz2`.
+
+- <a id="type-number" href="#type-number">Number</a>
+
+ Numbers, which can be *integers* (like `123`) or *floating point*
+ (like `123.43` or `.27e13`).
+
+ Numbers are type-compatible: pure integer operations will always
+ return integers, whereas any operation involving at least one
+ floating point number will have a floating point number as a result.
+
+- <a id="type-path" href="#type-path">Path</a>
+
+ *Paths*, e.g., `/bin/sh` or `./builder.sh`. A path must contain at
+ least one slash to be recognised as such. For instance, `builder.sh`
+ is not a path: it's parsed as an expression that selects the
+ attribute `sh` from the variable `builder`. If the file name is
+ relative, i.e., if it does not begin with a slash, it is made
+ absolute at parse time relative to the directory of the Nix
+ expression that contained it. For instance, if a Nix expression in
+ `/foo/bar/bla.nix` refers to `../xyzzy/fnord.nix`, the absolute path
+ is `/foo/xyzzy/fnord.nix`.
+
+ If the first component of a path is a `~`, it is interpreted as if
+ the rest of the path were relative to the user's home directory.
+ e.g. `~/foo` would be equivalent to `/home/edolstra/foo` for a user
+ whose home directory is `/home/edolstra`.
+
+ Paths can also be specified between angle brackets, e.g.
+ `<nixpkgs>`. This means that the directories listed in the
+ environment variable `NIX_PATH` will be searched for the given file
+ or directory name.
+
+ Antiquotation is supported in any paths except those in angle brackets.
+ `./${foo}-${bar}.nix` is a more convenient way of writing
+ `./. + "/" + foo + "-" + bar + ".nix"` or `./. + "/${foo}-${bar}.nix"`. At
+ least one slash must appear *before* any antiquotations for this to be
+ recognized as a path. `a.${foo}/b.${bar}` is a syntactically valid division
+ operation. `./a.${foo}/b.${bar}` is a path.
+
+- <a id="type-boolean" href="#type-boolean">Boolean</a>
+
+ *Booleans* with values `true` and `false`.
+
+- <a id="type-null" href="#type-null">Null</a>
+
+ The null value, denoted as `null`.
+
+## List
+
+Lists are formed by enclosing a whitespace-separated list of values
+between square brackets. For example,
+
+```nix
+[ 123 ./foo.nix "abc" (f { x = y; }) ]
+```
+
+defines a list of four elements, the last being the result of a call to
+the function `f`. Note that function calls have to be enclosed in
+parentheses. If they had been omitted, e.g.,
+
+```nix
+[ 123 ./foo.nix "abc" f { x = y; } ]
+```
+
+the result would be a list of five elements, the fourth one being a
+function and the fifth being a set.
+
+Note that lists are only lazy in values, and they are strict in length.
+
+## Attribute Set
+
+An attribute set is a collection of name-value-pairs (called *attributes*) enclosed in curly brackets (`{ }`).
+
+Names and values are separated by an equal sign (`=`).
+Each value is an arbitrary expression terminated by a semicolon (`;`).
+
+Attributes can appear in any order.
+An attribute name may only occur once.
+
+Example:
+
+```nix
+{
+ x = 123;
+ text = "Hello";
+ y = f { bla = 456; };
+}
+```
+
+This defines a set with attributes named `x`, `text`, `y`.
+
+Attributes can be selected from a set using the `.` operator. For
+instance,
+
+```nix
+{ a = "Foo"; b = "Bar"; }.a
+```
+
+evaluates to `"Foo"`. It is possible to provide a default value in an
+attribute selection using the `or` keyword. For example,
+
+```nix
+{ a = "Foo"; b = "Bar"; }.c or "Xyzzy"
+```
+
+will evaluate to `"Xyzzy"` because there is no `c` attribute in the set.
+
+You can use arbitrary double-quoted strings as attribute names:
+
+```nix
+{ "foo ${bar}" = 123; "nix-1.0" = 456; }."foo ${bar}"
+```
+
+This will evaluate to `123` (Assuming `bar` is antiquotable). In the
+case where an attribute name is just a single antiquotation, the quotes
+can be dropped:
+
+```nix
+{ foo = 123; }.${bar} or 456
+```
+
+This will evaluate to `123` if `bar` evaluates to `"foo"` when coerced
+to a string and `456` otherwise (again assuming `bar` is antiquotable).
+
+In the special case where an attribute name inside of a set declaration
+evaluates to `null` (which is normally an error, as `null` is not
+antiquotable), that attribute is simply not added to the set:
+
+```nix
+{ ${if foo then "bar" else null} = true; }
+```
+
+This will evaluate to `{}` if `foo` evaluates to `false`.
+
+A set that has a `__functor` attribute whose value is callable (i.e. is
+itself a function or a set with a `__functor` attribute whose value is
+callable) can be applied as if it were a function, with the set itself
+passed in first , e.g.,
+
+```nix
+let add = { __functor = self: x: x + self.x; };
+ inc = add // { x = 1; };
+in inc 1
+```
+
+evaluates to `2`. This can be used to attach metadata to a function
+without the caller needing to treat it specially, or to implement a form
+of object-oriented programming, for example.
diff --git a/doc/manual/src/package-management/package-management.md b/doc/manual/src/package-management/package-management.md
index bd26a09ab..d528112e2 100644
--- a/doc/manual/src/package-management/package-management.md
+++ b/doc/manual/src/package-management/package-management.md
@@ -1,5 +1,4 @@
This chapter discusses how to do package management with Nix, i.e.,
how to obtain, install, upgrade, and erase packages. This is the
“user’s” perspective of the Nix system — people who want to *create*
-packages should consult the [chapter on writing Nix
-expressions](../expressions/writing-nix-expressions.md).
+packages should consult the chapter on the [Nix language](../language/index.md).
diff --git a/doc/manual/src/release-notes/rl-2.10.md b/doc/manual/src/release-notes/rl-2.10.md
new file mode 100644
index 000000000..b99dbeef0
--- /dev/null
+++ b/doc/manual/src/release-notes/rl-2.10.md
@@ -0,0 +1,31 @@
+# Release 2.10 (2022-07-11)
+
+* `nix repl` now takes installables on the command line, unifying the usage
+ with other commands that use `--file` and `--expr`. Primary breaking change
+ is for the common usage of `nix repl '<nixpkgs>'` which can be recovered with
+ `nix repl --file '<nixpkgs>'` or `nix repl --expr 'import <nixpkgs>{}'`.
+
+ This is currently guarded by the `repl-flake` experimental feature.
+
+* A new function `builtins.traceVerbose` is available. It is similar
+ to `builtins.trace` if the `trace-verbose` setting is set to true,
+ and it is a no-op otherwise.
+
+* `nix search` has a new flag `--exclude` to filter out packages.
+
+* On Linux, if `/nix` doesn't exist and cannot be created and you're
+ not running as root, Nix will automatically use
+ `~/.local/share/nix/root` as a chroot store. This enables non-root
+ users to download the statically linked Nix binary and have it work
+ out of the box, e.g.
+
+ ```
+ # ~/nix run nixpkgs#hello
+ warning: '/nix' does not exists, so Nix will use '/home/ubuntu/.local/share/nix/root' as a chroot store
+ Hello, world!
+ ```
+
+* `flake-registry.json` is now fetched from `channels.nixos.org`.
+
+* Nix can now be built with LTO by passing `--enable-lto` to `configure`.
+ LTO is currently only supported when building with GCC.
diff --git a/doc/manual/src/release-notes/rl-2.11.md b/doc/manual/src/release-notes/rl-2.11.md
new file mode 100644
index 000000000..b322a4e5e
--- /dev/null
+++ b/doc/manual/src/release-notes/rl-2.11.md
@@ -0,0 +1,5 @@
+# Release 2.11 (2022-08-24)
+
+* `nix copy` now copies the store paths in parallel as much as possible (again).
+ This doesn't apply for the `daemon` and `ssh-ng` stores which copy everything
+ in one batch to avoid latencies issues.
diff --git a/doc/manual/src/release-notes/rl-2.9.md b/doc/manual/src/release-notes/rl-2.9.md
new file mode 100644
index 000000000..98cc4235d
--- /dev/null
+++ b/doc/manual/src/release-notes/rl-2.9.md
@@ -0,0 +1,47 @@
+# Release 2.9 (2022-05-30)
+
+* Running Nix with the new `--debugger` flag will cause it to start a
+ repl session if an exception is thrown during evaluation, or if
+ `builtins.break` is called. From there you can inspect the values
+ of variables and evaluate Nix expressions. In debug mode, the
+ following new repl commands are available:
+
+ ```
+ :env Show env stack
+ :bt Show trace stack
+ :st Show current trace
+ :st <idx> Change to another trace in the stack
+ :c Go until end of program, exception, or builtins.break().
+ :s Go one step
+ ```
+
+ Read more about the debugger
+ [here](https://www.zknotes.com/note/5970).
+
+* Nix now provides better integration with zsh's `run-help`
+ feature. It is now included in the Nix installation in the form of
+ an autoloadable shell function, `run-help-nix`. It picks up Nix
+ subcommands from the currently typed in command and directs the user
+ to the associated man pages.
+
+* `nix repl` has a new build-and-link (`:bl`) command that builds a
+ derivation while creating GC root symlinks.
+
+* The path produced by `builtins.toFile` is now allowed to be imported
+ or read even with restricted evaluation. Note that this will not
+ work with a read-only store.
+
+* `nix build` has a new `--print-out-paths` flag to print the
+ resulting output paths. This matches the default behaviour of
+ `nix-build`.
+
+* You can now specify which outputs of a derivation `nix` should
+ operate on using the syntax `installable^outputs`,
+ e.g. `nixpkgs#glibc^dev,static` or `nixpkgs#glibc^*`. By default,
+ `nix` will use the outputs specified by the derivation's
+ `meta.outputsToInstall` attribute if it exists, or all outputs
+ otherwise.
+
+* `builtins.fetchTree` (and flake inputs) can now be used to fetch
+ plain files over the `http(s)` and `file` protocols in addition to
+ directory tarballs.
diff --git a/doc/manual/src/release-notes/rl-next.md b/doc/manual/src/release-notes/rl-next.md
index f312f2f86..f25fce758 100644
--- a/doc/manual/src/release-notes/rl-next.md
+++ b/doc/manual/src/release-notes/rl-next.md
@@ -1,20 +1,5 @@
# Release X.Y (202?-??-??)
-* Nix now provides better integration with zsh's run-help feature. It is now
- included in the Nix installation in the form of an autoloadable shell
- function, run-help-nix. It picks up Nix subcommands from the currently typed
- in command and directs the user to the associated man pages.
-
-* `nix repl` has a new build-'n-link (`:bl`) command that builds a derivation
- while creating GC root symlinks.
-
-* The path produced by `builtins.toFile` is now allowed to be imported or read
- even with restricted evaluation. Note that this will not work with a
- read-only store.
-
-* `nix build` has a new `--print-out-paths` flag to print the resulting output paths.
- This matches the default behaviour of `nix-build`.
-
* Error traces have been reworked to provide detailed explanations and more
accurate error locations. A short excerpt of the trace is now shown by
default when an error occurs.
diff --git a/docker.nix b/docker.nix
index 0cd64856f..e95caf274 100644
--- a/docker.nix
+++ b/docker.nix
@@ -2,8 +2,12 @@
, lib ? pkgs.lib
, name ? "nix"
, tag ? "latest"
+, bundleNixpkgs ? true
, channelName ? "nixpkgs"
, channelURL ? "https://nixos.org/channels/nixpkgs-unstable"
+, extraPkgs ? []
+, maxLayers ? 100
+, nixConf ? {}
}:
let
defaultPkgs = with pkgs; [
@@ -23,7 +27,7 @@ let
iana-etc
git
openssh
- ];
+ ] ++ extraPkgs;
users = {
@@ -121,20 +125,27 @@ let
(lib.attrValues (lib.mapAttrs groupToGroup groups))
);
- nixConf = {
+ defaultNixConf = {
sandbox = "false";
build-users-group = "nixbld";
- trusted-public-keys = "cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY=";
+ trusted-public-keys = [ "cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY=" ];
};
- nixConfContents = (lib.concatStringsSep "\n" (lib.mapAttrsFlatten (n: v: "${n} = ${v}") nixConf)) + "\n";
+
+ nixConfContents = (lib.concatStringsSep "\n" (lib.mapAttrsFlatten (n: v:
+ let
+ vStr = if builtins.isList v then lib.concatStringsSep " " v else v;
+ in
+ "${n} = ${vStr}") (defaultNixConf // nixConf))) + "\n";
baseSystem =
let
nixpkgs = pkgs.path;
- channel = pkgs.runCommand "channel-nixos" { } ''
+ channel = pkgs.runCommand "channel-nixos" { inherit bundleNixpkgs; } ''
mkdir $out
- ln -s ${nixpkgs} $out/nixpkgs
- echo "[]" > $out/manifest.nix
+ if [ "$bundleNixpkgs" ]; then
+ ln -s ${nixpkgs} $out/nixpkgs
+ echo "[]" > $out/manifest.nix
+ fi
'';
rootEnv = pkgs.buildPackages.buildEnv {
name = "root-profile-env";
@@ -229,7 +240,7 @@ let
in
pkgs.dockerTools.buildLayeredImageWithNixDb {
- inherit name tag;
+ inherit name tag maxLayers;
contents = [ baseSystem ];
diff --git a/flake.lock b/flake.lock
index cd79fa85e..a66c9cb1b 100644
--- a/flake.lock
+++ b/flake.lock
@@ -18,17 +18,18 @@
},
"nixpkgs": {
"locked": {
- "lastModified": 1645296114,
- "narHash": "sha256-y53N7TyIkXsjMpOG7RhvqJFGDacLs9HlyHeSTBioqYU=",
+ "lastModified": 1657693803,
+ "narHash": "sha256-G++2CJ9u0E7NNTAi9n5G8TdDmGJXcIjkJ3NF8cetQB8=",
"owner": "NixOS",
"repo": "nixpkgs",
- "rev": "530a53dcbc9437363471167a5e4762c5fcfa34a1",
+ "rev": "365e1b3a859281cf11b94f87231adeabbdd878a2",
"type": "github"
},
"original": {
- "id": "nixpkgs",
- "ref": "nixos-21.05-small",
- "type": "indirect"
+ "owner": "NixOS",
+ "ref": "nixos-22.05-small",
+ "repo": "nixpkgs",
+ "type": "github"
}
},
"nixpkgs-regression": {
@@ -41,9 +42,10 @@
"type": "github"
},
"original": {
- "id": "nixpkgs",
+ "owner": "NixOS",
+ "repo": "nixpkgs",
"rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2",
- "type": "indirect"
+ "type": "github"
}
},
"root": {
diff --git a/flake.nix b/flake.nix
index 87b00edf4..cdb81179a 100644
--- a/flake.nix
+++ b/flake.nix
@@ -1,8 +1,8 @@
{
description = "The purely functional package manager";
- inputs.nixpkgs.url = "nixpkgs/nixos-21.05-small";
- inputs.nixpkgs-regression.url = "nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2";
+ inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-22.05-small";
+ inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2";
inputs.lowdown-src = { url = "github:kristapsdz/lowdown"; flake = false; };
outputs = { self, nixpkgs, nixpkgs-regression, lowdown-src }:
@@ -23,7 +23,7 @@
crossSystems = [ "armv6l-linux" "armv7l-linux" ];
- stdenvs = [ "gccStdenv" "clangStdenv" "clang11Stdenv" "stdenv" ];
+ stdenvs = [ "gccStdenv" "clangStdenv" "clang11Stdenv" "stdenv" "libcxxStdenv" ];
forAllSystems = f: nixpkgs.lib.genAttrs systems (system: f system);
forAllSystemsAndStdenvs = f: forAllSystems (system:
@@ -36,7 +36,7 @@
)
);
- forAllStdenvs = stdenvs: f: nixpkgs.lib.genAttrs stdenvs (stdenv: f stdenv);
+ forAllStdenvs = f: nixpkgs.lib.genAttrs stdenvs (stdenv: f stdenv);
# Memoize nixpkgs for different platforms for efficiency.
nixpkgsFor =
@@ -54,7 +54,7 @@
# we want most of the time and for backwards compatibility
forAllSystems (system: stdenvsPackages.${system} // stdenvsPackages.${system}.stdenvPackages);
- commonDeps = pkgs: with pkgs; rec {
+ commonDeps = { pkgs, isStatic ? false }: with pkgs; rec {
# Use "busybox-sandbox-shell" if present,
# if not (legacy) fallback and hope it's sufficient.
sh = pkgs.busybox-sandbox-shell or (busybox.override {
@@ -85,10 +85,11 @@
lib.optionals stdenv.isLinux [
"--with-boost=${boost}/lib"
"--with-sandbox-shell=${sh}/bin/busybox"
+ ]
+ ++ lib.optionals (stdenv.isLinux && !(isStatic && stdenv.system == "aarch64-linux")) [
"LDFLAGS=-fuse-ld=gold"
];
-
nativeBuildDeps =
[
buildPackages.bison
@@ -102,12 +103,12 @@
# Tests
buildPackages.git
buildPackages.mercurial # FIXME: remove? only needed for tests
- buildPackages.jq
+ buildPackages.jq # Also for custom mdBook preprocessor.
]
++ lib.optionals stdenv.hostPlatform.isLinux [(buildPackages.util-linuxMinimal or buildPackages.utillinuxMinimal)];
buildDeps =
- [ curl
+ [ (curl.override { patchNetrcRegression = true; })
bzip2 xz brotli editline
openssl sqlite
libarchive
@@ -135,11 +136,6 @@
}))
nlohmann_json
];
-
- perlDeps =
- [ perl
- perlPackages.DBDSQLite
- ];
};
installScriptFor = systems:
@@ -176,7 +172,7 @@
echo "file installer $out/install" >> $out/nix-support/hydra-build-products
'';
- testNixVersions = pkgs: client: daemon: with commonDeps pkgs; with pkgs.lib; pkgs.stdenv.mkDerivation {
+ testNixVersions = pkgs: client: daemon: with commonDeps { inherit pkgs; }; with pkgs.lib; pkgs.stdenv.mkDerivation {
NIX_DAEMON_PACKAGE = daemon;
NIX_CLIENT_PACKAGE = client;
name =
@@ -264,6 +260,7 @@
echo "file binary-dist $fn" >> $out/nix-support/hydra-build-products
tar cvfJ $fn \
--owner=0 --group=0 --mode=u+rw,uga+r \
+ --mtime='1970-01-01' \
--absolute-names \
--hard-dereference \
--transform "s,$TMPDIR/install,$dir/install," \
@@ -287,7 +284,7 @@
# Forward from the previous stage as we don’t want it to pick the lowdown override
nixUnstable = prev.nixUnstable;
- nix = with final; with commonDeps pkgs; currentStdenv.mkDerivation {
+ nix = with final; with commonDeps { inherit pkgs; }; currentStdenv.mkDerivation {
name = "nix-${version}";
inherit version;
@@ -319,6 +316,7 @@
for LIB in $out/lib/*.dylib; do
chmod u+w $LIB
install_name_tool -id $LIB $LIB
+ install_name_tool -delete_rpath ${boost}/lib/ $LIB || true
done
install_name_tool -change ${boost}/lib/libboost_system.dylib $out/lib/libboost_system.dylib $out/lib/libboost_thread.dylib
''}
@@ -353,7 +351,7 @@
strictDeps = true;
- passthru.perl-bindings = with final; currentStdenv.mkDerivation {
+ passthru.perl-bindings = with final; perl.pkgs.toPerlModule (currentStdenv.mkDerivation {
name = "nix-perl-${version}";
src = self;
@@ -366,7 +364,7 @@
buildInputs =
[ nix
- curl
+ (curl.override { patchNetrcRegression = true; })
bzip2
xz
pkgs.perl
@@ -375,16 +373,17 @@
++ lib.optional (currentStdenv.isLinux || currentStdenv.isDarwin) libsodium
++ lib.optional currentStdenv.isDarwin darwin.apple_sdk.frameworks.Security;
- configureFlags = ''
- --with-dbi=${perlPackages.DBI}/${pkgs.perl.libPrefix}
- --with-dbd-sqlite=${perlPackages.DBDSQLite}/${pkgs.perl.libPrefix}
- '';
+ configureFlags = [
+ "--with-dbi=${perlPackages.DBI}/${pkgs.perl.libPrefix}"
+ "--with-dbd-sqlite=${perlPackages.DBDSQLite}/${pkgs.perl.libPrefix}"
+ ];
enableParallelBuilding = true;
postUnpack = "sourceRoot=$sourceRoot/perl";
- };
+ });
+ meta.platforms = systems;
};
lowdown-nix = with final; currentStdenv.mkDerivation rec {
@@ -409,7 +408,7 @@
# A Nixpkgs overlay that overrides the 'nix' and
# 'nix.perl-bindings' packages.
- overlay = overlayFor (p: p.stdenv);
+ overlays.default = overlayFor (p: p.stdenv);
hydraJobs = {
@@ -434,7 +433,7 @@
value = let
nixpkgsCross = import nixpkgs {
inherit system crossSystem;
- overlays = [ self.overlay ];
+ overlays = [ self.overlays.default ];
};
in binaryTarball nixpkgsFor.${system} self.packages.${system}."nix-${crossSystem}" nixpkgsCross;
}) crossSystems));
@@ -452,7 +451,7 @@
# Line coverage analysis.
coverage =
with nixpkgsFor.x86_64-linux;
- with commonDeps pkgs;
+ with commonDeps { inherit pkgs; };
releaseTools.coverageAnalysis {
name = "nix-coverage-${version}";
@@ -480,31 +479,31 @@
tests.remoteBuilds = import ./tests/remote-builds.nix {
system = "x86_64-linux";
inherit nixpkgs;
- inherit (self) overlay;
+ overlay = self.overlays.default;
};
tests.nix-copy-closure = import ./tests/nix-copy-closure.nix {
system = "x86_64-linux";
inherit nixpkgs;
- inherit (self) overlay;
+ overlay = self.overlays.default;
};
tests.nssPreload = (import ./tests/nss-preload.nix rec {
system = "x86_64-linux";
inherit nixpkgs;
- inherit (self) overlay;
+ overlay = self.overlays.default;
});
tests.githubFlakes = (import ./tests/github-flakes.nix rec {
system = "x86_64-linux";
inherit nixpkgs;
- inherit (self) overlay;
+ overlay = self.overlays.default;
});
tests.sourcehutFlakes = (import ./tests/sourcehut-flakes.nix rec {
system = "x86_64-linux";
inherit nixpkgs;
- inherit (self) overlay;
+ overlay = self.overlays.default;
});
tests.setuid = nixpkgs.lib.genAttrs
@@ -512,7 +511,7 @@
(system:
import ./tests/setuid.nix rec {
inherit nixpkgs system;
- inherit (self) overlay;
+ overlay = self.overlays.default;
});
# Make sure that nix-env still produces the exact same result
@@ -557,12 +556,13 @@
dockerImage = self.hydraJobs.dockerImage.${system};
});
- packages = forAllSystems (system: {
+ packages = forAllSystems (system: rec {
inherit (nixpkgsFor.${system}) nix;
+ default = nix;
} // (nixpkgs.lib.optionalAttrs (builtins.elem system linux64BitSystems) {
nix-static = let
nixpkgs = nixpkgsFor.${system}.pkgsStatic;
- in with commonDeps nixpkgs; nixpkgs.stdenv.mkDerivation {
+ in with commonDeps { pkgs = nixpkgs; isStatic = true; }; nixpkgs.stdenv.mkDerivation {
name = "nix-${version}";
src = self;
@@ -574,14 +574,24 @@
nativeBuildInputs = nativeBuildDeps;
buildInputs = buildDeps ++ propagatedDeps;
- configureFlags = [ "--sysconfdir=/etc" ];
+ # Work around pkgsStatic disabling all tests.
+ # Remove in NixOS 22.11, see https://github.com/NixOS/nixpkgs/pull/140271.
+ preHook =
+ ''
+ doCheck=1
+ doInstallCheck=1
+ '';
+
+ configureFlags =
+ configureFlags ++
+ [ "--sysconfdir=/etc"
+ "--enable-embedded-sandbox-shell"
+ ];
enableParallelBuilding = true;
makeFlags = "profiledir=$(out)/etc/profile.d";
- doCheck = true;
-
installFlags = "sysconfdir=$(out)/etc";
postInstall = ''
@@ -591,7 +601,6 @@
echo "file binary-dist $out/bin/nix" >> $out/nix-support/hydra-build-products
'';
- doInstallCheck = true;
installCheckFlags = "sysconfdir=$(out)/etc";
stripAllList = ["bin"];
@@ -600,6 +609,7 @@
hardeningDisable = [ "pie" ];
};
+
dockerImage =
let
pkgs = nixpkgsFor.${system};
@@ -614,14 +624,16 @@
ln -s ${image} $image
echo "file binary-dist $image" >> $out/nix-support/hydra-build-products
'';
- } // builtins.listToAttrs (map (crossSystem: {
+ }
+
+ // builtins.listToAttrs (map (crossSystem: {
name = "nix-${crossSystem}";
value = let
nixpkgsCross = import nixpkgs {
inherit system crossSystem;
- overlays = [ self.overlay ];
+ overlays = [ self.overlays.default ];
};
- in with commonDeps nixpkgsCross; nixpkgsCross.stdenv.mkDerivation {
+ in with commonDeps { pkgs = nixpkgsCross; }; nixpkgsCross.stdenv.mkDerivation {
name = "nix-${version}";
src = self;
@@ -653,44 +665,45 @@
doInstallCheck = true;
installCheckFlags = "sysconfdir=$(out)/etc";
};
- }) crossSystems)) // (builtins.listToAttrs (map (stdenvName:
+ }) (if system == "x86_64-linux" then crossSystems else [])))
+
+ // (builtins.listToAttrs (map (stdenvName:
nixpkgsFor.${system}.lib.nameValuePair
"nix-${stdenvName}"
nixpkgsFor.${system}."${stdenvName}Packages".nix
) stdenvs)));
- defaultPackage = forAllSystems (system: self.packages.${system}.nix);
-
- devShell = forAllSystems (system: self.devShells.${system}.stdenvPackages);
+ devShells = forAllSystems (system:
+ forAllStdenvs (stdenv:
+ with nixpkgsFor.${system};
+ with commonDeps { inherit pkgs; };
+ nixpkgsFor.${system}.${stdenv}.mkDerivation {
+ name = "nix";
- devShells = forAllSystemsAndStdenvs (system: stdenv:
- with nixpkgsFor.${system};
- with commonDeps pkgs;
-
- nixpkgsFor.${system}.${stdenv}.mkDerivation {
- name = "nix";
-
- outputs = [ "out" "dev" "doc" ];
+ outputs = [ "out" "dev" "doc" ];
- nativeBuildInputs = nativeBuildDeps;
- buildInputs = buildDeps ++ propagatedDeps ++ awsDeps ++ perlDeps;
+ nativeBuildInputs = nativeBuildDeps;
+ buildInputs = buildDeps ++ propagatedDeps ++ awsDeps;
- inherit configureFlags;
+ inherit configureFlags;
- enableParallelBuilding = true;
+ enableParallelBuilding = true;
- installFlags = "sysconfdir=$(out)/etc";
+ installFlags = "sysconfdir=$(out)/etc";
- shellHook =
- ''
- PATH=$prefix/bin:$PATH
- unset PYTHONPATH
- export MANPATH=$out/share/man:$MANPATH
+ shellHook =
+ ''
+ PATH=$prefix/bin:$PATH
+ unset PYTHONPATH
+ export MANPATH=$out/share/man:$MANPATH
- # Make bash completion work.
- XDG_DATA_DIRS+=:$out/share
- '';
- });
+ # Make bash completion work.
+ XDG_DATA_DIRS+=:$out/share
+ '';
+ }
+ )
+ // { default = self.devShells.${system}.stdenv; }
+ );
};
}
diff --git a/misc/systemd/nix-daemon.service.in b/misc/systemd/nix-daemon.service.in
index 24d894898..e3ac42beb 100644
--- a/misc/systemd/nix-daemon.service.in
+++ b/misc/systemd/nix-daemon.service.in
@@ -9,6 +9,7 @@ ConditionPathIsReadWrite=@localstatedir@/nix/daemon-socket
[Service]
ExecStart=@@bindir@/nix-daemon nix-daemon --daemon
KillMode=process
+LimitNOFILE=4096
[Install]
WantedBy=multi-user.target
diff --git a/misc/zsh/completion.zsh b/misc/zsh/completion.zsh
index e702c721e..f9b3dca74 100644
--- a/misc/zsh/completion.zsh
+++ b/misc/zsh/completion.zsh
@@ -10,14 +10,15 @@ function _nix() {
local -a suggestions
declare -a suggestions
for suggestion in ${res:1}; do
- # FIXME: This doesn't work properly if the suggestion word contains a `:`
- # itself
- suggestions+="${suggestion/ /:}"
+ suggestions+=("${suggestion%% *}")
done
+ local -a args
if [[ "$tpe" == filenames ]]; then
- compadd -f
+ args+=('-f')
+ elif [[ "$tpe" == attrs ]]; then
+ args+=('-S' '')
fi
- _describe 'nix' suggestions
+ compadd -J nix "${args[@]}" -a suggestions
}
_nix "$@"
diff --git a/mk/libraries.mk b/mk/libraries.mk
index ffd7b5610..6541775f3 100644
--- a/mk/libraries.mk
+++ b/mk/libraries.mk
@@ -91,7 +91,7 @@ define build-library
$(1)_PATH := $$(_d)/$$($(1)_NAME).$(SO_EXT)
$$($(1)_PATH): $$($(1)_OBJS) $$(_libs) | $$(_d)/
- $$(trace-ld) $(CXX) -o $$(abspath $$@) -shared $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$($(1)_LDFLAGS_PROPAGATED) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE)) $$($(1)_LDFLAGS_UNINSTALLED)
+ +$$(trace-ld) $(CXX) -o $$(abspath $$@) -shared $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$($(1)_LDFLAGS_PROPAGATED) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE)) $$($(1)_LDFLAGS_UNINSTALLED)
ifndef HOST_DARWIN
$(1)_LDFLAGS_USE += -Wl,-rpath,$$(abspath $$(_d))
@@ -105,7 +105,7 @@ define build-library
$$(eval $$(call create-dir, $$($(1)_INSTALL_DIR)))
$$($(1)_INSTALL_PATH): $$($(1)_OBJS) $$(_libs_final) | $(DESTDIR)$$($(1)_INSTALL_DIR)/
- $$(trace-ld) $(CXX) -o $$@ -shared $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$($(1)_LDFLAGS_PROPAGATED) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE_INSTALLED))
+ +$$(trace-ld) $(CXX) -o $$@ -shared $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$($(1)_LDFLAGS_PROPAGATED) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE_INSTALLED))
$(1)_LDFLAGS_USE_INSTALLED += -L$$(DESTDIR)$$($(1)_INSTALL_DIR) -l$$(patsubst lib%,%,$$(strip $$($(1)_NAME)))
ifndef HOST_DARWIN
@@ -125,7 +125,7 @@ define build-library
$(1)_PATH := $$(_d)/$$($(1)_NAME).a
$$($(1)_PATH): $$($(1)_OBJS) | $$(_d)/
- $$(trace-ld) $(LD) -Ur -o $$(_d)/$$($(1)_NAME).o $$?
+ +$$(trace-ld) $(LD) -Ur -o $$(_d)/$$($(1)_NAME).o $$^
$$(trace-ar) $(AR) crs $$@ $$(_d)/$$($(1)_NAME).o
$(1)_LDFLAGS_USE += $$($(1)_PATH) $$($(1)_LDFLAGS)
diff --git a/mk/programs.mk b/mk/programs.mk
index d0cf5baf0..0fc1990f7 100644
--- a/mk/programs.mk
+++ b/mk/programs.mk
@@ -32,7 +32,7 @@ define build-program
$$(eval $$(call create-dir, $$(_d)))
$$($(1)_PATH): $$($(1)_OBJS) $$(_libs) | $$(_d)/
- $$(trace-ld) $(CXX) -o $$@ $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE))
+ +$$(trace-ld) $(CXX) -o $$@ $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE))
$(1)_INSTALL_DIR ?= $$(bindir)
@@ -49,7 +49,7 @@ define build-program
_libs_final := $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_INSTALL_PATH))
$(DESTDIR)$$($(1)_INSTALL_PATH): $$($(1)_OBJS) $$(_libs_final) | $(DESTDIR)$$($(1)_INSTALL_DIR)/
- $$(trace-ld) $(CXX) -o $$@ $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE_INSTALLED))
+ +$$(trace-ld) $(CXX) -o $$@ $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE_INSTALLED))
else
diff --git a/scripts/create-darwin-volume.sh b/scripts/create-darwin-volume.sh
index 4bac4b7ba..103e1e391 100755
--- a/scripts/create-darwin-volume.sh
+++ b/scripts/create-darwin-volume.sh
@@ -442,9 +442,14 @@ add_nix_vol_fstab_line() {
local escaped_mountpoint="${NIX_ROOT/ /'\\\'040}"
shift
- # wrap `ex` to work around a problem with vim plugins breaking exit codes;
- # (see https://github.com/NixOS/nix/issues/5468)
- # we'd prefer EDITOR="/usr/bin/ex --noplugin" but vifs doesn't word-split
+ # wrap `ex` to work around problems w/ vim features breaking exit codes
+ # - plugins (see github.com/NixOS/nix/issues/5468): -u NONE
+ # - swap file: -n
+ #
+ # the first draft used `--noplugin`, but github.com/NixOS/nix/issues/6462
+ # suggests we need the less-semantic `-u NONE`
+ #
+ # we'd prefer EDITOR="/usr/bin/ex -u NONE" but vifs doesn't word-split
# the EDITOR env.
#
# TODO: at some point we should switch to `--clean`, but it wasn't added
@@ -452,7 +457,7 @@ add_nix_vol_fstab_line() {
# minver 10.12.6 seems to have released with vim 7.4
cat > "$SCRATCH/ex_cleanroom_wrapper" <<EOF
#!/bin/sh
-/usr/bin/ex --noplugin "\$@"
+/usr/bin/ex -u NONE -n "\$@"
EOF
chmod 755 "$SCRATCH/ex_cleanroom_wrapper"
@@ -646,8 +651,9 @@ EOF
task "Configuring /etc/synthetic.conf to make a mount-point at $NIX_ROOT" >&2
# technically /etc/synthetic.d/nix is supported in Big Sur+
# but handling both takes even more code...
+ # See earlier note; `-u NONE` disables vim plugins/rc, `-n` skips swapfile
_sudo "to add Nix to /etc/synthetic.conf" \
- /usr/bin/ex --noplugin /etc/synthetic.conf <<EOF
+ /usr/bin/ex -u NONE -n /etc/synthetic.conf <<EOF
:a
${NIX_ROOT:1}
.
@@ -815,7 +821,8 @@ setup_volume_daemon() {
local volume_uuid="$2"
if ! test_voldaemon; then
task "Configuring LaunchDaemon to mount '$NIX_VOLUME_LABEL'" >&2
- _sudo "to install the Nix volume mounter" /usr/bin/ex --noplugin "$NIX_VOLUME_MOUNTD_DEST" <<EOF
+ # See earlier note; `-u NONE` disables vim plugins/rc, `-n` skips swapfile
+ _sudo "to install the Nix volume mounter" /usr/bin/ex -u NONE -n "$NIX_VOLUME_MOUNTD_DEST" <<EOF
:a
$(generate_mount_daemon "$cmd_type" "$volume_uuid")
.
diff --git a/scripts/install-darwin-multi-user.sh b/scripts/install-darwin-multi-user.sh
index afaa6783b..5111a5dde 100644
--- a/scripts/install-darwin-multi-user.sh
+++ b/scripts/install-darwin-multi-user.sh
@@ -167,7 +167,7 @@ poly_user_shell_get() {
}
poly_user_shell_set() {
- _sudo "in order to give $1 a safe home directory" \
+ _sudo "in order to give $1 a safe shell" \
/usr/bin/dscl . -create "/Users/$1" "UserShell" "$2"
}
diff --git a/scripts/install-multi-user.sh b/scripts/install-multi-user.sh
index b79a9c23a..105b84af6 100644
--- a/scripts/install-multi-user.sh
+++ b/scripts/install-multi-user.sh
@@ -59,6 +59,30 @@ headless() {
fi
}
+is_root() {
+ if [ "$EUID" -eq 0 ]; then
+ return 0
+ else
+ return 1
+ fi
+}
+
+is_os_linux() {
+ if [ "$(uname -s)" = "Linux" ]; then
+ return 0
+ else
+ return 1
+ fi
+}
+
+is_os_darwin() {
+ if [ "$(uname -s)" = "Darwin" ]; then
+ return 0
+ else
+ return 1
+ fi
+}
+
contact_us() {
echo "You can open an issue at https://github.com/nixos/nix/issues"
echo ""
@@ -313,14 +337,23 @@ __sudo() {
_sudo() {
local expl="$1"
shift
- if ! headless; then
+ if ! headless || is_root; then
__sudo "$expl" "$*" >&2
fi
- sudo "$@"
+
+ if is_root; then
+ env "$@"
+ else
+ sudo "$@"
+ fi
}
+# Ensure that $TMPDIR exists if defined.
+if [[ -n "${TMPDIR:-}" ]] && [[ ! -d "${TMPDIR:-}" ]]; then
+ mkdir -m 0700 -p "${TMPDIR:-}"
+fi
-readonly SCRATCH=$(mktemp -d "${TMPDIR:-/tmp/}tmp.XXXXXXXXXX")
+readonly SCRATCH=$(mktemp -d)
finish_cleanup() {
rm -rf "$SCRATCH"
}
@@ -423,7 +456,7 @@ EOF
fi
done
- if [ "$(uname -s)" = "Linux" ] && [ ! -e /run/systemd/system ]; then
+ if is_os_linux && [ ! -e /run/systemd/system ]; then
warning <<EOF
We did not detect systemd on your system. With a multi-user install
without systemd you will have to manually configure your init system to
@@ -638,6 +671,17 @@ place_channel_configuration() {
fi
}
+check_selinux() {
+ if command -v getenforce > /dev/null 2>&1; then
+ if [ "$(getenforce)" = "Enforcing" ]; then
+ failure <<EOF
+Nix does not work with selinux enabled yet!
+see https://github.com/NixOS/nix/issues/2374
+EOF
+ fi
+ fi
+}
+
welcome_to_nix() {
ok "Welcome to the Multi-User Nix Installation"
@@ -766,7 +810,7 @@ EOF
fi
_sudo "to load data for the first time in to the Nix Database" \
- "$NIX_INSTALLED_NIX/bin/nix-store" --load-db < ./.reginfo
+ HOME="$ROOT_HOME" "$NIX_INSTALLED_NIX/bin/nix-store" --load-db < ./.reginfo
echo " Just finished getting the nix database ready."
)
@@ -854,22 +898,14 @@ EOF
install -m 0664 "$SCRATCH/nix.conf" /etc/nix/nix.conf
}
+
main() {
- # TODO: I've moved this out of validate_starting_assumptions so we
- # can fail faster in this case. Sourcing install-darwin... now runs
- # `touch /` to detect Read-only root, but it could update times on
- # pre-Catalina macOS if run as root user.
- if [ "$EUID" -eq 0 ]; then
- failure <<EOF
-Please do not run this script with root privileges. I will call sudo
-when I need to.
-EOF
- fi
+ check_selinux
- if [ "$(uname -s)" = "Darwin" ]; then
+ if is_os_darwin; then
# shellcheck source=./install-darwin-multi-user.sh
. "$EXTRACTED_NIX_PATH/install-darwin-multi-user.sh"
- elif [ "$(uname -s)" = "Linux" ]; then
+ elif is_os_linux; then
# shellcheck source=./install-systemd-multi-user.sh
. "$EXTRACTED_NIX_PATH/install-systemd-multi-user.sh" # most of this works on non-systemd distros also
else
@@ -877,7 +913,10 @@ EOF
fi
welcome_to_nix
- chat_about_sudo
+
+ if ! is_root; then
+ chat_about_sudo
+ fi
cure_artifacts
# TODO: there's a tension between cure and validate. I moved the
diff --git a/scripts/install-nix-from-closure.sh b/scripts/install-nix-from-closure.sh
index d543b4463..cd3cf6670 100644
--- a/scripts/install-nix-from-closure.sh
+++ b/scripts/install-nix-from-closure.sh
@@ -148,7 +148,9 @@ if ! [ -w "$dest" ]; then
exit 1
fi
-mkdir -p "$dest/store"
+# The auto-chroot code in openFromNonUri() checks for the
+# non-existence of /nix/var/nix, so we need to create it here.
+mkdir -p "$dest/store" "$dest/var/nix"
printf "copying Nix to %s..." "${dest}/store" >&2
# Insert a newline if no progress is shown.
diff --git a/src/libcmd/command.cc b/src/libcmd/command.cc
index f28cfe5de..14bb27936 100644
--- a/src/libcmd/command.cc
+++ b/src/libcmd/command.cc
@@ -86,6 +86,11 @@ ref<Store> CopyCommand::getDstStore()
EvalCommand::EvalCommand()
{
+ addFlag({
+ .longName = "debugger",
+ .description = "start an interactive environment if evaluation fails",
+ .handler = {&startReplOnEvalErrors, true},
+ });
}
EvalCommand::~EvalCommand()
@@ -103,7 +108,7 @@ ref<Store> EvalCommand::getEvalStore()
ref<EvalState> EvalCommand::getEvalState()
{
- if (!evalState)
+ if (!evalState) {
evalState =
#if HAVE_BOEHMGC
std::allocate_shared<EvalState>(traceable_allocator<EvalState>(),
@@ -113,6 +118,11 @@ ref<EvalState> EvalCommand::getEvalState()
searchPath, getEvalStore(), getStore())
#endif
;
+
+ if (startReplOnEvalErrors) {
+ evalState->debugRepl = &runRepl;
+ };
+ }
return ref<EvalState>(evalState);
}
diff --git a/src/libcmd/command.hh b/src/libcmd/command.hh
index 078e2a2ce..3b4b40981 100644
--- a/src/libcmd/command.hh
+++ b/src/libcmd/command.hh
@@ -57,6 +57,9 @@ struct CopyCommand : virtual StoreCommand
struct EvalCommand : virtual StoreCommand, MixEvalArgs
{
+ bool startReplOnEvalErrors = false;
+ bool ignoreExceptionsDuringTry = false;
+
EvalCommand();
~EvalCommand();
@@ -75,10 +78,16 @@ struct MixFlakeOptions : virtual Args, EvalCommand
{
flake::LockFlags lockFlags;
+ std::optional<std::string> needsFlakeInputCompletion = {};
+
MixFlakeOptions();
- virtual std::optional<FlakeRef> getFlakeRefForCompletion()
+ virtual std::vector<std::string> getFlakesForCompletion()
{ return {}; }
+
+ void completeFlakeInput(std::string_view prefix);
+
+ void completionHook() override;
};
struct SourceExprCommand : virtual Args, MixFlakeOptions
@@ -114,12 +123,13 @@ struct InstallablesCommand : virtual Args, SourceExprCommand
InstallablesCommand();
void prepare() override;
+ Installables load();
virtual bool useDefaultInstallables() { return true; }
- std::optional<FlakeRef> getFlakeRefForCompletion() override;
+ std::vector<std::string> getFlakesForCompletion() override;
-private:
+protected:
std::vector<std::string> _installables;
};
@@ -133,9 +143,9 @@ struct InstallableCommand : virtual Args, SourceExprCommand
void prepare() override;
- std::optional<FlakeRef> getFlakeRefForCompletion() override
+ std::vector<std::string> getFlakesForCompletion() override
{
- return parseFlakeRefWithFragment(_installable, absPath(".")).first;
+ return {_installable};
}
private:
@@ -270,4 +280,8 @@ void printClosureDiff(
const StorePath & afterPath,
std::string_view indent);
+
+void runRepl(
+ ref<EvalState> evalState,
+ const ValMap & extraEnv);
}
diff --git a/src/libcmd/installables.cc b/src/libcmd/installables.cc
index 9f5eec2b5..00a7a33b7 100644
--- a/src/libcmd/installables.cc
+++ b/src/libcmd/installables.cc
@@ -23,17 +23,6 @@
namespace nix {
-void completeFlakeInputPath(
- ref<EvalState> evalState,
- const FlakeRef & flakeRef,
- std::string_view prefix)
-{
- auto flake = flake::getFlake(*evalState, flakeRef, true);
- for (auto & input : flake.inputs)
- if (hasPrefix(input.first, prefix))
- completions->add(input.first);
-}
-
MixFlakeOptions::MixFlakeOptions()
{
auto category = "Common flake-related options";
@@ -86,8 +75,7 @@ MixFlakeOptions::MixFlakeOptions()
lockFlags.inputUpdates.insert(flake::parseInputPath(s));
}},
.completer = {[&](size_t, std::string_view prefix) {
- if (auto flakeRef = getFlakeRefForCompletion())
- completeFlakeInputPath(getEvalState(), *flakeRef, prefix);
+ needsFlakeInputCompletion = {std::string(prefix)};
}}
});
@@ -103,12 +91,10 @@ MixFlakeOptions::MixFlakeOptions()
parseFlakeRef(flakeRef, absPath("."), true));
}},
.completer = {[&](size_t n, std::string_view prefix) {
- if (n == 0) {
- if (auto flakeRef = getFlakeRefForCompletion())
- completeFlakeInputPath(getEvalState(), *flakeRef, prefix);
- } else if (n == 1) {
+ if (n == 0)
+ needsFlakeInputCompletion = {std::string(prefix)};
+ else if (n == 1)
completeFlakeRef(getEvalState()->store, prefix);
- }
}}
});
@@ -139,6 +125,24 @@ MixFlakeOptions::MixFlakeOptions()
});
}
+void MixFlakeOptions::completeFlakeInput(std::string_view prefix)
+{
+ auto evalState = getEvalState();
+ for (auto & flakeRefS : getFlakesForCompletion()) {
+ auto flakeRef = parseFlakeRefWithFragment(expandTilde(flakeRefS), absPath(".")).first;
+ auto flake = flake::getFlake(*evalState, flakeRef, true);
+ for (auto & input : flake.inputs)
+ if (hasPrefix(input.first, prefix))
+ completions->add(input.first);
+ }
+}
+
+void MixFlakeOptions::completionHook()
+{
+ if (auto & prefix = needsFlakeInputCompletion)
+ completeFlakeInput(*prefix);
+}
+
SourceExprCommand::SourceExprCommand(bool supportReadOnlyMode)
{
addFlag({
@@ -146,7 +150,8 @@ SourceExprCommand::SourceExprCommand(bool supportReadOnlyMode)
.shortName = 'f',
.description =
"Interpret installables as attribute paths relative to the Nix expression stored in *file*. "
- "If *file* is the character -, then a Nix expression will be read from standard input.",
+ "If *file* is the character -, then a Nix expression will be read from standard input. "
+ "Implies `--impure`.",
.category = installablesCategory,
.labels = {"file"},
.handler = {&file},
@@ -440,10 +445,8 @@ DerivedPaths InstallableValue::toDerivedPaths()
// Group by derivation, helps with .all in particular
for (auto & drv : toDerivations()) {
- auto outputName = drv.outputName;
- if (outputName == "")
- throw Error("derivation '%s' lacks an 'outputName' attribute", state->store->printStorePath(drv.drvPath));
- drvsToOutputs[drv.drvPath].insert(outputName);
+ for (auto & outputName : drv.outputsToInstall)
+ drvsToOutputs[drv.drvPath].insert(outputName);
drvsToCopy.insert(drv.drvPath);
}
@@ -466,9 +469,19 @@ struct InstallableAttrPath : InstallableValue
SourceExprCommand & cmd;
RootValue v;
std::string attrPath;
-
- InstallableAttrPath(ref<EvalState> state, SourceExprCommand & cmd, Value * v, const std::string & attrPath)
- : InstallableValue(state), cmd(cmd), v(allocRootValue(v)), attrPath(attrPath)
+ OutputsSpec outputsSpec;
+
+ InstallableAttrPath(
+ ref<EvalState> state,
+ SourceExprCommand & cmd,
+ Value * v,
+ const std::string & attrPath,
+ OutputsSpec outputsSpec)
+ : InstallableValue(state)
+ , cmd(cmd)
+ , v(allocRootValue(v))
+ , attrPath(attrPath)
+ , outputsSpec(std::move(outputsSpec))
{ }
std::string what() const override { return attrPath; }
@@ -497,7 +510,19 @@ std::vector<InstallableValue::DerivationInfo> InstallableAttrPath::toDerivations
auto drvPath = drvInfo.queryDrvPath();
if (!drvPath)
throw Error("'%s' is not a derivation", what());
- res.push_back({ *drvPath, drvInfo.queryOutputName() });
+
+ std::set<std::string> outputsToInstall;
+
+ if (auto outputNames = std::get_if<OutputNames>(&outputsSpec))
+ outputsToInstall = *outputNames;
+ else
+ for (auto & output : drvInfo.queryOutputs(false, std::get_if<DefaultOutputs>(&outputsSpec)))
+ outputsToInstall.insert(output.first);
+
+ res.push_back(DerivationInfo {
+ .drvPath = *drvPath,
+ .outputsToInstall = std::move(outputsToInstall)
+ });
}
return res;
@@ -574,6 +599,7 @@ InstallableFlake::InstallableFlake(
ref<EvalState> state,
FlakeRef && flakeRef,
std::string_view fragment,
+ OutputsSpec outputsSpec,
Strings attrPaths,
Strings prefixes,
const flake::LockFlags & lockFlags)
@@ -581,6 +607,7 @@ InstallableFlake::InstallableFlake(
flakeRef(flakeRef),
attrPaths(fragment == "" ? attrPaths : Strings{(std::string) fragment}),
prefixes(fragment == "" ? Strings{} : prefixes),
+ outputsSpec(std::move(outputsSpec)),
lockFlags(lockFlags)
{
if (cmd && cmd->getAutoArgs(*state)->size())
@@ -589,6 +616,8 @@ InstallableFlake::InstallableFlake(
std::tuple<std::string, FlakeRef, InstallableValue::DerivationInfo> InstallableFlake::toDerivation()
{
+ Activity act(*logger, lvlTalkative, actUnknown, fmt("evaluating derivation '%s'", what()));
+
auto attr = getCursor(*state);
auto attrPath = attr->getAttrPathStr();
@@ -598,9 +627,41 @@ std::tuple<std::string, FlakeRef, InstallableValue::DerivationInfo> InstallableF
auto drvPath = attr->forceDerivation();
+ std::set<std::string> outputsToInstall;
+ std::optional<NixInt> priority;
+
+ if (auto aOutputSpecified = attr->maybeGetAttr(state->sOutputSpecified)) {
+ if (aOutputSpecified->getBool()) {
+ if (auto aOutputName = attr->maybeGetAttr("outputName"))
+ outputsToInstall = { aOutputName->getString() };
+ }
+ }
+
+ else if (auto aMeta = attr->maybeGetAttr(state->sMeta)) {
+ if (auto aOutputsToInstall = aMeta->maybeGetAttr("outputsToInstall"))
+ for (auto & s : aOutputsToInstall->getListOfStrings())
+ outputsToInstall.insert(s);
+ if (auto aPriority = aMeta->maybeGetAttr("priority"))
+ priority = aPriority->getInt();
+ }
+
+ if (outputsToInstall.empty() || std::get_if<AllOutputs>(&outputsSpec)) {
+ outputsToInstall.clear();
+ if (auto aOutputs = attr->maybeGetAttr(state->sOutputs))
+ for (auto & s : aOutputs->getListOfStrings())
+ outputsToInstall.insert(s);
+ }
+
+ if (outputsToInstall.empty())
+ outputsToInstall.insert("out");
+
+ if (auto outputNames = std::get_if<OutputNames>(&outputsSpec))
+ outputsToInstall = *outputNames;
+
auto drvInfo = DerivationInfo {
- std::move(drvPath),
- attr->getAttr(state->sOutputName)->getString()
+ .drvPath = std::move(drvPath),
+ .outputsToInstall = std::move(outputsToInstall),
+ .priority = priority,
};
return {attrPath, getLockedFlake()->flake.lockedRef, std::move(drvInfo)};
@@ -723,8 +784,14 @@ std::vector<std::shared_ptr<Installable>> SourceExprCommand::parseInstallables(
state->eval(e, *vFile);
}
- for (auto & s : ss)
- result.push_back(std::make_shared<InstallableAttrPath>(state, *this, vFile, s == "." ? "" : s));
+ for (auto & s : ss) {
+ auto [prefix, outputsSpec] = parseOutputsSpec(s);
+ result.push_back(
+ std::make_shared<InstallableAttrPath>(
+ state, *this, vFile,
+ prefix == "." ? "" : prefix,
+ outputsSpec));
+ }
} else {
@@ -743,12 +810,13 @@ std::vector<std::shared_ptr<Installable>> SourceExprCommand::parseInstallables(
}
try {
- auto [flakeRef, fragment] = parseFlakeRefWithFragment(s, absPath("."));
+ auto [flakeRef, fragment, outputsSpec] = parseFlakeRefWithFragmentAndOutputsSpec(s, absPath("."));
result.push_back(std::make_shared<InstallableFlake>(
this,
getEvalState(),
std::move(flakeRef),
fragment,
+ outputsSpec,
getDefaultFlakeAttrPaths(),
getDefaultFlakeAttrPathPrefixes(),
lockFlags));
@@ -822,12 +890,13 @@ std::vector<std::pair<std::shared_ptr<Installable>, BuiltPath>> Installable::bui
auto outputHashes = staticOutputHashes(*evalStore, drv); // FIXME: expensive
auto drvOutputs = drv.outputsAndOptPaths(*store);
for (auto & output : bfd.outputs) {
- if (!outputHashes.count(output))
+ auto outputHash = get(outputHashes, output);
+ if (!outputHash)
throw Error(
"the derivation '%s' doesn't have an output named '%s'",
store->printStorePath(bfd.drvPath), output);
if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations)) {
- DrvOutput outputId { outputHashes.at(output), output };
+ DrvOutput outputId { *outputHash, output };
auto realisation = store->queryRealisation(outputId);
if (!realisation)
throw Error(
@@ -838,10 +907,11 @@ std::vector<std::pair<std::shared_ptr<Installable>, BuiltPath>> Installable::bui
} else {
// If ca-derivations isn't enabled, assume that
// the output path is statically known.
- assert(drvOutputs.count(output));
- assert(drvOutputs.at(output).second);
+ auto drvOutput = get(drvOutputs, output);
+ assert(drvOutput);
+ assert(drvOutput->second);
outputs.insert_or_assign(
- output, *drvOutputs.at(output).second);
+ output, *drvOutput->second);
}
}
res.push_back({installable, BuiltPath::Built { bfd.drvPath, outputs }});
@@ -856,6 +926,9 @@ std::vector<std::pair<std::shared_ptr<Installable>, BuiltPath>> Installable::bui
break;
case Realise::Outputs: {
+ if (settings.printMissing)
+ printMissing(store, pathsToBuild, lvlInfo);
+
for (auto & buildResult : store->buildPathsWithResults(pathsToBuild, bMode, evalStore)) {
if (!buildResult.success())
buildResult.rethrow();
@@ -969,21 +1042,26 @@ InstallablesCommand::InstallablesCommand()
void InstallablesCommand::prepare()
{
+ installables = load();
+}
+
+Installables InstallablesCommand::load() {
+ Installables installables;
if (_installables.empty() && useDefaultInstallables())
// FIXME: commands like "nix profile install" should not have a
// default, probably.
_installables.push_back(".");
- installables = parseInstallables(getStore(), _installables);
+ return parseInstallables(getStore(), _installables);
}
-std::optional<FlakeRef> InstallablesCommand::getFlakeRefForCompletion()
+std::vector<std::string> InstallablesCommand::getFlakesForCompletion()
{
if (_installables.empty()) {
if (useDefaultInstallables())
- return parseFlakeRefWithFragment(".", absPath(".")).first;
+ return {"."};
return {};
}
- return parseFlakeRefWithFragment(_installables.front(), absPath(".")).first;
+ return _installables;
}
InstallableCommand::InstallableCommand(bool supportReadOnlyMode)
diff --git a/src/libcmd/installables.hh b/src/libcmd/installables.hh
index de8b08525..948f78919 100644
--- a/src/libcmd/installables.hh
+++ b/src/libcmd/installables.hh
@@ -132,6 +132,8 @@ struct Installable
const std::vector<std::shared_ptr<Installable>> & installables);
};
+typedef std::vector<std::shared_ptr<Installable>> Installables;
+
struct InstallableValue : Installable
{
ref<EvalState> state;
@@ -141,7 +143,8 @@ struct InstallableValue : Installable
struct DerivationInfo
{
StorePath drvPath;
- std::string outputName;
+ std::set<std::string> outputsToInstall;
+ std::optional<NixInt> priority;
};
virtual std::vector<DerivationInfo> toDerivations() = 0;
@@ -156,6 +159,7 @@ struct InstallableFlake : InstallableValue
FlakeRef flakeRef;
Strings attrPaths;
Strings prefixes;
+ OutputsSpec outputsSpec;
const flake::LockFlags & lockFlags;
mutable std::shared_ptr<flake::LockedFlake> _lockedFlake;
@@ -164,6 +168,7 @@ struct InstallableFlake : InstallableValue
ref<EvalState> state,
FlakeRef && flakeRef,
std::string_view fragment,
+ OutputsSpec outputsSpec,
Strings attrPaths,
Strings prefixes,
const flake::LockFlags & lockFlags);
diff --git a/src/libcmd/local.mk b/src/libcmd/local.mk
index 7a2f83cc7..3a4de6bcb 100644
--- a/src/libcmd/local.mk
+++ b/src/libcmd/local.mk
@@ -6,9 +6,9 @@ libcmd_DIR := $(d)
libcmd_SOURCES := $(wildcard $(d)/*.cc)
-libcmd_CXXFLAGS += -I src/libutil -I src/libstore -I src/libexpr -I src/libmain -I src/libfetchers
+libcmd_CXXFLAGS += -I src/libutil -I src/libstore -I src/libexpr -I src/libmain -I src/libfetchers -I src/nix
-libcmd_LDFLAGS += $(LOWDOWN_LIBS) -pthread
+libcmd_LDFLAGS = $(EDITLINE_LIBS) -llowdown -pthread
libcmd_LIBS = libstore libutil libexpr libmain libfetchers
diff --git a/src/libcmd/markdown.cc b/src/libcmd/markdown.cc
index 29bb4d31e..668a07763 100644
--- a/src/libcmd/markdown.cc
+++ b/src/libcmd/markdown.cc
@@ -9,14 +9,16 @@ namespace nix {
std::string renderMarkdownToTerminal(std::string_view markdown)
{
+ int windowWidth = getWindowSize().second;
+
struct lowdown_opts opts {
.type = LOWDOWN_TERM,
.maxdepth = 20,
- .cols = std::max(getWindowSize().second, (unsigned short) 80),
+ .cols = (size_t) std::max(windowWidth - 5, 60),
.hmargin = 0,
.vmargin = 0,
.feat = LOWDOWN_COMMONMARK | LOWDOWN_FENCED | LOWDOWN_DEFLIST | LOWDOWN_TABLES,
- .oflags = 0,
+ .oflags = LOWDOWN_TERM_NOLINK,
};
auto doc = lowdown_doc_new(&opts);
diff --git a/src/nix/repl.cc b/src/libcmd/repl.cc
index cdf253401..a8de6b80b 100644
--- a/src/nix/repl.cc
+++ b/src/libcmd/repl.cc
@@ -22,6 +22,7 @@ extern "C" {
#include "ansicolor.hh"
#include "shared.hh"
#include "eval.hh"
+#include "eval-cache.hh"
#include "eval-inline.hh"
#include "attr-path.hh"
#include "store-api.hh"
@@ -34,6 +35,7 @@ extern "C" {
#include "finally.hh"
#include "markdown.hh"
#include "local-fs-store.hh"
+#include "progress-bar.hh"
#if HAVE_BOEHMGC
#define GC_INCLUDE_NEW
@@ -48,38 +50,46 @@ struct NixRepl
#endif
{
std::string curDir;
- std::unique_ptr<EvalState> state;
+ ref<EvalState> state;
Bindings * autoArgs;
+ size_t debugTraceIndex;
+
Strings loadedFiles;
+ typedef std::vector<std::pair<Value*,std::string>> AnnotatedValues;
+ std::function<AnnotatedValues()> getValues;
const static int envSize = 32768;
- StaticEnv staticEnv;
+ std::shared_ptr<StaticEnv> staticEnv;
Env * env;
int displ;
StringSet varNames;
const Path historyFile;
- NixRepl(const Strings & searchPath, nix::ref<Store> store);
+ NixRepl(const Strings & searchPath, nix::ref<Store> store,ref<EvalState> state,
+ std::function<AnnotatedValues()> getValues);
~NixRepl();
- void mainLoop(const std::vector<std::string> & files);
+ void mainLoop();
StringSet completePrefix(const std::string & prefix);
- bool getLine(std::string & input, const std::string &prompt);
+ bool getLine(std::string & input, const std::string & prompt);
StorePath getDerivationPath(Value & v);
bool processLine(std::string line);
+
void loadFile(const Path & path);
void loadFlake(const std::string & flakeRef);
void initEnv();
+ void loadFiles();
void reloadFiles();
void addAttrsToScope(Value & attrs);
void addVarToScope(const Symbol name, Value & v);
Expr * parseString(std::string s);
void evalString(std::string s, Value & v);
+ void loadDebugTraceEnv(DebugTrace & dt);
typedef std::set<Value *> ValuesSeen;
- std::ostream & printValue(std::ostream & str, Value & v, unsigned int maxDepth);
- std::ostream & printValue(std::ostream & str, Value & v, unsigned int maxDepth, ValuesSeen & seen);
+ std::ostream & printValue(std::ostream & str, Value & v, unsigned int maxDepth);
+ std::ostream & printValue(std::ostream & str, Value & v, unsigned int maxDepth, ValuesSeen & seen);
};
@@ -92,9 +102,12 @@ std::string removeWhitespace(std::string s)
}
-NixRepl::NixRepl(const Strings & searchPath, nix::ref<Store> store)
- : state(std::make_unique<EvalState>(searchPath, store))
- , staticEnv(false, &state->staticBaseEnv)
+NixRepl::NixRepl(const Strings & searchPath, nix::ref<Store> store, ref<EvalState> state,
+ std::function<NixRepl::AnnotatedValues()> getValues)
+ : state(state)
+ , debugTraceIndex(0)
+ , getValues(getValues)
+ , staticEnv(new StaticEnv(false, state->staticBaseEnv.get()))
, historyFile(getDataDir() + "/nix/repl-history")
{
curDir = absPath(".");
@@ -106,23 +119,20 @@ NixRepl::~NixRepl()
write_history(historyFile.c_str());
}
-std::string runNix(Path program, const Strings & args,
+void runNix(Path program, const Strings & args,
const std::optional<std::string> & input = {})
{
auto subprocessEnv = getEnv();
subprocessEnv["NIX_CONFIG"] = globalConfig.toKeyValue();
- auto res = runProgram(RunOptions {
+ runProgram2(RunOptions {
.program = settings.nixBinDir+ "/" + program,
.args = args,
.environment = subprocessEnv,
.input = input,
});
- if (!statusOk(res.first))
- throw ExecError(res.first, "program '%1%' %2%", program, statusToString(res.first));
-
- return res.second;
+ return;
}
static NixRepl * curRepl; // ugly
@@ -198,16 +208,37 @@ namespace {
}
}
-void NixRepl::mainLoop(const std::vector<std::string> & files)
+static std::ostream & showDebugTrace(std::ostream & out, const PosTable & positions, const DebugTrace & dt)
+{
+ if (dt.isError)
+ out << ANSI_RED "error: " << ANSI_NORMAL;
+ out << dt.hint.str() << "\n";
+
+ // prefer direct pos, but if noPos then try the expr.
+ auto pos = *dt.pos
+ ? *dt.pos
+ : positions[dt.expr.getPos() ? dt.expr.getPos() : noPos];
+
+ if (pos) {
+ printAtPos(pos, out);
+
+ auto loc = getCodeLines(pos);
+ if (loc.has_value()) {
+ out << "\n";
+ printCodeLines(out, "", pos, *loc);
+ out << "\n";
+ }
+ }
+
+ return out;
+}
+
+void NixRepl::mainLoop()
{
std::string error = ANSI_RED "error:" ANSI_NORMAL " ";
notice("Welcome to Nix " + nixVersion + ". Type :? for help.\n");
- for (auto & i : files)
- loadedFiles.push_back(i);
-
- reloadFiles();
- if (!loadedFiles.empty()) notice("");
+ loadFiles();
// Allow nix-repl specific settings in .inputrc
rl_readline_name = "nix-repl";
@@ -222,14 +253,21 @@ void NixRepl::mainLoop(const std::vector<std::string> & files)
rl_set_list_possib_func(listPossibleCallback);
#endif
+ /* Stop the progress bar because it interferes with the display of
+ the repl. */
+ stopProgressBar();
+
std::string input;
while (true) {
// When continuing input from previous lines, don't print a prompt, just align to the same
// number of chars as the prompt.
- if (!getLine(input, input.empty() ? "nix-repl> " : " "))
+ if (!getLine(input, input.empty() ? "nix-repl> " : " ")) {
+ // ctrl-D should exit the debugger.
+ state->debugStop = false;
+ state->debugQuit = true;
break;
-
+ }
try {
if (!removeWhitespace(input).empty() && !processLine(input)) return;
} catch (ParseError & e) {
@@ -240,6 +278,14 @@ void NixRepl::mainLoop(const std::vector<std::string> & files)
} else {
printMsg(lvlError, e.msg());
}
+ } catch (EvalError & e) {
+ // in debugger mode, an EvalError should trigger another repl session.
+ // when that session returns the exception will land here. No need to show it again;
+ // show the error for this repl session instead.
+ if (state->debugRepl && !state->debugTraces.empty())
+ showDebugTrace(std::cout, state->positions, state->debugTraces.front());
+ else
+ printMsg(lvlError, e.msg());
} catch (Error & e) {
printMsg(lvlError, e.msg());
} catch (Interrupted & e) {
@@ -394,6 +440,19 @@ StorePath NixRepl::getDerivationPath(Value & v) {
return *drvPath;
}
+void NixRepl::loadDebugTraceEnv(DebugTrace & dt)
+{
+ initEnv();
+
+ auto se = state->getStaticEnv(dt.expr);
+ if (se) {
+ auto vm = mapStaticEnvBindings(state->symbols, *se.get(), dt.env);
+
+ // add staticenv vars.
+ for (auto & [name, value] : *(vm.get()))
+ addVarToScope(state->symbols.create(name), *value);
+ }
+}
bool NixRepl::processLine(std::string line)
{
@@ -429,12 +488,72 @@ bool NixRepl::processLine(std::string line)
<< " :p <expr> Evaluate and print expression recursively\n"
<< " :q Exit nix-repl\n"
<< " :r Reload all files\n"
- << " :s <expr> Build dependencies of derivation, then start nix-shell\n"
+ << " :sh <expr> Build dependencies of derivation, then start nix-shell\n"
<< " :t <expr> Describe result of evaluation\n"
<< " :u <expr> Build derivation, then start nix-shell\n"
<< " :doc <expr> Show documentation of a builtin function\n"
<< " :log <expr> Show logs for a derivation\n"
- << " :st [bool] Enable, disable or toggle showing traces for errors\n";
+ << " :te [bool] Enable, disable or toggle showing traces for errors\n"
+ ;
+ if (state->debugRepl) {
+ std::cout
+ << "\n"
+ << " Debug mode commands\n"
+ << " :env Show env stack\n"
+ << " :bt Show trace stack\n"
+ << " :st Show current trace\n"
+ << " :st <idx> Change to another trace in the stack\n"
+ << " :c Go until end of program, exception, or builtins.break\n"
+ << " :s Go one step\n"
+ ;
+ }
+
+ }
+
+ else if (state->debugRepl && (command == ":bt" || command == ":backtrace")) {
+ for (const auto & [idx, i] : enumerate(state->debugTraces)) {
+ std::cout << "\n" << ANSI_BLUE << idx << ANSI_NORMAL << ": ";
+ showDebugTrace(std::cout, state->positions, i);
+ }
+ }
+
+ else if (state->debugRepl && (command == ":env")) {
+ for (const auto & [idx, i] : enumerate(state->debugTraces)) {
+ if (idx == debugTraceIndex) {
+ printEnvBindings(*state, i.expr, i.env);
+ break;
+ }
+ }
+ }
+
+ else if (state->debugRepl && (command == ":st")) {
+ try {
+ // change the DebugTrace index.
+ debugTraceIndex = stoi(arg);
+ } catch (...) { }
+
+ for (const auto & [idx, i] : enumerate(state->debugTraces)) {
+ if (idx == debugTraceIndex) {
+ std::cout << "\n" << ANSI_BLUE << idx << ANSI_NORMAL << ": ";
+ showDebugTrace(std::cout, state->positions, i);
+ std::cout << std::endl;
+ printEnvBindings(*state, i.expr, i.env);
+ loadDebugTraceEnv(i);
+ break;
+ }
+ }
+ }
+
+ else if (state->debugRepl && (command == ":s" || command == ":step")) {
+ // set flag to stop at next DebugTrace; exit repl.
+ state->debugStop = true;
+ return false;
+ }
+
+ else if (state->debugRepl && (command == ":c" || command == ":continue")) {
+ // set flag to run to next breakpoint or end of program; exit repl.
+ state->debugStop = false;
+ return false;
}
else if (command == ":a" || command == ":add") {
@@ -506,7 +625,7 @@ bool NixRepl::processLine(std::string line)
runNix("nix-shell", {state->store->printStorePath(drvPath)});
}
- else if (command == ":b" || command == ":bl" || command == ":i" || command == ":s" || command == ":log") {
+ else if (command == ":b" || command == ":bl" || command == ":i" || command == ":sh" || command == ":log") {
Value v;
evalString(arg, v);
StorePath drvPath = getDerivationPath(v);
@@ -567,8 +686,11 @@ bool NixRepl::processLine(std::string line)
printValue(std::cout, v, 1000000000) << std::endl;
}
- else if (command == ":q" || command == ":quit")
+ else if (command == ":q" || command == ":quit") {
+ state->debugStop = false;
+ state->debugQuit = true;
return false;
+ }
else if (command == ":doc") {
Value v;
@@ -593,7 +715,7 @@ bool NixRepl::processLine(std::string line)
throw Error("value does not have documentation");
}
- else if (command == ":st" || command == ":show-trace") {
+ else if (command == ":te" || command == ":trace-enable") {
if (arg == "false" || (arg == "" && loggerSettings.showTrace)) {
std::cout << "not showing error traces\n";
loggerSettings.showTrace = false;
@@ -630,7 +752,6 @@ bool NixRepl::processLine(std::string line)
return true;
}
-
void NixRepl::loadFile(const Path & path)
{
loadedFiles.remove(path);
@@ -669,10 +790,10 @@ void NixRepl::initEnv()
env = &state->allocEnv(envSize);
env->up = &state->baseEnv;
displ = 0;
- staticEnv.vars.clear();
+ staticEnv->vars.clear();
varNames.clear();
- for (auto & i : state->staticBaseEnv.vars)
+ for (auto & i : state->staticBaseEnv->vars)
varNames.emplace(state->symbols[i.first]);
}
@@ -681,16 +802,24 @@ void NixRepl::reloadFiles()
{
initEnv();
+ loadFiles();
+}
+
+
+void NixRepl::loadFiles()
+{
Strings old = loadedFiles;
loadedFiles.clear();
- bool first = true;
for (auto & i : old) {
- if (!first) notice("");
- first = false;
notice("Loading '%1%'...", i);
loadFile(i);
}
+
+ for (auto & [i, what] : getValues()) {
+ notice("Loading installable '%1%'...", what);
+ addAttrsToScope(*i);
+ }
}
@@ -701,12 +830,12 @@ void NixRepl::addAttrsToScope(Value & attrs)
throw Error("environment full; cannot add more variables");
for (auto & i : *attrs.attrs) {
- staticEnv.vars.emplace_back(i.name, displ);
+ staticEnv->vars.emplace_back(i.name, displ);
env->values[displ++] = i.value;
varNames.emplace(state->symbols[i.name]);
}
- staticEnv.sort();
- staticEnv.deduplicate();
+ staticEnv->sort();
+ staticEnv->deduplicate();
notice("Added %1% variables.", attrs.attrs->size());
}
@@ -715,10 +844,10 @@ void NixRepl::addVarToScope(const Symbol name, Value & v)
{
if (displ >= envSize)
throw Error("environment full; cannot add more variables");
- if (auto oldVar = staticEnv.find(name); oldVar != staticEnv.vars.end())
- staticEnv.vars.erase(oldVar);
- staticEnv.vars.emplace_back(name, displ);
- staticEnv.sort();
+ if (auto oldVar = staticEnv->find(name); oldVar != staticEnv->vars.end())
+ staticEnv->vars.erase(oldVar);
+ staticEnv->vars.emplace_back(name, displ);
+ staticEnv->sort();
env->values[displ++] = &v;
varNames.emplace(state->symbols[name]);
}
@@ -886,17 +1015,66 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int m
return str;
}
-struct CmdRepl : StoreCommand, MixEvalArgs
+void runRepl(
+ ref<EvalState>evalState,
+ const ValMap & extraEnv)
{
+ auto getValues = [&]()->NixRepl::AnnotatedValues{
+ NixRepl::AnnotatedValues values;
+ return values;
+ };
+ const Strings & searchPath = {};
+ auto repl = std::make_unique<NixRepl>(
+ searchPath,
+ openStore(),
+ evalState,
+ getValues
+ );
+
+ repl->initEnv();
+
+ // add 'extra' vars.
+ for (auto & [name, value] : extraEnv)
+ repl->addVarToScope(repl->state->symbols.create(name), *value);
+
+ repl->mainLoop();
+}
+
+struct CmdRepl : InstallablesCommand
+{
+ CmdRepl() {
+ evalSettings.pureEval = false;
+ }
+
+ void prepare()
+ {
+ if (!settings.isExperimentalFeatureEnabled(Xp::ReplFlake) && !(file) && this->_installables.size() >= 1) {
+ warn("future versions of Nix will require using `--file` to load a file");
+ if (this->_installables.size() > 1)
+ warn("more than one input file is not currently supported");
+ auto filePath = this->_installables[0].data();
+ file = std::optional(filePath);
+ _installables.front() = _installables.back();
+ _installables.pop_back();
+ }
+ installables = InstallablesCommand::load();
+ }
+
std::vector<std::string> files;
- CmdRepl()
+ Strings getDefaultFlakeAttrPaths() override
+ {
+ return {""};
+ }
+
+ bool useDefaultInstallables() override
{
- expectArgs({
- .label = "files",
- .handler = {&files},
- .completer = completePath
- });
+ return file.has_value() or expr.has_value();
+ }
+
+ bool forceImpureByDefault() override
+ {
+ return true;
}
std::string description() override
@@ -913,10 +1091,37 @@ struct CmdRepl : StoreCommand, MixEvalArgs
void run(ref<Store> store) override
{
- evalSettings.pureEval = false;
- auto repl = std::make_unique<NixRepl>(searchPath, openStore());
+ auto state = getEvalState();
+ auto getValues = [&]()->NixRepl::AnnotatedValues{
+ auto installables = load();
+ NixRepl::AnnotatedValues values;
+ for (auto & installable: installables){
+ auto what = installable->what();
+ if (file){
+ auto [val, pos] = installable->toValue(*state);
+ auto what = installable->what();
+ state->forceValue(*val, pos);
+ auto autoArgs = getAutoArgs(*state);
+ auto valPost = state->allocValue();
+ state->autoCallFunction(*autoArgs, *val, *valPost);
+ state->forceValue(*valPost, pos);
+ values.push_back( {valPost, what });
+ } else {
+ auto [val, pos] = installable->toValue(*state);
+ values.push_back( {val, what} );
+ }
+ }
+ return values;
+ };
+ auto repl = std::make_unique<NixRepl>(
+ searchPath,
+ openStore(),
+ state,
+ getValues
+ );
repl->autoArgs = getAutoArgs(*repl->state);
- repl->mainLoop(files);
+ repl->initEnv();
+ repl->mainLoop();
}
};
diff --git a/src/libexpr/eval-cache.cc b/src/libexpr/eval-cache.cc
index dc52b9fa0..1a37f87f3 100644
--- a/src/libexpr/eval-cache.cc
+++ b/src/libexpr/eval-cache.cc
@@ -47,7 +47,7 @@ struct AttrDb
{
auto state(_state->lock());
- Path cacheDir = getCacheDir() + "/nix/eval-cache-v2";
+ Path cacheDir = getCacheDir() + "/nix/eval-cache-v4";
createDirs(cacheDir);
Path dbPath = cacheDir + "/" + fingerprint.to_string(Base16, false) + ".sqlite";
@@ -175,6 +175,42 @@ struct AttrDb
});
}
+ AttrId setInt(
+ AttrKey key,
+ int n)
+ {
+ return doSQLite([&]()
+ {
+ auto state(_state->lock());
+
+ state->insertAttribute.use()
+ (key.first)
+ (symbols[key.second])
+ (AttrType::Int)
+ (n).exec();
+
+ return state->db.getLastInsertedRowId();
+ });
+ }
+
+ AttrId setListOfStrings(
+ AttrKey key,
+ const std::vector<std::string> & l)
+ {
+ return doSQLite([&]()
+ {
+ auto state(_state->lock());
+
+ state->insertAttribute.use()
+ (key.first)
+ (symbols[key.second])
+ (AttrType::ListOfStrings)
+ (concatStringsSep("\t", l)).exec();
+
+ return state->db.getLastInsertedRowId();
+ });
+ }
+
AttrId setPlaceholder(AttrKey key)
{
return doSQLite([&]()
@@ -246,7 +282,7 @@ struct AttrDb
auto queryAttribute(state->queryAttribute.use()(key.first)(symbols[key.second]));
if (!queryAttribute.next()) return {};
- auto rowId = (AttrType) queryAttribute.getInt(0);
+ auto rowId = (AttrId) queryAttribute.getInt(0);
auto type = (AttrType) queryAttribute.getInt(1);
switch (type) {
@@ -269,6 +305,10 @@ struct AttrDb
}
case AttrType::Bool:
return {{rowId, queryAttribute.getInt(2) != 0}};
+ case AttrType::Int:
+ return {{rowId, int_t{queryAttribute.getInt(2)}}};
+ case AttrType::ListOfStrings:
+ return {{rowId, tokenizeString<std::vector<std::string>>(queryAttribute.getStr(2), "\t")}};
case AttrType::Missing:
return {{rowId, missing_t()}};
case AttrType::Misc:
@@ -385,7 +425,7 @@ std::string AttrCursor::getAttrPathStr(Symbol name) const
Value & AttrCursor::forceValue()
{
- debug("evaluating uncached attribute %s", getAttrPathStr());
+ debug("evaluating uncached attribute '%s'", getAttrPathStr());
auto & v = getValue();
@@ -406,6 +446,8 @@ Value & AttrCursor::forceValue()
cachedValue = {root->db->setString(getKey(), v.path), string_t{v.path, {}}};
else if (v.type() == nBool)
cachedValue = {root->db->setBool(getKey(), v.boolean), v.boolean};
+ else if (v.type() == nInt)
+ cachedValue = {root->db->setInt(getKey(), v.integer), int_t{v.integer}};
else if (v.type() == nAttrs)
; // FIXME: do something?
else
@@ -444,7 +486,7 @@ std::shared_ptr<AttrCursor> AttrCursor::maybeGetAttr(Symbol name, bool forceErro
return nullptr;
else if (std::get_if<failed_t>(&attr->second)) {
if (forceErrors)
- debug("reevaluating failed cached attribute '%s'");
+ debug("reevaluating failed cached attribute '%s'", getAttrPathStr(name));
else
throw CachedEvalError("cached failure of attribute '%s'", getAttrPathStr(name));
} else
@@ -465,11 +507,6 @@ std::shared_ptr<AttrCursor> AttrCursor::maybeGetAttr(Symbol name, bool forceErro
return nullptr;
//throw TypeError("'%s' is not an attribute set", getAttrPathStr());
- for (auto & attr : *v.attrs) {
- if (root->db)
- root->db->setPlaceholder({cachedValue->first, attr.name});
- }
-
auto attr = v.attrs->get(name);
if (!attr) {
@@ -534,14 +571,14 @@ std::string AttrCursor::getString()
debug("using cached string attribute '%s'", getAttrPathStr());
return s->first;
} else
- throw TypeError("'%s' is not a string", getAttrPathStr());
+ root->state.debugThrowLastTrace(TypeError("'%s' is not a string", getAttrPathStr()));
}
}
auto & v = forceValue();
if (v.type() != nString && v.type() != nPath)
- throw TypeError("'%s' is not a string but %s", getAttrPathStr(), showType(v.type()));
+ root->state.debugThrowLastTrace(TypeError("'%s' is not a string but %s", getAttrPathStr(), showType(v.type())));
return v.type() == nString ? v.string.s : v.path;
}
@@ -565,7 +602,7 @@ string_t AttrCursor::getStringWithContext()
return *s;
}
} else
- throw TypeError("'%s' is not a string", getAttrPathStr());
+ root->state.debugThrowLastTrace(TypeError("'%s' is not a string", getAttrPathStr()));
}
}
@@ -576,7 +613,7 @@ string_t AttrCursor::getStringWithContext()
else if (v.type() == nPath)
return {v.path, {}};
else
- throw TypeError("'%s' is not a string but %s", getAttrPathStr(), showType(v.type()));
+ root->state.debugThrowLastTrace(TypeError("'%s' is not a string but %s", getAttrPathStr(), showType(v.type())));
}
bool AttrCursor::getBool()
@@ -589,18 +626,73 @@ bool AttrCursor::getBool()
debug("using cached Boolean attribute '%s'", getAttrPathStr());
return *b;
} else
- throw TypeError("'%s' is not a Boolean", getAttrPathStr());
+ root->state.debugThrowLastTrace(TypeError("'%s' is not a Boolean", getAttrPathStr()));
}
}
auto & v = forceValue();
if (v.type() != nBool)
- throw TypeError("'%s' is not a Boolean", getAttrPathStr());
+ root->state.debugThrowLastTrace(TypeError("'%s' is not a Boolean", getAttrPathStr()));
return v.boolean;
}
+NixInt AttrCursor::getInt()
+{
+ if (root->db) {
+ if (!cachedValue)
+ cachedValue = root->db->getAttr(getKey());
+ if (cachedValue && !std::get_if<placeholder_t>(&cachedValue->second)) {
+ if (auto i = std::get_if<int_t>(&cachedValue->second)) {
+ debug("using cached Integer attribute '%s'", getAttrPathStr());
+ return i->x;
+ } else
+ throw TypeError("'%s' is not an Integer", getAttrPathStr());
+ }
+ }
+
+ auto & v = forceValue();
+
+ if (v.type() != nInt)
+ throw TypeError("'%s' is not an Integer", getAttrPathStr());
+
+ return v.integer;
+}
+
+std::vector<std::string> AttrCursor::getListOfStrings()
+{
+ if (root->db) {
+ if (!cachedValue)
+ cachedValue = root->db->getAttr(getKey());
+ if (cachedValue && !std::get_if<placeholder_t>(&cachedValue->second)) {
+ if (auto l = std::get_if<std::vector<std::string>>(&cachedValue->second)) {
+ debug("using cached list of strings attribute '%s'", getAttrPathStr());
+ return *l;
+ } else
+ throw TypeError("'%s' is not a list of strings", getAttrPathStr());
+ }
+ }
+
+ debug("evaluating uncached attribute '%s'", getAttrPathStr());
+
+ auto & v = getValue();
+ root->state.forceValue(v, noPos);
+
+ if (v.type() != nList)
+ throw TypeError("'%s' is not a list", getAttrPathStr());
+
+ std::vector<std::string> res;
+
+ for (auto & elem : v.listItems())
+ res.push_back(std::string(root->state.forceStringNoCtx(*elem)));
+
+ if (root->db)
+ cachedValue = {root->db->setListOfStrings(getKey(), res), res};
+
+ return res;
+}
+
std::vector<Symbol> AttrCursor::getAttrs()
{
if (root->db) {
@@ -611,14 +703,14 @@ std::vector<Symbol> AttrCursor::getAttrs()
debug("using cached attrset attribute '%s'", getAttrPathStr());
return *attrs;
} else
- throw TypeError("'%s' is not an attribute set", getAttrPathStr());
+ root->state.debugThrowLastTrace(TypeError("'%s' is not an attribute set", getAttrPathStr()));
}
}
auto & v = forceValue();
if (v.type() != nAttrs)
- throw TypeError("'%s' is not an attribute set", getAttrPathStr());
+ root->state.debugThrowLastTrace(TypeError("'%s' is not an attribute set", getAttrPathStr()));
std::vector<Symbol> attrs;
for (auto & attr : *getValue().attrs)
diff --git a/src/libexpr/eval-cache.hh b/src/libexpr/eval-cache.hh
index b0709ebc2..c93e55b93 100644
--- a/src/libexpr/eval-cache.hh
+++ b/src/libexpr/eval-cache.hh
@@ -44,12 +44,15 @@ enum AttrType {
Misc = 4,
Failed = 5,
Bool = 6,
+ ListOfStrings = 7,
+ Int = 8,
};
struct placeholder_t {};
struct missing_t {};
struct misc_t {};
struct failed_t {};
+struct int_t { NixInt x; };
typedef uint64_t AttrId;
typedef std::pair<AttrId, Symbol> AttrKey;
typedef std::pair<std::string, NixStringContext> string_t;
@@ -61,7 +64,9 @@ typedef std::variant<
missing_t,
misc_t,
failed_t,
- bool
+ bool,
+ int_t,
+ std::vector<std::string>
> AttrValue;
class AttrCursor : public std::enable_shared_from_this<AttrCursor>
@@ -114,6 +119,10 @@ public:
bool getBool();
+ NixInt getInt();
+
+ std::vector<std::string> getListOfStrings();
+
std::vector<Symbol> getAttrs();
bool isDerivation();
diff --git a/src/libexpr/eval-inline.hh b/src/libexpr/eval-inline.hh
index 30d0f1e9c..5997525e4 100644
--- a/src/libexpr/eval-inline.hh
+++ b/src/libexpr/eval-inline.hh
@@ -4,7 +4,6 @@
namespace nix {
-
/* Note: Various places expect the allocated memory to be zeroed. */
[[gnu::always_inline]]
inline void * allocBytes(size_t n)
diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc
index 5523ba7bb..46e5dc41e 100644
--- a/src/libexpr/eval.cc
+++ b/src/libexpr/eval.cc
@@ -18,6 +18,7 @@
#include <sys/resource.h>
#include <iostream>
#include <fstream>
+#include <functional>
#include <sys/resource.h>
@@ -36,7 +37,6 @@
namespace nix {
-
static char * allocString(size_t size)
{
char * t;
@@ -147,7 +147,10 @@ void Value::print(const SymbolTable & symbols, std::ostream & str,
else {
str << "[ ";
for (auto v2 : listItems()) {
- v2->print(symbols, str, seen);
+ if (v2)
+ v2->print(symbols, str, seen);
+ else
+ str << "(nullptr)";
str << " ";
}
str << "]";
@@ -184,6 +187,11 @@ void Value::print(const SymbolTable & symbols, std::ostream & str, bool showRepe
print(symbols, str, showRepeated ? nullptr : &seen);
}
+// Pretty print types for assertion errors
+std::ostream & operator << (std::ostream & os, const ValueType t) {
+ os << showType(t);
+ return os;
+}
std::string printValue(const EvalState & state, const Value & v)
{
@@ -451,10 +459,15 @@ EvalState::EvalState(
, sKey(symbols.create("key"))
, sPath(symbols.create("path"))
, sPrefix(symbols.create("prefix"))
+ , sOutputSpecified(symbols.create("outputSpecified"))
, repair(NoRepair)
, emptyBindings(0)
, store(store)
, buildStore(buildStore ? buildStore : store)
+ , debugRepl(nullptr)
+ , debugStop(false)
+ , debugQuit(false)
+ , trylevel(0)
, regexCache(makeRegexCache())
#if HAVE_BOEHMGC
, valueAllocCache(std::allocate_shared<void *>(traceable_allocator<void *>(), nullptr))
@@ -464,7 +477,7 @@ EvalState::EvalState(
, env1AllocCache(std::make_shared<void *>(nullptr))
#endif
, baseEnv(allocEnv(128))
- , staticBaseEnv(false, 0)
+ , staticBaseEnv{std::make_shared<StaticEnv>(false, nullptr)}
{
countCalls = getEnv("NIX_COUNT_CALLS").value_or("0") != "0";
@@ -522,7 +535,7 @@ void EvalState::allowPath(const StorePath & storePath)
allowedPaths->insert(store->toRealPath(storePath));
}
-void EvalState::allowAndSetStorePathString(const StorePath &storePath, Value & v)
+void EvalState::allowAndSetStorePathString(const StorePath & storePath, Value & v)
{
allowPath(storePath);
@@ -630,7 +643,7 @@ Value * EvalState::addConstant(const std::string & name, Value & v)
void EvalState::addConstant(const std::string & name, Value * v)
{
- staticBaseEnv.vars.emplace_back(symbols.create(name), baseEnvDispl);
+ staticBaseEnv->vars.emplace_back(symbols.create(name), baseEnvDispl);
baseEnv.values[baseEnvDispl++] = v;
auto name2 = name.substr(0, 2) == "__" ? name.substr(2) : name;
baseEnv.values[0]->attrs->push_back(Attr(symbols.create(name2), v));
@@ -663,7 +676,7 @@ Value * EvalState::addPrimOp(PrimOp && primOp)
Value * v = allocValue();
v->mkPrimOp(new PrimOp(primOp));
- staticBaseEnv.vars.emplace_back(envName, baseEnvDispl);
+ staticBaseEnv->vars.emplace_back(envName, baseEnvDispl);
baseEnv.values[baseEnvDispl++] = v;
baseEnv.values[0]->attrs->push_back(Attr(symbols.create(primOp.name), v));
return v;
@@ -693,11 +706,137 @@ std::optional<EvalState::Doc> EvalState::getDoc(Value & v)
}
+// just for the current level of StaticEnv, not the whole chain.
+void printStaticEnvBindings(const SymbolTable & st, const StaticEnv & se)
+{
+ std::cout << ANSI_MAGENTA;
+ for (auto & i : se.vars)
+ std::cout << st[i.first] << " ";
+ std::cout << ANSI_NORMAL;
+ std::cout << std::endl;
+}
+
+// just for the current level of Env, not the whole chain.
+void printWithBindings(const SymbolTable & st, const Env & env)
+{
+ if (env.type == Env::HasWithAttrs) {
+ std::cout << "with: ";
+ std::cout << ANSI_MAGENTA;
+ Bindings::iterator j = env.values[0]->attrs->begin();
+ while (j != env.values[0]->attrs->end()) {
+ std::cout << st[j->name] << " ";
+ ++j;
+ }
+ std::cout << ANSI_NORMAL;
+ std::cout << std::endl;
+ }
+}
+
+void printEnvBindings(const SymbolTable & st, const StaticEnv & se, const Env & env, int lvl)
+{
+ std::cout << "Env level " << lvl << std::endl;
+
+ if (se.up && env.up) {
+ std::cout << "static: ";
+ printStaticEnvBindings(st, se);
+ printWithBindings(st, env);
+ std::cout << std::endl;
+ printEnvBindings(st, *se.up, *env.up, ++lvl);
+ } else {
+ std::cout << ANSI_MAGENTA;
+ // for the top level, don't print the double underscore ones;
+ // they are in builtins.
+ for (auto & i : se.vars)
+ if (!hasPrefix(st[i.first], "__"))
+ std::cout << st[i.first] << " ";
+ std::cout << ANSI_NORMAL;
+ std::cout << std::endl;
+ printWithBindings(st, env); // probably nothing there for the top level.
+ std::cout << std::endl;
+
+ }
+}
+
+void printEnvBindings(const EvalState &es, const Expr & expr, const Env & env)
+{
+ // just print the names for now
+ auto se = es.getStaticEnv(expr);
+ if (se)
+ printEnvBindings(es.symbols, *se, env, 0);
+}
+
+void mapStaticEnvBindings(const SymbolTable & st, const StaticEnv & se, const Env & env, ValMap & vm)
+{
+ // add bindings for the next level up first, so that the bindings for this level
+ // override the higher levels.
+ // The top level bindings (builtins) are skipped since they are added for us by initEnv()
+ if (env.up && se.up) {
+ mapStaticEnvBindings(st, *se.up, *env.up, vm);
+
+ if (env.type == Env::HasWithAttrs) {
+ // add 'with' bindings.
+ Bindings::iterator j = env.values[0]->attrs->begin();
+ while (j != env.values[0]->attrs->end()) {
+ vm[st[j->name]] = j->value;
+ ++j;
+ }
+ } else {
+ // iterate through staticenv bindings and add them.
+ for (auto & i : se.vars)
+ vm[st[i.first]] = env.values[i.second];
+ }
+ }
+}
+
+std::unique_ptr<ValMap> mapStaticEnvBindings(const SymbolTable & st, const StaticEnv & se, const Env & env)
+{
+ auto vm = std::make_unique<ValMap>();
+ mapStaticEnvBindings(st, se, env, *vm);
+ return vm;
+}
+
+void EvalState::runDebugRepl(const Error * error, const Env & env, const Expr & expr)
+{
+ // double check we've got the debugRepl function pointer.
+ if (!debugRepl)
+ return;
+
+ auto dts =
+ error && expr.getPos()
+ ? std::make_unique<DebugTraceStacker>(
+ *this,
+ DebugTrace {
+ .pos = error->info().errPos ? *error->info().errPos : positions[expr.getPos()],
+ .expr = expr,
+ .env = env,
+ .hint = error->info().msg,
+ .isError = true
+ })
+ : nullptr;
+
+ if (error)
+ {
+ printError("%s\n\n", error->what());
+
+ if (trylevel > 0 && error->info().level != lvlInfo)
+ printError("This exception occurred in a 'tryEval' call. Use " ANSI_GREEN "--ignore-try" ANSI_NORMAL " to skip these.\n");
+
+ printError(ANSI_BOLD "Starting REPL to allow you to inspect the current state of the evaluator.\n" ANSI_NORMAL);
+ }
+
+ auto se = getStaticEnv(expr);
+ if (se) {
+ auto vm = mapStaticEnvBindings(symbols, *se.get(), env);
+ (debugRepl)(ref<EvalState>(shared_from_this()), *vm);
+ }
+}
+
/* Every "format" object (even temporary) takes up a few hundred bytes
of stack space, which is a real killer in the recursive
evaluator. So here are some helper functions for throwing
exceptions. */
+// *WithTrace
void EvalState::throwTypeErrorWithTrace(
const PosIdx pos,
const char * s,
@@ -759,6 +898,17 @@ void EvalState::throwEvalErrorWithTrace(const char * s, const std::string_view s
throw e;
}
+
+// *WithoutTrace coerce-strings
+
+void EvalState::throwEvalError(const PosIdx pos, const char * s, Env & env, Expr & expr)
+{
+ debugThrow(EvalError({
+ .msg = hintfmt(s),
+ .errPos = positions[pos]
+ }), env, expr);
+}
+
void EvalState::throwEvalError(const PosIdx pos, const Suggestions & suggestions, const char * s, const std::string_view s2) const
{
throw EvalError(ErrorInfo {
@@ -848,6 +998,167 @@ void EvalState::throwMissingArgumentError(const PosIdx pos, const char * s, cons
});
}
+
+// master
+
+void EvalState::throwEvalError(const PosIdx pos, const char * s, Env & env, Expr & expr)
+{
+ debugThrow(EvalError({
+ .msg = hintfmt(s),
+ .errPos = positions[pos]
+ }), env, expr);
+}
+
+void EvalState::throwEvalError(const PosIdx pos, const char * s)
+{
+ debugThrowLastTrace(EvalError({
+ .msg = hintfmt(s),
+ .errPos = positions[pos]
+ }));
+}
+
+void EvalState::throwEvalError(const char * s, const std::string & s2)
+{
+ debugThrowLastTrace(EvalError(s, s2));
+}
+
+void EvalState::throwEvalError(const PosIdx pos, const Suggestions & suggestions, const char * s,
+ const std::string & s2, Env & env, Expr & expr)
+{
+ debugThrow(EvalError(ErrorInfo{
+ .msg = hintfmt(s, s2),
+ .errPos = positions[pos],
+ .suggestions = suggestions,
+ }), env, expr);
+}
+
+void EvalState::throwEvalError(const PosIdx pos, const char * s, const std::string & s2)
+{
+ debugThrowLastTrace(EvalError({
+ .msg = hintfmt(s, s2),
+ .errPos = positions[pos]
+ }));
+}
+
+void EvalState::throwEvalError(const PosIdx pos, const char * s, const std::string & s2, Env & env, Expr & expr)
+{
+ debugThrow(EvalError({
+ .msg = hintfmt(s, s2),
+ .errPos = positions[pos]
+ }), env, expr);
+}
+
+void EvalState::throwEvalError(const char * s, const std::string & s2,
+ const std::string & s3)
+{
+ debugThrowLastTrace(EvalError({
+ .msg = hintfmt(s, s2),
+ .errPos = positions[noPos]
+ }));
+}
+
+void EvalState::throwEvalError(const PosIdx pos, const char * s, const std::string & s2,
+ const std::string & s3)
+{
+ debugThrowLastTrace(EvalError({
+ .msg = hintfmt(s, s2),
+ .errPos = positions[pos]
+ }));
+}
+
+void EvalState::throwEvalError(const PosIdx pos, const char * s, const std::string & s2,
+ const std::string & s3, Env & env, Expr & expr)
+{
+ debugThrow(EvalError({
+ .msg = hintfmt(s, s2),
+ .errPos = positions[pos]
+ }), env, expr);
+}
+
+void EvalState::throwEvalError(const PosIdx p1, const char * s, const Symbol sym, const PosIdx p2, Env & env, Expr & expr)
+{
+ // p1 is where the error occurred; p2 is a position mentioned in the message.
+ debugThrow(EvalError({
+ .msg = hintfmt(s, symbols[sym], positions[p2]),
+ .errPos = positions[p1]
+ }), env, expr);
+}
+
+void EvalState::throwTypeError(const PosIdx pos, const char * s, const Value & v)
+{
+ debugThrowLastTrace(TypeError({
+ .msg = hintfmt(s, showType(v)),
+ .errPos = positions[pos]
+ }));
+}
+
+void EvalState::throwTypeError(const PosIdx pos, const char * s, const Value & v, Env & env, Expr & expr)
+{
+ debugThrow(TypeError({
+ .msg = hintfmt(s, showType(v)),
+ .errPos = positions[pos]
+ }), env, expr);
+}
+
+void EvalState::throwTypeError(const PosIdx pos, const char * s)
+{
+ debugThrowLastTrace(TypeError({
+ .msg = hintfmt(s),
+ .errPos = positions[pos]
+ }));
+}
+
+void EvalState::throwTypeError(const PosIdx pos, const char * s, const ExprLambda & fun,
+ const Symbol s2, Env & env, Expr &expr)
+{
+ debugThrow(TypeError({
+ .msg = hintfmt(s, fun.showNamePos(*this), symbols[s2]),
+ .errPos = positions[pos]
+ }), env, expr);
+}
+
+void EvalState::throwTypeError(const PosIdx pos, const Suggestions & suggestions, const char * s,
+ const ExprLambda & fun, const Symbol s2, Env & env, Expr &expr)
+{
+ debugThrow(TypeError(ErrorInfo {
+ .msg = hintfmt(s, fun.showNamePos(*this), symbols[s2]),
+ .errPos = positions[pos],
+ .suggestions = suggestions,
+ }), env, expr);
+}
+
+void EvalState::throwTypeError(const char * s, const Value & v, Env & env, Expr &expr)
+{
+ debugThrow(TypeError({
+ .msg = hintfmt(s, showType(v)),
+ .errPos = positions[expr.getPos()],
+ }), env, expr);
+}
+
+void EvalState::throwAssertionError(const PosIdx pos, const char * s, const std::string & s1, Env & env, Expr &expr)
+{
+ debugThrow(AssertionError({
+ .msg = hintfmt(s, s1),
+ .errPos = positions[pos]
+ }), env, expr);
+}
+
+void EvalState::throwUndefinedVarError(const PosIdx pos, const char * s, const std::string & s1, Env & env, Expr &expr)
+{
+ debugThrow(UndefinedVarError({
+ .msg = hintfmt(s, s1),
+ .errPos = positions[pos]
+ }), env, expr);
+}
+
+void EvalState::throwMissingArgumentError(const PosIdx pos, const char * s, const std::string & s1, Env & env, Expr &expr)
+{
+ debugThrow(MissingArgumentError({
+ .msg = hintfmt(s, s1),
+ .errPos = positions[pos]
+ }), env, expr);
+}
+
void EvalState::addErrorTrace(Error & e, const char * s, const std::string & s2) const
{
e.addTrace(std::nullopt, s, s2);
@@ -858,6 +1169,32 @@ void EvalState::addErrorTrace(Error & e, const PosIdx pos, const char * s, const
e.addTrace(positions[pos], s, s2);
}
+static std::unique_ptr<DebugTraceStacker> makeDebugTraceStacker(
+ EvalState & state,
+ Expr & expr,
+ Env & env,
+ std::optional<ErrPos> pos,
+ const char * s,
+ const std::string & s2)
+{
+ return std::make_unique<DebugTraceStacker>(state,
+ DebugTrace {
+ .pos = pos,
+ .expr = expr,
+ .env = env,
+ .hint = hintfmt(s, s2),
+ .isError = false
+ });
+}
+
+DebugTraceStacker::DebugTraceStacker(EvalState & evalState, DebugTrace t)
+ : evalState(evalState)
+ , trace(std::move(t))
+{
+ evalState.debugTraces.push_front(trace);
+ if (evalState.debugStop && evalState.debugRepl)
+ evalState.runDebugRepl(nullptr, trace.env, trace.expr);
+}
void Value::mkString(std::string_view s)
{
@@ -916,12 +1253,11 @@ inline Value * EvalState::lookupVar(Env * env, const ExprVar & var, bool noEval)
return j->value;
}
if (!env->prevWith)
- throwUndefinedVarError(var.pos, "undefined variable '%1%'", symbols[var.name]);
+ throwUndefinedVarError(var.pos, "undefined variable '%1%'", symbols[var.name], *env, const_cast<ExprVar&>(var));
for (size_t l = env->prevWith; l; --l, env = env->up) ;
}
}
-
void EvalState::mkList(Value & v, size_t size)
{
v.mkList(size);
@@ -1054,8 +1390,19 @@ void EvalState::cacheFile(
fileParseCache[resolvedPath] = e;
try {
- // Enforce that 'flake.nix' is a direct attrset, not a computation.
- if (mustBeTrivial && !(dynamic_cast<ExprAttrs *>(e)))
+ auto dts = debugRepl
+ ? makeDebugTraceStacker(
+ *this,
+ *e,
+ this->baseEnv,
+ e->getPos() ? std::optional(ErrPos(positions[e->getPos()])) : std::nullopt,
+ "while evaluating the file '%1%':", resolvedPath)
+ : nullptr;
+
+ // Enforce that 'flake.nix' is a direct attrset, not a
+ // computation.
+ if (mustBeTrivial &&
+ !(dynamic_cast<ExprAttrs *>(e)))
throwEvalError("file '%s' must be an attribute set", path);
eval(e, v);
} catch (Error & e) {
@@ -1094,7 +1441,7 @@ inline void EvalState::evalAttrs(Env & env, Expr * e, Value & v, const PosIdx po
try {
e->eval(*this, env, v);
if (v.type() != nAttrs)
- throwTypeError("value is %1% while a set was expected", v);
+ throwTypeError("value is %1% while a set was expected", v, env, *e);
} catch (Error & e) {
e.addTrace(positions[pos], errorCtx);
throw;
@@ -1203,7 +1550,7 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v)
auto nameSym = state.symbols.create(nameVal.string.s);
Bindings::iterator j = v.attrs->find(nameSym);
if (j != v.attrs->end())
- state.throwEvalError(i.pos, "dynamic attribute '%1%' already defined at %2%", nameSym, j->pos);
+ state.throwEvalError(i.pos, "dynamic attribute '%1%' already defined at %2%", nameSym, j->pos, env, *this);
i.valueExpr->setName(nameSym);
/* Keep sorted order so find can catch duplicates */
@@ -1277,6 +1624,15 @@ void ExprSelect::eval(EvalState & state, Env & env, Value & v)
e->eval(state, env, vTmp);
try {
+ auto dts = state.debugRepl
+ ? makeDebugTraceStacker(
+ state,
+ *this,
+ env,
+ state.positions[pos2],
+ "while evaluating the attribute '%1%'",
+ showAttrPath(state, env, attrPath))
+ : nullptr;
for (auto & i : attrPath) {
state.nrLookups++;
@@ -1299,7 +1655,7 @@ void ExprSelect::eval(EvalState & state, Env & env, Value & v)
state.throwEvalError(
pos,
Suggestions::bestMatches(allAttrNames, state.symbols[name]),
- "attribute '%1%' missing", state.symbols[name]);
+ "attribute '%1%' missing", state.symbols[name], env, *this);
}
}
vAttrs = j->value;
@@ -1351,6 +1707,7 @@ void ExprLambda::eval(EvalState & state, Env & env, Value & v)
v.mkLambda(&env, this);
}
+
void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & vRes, const PosIdx pos)
{
auto trace = evalSettings.traceFunctionCalls
@@ -1389,7 +1746,6 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value &
if (!lambda.hasFormals())
env2.values[displ++] = args[0];
-
else {
try {
forceAttrs(*args[0], lambda.pos, "while evaluating the value passed for the lambda argument");
@@ -1412,7 +1768,7 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value &
throwTypeErrorWithTrace(lambda.pos,
"function '%1%' called without required argument '%2%'",
(lambda.name ? std::string(symbols[lambda.name]) : "anonymous lambda"),
- i.name, pos, "from call site");
+ i.name, pos, "from call site", *fun.lambda.env, lambda);
}
env2.values[displ++] = i.def->maybeThunk(*this, env2);
} else {
@@ -1446,6 +1802,15 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value &
/* Evaluate the body. */
try {
+ auto dts = debugRepl
+ ? makeDebugTraceStacker(
+ *this, *lambda.body, env2, positions[lambda.pos],
+ "while evaluating %s",
+ lambda.name
+ ? concatStrings("'", symbols[lambda.name], "'")
+ : "anonymous lambda")
+ : nullptr;
+
lambda.body->eval(*this, env2, vCur);
} catch (Error & e) {
if (loggerSettings.showTrace.get()) {
@@ -1617,8 +1982,8 @@ void EvalState::autoCallFunction(Bindings & args, Value & fun, Value & res)
Nix attempted to evaluate a function as a top level expression; in
this case it must have its arguments supplied either by default
values, or passed explicitly with '--arg' or '--argstr'. See
-https://nixos.org/manual/nix/stable/#ss-functions.)", symbols[i.name]);
-
+https://nixos.org/manual/nix/stable/expressions/language-constructs.html#functions.)", symbols[i.name],
+ *fun.lambda.env, *fun.lambda.fun);
}
}
}
@@ -1641,7 +2006,7 @@ void ExprWith::eval(EvalState & state, Env & env, Value & v)
void ExprIf::eval(EvalState & state, Env & env, Value & v)
{
- // We cheat in the parser, an pass the position of the condition as the position of the if itself.
+ // We cheat in the parser, and pass the position of the condition as the position of the if itself.
(state.evalBool(env, cond, pos, "while evaluating a branch condition") ? then : else_)->eval(state, env, v);
}
@@ -1651,7 +2016,7 @@ void ExprAssert::eval(EvalState & state, Env & env, Value & v)
if (!state.evalBool(env, cond, pos, "in the condition of the assert statement")) {
std::ostringstream out;
cond->show(state.symbols, out);
- state.throwAssertionError(pos, "assertion '%1%' failed", out.str());
+ state.throwAssertionError(pos, "assertion '%1%' failed", out.str(), env, *this);
}
body->eval(state, env, v);
}
@@ -1828,14 +2193,14 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v)
nf = n;
nf += vTmp.fpoint;
} else
- state.throwEvalError(i_pos, "cannot add %1% to an integer", showType(vTmp));
+ state.throwEvalError(i_pos, "cannot add %1% to an integer", showType(vTmp), env, *this);
} else if (firstType == nFloat) {
if (vTmp.type() == nInt) {
nf += vTmp.integer;
} else if (vTmp.type() == nFloat) {
nf += vTmp.fpoint;
} else
- state.throwEvalError(i_pos, "cannot add %1% to a float", showType(vTmp));
+ state.throwEvalError(i_pos, "cannot add %1% to a float", showType(vTmp), env, *this);
} else {
if (s.empty()) s.reserve(es->size());
/* skip canonization of first path, which would only be not
@@ -1855,7 +2220,7 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v)
v.mkFloat(nf);
else if (firstType == nPath) {
if (!context.empty())
- state.throwEvalError(pos, "a string that refers to a store path cannot be appended to a path");
+ state.throwEvalError(pos, "a string that refers to a store path cannot be appended to a path", env, *this);
v.mkPath(canonPath(str()));
} else
v.mkStringMove(c_str(), context);
@@ -1882,6 +2247,12 @@ void EvalState::forceValueDeep(Value & v)
if (v.type() == nAttrs) {
for (auto & i : *v.attrs)
try {
+ // If the value is a thunk, we're evaling. Otherwise no trace necessary.
+ auto dts = debugRepl && i.value->isThunk()
+ ? makeDebugTraceStacker(*this, *i.value->thunk.expr, *i.value->thunk.env, positions[i.pos],
+ "while evaluating the attribute '%1%'", symbols[i.name])
+ : nullptr;
+
recurse(*i.value);
} catch (Error & e) {
addErrorTrace(e, i.pos, "while evaluating the attribute '%1%'", symbols[i.name]);
@@ -2092,7 +2463,7 @@ BackedStringView EvalState::coerceToString(const PosIdx pos, Value & v, PathSet
return std::move(*maybeString);
auto i = v.attrs->find(sOutPath);
if (i == v.attrs->end())
- throwTypeErrorWithTrace("cannot coerce %1% to a string", showType(v), pos, errorCtx);
+ throwTypeErrorWithTrace("cannot coerce a set to a string", pos, errorCtx);
return coerceToString(pos, *i->value, context, coerceMore, copyToStore, canonicalizePath, errorCtx);
}
@@ -2100,7 +2471,6 @@ BackedStringView EvalState::coerceToString(const PosIdx pos, Value & v, PathSet
return v.external->coerceToString(positions[pos], context, coerceMore, copyToStore, errorCtx);
if (coerceMore) {
-
/* Note that `false' is represented as an empty string for
shell scripting convenience, just like `null'. */
if (v.type() == nBool && v.boolean) return "1";
@@ -2334,18 +2704,18 @@ void EvalState::printStats()
}
{
auto list = topObj.list("functions");
- for (auto & i : functionCalls) {
+ for (auto & [fun, count] : functionCalls) {
auto obj = list.object();
- if (i.first->name)
- obj.attr("name", (const std::string &) i.first->name);
+ if (fun->name)
+ obj.attr("name", (std::string_view) symbols[fun->name]);
else
obj.attr("name", nullptr);
- if (auto pos = positions[i.first->pos]) {
- obj.attr("file", (const std::string &) pos.file);
+ if (auto pos = positions[fun->pos]) {
+ obj.attr("file", (std::string_view) pos.file);
obj.attr("line", pos.line);
obj.attr("column", pos.column);
}
- obj.attr("count", i.second);
+ obj.attr("count", count);
}
}
{
@@ -2372,7 +2742,7 @@ void EvalState::printStats()
std::string ExternalValueBase::coerceToString(const Pos & pos, PathSet & context, bool copyMore, bool copyToStore, std::string_view errorCtx) const
{
- auto e = TypeError(ErrorInfo {
+ auto e = TypeError({
.msg = hintfmt("cannot coerce %1% to a string", showType())
});
e.addTrace(pos, errorCtx);
diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh
index 4c7d92dbc..5fe33025c 100644
--- a/src/libexpr/eval.hh
+++ b/src/libexpr/eval.hh
@@ -13,7 +13,6 @@
#include <unordered_map>
#include <mutex>
-
namespace nix {
@@ -25,7 +24,6 @@ enum RepairFlag : bool;
typedef void (* PrimOpFun) (EvalState & state, const PosIdx pos, Value * * args, Value & v);
-
struct PrimOp
{
PrimOpFun fun;
@@ -35,6 +33,11 @@ struct PrimOp
const char * doc = nullptr;
};
+#if HAVE_BOEHMGC
+ typedef std::map<std::string, Value *, std::less<std::string>, traceable_allocator<std::pair<const std::string, Value *> > > ValMap;
+#else
+ typedef std::map<std::string, Value *> ValMap;
+#endif
struct Env
{
@@ -44,6 +47,10 @@ struct Env
Value * values[0];
};
+void printEnvBindings(const EvalState &es, const Expr & expr, const Env & env);
+void printEnvBindings(const SymbolTable & st, const StaticEnv & se, const Env & env, int lvl = 0);
+
+std::unique_ptr<ValMap> mapStaticEnvBindings(const SymbolTable & st, const StaticEnv & se, const Env & env);
void copyContext(const Value & v, PathSet & context);
@@ -55,6 +62,7 @@ typedef std::map<Path, StorePath> SrcToStore;
std::ostream & printValue(const EvalState & state, std::ostream & str, const Value & v);
std::string printValue(const EvalState & state, const Value & v);
+std::ostream & operator << (std::ostream & os, const ValueType t);
typedef std::pair<std::string, std::string> SearchPathElem;
@@ -69,8 +77,17 @@ struct RegexCache;
std::shared_ptr<RegexCache> makeRegexCache();
+struct DebugTrace {
+ std::optional<ErrPos> pos;
+ const Expr & expr;
+ const Env & env;
+ hintformat hint;
+ bool isError;
+};
+
+void debugError(Error * e, Env & env, Expr & expr);
-class EvalState
+class EvalState : public std::enable_shared_from_this<EvalState>
{
public:
SymbolTable symbols;
@@ -86,7 +103,8 @@ public:
sOutputHash, sOutputHashAlgo, sOutputHashMode,
sRecurseForDerivations,
sDescription, sSelf, sEpsilon, sStartSet, sOperator, sKey, sPath,
- sPrefix;
+ sPrefix,
+ sOutputSpecified;
Symbol sDerivationNix;
/* If set, force copying files to the Nix store even if they
@@ -108,12 +126,56 @@ public:
RootValue vCallFlake = nullptr;
RootValue vImportedDrvToDerivation = nullptr;
+ /* Debugger */
+ void (* debugRepl)(ref<EvalState> es, const ValMap & extraEnv);
+ bool debugStop;
+ bool debugQuit;
+ int trylevel;
+ std::list<DebugTrace> debugTraces;
+ std::map<const Expr*, const std::shared_ptr<const StaticEnv>> exprEnvs;
+ const std::shared_ptr<const StaticEnv> getStaticEnv(const Expr & expr) const
+ {
+ auto i = exprEnvs.find(&expr);
+ if (i != exprEnvs.end())
+ return i->second;
+ else
+ return std::shared_ptr<const StaticEnv>();;
+ }
+
+ void runDebugRepl(const Error * error, const Env & env, const Expr & expr);
+
+ template<class E>
+ [[gnu::noinline, gnu::noreturn]]
+ void debugThrow(E && error, const Env & env, const Expr & expr)
+ {
+ if (debugRepl)
+ runDebugRepl(&error, env, expr);
+
+ throw std::move(error);
+ }
+
+ template<class E>
+ [[gnu::noinline, gnu::noreturn]]
+ void debugThrowLastTrace(E && e)
+ {
+ // Call this in the situation where Expr and Env are inaccessible.
+ // The debugger will start in the last context that's in the
+ // DebugTrace stack.
+ if (debugRepl && !debugTraces.empty()) {
+ const DebugTrace & last = debugTraces.front();
+ runDebugRepl(&e, last.env, last.expr);
+ }
+
+ throw std::move(e);
+ }
+
+
private:
SrcToStore srcToStore;
/* A cache from path names to parse trees. */
#if HAVE_BOEHMGC
- typedef std::map<Path, Expr *, std::less<Path>, traceable_allocator<std::pair<const Path, Expr *> > > FileParseCache;
+ typedef std::map<Path, Expr *, std::less<Path>, traceable_allocator<std::pair<const Path, Expr *>>> FileParseCache;
#else
typedef std::map<Path, Expr *> FileParseCache;
#endif
@@ -121,7 +183,7 @@ private:
/* A cache from path names to values. */
#if HAVE_BOEHMGC
- typedef std::map<Path, Value, std::less<Path>, traceable_allocator<std::pair<const Path, Value> > > FileEvalCache;
+ typedef std::map<Path, Value, std::less<Path>, traceable_allocator<std::pair<const Path, Value>>> FileEvalCache;
#else
typedef std::map<Path, Value> FileEvalCache;
#endif
@@ -184,10 +246,10 @@ public:
/* Parse a Nix expression from the specified file. */
Expr * parseExprFromFile(const Path & path);
- Expr * parseExprFromFile(const Path & path, StaticEnv & staticEnv);
+ Expr * parseExprFromFile(const Path & path, std::shared_ptr<StaticEnv> & staticEnv);
/* Parse a Nix expression from the specified string. */
- Expr * parseExprFromString(std::string s, const Path & basePath, StaticEnv & staticEnv);
+ Expr * parseExprFromString(std::string s, const Path & basePath, std::shared_ptr<StaticEnv> & staticEnv);
Expr * parseExprFromString(std::string s, const Path & basePath);
Expr * parseStdin();
@@ -197,7 +259,7 @@ public:
trivial (i.e. doesn't require arbitrary computation). */
void evalFile(const Path & path, Value & v, bool mustBeTrivial = false);
- /* Like `cacheFile`, but with an already parsed expression. */
+ /* Like `evalFile`, but with an already parsed expression. */
void cacheFile(
const Path & path,
const Path & resolvedPath,
@@ -253,32 +315,123 @@ public:
std::string_view forceString(Value & v, PathSet & context, const PosIdx pos, std::string_view errorCtx);
std::string_view forceStringNoCtx(Value & v, const PosIdx pos, std::string_view errorCtx);
- [[gnu::noinline, gnu::noreturn]] void throwEvalError(const PosIdx pos, const char * s) const;
- [[gnu::noinline, gnu::noreturn]] void throwEvalError(const PosIdx pos, const char * s, const Value & v) const;
- [[gnu::noinline, gnu::noreturn]] void throwEvalError(const char * s, const std::string_view s2) const;
- [[gnu::noinline, gnu::noreturn]] void throwEvalError(const PosIdx pos, const Suggestions & suggestions, const char * s, const std::string_view s2) const;
- [[gnu::noinline, gnu::noreturn]] void throwEvalError(const PosIdx pos, const char * s, const std::string_view s2) const;
- [[gnu::noinline, gnu::noreturn]] void throwEvalError(const char * s, const std::string_view s2, const std::string_view s3) const;
- [[gnu::noinline, gnu::noreturn]] void throwEvalError(const PosIdx pos, const char * s, const std::string & s2, const std::string & s3) const;
- [[gnu::noinline, gnu::noreturn]] void throwEvalError(const PosIdx p1, const char * s, const Symbol sym, const PosIdx p2) const;
- [[gnu::noinline, gnu::noreturn]] void throwEvalError(const char * s, const Value & v) const;
- [[gnu::noinline, gnu::noreturn]] void throwTypeError(const PosIdx pos, const char * s) const;
- [[gnu::noinline, gnu::noreturn]] void throwTypeError(const PosIdx pos, const char * s, const Value & v) const;
- [[gnu::noinline, gnu::noreturn]] void throwTypeError(const PosIdx pos, const char * s, const ExprLambda & fun, const Symbol s2) const;
- [[gnu::noinline, gnu::noreturn]] void throwTypeError(const PosIdx pos, const Suggestions & suggestions, const char * s, const ExprLambda & fun, const Symbol s2) const;
- [[gnu::noinline, gnu::noreturn]] void throwTypeError(const char * s, const Value & v) const;
- [[gnu::noinline, gnu::noreturn]] void throwTypeErrorWithTrace(const PosIdx, const char*, std::string_view, const nix::Symbol&, const PosIdx, std::string_view) const;
- [[gnu::noinline, gnu::noreturn]] void throwTypeErrorWithTrace(const PosIdx, const nix::Suggestions&, const char*, std::string_view, const nix::Symbol&, const PosIdx, std::string_view) const;
- [[gnu::noinline, gnu::noreturn]] void throwTypeErrorWithTrace(const char*, std::string_view, const Pos &, std::string_view) const;
- [[gnu::noinline, gnu::noreturn]] void throwTypeErrorWithTrace(const char*, std::string_view, const PosIdx, std::string_view) const;
- [[gnu::noinline, gnu::noreturn]] void throwEvalErrorWithTrace(const char*, std::string_view, const PosIdx, std::string_view) const;
- [[gnu::noinline, gnu::noreturn]] void throwEvalErrorWithTrace(const char*, std::string_view, std::string_view, nix::PosIdx, std::string_view) const;
- [[gnu::noinline, gnu::noreturn]] void throwAssertionError(const PosIdx pos, const char * s, const std::string & s1) const;
- [[gnu::noinline, gnu::noreturn]] void throwUndefinedVarError(const PosIdx pos, const char * s, const std::string & s1) const;
- [[gnu::noinline, gnu::noreturn]] void throwMissingArgumentError(const PosIdx pos, const char * s, const std::string & s1) const;
-
- [[gnu::noinline]] void addErrorTrace(Error & e, const char * s, const std::string & s2) const;
- [[gnu::noinline]] void addErrorTrace(Error & e, const PosIdx pos, const char * s, const std::string & s2) const;
+ // coerce-strings
+ [[gnu::noinline, gnu::noreturn]]
+ void throwEvalError(const PosIdx pos, const char * s) const;
+ [[gnu::noinline, gnu::noreturn]]
+ void throwEvalError(const PosIdx pos, const char * s, const Value & v) const;
+ [[gnu::noinline, gnu::noreturn]]
+ void throwEvalError(const char * s, const std::string_view s2) const;
+ [[gnu::noinline, gnu::noreturn]]
+ void throwEvalError(const PosIdx pos, const Suggestions & suggestions, const char * s, const std::string_view s2) const;
+ [[gnu::noinline, gnu::noreturn]]
+ void throwEvalError(const PosIdx pos, const char * s, const std::string_view s2) const;
+ [[gnu::noinline, gnu::noreturn]]
+ void throwEvalError(const char * s, const std::string_view s2, const std::string_view s3) const;
+ [[gnu::noinline, gnu::noreturn]]
+ void throwEvalError(const PosIdx pos, const char * s, const std::string & s2, const std::string & s3) const;
+ [[gnu::noinline, gnu::noreturn]]
+ void throwEvalError(const PosIdx p1, const char * s, const Symbol sym, const PosIdx p2) const;
+ [[gnu::noinline, gnu::noreturn]]
+ void throwEvalError(const char * s, const Value & v) const;
+ [[gnu::noinline, gnu::noreturn]]
+ void throwTypeError(const PosIdx pos, const char * s) const;
+ [[gnu::noinline, gnu::noreturn]]
+ void throwTypeError(const PosIdx pos, const char * s, const Value & v) const;
+ [[gnu::noinline, gnu::noreturn]]
+ void throwTypeError(const PosIdx pos, const char * s, const ExprLambda & fun, const Symbol s2) const;
+ [[gnu::noinline, gnu::noreturn]]
+ void throwTypeError(const PosIdx pos, const Suggestions & suggestions, const char * s, const ExprLambda & fun, const Symbol s2) const;
+ [[gnu::noinline, gnu::noreturn]]
+ void throwTypeError(const char * s, const Value & v) const;
+ [[gnu::noinline, gnu::noreturn]]
+ void throwTypeErrorWithTrace(const PosIdx, const char*, std::string_view, const nix::Symbol&, const PosIdx, std::string_view) const;
+ [[gnu::noinline, gnu::noreturn]]
+ void throwTypeErrorWithTrace(const PosIdx, const nix::Suggestions&, const char*, std::string_view, const nix::Symbol&, const PosIdx, std::string_view) const;
+ [[gnu::noinline, gnu::noreturn]]
+ void throwTypeErrorWithTrace(const char*, std::string_view, const Pos &, std::string_view) const;
+ [[gnu::noinline, gnu::noreturn]]
+ void throwTypeErrorWithTrace(const char*, std::string_view, const PosIdx, std::string_view) const;
+ [[gnu::noinline, gnu::noreturn]]
+ void throwEvalErrorWithTrace(const char*, std::string_view, const PosIdx, std::string_view) const;
+ [[gnu::noinline, gnu::noreturn]]
+ void throwEvalErrorWithTrace(const char*, std::string_view, std::string_view, nix::PosIdx, std::string_view) const;
+ [[gnu::noinline, gnu::noreturn]]
+ void throwAssertionError(const PosIdx pos, const char * s, const std::string & s1) const;
+ [[gnu::noinline, gnu::noreturn]]
+ void throwUndefinedVarError(const PosIdx pos, const char * s, const std::string & s1) const;
+ [[gnu::noinline, gnu::noreturn]]
+ void throwMissingArgumentError(const PosIdx pos, const char * s, const std::string & s1) const;
+
+ // origin/master
+ [[gnu::noinline, gnu::noreturn]]
+ void throwEvalError(const PosIdx pos, const char * s);
+ [[gnu::noinline, gnu::noreturn]]
+ void throwEvalError(const PosIdx pos, const char * s,
+ Env & env, Expr & expr);
+ [[gnu::noinline, gnu::noreturn]]
+ void throwEvalError(const char * s, const std::string & s2);
+ [[gnu::noinline, gnu::noreturn]]
+ void throwEvalError(const PosIdx pos, const char * s, const std::string & s2);
+ [[gnu::noinline, gnu::noreturn]]
+ void throwEvalError(const char * s, const std::string & s2,
+ Env & env, Expr & expr);
+ [[gnu::noinline, gnu::noreturn]]
+ void throwEvalError(const PosIdx pos, const char * s, const std::string & s2,
+ Env & env, Expr & expr);
+ [[gnu::noinline, gnu::noreturn]]
+ void throwEvalError(const char * s, const std::string & s2, const std::string & s3,
+ Env & env, Expr & expr);
+ [[gnu::noinline, gnu::noreturn]]
+ void throwEvalError(const PosIdx pos, const char * s, const std::string & s2, const std::string & s3,
+ Env & env, Expr & expr);
+ [[gnu::noinline, gnu::noreturn]]
+ void throwEvalError(const PosIdx pos, const char * s, const std::string & s2, const std::string & s3);
+ [[gnu::noinline, gnu::noreturn]]
+ void throwEvalError(const char * s, const std::string & s2, const std::string & s3);
+ [[gnu::noinline, gnu::noreturn]]
+ void throwEvalError(const PosIdx pos, const Suggestions & suggestions, const char * s, const std::string & s2,
+ Env & env, Expr & expr);
+ [[gnu::noinline, gnu::noreturn]]
+ void throwEvalError(const PosIdx p1, const char * s, const Symbol sym, const PosIdx p2,
+ Env & env, Expr & expr);
+
+ [[gnu::noinline, gnu::noreturn]]
+ void throwTypeError(const PosIdx pos, const char * s, const Value & v);
+ [[gnu::noinline, gnu::noreturn]]
+ void throwTypeError(const PosIdx pos, const char * s, const Value & v,
+ Env & env, Expr & expr);
+ [[gnu::noinline, gnu::noreturn]]
+ void throwTypeError(const PosIdx pos, const char * s);
+ [[gnu::noinline, gnu::noreturn]]
+ void throwTypeError(const PosIdx pos, const char * s,
+ Env & env, Expr & expr);
+ [[gnu::noinline, gnu::noreturn]]
+ void throwTypeError(const PosIdx pos, const char * s, const ExprLambda & fun, const Symbol s2,
+ Env & env, Expr & expr);
+ [[gnu::noinline, gnu::noreturn]]
+ void throwTypeError(const PosIdx pos, const Suggestions & suggestions, const char * s, const ExprLambda & fun, const Symbol s2,
+ Env & env, Expr & expr);
+ [[gnu::noinline, gnu::noreturn]]
+ void throwTypeError(const char * s, const Value & v,
+ Env & env, Expr & expr);
+
+ [[gnu::noinline, gnu::noreturn]]
+ void throwAssertionError(const PosIdx pos, const char * s, const std::string & s1,
+ Env & env, Expr & expr);
+
+ [[gnu::noinline, gnu::noreturn]]
+ void throwUndefinedVarError(const PosIdx pos, const char * s, const std::string & s1,
+ Env & env, Expr & expr);
+
+ [[gnu::noinline, gnu::noreturn]]
+ void throwMissingArgumentError(const PosIdx pos, const char * s, const std::string & s1,
+ Env & env, Expr & expr);
+
+ [[gnu::noinline]]
+ void addErrorTrace(Error & e, const char * s, const std::string & s2) const;
+ [[gnu::noinline]]
+ void addErrorTrace(Error & e, const PosIdx pos, const char * s, const std::string & s2) const;
public:
/* Return true iff the value `v' denotes a derivation (i.e. a
@@ -314,7 +467,7 @@ public:
Env & baseEnv;
/* The same, but used during parsing to resolve variables. */
- StaticEnv staticBaseEnv; // !!! should be private
+ std::shared_ptr<StaticEnv> staticBaseEnv; // !!! should be private
private:
@@ -355,7 +508,7 @@ private:
friend struct ExprLet;
Expr * parse(char * text, size_t length, FileOrigin origin, const PathView path,
- const PathView basePath, StaticEnv & staticEnv);
+ const PathView basePath, std::shared_ptr<StaticEnv> & staticEnv);
public:
@@ -450,6 +603,16 @@ private:
friend struct Value;
};
+struct DebugTraceStacker {
+ DebugTraceStacker(EvalState & evalState, DebugTrace t);
+ ~DebugTraceStacker()
+ {
+ // assert(evalState.debugTraces.front() == trace);
+ evalState.debugTraces.pop_front();
+ }
+ EvalState & evalState;
+ DebugTrace trace;
+};
/* Return a string representing the type of the value `v'. */
std::string_view showType(ValueType type);
@@ -534,6 +697,15 @@ struct EvalSettings : Config
Setting<bool> useEvalCache{this, true, "eval-cache",
"Whether to use the flake evaluation cache."};
+
+ Setting<bool> ignoreExceptionsDuringTry{this, false, "ignore-try",
+ R"(
+ If set to true, ignore exceptions inside 'tryEval' calls when evaluating nix expressions in
+ debug mode (using the --debugger flag). By default the debugger will pause on all exceptions.
+ )"};
+
+ Setting<bool> traceVerbose{this, false, "trace-verbose",
+ "Whether `builtins.traceVerbose` should trace its first argument when evaluated."};
};
extern EvalSettings evalSettings;
diff --git a/src/libexpr/flake/config.cc b/src/libexpr/flake/config.cc
index a811e59a1..3e9d264b4 100644
--- a/src/libexpr/flake/config.cc
+++ b/src/libexpr/flake/config.cc
@@ -31,7 +31,7 @@ static void writeTrustedList(const TrustedList & trustedList)
void ConfigFile::apply()
{
- std::set<std::string> whitelist{"bash-prompt", "bash-prompt-suffix", "flake-registry"};
+ std::set<std::string> whitelist{"bash-prompt", "bash-prompt-prefix", "bash-prompt-suffix", "flake-registry"};
for (auto & [name, value] : settings) {
@@ -50,13 +50,11 @@ void ConfigFile::apply()
else
assert(false);
- if (!whitelist.count(baseName)) {
- auto trustedList = readTrustedList();
-
+ if (!whitelist.count(baseName) && !nix::fetchSettings.acceptFlakeConfig) {
bool trusted = false;
- if (nix::fetchSettings.acceptFlakeConfig){
- trusted = true;
- } else if (auto saved = get(get(trustedList, name).value_or(std::map<std::string, bool>()), valueS)) {
+ auto trustedList = readTrustedList();
+ auto tlname = get(trustedList, name);
+ if (auto saved = tlname ? get(*tlname, valueS) : nullptr) {
trusted = *saved;
warn("Using saved setting for '%s = %s' from ~/.local/share/nix/trusted-settings.json.", name,valueS);
} else {
@@ -69,7 +67,6 @@ void ConfigFile::apply()
writeTrustedList(trustedList);
}
}
-
if (!trusted) {
warn("ignoring untrusted flake configuration setting '%s'", name);
continue;
diff --git a/src/libexpr/flake/flake.cc b/src/libexpr/flake/flake.cc
index 29fd51dd1..06562f614 100644
--- a/src/libexpr/flake/flake.cc
+++ b/src/libexpr/flake/flake.cc
@@ -341,7 +341,6 @@ LockedFlake lockFlake(
debug("old lock file: %s", oldLockFile);
- // FIXME: check whether all overrides are used.
std::map<InputPath, FlakeInput> overrides;
std::set<InputPath> overridesUsed, updatesUsed;
@@ -384,6 +383,18 @@ LockedFlake lockFlake(
}
}
+ /* Check whether this input has overrides for a
+ non-existent input. */
+ for (auto [inputPath, inputOverride] : overrides) {
+ auto inputPath2(inputPath);
+ auto follow = inputPath2.back();
+ inputPath2.pop_back();
+ if (inputPath2 == inputPathPrefix && !flakeInputs.count(follow))
+ warn(
+ "input '%s' has an override for a non-existent input '%s'",
+ printInputPath(inputPathPrefix), follow);
+ }
+
/* Go over the flake inputs, resolve/fetch them if
necessary (i.e. if they're new or the flakeref changed
from what's in the lock file). */
@@ -513,6 +524,15 @@ LockedFlake lockFlake(
if (!lockFlags.allowMutable && !input.ref->input.isLocked())
throw Error("cannot update flake input '%s' in pure mode", inputPathS);
+ /* Note: in case of an --override-input, we use
+ the *original* ref (input2.ref) for the
+ "original" field, rather than the
+ override. This ensures that the override isn't
+ nuked the next time we update the lock
+ file. That is, overrides are sticky unless you
+ use --no-write-lock-file. */
+ auto ref = input2.ref ? *input2.ref : *input.ref;
+
if (input.isFlake) {
Path localPath = parentPath;
FlakeRef localRef = *input.ref;
@@ -524,15 +544,7 @@ LockedFlake lockFlake(
auto inputFlake = getFlake(state, localRef, useRegistries, flakeCache, inputPath);
- /* Note: in case of an --override-input, we use
- the *original* ref (input2.ref) for the
- "original" field, rather than the
- override. This ensures that the override isn't
- nuked the next time we update the lock
- file. That is, overrides are sticky unless you
- use --no-write-lock-file. */
- auto childNode = std::make_shared<LockedNode>(
- inputFlake.lockedRef, input2.ref ? *input2.ref : *input.ref);
+ auto childNode = std::make_shared<LockedNode>(inputFlake.lockedRef, ref);
node->inputs.insert_or_assign(id, childNode);
@@ -560,7 +572,7 @@ LockedFlake lockFlake(
auto [sourceInfo, resolvedRef, lockedRef] = fetchOrSubstituteTree(
state, *input.ref, useRegistries, flakeCache);
node->inputs.insert_or_assign(id,
- std::make_shared<LockedNode>(lockedRef, *input.ref, false));
+ std::make_shared<LockedNode>(lockedRef, ref, false));
}
}
@@ -723,6 +735,7 @@ static void prim_getFlake(EvalState & state, const PosIdx pos, Value * * args, V
lockFlake(state, flakeRef,
LockFlags {
.updateLockFile = false,
+ .writeLockFile = false,
.useRegistries = !evalSettings.pureEval && fetchSettings.useRegistries,
.allowMutable = !evalSettings.pureEval,
}),
diff --git a/src/libexpr/flake/flakeref.cc b/src/libexpr/flake/flakeref.cc
index c1eae413f..eede493f8 100644
--- a/src/libexpr/flake/flakeref.cc
+++ b/src/libexpr/flake/flakeref.cc
@@ -176,7 +176,7 @@ std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
parsedURL.query.insert_or_assign("shallow", "1");
return std::make_pair(
- FlakeRef(Input::fromURL(parsedURL), get(parsedURL.query, "dir").value_or("")),
+ FlakeRef(Input::fromURL(parsedURL), getOr(parsedURL.query, "dir", "")),
fragment);
}
@@ -189,7 +189,7 @@ std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
if (!hasPrefix(path, "/"))
throw BadURL("flake reference '%s' is not an absolute path", url);
auto query = decodeQuery(match[2]);
- path = canonPath(path + "/" + get(query, "dir").value_or(""));
+ path = canonPath(path + "/" + getOr(query, "dir", ""));
}
fetchers::Attrs attrs;
@@ -208,7 +208,7 @@ std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
input.parent = baseDir;
return std::make_pair(
- FlakeRef(std::move(input), get(parsedURL.query, "dir").value_or("")),
+ FlakeRef(std::move(input), getOr(parsedURL.query, "dir", "")),
fragment);
}
}
@@ -238,4 +238,15 @@ std::pair<fetchers::Tree, FlakeRef> FlakeRef::fetchTree(ref<Store> store) const
return {std::move(tree), FlakeRef(std::move(lockedInput), subdir)};
}
+std::tuple<FlakeRef, std::string, OutputsSpec> parseFlakeRefWithFragmentAndOutputsSpec(
+ const std::string & url,
+ const std::optional<Path> & baseDir,
+ bool allowMissing,
+ bool isFlake)
+{
+ auto [prefix, outputsSpec] = parseOutputsSpec(url);
+ auto [flakeRef, fragment] = parseFlakeRefWithFragment(prefix, baseDir, allowMissing, isFlake);
+ return {std::move(flakeRef), fragment, outputsSpec};
+}
+
}
diff --git a/src/libexpr/flake/flakeref.hh b/src/libexpr/flake/flakeref.hh
index 1fddfd9a0..fe4f67193 100644
--- a/src/libexpr/flake/flakeref.hh
+++ b/src/libexpr/flake/flakeref.hh
@@ -3,6 +3,7 @@
#include "types.hh"
#include "hash.hh"
#include "fetchers.hh"
+#include "path-with-outputs.hh"
#include <variant>
@@ -27,7 +28,7 @@ typedef std::string FlakeId;
* object that fetcher generates (usually via
* FlakeRef::fromAttrs(attrs) or parseFlakeRef(url) calls).
*
- * The actual fetch not have been performed yet (i.e. a FlakeRef may
+ * The actual fetch may not have been performed yet (i.e. a FlakeRef may
* be lazy), but the fetcher can be invoked at any time via the
* FlakeRef to ensure the store is populated with this input.
*/
@@ -79,4 +80,11 @@ std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
std::optional<std::pair<FlakeRef, std::string>> maybeParseFlakeRefWithFragment(
const std::string & url, const std::optional<Path> & baseDir = {});
+std::tuple<FlakeRef, std::string, OutputsSpec> parseFlakeRefWithFragmentAndOutputsSpec(
+ const std::string & url,
+ const std::optional<Path> & baseDir = {},
+ bool allowMissing = false,
+ bool isFlake = true);
+
+
}
diff --git a/src/libexpr/get-drvs.cc b/src/libexpr/get-drvs.cc
index f1a59c379..8a1f9311d 100644
--- a/src/libexpr/get-drvs.cc
+++ b/src/libexpr/get-drvs.cc
@@ -34,7 +34,7 @@ DrvInfo::DrvInfo(EvalState & state, ref<Store> store, const std::string & drvPat
outputName =
selectedOutputs.empty()
- ? get(drv.env, "outputName").value_or("out")
+ ? getOr(drv.env, "outputName", "out")
: *selectedOutputs.begin();
auto i = drv.outputs.find(outputName);
@@ -132,23 +132,36 @@ DrvInfo::Outputs DrvInfo::queryOutputs(bool withPaths, bool onlyOutputsToInstall
} else
outputs.emplace("out", withPaths ? std::optional{queryOutPath()} : std::nullopt);
}
+
if (!onlyOutputsToInstall || !attrs)
return outputs;
- /* Check for `meta.outputsToInstall` and return `outputs` reduced to that. */
- const Value * outTI = queryMeta("outputsToInstall");
- if (!outTI) return outputs;
- const auto errMsg = Error("this derivation has bad 'meta.outputsToInstall'");
- /* ^ this shows during `nix-env -i` right under the bad derivation */
- if (!outTI->isList()) throw errMsg;
- Outputs result;
- for (auto elem : outTI->listItems()) {
- if (elem->type() != nString) throw errMsg;
- auto out = outputs.find(elem->string.s);
- if (out == outputs.end()) throw errMsg;
+ Bindings::iterator i;
+ if (attrs && (i = attrs->find(state->sOutputSpecified)) != attrs->end() && state->forceBool(*i->value, i->pos)) {
+ Outputs result;
+ auto out = outputs.find(queryOutputName());
+ if (out == outputs.end())
+ throw Error("derivation does not have output '%s'", queryOutputName());
result.insert(*out);
+ return result;
+ }
+
+ else {
+ /* Check for `meta.outputsToInstall` and return `outputs` reduced to that. */
+ const Value * outTI = queryMeta("outputsToInstall");
+ if (!outTI) return outputs;
+ const auto errMsg = Error("this derivation has bad 'meta.outputsToInstall'");
+ /* ^ this shows during `nix-env -i` right under the bad derivation */
+ if (!outTI->isList()) throw errMsg;
+ Outputs result;
+ for (auto elem : outTI->listItems()) {
+ if (elem->type() != nString) throw errMsg;
+ auto out = outputs.find(elem->string.s);
+ if (out == outputs.end()) throw errMsg;
+ result.insert(*out);
+ }
+ return result;
}
- return result;
}
diff --git a/src/libexpr/get-drvs.hh b/src/libexpr/get-drvs.hh
index 7cc1abef2..bbd2d3c47 100644
--- a/src/libexpr/get-drvs.hh
+++ b/src/libexpr/get-drvs.hh
@@ -73,7 +73,7 @@ public:
#if HAVE_BOEHMGC
-typedef std::list<DrvInfo, traceable_allocator<DrvInfo> > DrvInfos;
+typedef std::list<DrvInfo, traceable_allocator<DrvInfo>> DrvInfos;
#else
typedef std::list<DrvInfo> DrvInfos;
#endif
diff --git a/src/libexpr/lexer.l b/src/libexpr/lexer.l
index 4c28b976e..462b3b602 100644
--- a/src/libexpr/lexer.l
+++ b/src/libexpr/lexer.l
@@ -198,7 +198,7 @@ or { return OR_KW; }
(...|\$[^\{\"\\]|\\.|\$\\.)+ would have triggered.
This is technically invalid, but we leave the problem to the
parser who fails with exact location. */
- return STR;
+ return EOF;
}
\'\'(\ *\n)? { PUSH_STATE(IND_STRING); return IND_STRING_OPEN; }
diff --git a/src/libexpr/nixexpr.cc b/src/libexpr/nixexpr.cc
index c529fdc89..7c623a07d 100644
--- a/src/libexpr/nixexpr.cc
+++ b/src/libexpr/nixexpr.cc
@@ -6,10 +6,8 @@
#include <cstdlib>
-
namespace nix {
-
/* Displaying abstract syntax trees. */
static void showString(std::ostream & str, std::string_view s)
@@ -294,35 +292,46 @@ std::string showAttrPath(const SymbolTable & symbols, const AttrPath & attrPath)
/* Computing levels/displacements for variables. */
-void Expr::bindVars(const EvalState & es, const StaticEnv & env)
+void Expr::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env)
{
abort();
}
-void ExprInt::bindVars(const EvalState & es, const StaticEnv & env)
+void ExprInt::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env)
{
+ if (es.debugRepl)
+ es.exprEnvs.insert(std::make_pair(this, env));
}
-void ExprFloat::bindVars(const EvalState & es, const StaticEnv & env)
+void ExprFloat::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env)
{
+ if (es.debugRepl)
+ es.exprEnvs.insert(std::make_pair(this, env));
}
-void ExprString::bindVars(const EvalState & es, const StaticEnv & env)
+void ExprString::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env)
{
+ if (es.debugRepl)
+ es.exprEnvs.insert(std::make_pair(this, env));
}
-void ExprPath::bindVars(const EvalState & es, const StaticEnv & env)
+void ExprPath::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env)
{
+ if (es.debugRepl)
+ es.exprEnvs.insert(std::make_pair(this, env));
}
-void ExprVar::bindVars(const EvalState & es, const StaticEnv & env)
+void ExprVar::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env)
{
+ if (es.debugRepl)
+ es.exprEnvs.insert(std::make_pair(this, env));
+
/* Check whether the variable appears in the environment. If so,
set its level and displacement. */
const StaticEnv * curEnv;
Level level;
int withLevel = -1;
- for (curEnv = &env, level = 0; curEnv; curEnv = curEnv->up, level++) {
+ for (curEnv = env.get(), level = 0; curEnv; curEnv = curEnv->up, level++) {
if (curEnv->isWith) {
if (withLevel == -1) withLevel = level;
} else {
@@ -348,8 +357,11 @@ void ExprVar::bindVars(const EvalState & es, const StaticEnv & env)
this->level = withLevel;
}
-void ExprSelect::bindVars(const EvalState & es, const StaticEnv & env)
+void ExprSelect::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env)
{
+ if (es.debugRepl)
+ es.exprEnvs.insert(std::make_pair(this, env));
+
e->bindVars(es, env);
if (def) def->bindVars(es, env);
for (auto & i : attrPath)
@@ -357,64 +369,78 @@ void ExprSelect::bindVars(const EvalState & es, const StaticEnv & env)
i.expr->bindVars(es, env);
}
-void ExprOpHasAttr::bindVars(const EvalState & es, const StaticEnv & env)
+void ExprOpHasAttr::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env)
{
+ if (es.debugRepl)
+ es.exprEnvs.insert(std::make_pair(this, env));
+
e->bindVars(es, env);
for (auto & i : attrPath)
if (!i.symbol)
i.expr->bindVars(es, env);
}
-void ExprAttrs::bindVars(const EvalState & es, const StaticEnv & env)
+void ExprAttrs::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env)
{
- const StaticEnv * dynamicEnv = &env;
- StaticEnv newEnv(false, &env, recursive ? attrs.size() : 0);
+ if (es.debugRepl)
+ es.exprEnvs.insert(std::make_pair(this, env));
if (recursive) {
- dynamicEnv = &newEnv;
+ auto newEnv = std::make_shared<StaticEnv>(false, env.get(), recursive ? attrs.size() : 0);
Displacement displ = 0;
for (auto & i : attrs)
- newEnv.vars.emplace_back(i.first, i.second.displ = displ++);
+ newEnv->vars.emplace_back(i.first, i.second.displ = displ++);
// No need to sort newEnv since attrs is in sorted order.
for (auto & i : attrs)
i.second.e->bindVars(es, i.second.inherited ? env : newEnv);
- }
- else
+ for (auto & i : dynamicAttrs) {
+ i.nameExpr->bindVars(es, newEnv);
+ i.valueExpr->bindVars(es, newEnv);
+ }
+ }
+ else {
for (auto & i : attrs)
i.second.e->bindVars(es, env);
- for (auto & i : dynamicAttrs) {
- i.nameExpr->bindVars(es, *dynamicEnv);
- i.valueExpr->bindVars(es, *dynamicEnv);
+ for (auto & i : dynamicAttrs) {
+ i.nameExpr->bindVars(es, env);
+ i.valueExpr->bindVars(es, env);
+ }
}
}
-void ExprList::bindVars(const EvalState & es, const StaticEnv & env)
+void ExprList::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env)
{
+ if (es.debugRepl)
+ es.exprEnvs.insert(std::make_pair(this, env));
+
for (auto & i : elems)
i->bindVars(es, env);
}
-void ExprLambda::bindVars(const EvalState & es, const StaticEnv & env)
+void ExprLambda::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env)
{
- StaticEnv newEnv(
- false, &env,
+ if (es.debugRepl)
+ es.exprEnvs.insert(std::make_pair(this, env));
+
+ auto newEnv = std::make_shared<StaticEnv>(
+ false, env.get(),
(hasFormals() ? formals->formals.size() : 0) +
(!arg ? 0 : 1));
Displacement displ = 0;
- if (arg) newEnv.vars.emplace_back(arg, displ++);
+ if (arg) newEnv->vars.emplace_back(arg, displ++);
if (hasFormals()) {
for (auto & i : formals->formals)
- newEnv.vars.emplace_back(i.name, displ++);
+ newEnv->vars.emplace_back(i.name, displ++);
- newEnv.sort();
+ newEnv->sort();
for (auto & i : formals->formals)
if (i.def) i.def->bindVars(es, newEnv);
@@ -423,20 +449,26 @@ void ExprLambda::bindVars(const EvalState & es, const StaticEnv & env)
body->bindVars(es, newEnv);
}
-void ExprCall::bindVars(const EvalState & es, const StaticEnv & env)
+void ExprCall::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env)
{
+ if (es.debugRepl)
+ es.exprEnvs.insert(std::make_pair(this, env));
+
fun->bindVars(es, env);
for (auto e : args)
e->bindVars(es, env);
}
-void ExprLet::bindVars(const EvalState & es, const StaticEnv & env)
+void ExprLet::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env)
{
- StaticEnv newEnv(false, &env, attrs->attrs.size());
+ if (es.debugRepl)
+ es.exprEnvs.insert(std::make_pair(this, env));
+
+ auto newEnv = std::make_shared<StaticEnv>(false, env.get(), attrs->attrs.size());
Displacement displ = 0;
for (auto & i : attrs->attrs)
- newEnv.vars.emplace_back(i.first, i.second.displ = displ++);
+ newEnv->vars.emplace_back(i.first, i.second.displ = displ++);
// No need to sort newEnv since attrs->attrs is in sorted order.
@@ -446,51 +478,71 @@ void ExprLet::bindVars(const EvalState & es, const StaticEnv & env)
body->bindVars(es, newEnv);
}
-void ExprWith::bindVars(const EvalState & es, const StaticEnv & env)
+void ExprWith::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env)
{
+ if (es.debugRepl)
+ es.exprEnvs.insert(std::make_pair(this, env));
+
/* Does this `with' have an enclosing `with'? If so, record its
level so that `lookupVar' can look up variables in the previous
`with' if this one doesn't contain the desired attribute. */
const StaticEnv * curEnv;
Level level;
prevWith = 0;
- for (curEnv = &env, level = 1; curEnv; curEnv = curEnv->up, level++)
+ for (curEnv = env.get(), level = 1; curEnv; curEnv = curEnv->up, level++)
if (curEnv->isWith) {
prevWith = level;
break;
}
+ if (es.debugRepl)
+ es.exprEnvs.insert(std::make_pair(this, env));
+
attrs->bindVars(es, env);
- StaticEnv newEnv(true, &env);
+ auto newEnv = std::make_shared<StaticEnv>(true, env.get());
body->bindVars(es, newEnv);
}
-void ExprIf::bindVars(const EvalState & es, const StaticEnv & env)
+void ExprIf::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env)
{
+ if (es.debugRepl)
+ es.exprEnvs.insert(std::make_pair(this, env));
+
cond->bindVars(es, env);
then->bindVars(es, env);
else_->bindVars(es, env);
}
-void ExprAssert::bindVars(const EvalState & es, const StaticEnv & env)
+void ExprAssert::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env)
{
+ if (es.debugRepl)
+ es.exprEnvs.insert(std::make_pair(this, env));
+
cond->bindVars(es, env);
body->bindVars(es, env);
}
-void ExprOpNot::bindVars(const EvalState & es, const StaticEnv & env)
+void ExprOpNot::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env)
{
+ if (es.debugRepl)
+ es.exprEnvs.insert(std::make_pair(this, env));
+
e->bindVars(es, env);
}
-void ExprConcatStrings::bindVars(const EvalState & es, const StaticEnv & env)
+void ExprConcatStrings::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env)
{
+ if (es.debugRepl)
+ es.exprEnvs.insert(std::make_pair(this, env));
+
for (auto & i : *this->es)
i.second->bindVars(es, env);
}
-void ExprPos::bindVars(const EvalState & es, const StaticEnv & env)
+void ExprPos::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env)
{
+ if (es.debugRepl)
+ es.exprEnvs.insert(std::make_pair(this, env));
}
diff --git a/src/libexpr/nixexpr.hh b/src/libexpr/nixexpr.hh
index 68b83556a..ea0ce1a7f 100644
--- a/src/libexpr/nixexpr.hh
+++ b/src/libexpr/nixexpr.hh
@@ -21,7 +21,6 @@ MakeError(UndefinedVarError, Error);
MakeError(MissingArgumentError, EvalError);
MakeError(RestrictedPathError, Error);
-
/* Position objects. */
struct Pos
@@ -142,24 +141,25 @@ struct Expr
{
virtual ~Expr() { };
virtual void show(const SymbolTable & symbols, std::ostream & str) const;
- virtual void bindVars(const EvalState & es, const StaticEnv & env);
+ virtual void bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env);
virtual void eval(EvalState & state, Env & env, Value & v);
virtual Value * maybeThunk(EvalState & state, Env & env);
virtual void setName(Symbol name);
+ virtual PosIdx getPos() const { return noPos; }
};
#define COMMON_METHODS \
- void show(const SymbolTable & symbols, std::ostream & str) const; \
- void eval(EvalState & state, Env & env, Value & v); \
- void bindVars(const EvalState & es, const StaticEnv & env);
+ void show(const SymbolTable & symbols, std::ostream & str) const override; \
+ void eval(EvalState & state, Env & env, Value & v) override; \
+ void bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env) override;
struct ExprInt : Expr
{
NixInt n;
Value v;
ExprInt(NixInt n) : n(n) { v.mkInt(n); };
+ Value * maybeThunk(EvalState & state, Env & env) override;
COMMON_METHODS
- Value * maybeThunk(EvalState & state, Env & env);
};
struct ExprFloat : Expr
@@ -167,8 +167,8 @@ struct ExprFloat : Expr
NixFloat nf;
Value v;
ExprFloat(NixFloat nf) : nf(nf) { v.mkFloat(nf); };
+ Value * maybeThunk(EvalState & state, Env & env) override;
COMMON_METHODS
- Value * maybeThunk(EvalState & state, Env & env);
};
struct ExprString : Expr
@@ -176,8 +176,8 @@ struct ExprString : Expr
std::string s;
Value v;
ExprString(std::string s) : s(std::move(s)) { v.mkString(this->s.data()); };
+ Value * maybeThunk(EvalState & state, Env & env) override;
COMMON_METHODS
- Value * maybeThunk(EvalState & state, Env & env);
};
struct ExprPath : Expr
@@ -185,8 +185,8 @@ struct ExprPath : Expr
std::string s;
Value v;
ExprPath(std::string s) : s(std::move(s)) { v.mkPath(this->s.c_str()); };
+ Value * maybeThunk(EvalState & state, Env & env) override;
COMMON_METHODS
- Value * maybeThunk(EvalState & state, Env & env);
};
typedef uint32_t Level;
@@ -212,8 +212,9 @@ struct ExprVar : Expr
ExprVar(Symbol name) : name(name) { };
ExprVar(const PosIdx & pos, Symbol name) : pos(pos), name(name) { };
+ Value * maybeThunk(EvalState & state, Env & env) override;
+ PosIdx getPos() const override { return pos; }
COMMON_METHODS
- Value * maybeThunk(EvalState & state, Env & env);
};
struct ExprSelect : Expr
@@ -223,6 +224,7 @@ struct ExprSelect : Expr
AttrPath attrPath;
ExprSelect(const PosIdx & pos, Expr * e, const AttrPath & attrPath, Expr * def) : pos(pos), e(e), def(def), attrPath(attrPath) { };
ExprSelect(const PosIdx & pos, Expr * e, Symbol name) : pos(pos), e(e), def(0) { attrPath.push_back(AttrName(name)); };
+ PosIdx getPos() const override { return pos; }
COMMON_METHODS
};
@@ -231,6 +233,7 @@ struct ExprOpHasAttr : Expr
Expr * e;
AttrPath attrPath;
ExprOpHasAttr(Expr * e, const AttrPath & attrPath) : e(e), attrPath(attrPath) { };
+ PosIdx getPos() const override { return e->getPos(); }
COMMON_METHODS
};
@@ -259,6 +262,7 @@ struct ExprAttrs : Expr
DynamicAttrDefs dynamicAttrs;
ExprAttrs(const PosIdx &pos) : recursive(false), pos(pos) { };
ExprAttrs() : recursive(false) { };
+ PosIdx getPos() const override { return pos; }
COMMON_METHODS
};
@@ -267,6 +271,11 @@ struct ExprList : Expr
std::vector<Expr *> elems;
ExprList() { };
COMMON_METHODS
+
+ PosIdx getPos() const override
+ {
+ return elems.empty() ? noPos : elems.front()->getPos();
+ }
};
struct Formal
@@ -316,9 +325,10 @@ struct ExprLambda : Expr
: pos(pos), formals(formals), body(body)
{
}
- void setName(Symbol name);
+ void setName(Symbol name) override;
std::string showNamePos(const EvalState & state) const;
inline bool hasFormals() const { return formals != nullptr; }
+ PosIdx getPos() const override { return pos; }
COMMON_METHODS
};
@@ -330,6 +340,7 @@ struct ExprCall : Expr
ExprCall(const PosIdx & pos, Expr * fun, std::vector<Expr *> && args)
: fun(fun), args(args), pos(pos)
{ }
+ PosIdx getPos() const override { return pos; }
COMMON_METHODS
};
@@ -347,6 +358,7 @@ struct ExprWith : Expr
Expr * attrs, * body;
size_t prevWith;
ExprWith(const PosIdx & pos, Expr * attrs, Expr * body) : pos(pos), attrs(attrs), body(body) { };
+ PosIdx getPos() const override { return pos; }
COMMON_METHODS
};
@@ -355,6 +367,7 @@ struct ExprIf : Expr
PosIdx pos;
Expr * cond, * then, * else_;
ExprIf(const PosIdx & pos, Expr * cond, Expr * then, Expr * else_) : pos(pos), cond(cond), then(then), else_(else_) { };
+ PosIdx getPos() const override { return pos; }
COMMON_METHODS
};
@@ -363,6 +376,7 @@ struct ExprAssert : Expr
PosIdx pos;
Expr * cond, * body;
ExprAssert(const PosIdx & pos, Expr * cond, Expr * body) : pos(pos), cond(cond), body(body) { };
+ PosIdx getPos() const override { return pos; }
COMMON_METHODS
};
@@ -380,15 +394,16 @@ struct ExprOpNot : Expr
Expr * e1, * e2; \
name(Expr * e1, Expr * e2) : e1(e1), e2(e2) { }; \
name(const PosIdx & pos, Expr * e1, Expr * e2) : pos(pos), e1(e1), e2(e2) { }; \
- void show(const SymbolTable & symbols, std::ostream & str) const \
+ void show(const SymbolTable & symbols, std::ostream & str) const override \
{ \
str << "("; e1->show(symbols, str); str << " " s " "; e2->show(symbols, str); str << ")"; \
} \
- void bindVars(const EvalState & es, const StaticEnv & env) \
+ void bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env) override \
{ \
e1->bindVars(es, env); e2->bindVars(es, env); \
} \
- void eval(EvalState & state, Env & env, Value & v); \
+ void eval(EvalState & state, Env & env, Value & v) override; \
+ PosIdx getPos() const override { return pos; } \
};
MakeBinOp(ExprOpEq, "==")
@@ -403,9 +418,10 @@ struct ExprConcatStrings : Expr
{
PosIdx pos;
bool forceString;
- std::vector<std::pair<PosIdx, Expr *> > * es;
- ExprConcatStrings(const PosIdx & pos, bool forceString, std::vector<std::pair<PosIdx, Expr *> > * es)
+ std::vector<std::pair<PosIdx, Expr *>> * es;
+ ExprConcatStrings(const PosIdx & pos, bool forceString, std::vector<std::pair<PosIdx, Expr *>> * es)
: pos(pos), forceString(forceString), es(es) { };
+ PosIdx getPos() const override { return pos; }
COMMON_METHODS
};
@@ -413,6 +429,7 @@ struct ExprPos : Expr
{
PosIdx pos;
ExprPos(const PosIdx & pos) : pos(pos) { };
+ PosIdx getPos() const override { return pos; }
COMMON_METHODS
};
diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y
index 30e5ce380..7768ed8df 100644
--- a/src/libexpr/parser.y
+++ b/src/libexpr/parser.y
@@ -193,7 +193,7 @@ static Formals * toFormals(ParseData & data, ParserFormals * formals,
static Expr * stripIndentation(const PosIdx pos, SymbolTable & symbols,
- std::vector<std::pair<PosIdx, std::variant<Expr *, StringToken> > > & es)
+ std::vector<std::pair<PosIdx, std::variant<Expr *, StringToken>>> & es)
{
if (es.empty()) return new ExprString("");
@@ -233,7 +233,7 @@ static Expr * stripIndentation(const PosIdx pos, SymbolTable & symbols,
}
/* Strip spaces from each line. */
- auto * es2 = new std::vector<std::pair<PosIdx, Expr *> >;
+ auto * es2 = new std::vector<std::pair<PosIdx, Expr *>>;
atStartOfLine = true;
size_t curDropped = 0;
size_t n = es.size();
@@ -320,8 +320,8 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParseData * data, const char * err
StringToken uri;
StringToken str;
std::vector<nix::AttrName> * attrNames;
- std::vector<std::pair<nix::PosIdx, nix::Expr *> > * string_parts;
- std::vector<std::pair<nix::PosIdx, std::variant<nix::Expr *, StringToken> > > * ind_string_parts;
+ std::vector<std::pair<nix::PosIdx, nix::Expr *>> * string_parts;
+ std::vector<std::pair<nix::PosIdx, std::variant<nix::Expr *, StringToken>>> * ind_string_parts;
}
%type <e> start expr expr_function expr_if expr_op
@@ -503,9 +503,9 @@ string_parts_interpolated
: string_parts_interpolated STR
{ $$ = $1; $1->emplace_back(makeCurPos(@2, data), new ExprString(std::string($2))); }
| string_parts_interpolated DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(makeCurPos(@2, data), $3); }
- | DOLLAR_CURLY expr '}' { $$ = new std::vector<std::pair<PosIdx, Expr *> >; $$->emplace_back(makeCurPos(@1, data), $2); }
+ | DOLLAR_CURLY expr '}' { $$ = new std::vector<std::pair<PosIdx, Expr *>>; $$->emplace_back(makeCurPos(@1, data), $2); }
| STR DOLLAR_CURLY expr '}' {
- $$ = new std::vector<std::pair<PosIdx, Expr *> >;
+ $$ = new std::vector<std::pair<PosIdx, Expr *>>;
$$->emplace_back(makeCurPos(@1, data), new ExprString(std::string($1)));
$$->emplace_back(makeCurPos(@2, data), $3);
}
@@ -520,6 +520,12 @@ path_start
$$ = new ExprPath(path);
}
| HPATH {
+ if (evalSettings.pureEval) {
+ throw Error(
+ "the path '%s' can not be resolved in pure mode",
+ std::string_view($1.p, $1.l)
+ );
+ }
Path path(getHome() + std::string($1.p + 1, $1.l - 1));
$$ = new ExprPath(path);
}
@@ -528,7 +534,7 @@ path_start
ind_string_parts
: ind_string_parts IND_STR { $$ = $1; $1->emplace_back(makeCurPos(@2, data), $2); }
| ind_string_parts DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(makeCurPos(@2, data), $3); }
- | { $$ = new std::vector<std::pair<PosIdx, std::variant<Expr *, StringToken> > >; }
+ | { $$ = new std::vector<std::pair<PosIdx, std::variant<Expr *, StringToken>>>; }
;
binds
@@ -643,7 +649,7 @@ namespace nix {
Expr * EvalState::parse(char * text, size_t length, FileOrigin origin,
- const PathView path, const PathView basePath, StaticEnv & staticEnv)
+ const PathView path, const PathView basePath, std::shared_ptr<StaticEnv> & staticEnv)
{
yyscan_t scanner;
std::string file;
@@ -706,7 +712,7 @@ Expr * EvalState::parseExprFromFile(const Path & path)
}
-Expr * EvalState::parseExprFromFile(const Path & path, StaticEnv & staticEnv)
+Expr * EvalState::parseExprFromFile(const Path & path, std::shared_ptr<StaticEnv> & staticEnv)
{
auto buffer = readFile(path);
// readFile should have left some extra space for terminators
@@ -715,7 +721,7 @@ Expr * EvalState::parseExprFromFile(const Path & path, StaticEnv & staticEnv)
}
-Expr * EvalState::parseExprFromString(std::string s, const Path & basePath, StaticEnv & staticEnv)
+Expr * EvalState::parseExprFromString(std::string s, const Path & basePath, std::shared_ptr<StaticEnv> & staticEnv)
{
s.append("\0\0", 2);
return parse(s.data(), s.size(), foString, "", basePath, staticEnv);
@@ -782,13 +788,13 @@ Path EvalState::findFile(SearchPath & searchPath, const std::string_view path, c
if (hasPrefix(path, "nix/"))
return concatStrings(corepkgsPrefix, path.substr(4));
- throw ThrownError({
+ debugThrowLastTrace(ThrownError({
.msg = hintfmt(evalSettings.pureEval
? "cannot look up '<%s>' in pure evaluation mode (use '--impure' to override)"
: "file '%s' was not found in the Nix search path (add it using $NIX_PATH or -I)",
path),
.errPos = positions[pos]
- });
+ }));
}
diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index b11e44ede..6b7c20fd5 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -46,7 +46,7 @@ StringMap EvalState::realiseContext(const PathSet & context)
auto [ctx, outputName] = decodeContext(*store, i);
auto ctxS = store->printStorePath(ctx);
if (!store->isValidPath(ctx))
- throw InvalidPathError(store->printStorePath(ctx));
+ debugThrowLastTrace(InvalidPathError(store->printStorePath(ctx)));
if (!outputName.empty() && ctx.isDerivation()) {
drvs.push_back({ctx, {outputName}});
} else {
@@ -57,9 +57,9 @@ StringMap EvalState::realiseContext(const PathSet & context)
if (drvs.empty()) return {};
if (!evalSettings.enableImportFromDerivation)
- throw Error(
+ debugThrowLastTrace(Error(
"cannot build '%1%' during evaluation because the option 'allow-import-from-derivation' is disabled",
- store->printStorePath(drvs.begin()->drvPath));
+ store->printStorePath(drvs.begin()->drvPath)));
/* Build/substitute the context. */
std::vector<DerivedPath> buildReqs;
@@ -68,14 +68,15 @@ StringMap EvalState::realiseContext(const PathSet & context)
/* Get all the output paths corresponding to the placeholders we had */
for (auto & [drvPath, outputs] : drvs) {
- auto outputPaths = store->queryDerivationOutputMap(drvPath);
+ const auto outputPaths = store->queryDerivationOutputMap(drvPath);
for (auto & outputName : outputs) {
- if (outputPaths.count(outputName) == 0)
- throw Error("derivation '%s' does not have an output named '%s'",
- store->printStorePath(drvPath), outputName);
+ auto outputPath = get(outputPaths, outputName);
+ if (!outputPath)
+ debugThrowLastTrace(Error("derivation '%s' does not have an output named '%s'",
+ store->printStorePath(drvPath), outputName));
res.insert_or_assign(
downstreamPlaceholder(*store, drvPath, outputName),
- store->printStorePath(outputPaths.at(outputName))
+ store->printStorePath(*outputPath)
);
}
}
@@ -207,11 +208,11 @@ static void import(EvalState & state, const PosIdx pos, Value & vPath, Value * v
Env * env = &state.allocEnv(vScope->attrs->size());
env->up = &state.baseEnv;
- StaticEnv staticEnv(false, &state.staticBaseEnv, vScope->attrs->size());
+ auto staticEnv = std::make_shared<StaticEnv>(false, state.staticBaseEnv.get(), vScope->attrs->size());
unsigned int displ = 0;
for (auto & attr : *vScope->attrs) {
- staticEnv.vars.emplace_back(attr.name, displ);
+ staticEnv->vars.emplace_back(attr.name, displ);
env->values[displ++] = attr.value;
}
@@ -310,17 +311,16 @@ void prim_importNative(EvalState & state, const PosIdx pos, Value * * args, Valu
void *handle = dlopen(path.c_str(), RTLD_LAZY | RTLD_LOCAL);
if (!handle)
- throw EvalError("could not open '%1%': %2%", path, dlerror());
+ state.debugThrowLastTrace(EvalError("could not open '%1%': %2%", path, dlerror()));
dlerror();
ValueInitializer func = (ValueInitializer) dlsym(handle, sym.c_str());
if(!func) {
char *message = dlerror();
if (message)
- throw EvalError("could not load symbol '%1%' from '%2%': %3%", sym, path, message);
+ state.debugThrowLastTrace(EvalError("could not load symbol '%1%' from '%2%': %3%", sym, path, message));
else
- throw EvalError("symbol '%1%' from '%2%' resolved to NULL when a function pointer was expected",
- sym, path);
+ state.debugThrowLastTrace(EvalError("symbol '%1%' from '%2%' resolved to NULL when a function pointer was expected", sym, path));
}
(func)(state, v);
@@ -335,12 +335,11 @@ void prim_exec(EvalState & state, const PosIdx pos, Value * * args, Value & v)
state.forceList(*args[0], pos, "while evaluating the first argument passed to builtins.exec");
auto elems = args[0]->listElems();
auto count = args[0]->listSize();
- if (count == 0) {
- throw EvalError({
+ if (count == 0)
+ state.debugThrowLastTrace(EvalError({
.msg = hintfmt("at least one argument to 'exec' required"),
.errPos = state.positions[pos]
- });
- }
+ }));
PathSet context;
auto program = state.coerceToString(pos, *elems[0], context, false, false,
"while evaluating the first element of the argument passed to builtins.exec").toOwned();
@@ -352,11 +351,11 @@ void prim_exec(EvalState & state, const PosIdx pos, Value * * args, Value & v)
try {
auto _ = state.realiseContext(context); // FIXME: Handle CA derivations
} catch (InvalidPathError & e) {
- throw EvalError({
+ state.debugThrowLastTrace(EvalError({
.msg = hintfmt("cannot execute '%1%', since path '%2%' is not valid",
program, e.path),
.errPos = state.positions[pos]
- });
+ }));
}
auto output = runProgram(program, true, commandArgs);
@@ -561,10 +560,10 @@ struct CompareValues
if (v1->type() == nInt && v2->type() == nFloat)
return v1->integer < v2->fpoint;
if (v1->type() != v2->type())
- throw EvalError({
- .msg = hintfmt("%scannot compare %s with %s", errorCtx, showType(*v1), showType(*v2)),
+ state.debugThrowLastTrace(EvalError({
+ ."cannot compare %1% with %2%", showType(*v1), showType(*v2)));
.errPos = std::nullopt,
- });
+ }));
switch (v1->type()) {
case nInt:
return v1->integer < v2->integer;
@@ -586,10 +585,10 @@ struct CompareValues
}
}
default:
- throw EvalError({
+ state.debugThrowLastTrace(EvalError({
.msg = hintfmt("%scannot compare %s with %s", errorCtx, showType(*v1), showType(*v2)),
.errPos = std::nullopt,
- });
+ }));
}
} catch (Error & e) {
e.addTrace(std::nullopt, errorCtx);
@@ -600,7 +599,7 @@ struct CompareValues
#if HAVE_BOEHMGC
-typedef std::list<Value *, gc_allocator<Value *> > ValueList;
+typedef std::list<Value *, gc_allocator<Value *>> ValueList;
#else
typedef std::list<Value *> ValueList;
#endif
@@ -618,6 +617,12 @@ static Bindings::iterator getAttr(
.msg = hintfmt("attribute '%s' missing %s", state.symbols[attrSym], errorCtx),
.errPos = state.positions[attrSet->pos],
});
+ // TODO XXX
+ // Adding another trace for the function name to make it clear
+ // which call received wrong arguments.
+ //e.addTrace(state.positions[pos], hintfmt("while invoking '%s'", funcName));
+ //state.debugThrowLastTrace(e);
+ }
}
return value;
}
@@ -714,6 +719,41 @@ static RegisterPrimOp primop_genericClosure(RegisterPrimOp::Info {
.fun = prim_genericClosure,
});
+
+static RegisterPrimOp primop_break({
+ .name = "break",
+ .args = {"v"},
+ .doc = R"(
+ In debug mode (enabled using `--debugger`), pause Nix expression evaluation and enter the REPL.
+ Otherwise, return the argument `v`.
+ )",
+ .fun = [](EvalState & state, const PosIdx pos, Value * * args, Value & v)
+ {
+ if (state.debugRepl && !state.debugTraces.empty()) {
+ auto error = Error(ErrorInfo {
+ .level = lvlInfo,
+ .msg = hintfmt("breakpoint reached"),
+ .errPos = state.positions[pos],
+ });
+
+ auto & dt = state.debugTraces.front();
+ state.runDebugRepl(&error, dt.env, dt.expr);
+
+ if (state.debugQuit) {
+ // If the user elects to quit the repl, throw an exception.
+ throw Error(ErrorInfo{
+ .level = lvlInfo,
+ .msg = hintfmt("quit the debugger"),
+ .errPos = state.positions[noPos],
+ });
+ }
+ }
+
+ // Return the value we were passed.
+ v = *args[0];
+ }
+});
+
static RegisterPrimOp primop_abort({
.name = "abort",
.args = {"s"},
@@ -725,7 +765,7 @@ static RegisterPrimOp primop_abort({
PathSet context;
auto s = state.coerceToString(pos, *args[0], context,
"while evaluating the error message passed to builtins.abort").toOwned();
- throw Abort("evaluation aborted with the following error message: '%1%'", s);
+ state.debugThrowLastTrace(Abort("evaluation aborted with the following error message: '%1%'", s));
}
});
@@ -744,7 +784,7 @@ static RegisterPrimOp primop_throw({
PathSet context;
auto s = state.coerceToString(pos, *args[0], context,
"while evaluating the error message passed to builtin.throw").toOwned();
- throw ThrownError(s);
+ state.debugThrowLastTrace(ThrownError(s));
}
});
@@ -811,6 +851,18 @@ static RegisterPrimOp primop_floor({
static void prim_tryEval(EvalState & state, const PosIdx pos, Value * * args, Value & v)
{
auto attrs = state.buildBindings(2);
+
+ /* increment state.trylevel, and decrement it when this function returns. */
+ MaintainCount trylevel(state.trylevel);
+
+ void (* savedDebugRepl)(ref<EvalState> es, const ValMap & extraEnv) = nullptr;
+ if (state.debugRepl && evalSettings.ignoreExceptionsDuringTry)
+ {
+ /* to prevent starting the repl from exceptions withing a tryEval, null it. */
+ savedDebugRepl = state.debugRepl;
+ state.debugRepl = nullptr;
+ }
+
try {
state.forceValue(*args[0], pos);
attrs.insert(state.sValue, args[0]);
@@ -819,6 +871,11 @@ static void prim_tryEval(EvalState & state, const PosIdx pos, Value * * args, Va
attrs.alloc(state.sValue).mkBool(false);
attrs.alloc("success").mkBool(false);
}
+
+ // restore the debugRepl pointer if we saved it earlier.
+ if (savedDebugRepl)
+ state.debugRepl = savedDebugRepl;
+
v.mkAttrs(attrs);
}
@@ -930,6 +987,15 @@ static RegisterPrimOp primop_trace({
});
+/* Takes two arguments and evaluates to the second one. Used as the
+ * builtins.traceVerbose implementation when --trace-verbose is not enabled
+ */
+static void prim_second(EvalState & state, const PosIdx pos, Value * * args, Value & v)
+{
+ state.forceValue(*args[1], pos);
+ v = *args[1];
+}
+
/*************************************************************
* Derivations
*************************************************************/
@@ -995,37 +1061,37 @@ static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value * *
if (s == "recursive") ingestionMethod = FileIngestionMethod::Recursive;
else if (s == "flat") ingestionMethod = FileIngestionMethod::Flat;
else
- throw EvalError({
+ state.debugThrowLastTrace(EvalError({
.msg = hintfmt("invalid value '%s' for 'outputHashMode' attribute", s),
.errPos = state.positions[posDrvName]
- });
+ }));
};
auto handleOutputs = [&](const Strings & ss) {
outputs.clear();
for (auto & j : ss) {
if (outputs.find(j) != outputs.end())
- throw EvalError({
+ state.debugThrowLastTrace(EvalError({
.msg = hintfmt("duplicate derivation output '%1%'", j),
.errPos = state.positions[posDrvName]
- });
+ }));
/* !!! Check whether j is a valid attribute
name. */
/* Derivations cannot be named ‘drv’, because
then we'd have an attribute ‘drvPath’ in
the resulting set. */
if (j == "drv")
- throw EvalError({
+ state.debugThrowLastTrace(EvalError({
.msg = hintfmt("invalid derivation output name 'drv'" ),
.errPos = state.positions[posDrvName]
- });
+ }));
outputs.insert(j);
}
if (outputs.empty())
- throw EvalError({
+ state.debugThrowLastTrace(EvalError({
.msg = hintfmt("derivation cannot have an empty set of outputs"),
.errPos = state.positions[posDrvName]
- });
+ }));
};
try {
@@ -1154,23 +1220,23 @@ static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value * *
/* Do we have all required attributes? */
if (drv.builder == "")
- throw EvalError({
+ state.debugThrowLastTrace(EvalError({
.msg = hintfmt("required attribute 'builder' missing"),
.errPos = state.positions[posDrvName]
- });
+ }));
if (drv.platform == "")
- throw EvalError({
+ state.debugThrowLastTrace(EvalError({
.msg = hintfmt("required attribute 'system' missing"),
.errPos = state.positions[posDrvName]
- });
+ }));
/* Check whether the derivation name is valid. */
if (isDerivation(drvName))
- throw EvalError({
+ state.debugThrowLastTrace(EvalError({
.msg = hintfmt("derivation names are not allowed to end in '%s'", drvExtension),
.errPos = state.positions[posDrvName]
- });
+ }));
if (outputHash) {
/* Handle fixed-output derivations.
@@ -1178,10 +1244,10 @@ static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value * *
Ignore `__contentAddressed` because fixed output derivations are
already content addressed. */
if (outputs.size() != 1 || *(outputs.begin()) != "out")
- throw Error({
+ state.debugThrowLastTrace(Error({
.msg = hintfmt("multiple outputs are not supported in fixed-output derivations"),
.errPos = state.positions[posDrvName]
- });
+ }));
auto h = newHashAllowEmpty(*outputHash, parseHashTypeOpt(outputHashAlgo));
@@ -1241,8 +1307,13 @@ static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value * *
switch (hashModulo.kind) {
case DrvHash::Kind::Regular:
for (auto & i : outputs) {
- auto h = hashModulo.hashes.at(i);
- auto outPath = state.store->makeOutputPath(i, h, drvName);
+ auto h = get(hashModulo.hashes, i);
+ if (!h)
+ throw AssertionError({
+ .msg = hintfmt("derivation produced no hash for output '%s'", i),
+ .errPos = state.positions[posDrvName],
+ });
+ auto outPath = state.store->makeOutputPath(i, *h, drvName);
drv.env[i] = state.store->printStorePath(outPath);
drv.outputs.insert_or_assign(
i,
@@ -1344,10 +1415,10 @@ static RegisterPrimOp primop_toPath({
static void prim_storePath(EvalState & state, const PosIdx pos, Value * * args, Value & v)
{
if (evalSettings.pureEval)
- throw EvalError({
+ state.debugThrowLastTrace(EvalError({
.msg = hintfmt("'%s' is not allowed in pure evaluation mode", "builtins.storePath"),
.errPos = state.positions[pos]
- });
+ }));
PathSet context;
Path path = state.checkSourcePath(state.coerceToPath(pos, *args[0], context, "while evaluating the first argument passed to builtins.storePath"));
@@ -1356,10 +1427,10 @@ static void prim_storePath(EvalState & state, const PosIdx pos, Value * * args,
e.g. nix-push does the right thing. */
if (!state.store->isStorePath(path)) path = canonPath(path, true);
if (!state.store->isInStore(path))
- throw EvalError({
+ state.debugThrowLastTrace(EvalError({
.msg = hintfmt("path '%1%' is not in the Nix store", path),
.errPos = state.positions[pos]
- });
+ }));
auto path2 = state.store->toStorePath(path).first;
if (!settings.readOnlyMode)
state.store->ensurePath(path2);
@@ -1462,7 +1533,7 @@ static void prim_readFile(EvalState & state, const PosIdx pos, Value * * args, V
auto path = realisePath(state, pos, *args[0]);
auto s = readFile(path);
if (s.find((char) 0) != std::string::npos)
- throw Error("the contents of the file '%1%' cannot be represented as a Nix string", path);
+ state.debugThrowLastTrace(Error("the contents of the file '%1%' cannot be represented as a Nix string", path));
StorePathSet refs;
if (state.store->isInStore(path)) {
try {
@@ -1509,13 +1580,12 @@ static void prim_findFile(EvalState & state, const PosIdx pos, Value * * args, V
auto rewrites = state.realiseContext(context);
path = rewriteStrings(path, rewrites);
} catch (InvalidPathError & e) {
- throw EvalError({
+ state.debugThrowLastTrace(EvalError({
.msg = hintfmt("cannot find '%1%', since path '%2%' is not valid", path, e.path),
.errPos = state.positions[pos]
- });
+ }));
}
-
searchPath.emplace_back(prefix, path);
}
@@ -1536,10 +1606,10 @@ static void prim_hashFile(EvalState & state, const PosIdx pos, Value * * args, V
auto type = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.hashFile");
std::optional<HashType> ht = parseHashType(type);
if (!ht)
- throw Error({
+ state.debugThrowLastTrace(Error({
.msg = hintfmt("unknown hash type '%1%'", type),
.errPos = state.positions[pos]
- });
+ }));
auto path = realisePath(state, pos, *args[1]);
@@ -1776,13 +1846,13 @@ static void prim_toFile(EvalState & state, const PosIdx pos, Value * * args, Val
for (auto path : context) {
if (path.at(0) != '/')
- throw EvalError( {
+ state.debugThrowLastTrace(EvalError({
.msg = hintfmt(
"in 'toFile': the file named '%1%' must not contain a reference "
"to a derivation but contains (%2%)",
name, path),
.errPos = state.positions[pos]
- });
+ }));
refs.insert(state.store->parseStorePath(path));
}
@@ -1940,7 +2010,7 @@ static void addPath(
? state.store->computeStorePathForPath(name, path, method, htSHA256, filter).first
: state.store->addToStore(name, path, method, htSHA256, filter, state.repair, refs);
if (expectedHash && expectedStorePath != dstPath)
- throw Error("store path mismatch in (possibly filtered) path added from '%s'", path);
+ state.debugThrowLastTrace(Error("store path mismatch in (possibly filtered) path added from '%s'", path));
state.allowAndSetStorePathString(dstPath, v);
} else
state.allowAndSetStorePathString(*expectedStorePath, v);
@@ -2037,16 +2107,16 @@ static void prim_path(EvalState & state, const PosIdx pos, Value * * args, Value
else if (n == "sha256")
expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `sha256` attribute passed to builtins.path"), htSHA256);
else
- throw EvalError({
+ state.debugThrowLastTrace(EvalError({
.msg = hintfmt("unsupported argument '%1%' to 'addPath'", state.symbols[attr.name]),
.errPos = state.positions[attr.pos]
- });
+ }));
}
if (path.empty())
- throw EvalError({
+ state.debugThrowLastTrace(EvalError({
.msg = hintfmt("missing required 'path' attribute in the first argument to builtins.path"),
.errPos = state.positions[pos]
- });
+ }));
if (name.empty())
name = baseNameOf(path);
@@ -2405,10 +2475,10 @@ static void prim_functionArgs(EvalState & state, const PosIdx pos, Value * * arg
return;
}
if (!args[0]->isLambda())
- throw TypeError({
+ state.debugThrowLastTrace(TypeError({
.msg = hintfmt("'functionArgs' requires a function"),
.errPos = state.positions[pos]
- });
+ }));
if (!args[0]->lambda.fun->hasFormals()) {
v.mkAttrs(&state.emptyBindings);
@@ -2496,7 +2566,7 @@ static void prim_zipAttrsWith(EvalState & state, const PosIdx pos, Value * * arg
attrsSeen[attr.name].first++;
} catch (TypeError & e) {
e.addTrace(state.positions[pos], hintfmt("while invoking '%s'", "zipAttrsWith"));
- throw;
+ state.debugThrowLastTrace(e);
}
}
@@ -2583,10 +2653,10 @@ static void elemAt(EvalState & state, const PosIdx pos, Value & list, int n, Val
{
state.forceList(list, pos, "while evaluating the first argument passed to builtins.elemAt");
if (n < 0 || (unsigned int) n >= list.listSize())
- throw Error({
+ state.debugThrowLastTrace(Error({
.msg = hintfmt("list index %1% is out of bounds", n),
.errPos = state.positions[pos]
- });
+ }));
state.forceValue(*list.listElems()[n], pos);
v = *list.listElems()[n];
}
@@ -2631,10 +2701,10 @@ static void prim_tail(EvalState & state, const PosIdx pos, Value * * args, Value
{
state.forceList(*args[0], pos, "while evaluating the first argument passed to builtins.tail");
if (args[0]->listSize() == 0)
- throw Error({
+ state.debugThrowLastTrace(Error({
.msg = hintfmt("'tail' called on an empty list"),
.errPos = state.positions[pos]
- });
+ }));
state.mkList(v, args[0]->listSize() - 1);
for (unsigned int n = 0; n < v.listSize(); ++n)
@@ -2881,10 +2951,10 @@ static void prim_genList(EvalState & state, const PosIdx pos, Value * * args, Va
auto len = state.forceInt(*args[1], pos, "while evaluating the second argument passed to builtins.genList");
if (len < 0)
- throw EvalError({
+ state.debugThrowLastTrace(EvalError({
.msg = hintfmt("cannot create list of size %1%", len),
.errPos = state.positions[pos]
- });
+ }));
state.mkList(v, len);
@@ -3099,7 +3169,7 @@ static void prim_concatMap(EvalState & state, const PosIdx pos, Value * * args,
state.forceList(lists[n], lists[n].determinePos(args[0]->determinePos(pos)), "while evaluating the return value of the function passed to buitlins.concatMap");
} catch (TypeError &e) {
e.addTrace(state.positions[pos], hintfmt("while invoking '%s'", "concatMap"));
- throw;
+ state.debugThrowLastTrace(e);
}
len += lists[n].listSize();
}
@@ -3200,10 +3270,10 @@ static void prim_div(EvalState & state, const PosIdx pos, Value * * args, Value
NixFloat f2 = state.forceFloat(*args[1], pos, "while evaluating the second operand of the division");
if (f2 == 0)
- throw EvalError({
+ state.debugThrowLastTrace(EvalError({
.msg = hintfmt("division by zero"),
.errPos = state.positions[pos]
- });
+ }));
if (args[0]->type() == nFloat || args[1]->type() == nFloat) {
v.mkFloat(state.forceFloat(*args[0], pos, "while evaluating the first operand of the division") / f2);
@@ -3212,10 +3282,10 @@ static void prim_div(EvalState & state, const PosIdx pos, Value * * args, Value
NixInt i2 = state.forceInt(*args[1], pos, "while evaluating the second operand of the division");
/* Avoid division overflow as it might raise SIGFPE. */
if (i1 == std::numeric_limits<NixInt>::min() && i2 == -1)
- throw EvalError({
+ state.debugThrowLastTrace(EvalError({
.msg = hintfmt("overflow in integer division"),
.errPos = state.positions[pos]
- });
+ }));
v.mkInt(i1 / i2);
}
@@ -3347,10 +3417,10 @@ static void prim_substring(EvalState & state, const PosIdx pos, Value * * args,
auto s = state.coerceToString(pos, *args[2], context, "while evaluating the third argument (the string) passed to builtins.substring");
if (start < 0)
- throw EvalError({
+ state.debugThrowLastTrace(EvalError({
.msg = hintfmt("negative start position in 'substring'"),
.errPos = state.positions[pos]
- });
+ }));
v.mkString((unsigned int) start >= s->size() ? "" : s->substr(start, len), context);
}
@@ -3398,10 +3468,10 @@ static void prim_hashString(EvalState & state, const PosIdx pos, Value * * args,
auto type = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.hashString");
std::optional<HashType> ht = parseHashType(type);
if (!ht)
- throw Error({
+ state.debugThrowLastTrace(Error({
.msg = hintfmt("unknown hash type '%1%'", type),
.errPos = state.positions[pos]
- });
+ }));
PathSet context; // discarded
auto s = state.forceString(*args[1], context, pos, "while evaluating the second argument passed to builtins.hashString");
@@ -3468,19 +3538,18 @@ void prim_match(EvalState & state, const PosIdx pos, Value * * args, Value & v)
(v.listElems()[i] = state.allocValue())->mkString(match[i + 1].str());
}
- } catch (std::regex_error &e) {
+ } catch (std::regex_error & e) {
if (e.code() == std::regex_constants::error_space) {
// limit is _GLIBCXX_REGEX_STATE_LIMIT for libstdc++
- throw EvalError({
+ state.debugThrowLastTrace(EvalError({
.msg = hintfmt("memory limit exceeded by regular expression '%s'", re),
.errPos = state.positions[pos]
- });
- } else {
- throw EvalError({
+ }));
+ } else
+ state.debugThrowLastTrace(EvalError({
.msg = hintfmt("invalid regular expression '%s'", re),
.errPos = state.positions[pos]
- });
- }
+ }));
}
}
@@ -3515,7 +3584,7 @@ static RegisterPrimOp primop_match({
builtins.match "[[:space:]]+([[:upper:]]+)[[:space:]]+" " FOO "
```
- Evaluates to `[ "foo" ]`.
+ Evaluates to `[ "FOO" ]`.
)s",
.fun = prim_match,
});
@@ -3573,19 +3642,18 @@ void prim_split(EvalState & state, const PosIdx pos, Value * * args, Value & v)
assert(idx == 2 * len + 1);
- } catch (std::regex_error &e) {
+ } catch (std::regex_error & e) {
if (e.code() == std::regex_constants::error_space) {
// limit is _GLIBCXX_REGEX_STATE_LIMIT for libstdc++
- throw EvalError({
+ state.debugThrowLastTrace(EvalError({
.msg = hintfmt("memory limit exceeded by regular expression '%s'", re),
.errPos = state.positions[pos]
- });
- } else {
- throw EvalError({
+ }));
+ } else
+ state.debugThrowLastTrace(EvalError({
.msg = hintfmt("invalid regular expression '%s'", re),
.errPos = state.positions[pos]
- });
- }
+ }));
}
}
@@ -3618,7 +3686,7 @@ static RegisterPrimOp primop_split({
Evaluates to `[ "" [ "a" null ] "b" [ null "c" ] "" ]`.
```nix
- builtins.split "([[:upper:]]+)" " FOO "
+ builtins.split "([[:upper:]]+)" " FOO "
```
Evaluates to `[ " " [ "FOO" ] " " ]`.
@@ -3661,10 +3729,10 @@ static void prim_replaceStrings(EvalState & state, const PosIdx pos, Value * * a
state.forceList(*args[0], pos, "while evaluating the first argument passed to builtins.replaceStrings");
state.forceList(*args[1], pos, "while evaluating the second argument passed to builtins.replaceStrings");
if (args[0]->listSize() != args[1]->listSize())
- throw EvalError({
+ state.debugThrowLastTrace(EvalError({
.msg = hintfmt("'from' and 'to' arguments to 'replaceStrings' have different lengths"),
.errPos = state.positions[pos]
- });
+ }));
std::vector<std::string> from;
from.reserve(args[0]->listSize());
@@ -3882,6 +3950,18 @@ void EvalState::createBaseEnv()
addPrimOp("__exec", 1, prim_exec);
}
+ addPrimOp({
+ .fun = evalSettings.traceVerbose ? prim_trace : prim_second,
+ .arity = 2,
+ .name = "__traceVerbose",
+ .args = { "e1", "e2" },
+ .doc = R"(
+ Evaluate *e1* and print its abstract syntax representation on standard
+ error if `--trace-verbose` is enabled. Then return *e2*. This function
+ is useful for debugging.
+ )",
+ });
+
/* Add a value containing the current Nix expression search path. */
mkList(v, searchPath.size());
int n = 0;
@@ -3917,7 +3997,7 @@ void EvalState::createBaseEnv()
because attribute lookups expect it to be sorted. */
baseEnv.values[0]->attrs->sort();
- staticBaseEnv.sort();
+ staticBaseEnv->sort();
/* Note: we have to initialize the 'derivation' constant *after*
building baseEnv/staticBaseEnv because it uses 'builtins'. */
diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc
index 11f2893c5..ddbaae449 100644
--- a/src/libexpr/primops/fetchTree.cc
+++ b/src/libexpr/primops/fetchTree.cc
@@ -108,16 +108,16 @@ static void fetchTree(
if (auto aType = args[0]->attrs->get(state.sType)) {
if (type)
- throw Error({
+ state.debugThrowLastTrace(EvalError({
.msg = hintfmt("unexpected attribute 'type'"),
.errPos = state.positions[pos]
- });
+ }));
type = state.forceStringNoCtx(*aType->value, aType->pos, "while evaluating the `type` attribute passed to builtins.fetchTree");
} else if (!type)
- throw Error({
+ state.debugThrowLastTrace(EvalError({
.msg = hintfmt("attribute 'type' is missing in call to 'fetchTree'"),
.errPos = state.positions[pos]
- });
+ }));
attrs.emplace("type", type.value());
@@ -138,16 +138,16 @@ static void fetchTree(
else if (attr.value->type() == nInt)
attrs.emplace(state.symbols[attr.name], uint64_t(attr.value->integer));
else
- throw TypeError("fetchTree argument '%s' is %s while a string, Boolean or integer is expected",
- state.symbols[attr.name], showType(*attr.value));
+ state.debugThrowLastTrace(TypeError("fetchTree argument '%s' is %s while a string, Boolean or integer is expected",
+ state.symbols[attr.name], showType(*attr.value)));
}
if (!params.allowNameArgument)
if (auto nameIter = attrs.find("name"); nameIter != attrs.end())
- throw Error({
- .msg = hintfmt("attribute 'name' isn't supported in call to 'fetchTree'"),
+ state.debugThrowLastTrace(EvalError({
+ .msg = hintfmt("attribute 'name' isn’t supported in call to 'fetchTree'"),
.errPos = state.positions[pos]
- });
+ }));
input = fetchers::Input::fromAttrs(std::move(attrs));
} else {
@@ -167,7 +167,7 @@ static void fetchTree(
input = lookupInRegistries(state.store, input).first;
if (evalSettings.pureEval && !input.isLocked())
- throw Error("in pure evaluation mode, 'fetchTree' requires a locked input, at %s", state.positions[pos]);
+ state.debugThrowLastTrace(EvalError("in pure evaluation mode, 'fetchTree' requires a locked input, at %s", state.positions[pos]));
auto [tree, input2] = input.fetch(state.store);
@@ -204,17 +204,17 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
else if (n == "name")
name = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the name of the content we should fetch");
else
- throw EvalError({
+ state.debugThrowLastTrace(EvalError({
.msg = hintfmt("unsupported argument '%s' to '%s'", n, who),
.errPos = state.positions[attr.pos]
- });
- }
+ }));
+ }
if (!url)
- throw EvalError({
+ state.debugThrowLastTrace(EvalError({
.msg = hintfmt("'url' argument required"),
.errPos = state.positions[pos]
- });
+ }));
} else
url = state.forceStringNoCtx(*args[0], pos, "while evaluating the url we should fetch");
@@ -226,7 +226,7 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
name = baseNameOf(*url);
if (evalSettings.pureEval && !expectedHash)
- throw Error("in pure evaluation mode, '%s' requires a 'sha256' argument", who);
+ state.debugThrowLastTrace(EvalError("in pure evaluation mode, '%s' requires a 'sha256' argument", who));
// early exit if pinned and already in the store
if (expectedHash && expectedHash->type == htSHA256) {
@@ -253,8 +253,8 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
? state.store->queryPathInfo(storePath)->narHash
: hashFile(htSHA256, state.store->toRealPath(storePath));
if (hash != *expectedHash)
- throw Error((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n specified: %s\n got: %s",
- *url, expectedHash->to_string(Base32, true), hash.to_string(Base32, true));
+ state.debugThrowLastTrace(EvalError((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n specified: %s\n got: %s",
+ *url, expectedHash->to_string(Base32, true), hash.to_string(Base32, true)));
}
state.allowAndSetStorePathString(storePath, v);
@@ -362,6 +362,10 @@ static RegisterPrimOp primop_fetchGit({
A Boolean parameter that specifies whether submodules should be
checked out. Defaults to `false`.
+ - shallow\
+ A Boolean parameter that specifies whether fetching a shallow clone
+ is allowed. Defaults to `false`.
+
- allRefs\
Whether to fetch all refs of the repository. With this argument being
true, it's possible to load a `rev` from *any* `ref` (by default only
diff --git a/src/libexpr/tests/json.cc b/src/libexpr/tests/json.cc
new file mode 100644
index 000000000..f1ea1b197
--- /dev/null
+++ b/src/libexpr/tests/json.cc
@@ -0,0 +1,68 @@
+#include "libexprtests.hh"
+#include "value-to-json.hh"
+
+namespace nix {
+// Testing the conversion to JSON
+
+ class JSONValueTest : public LibExprTest {
+ protected:
+ std::string getJSONValue(Value& value) {
+ std::stringstream ss;
+ PathSet ps;
+ printValueAsJSON(state, true, value, noPos, ss, ps);
+ return ss.str();
+ }
+ };
+
+ TEST_F(JSONValueTest, null) {
+ Value v;
+ v.mkNull();
+ ASSERT_EQ(getJSONValue(v), "null");
+ }
+
+ TEST_F(JSONValueTest, BoolFalse) {
+ Value v;
+ v.mkBool(false);
+ ASSERT_EQ(getJSONValue(v),"false");
+ }
+
+ TEST_F(JSONValueTest, BoolTrue) {
+ Value v;
+ v.mkBool(true);
+ ASSERT_EQ(getJSONValue(v), "true");
+ }
+
+ TEST_F(JSONValueTest, IntPositive) {
+ Value v;
+ v.mkInt(100);
+ ASSERT_EQ(getJSONValue(v), "100");
+ }
+
+ TEST_F(JSONValueTest, IntNegative) {
+ Value v;
+ v.mkInt(-100);
+ ASSERT_EQ(getJSONValue(v), "-100");
+ }
+
+ TEST_F(JSONValueTest, String) {
+ Value v;
+ v.mkString("test");
+ ASSERT_EQ(getJSONValue(v), "\"test\"");
+ }
+
+ TEST_F(JSONValueTest, StringQuotes) {
+ Value v;
+
+ v.mkString("test\"");
+ ASSERT_EQ(getJSONValue(v), "\"test\\\"\"");
+ }
+
+ // The dummy store doesn't support writing files. Fails with this exception message:
+ // C++ exception with description "error: operation 'addToStoreFromDump' is
+ // not supported by store 'dummy'" thrown in the test body.
+ TEST_F(JSONValueTest, DISABLED_Path) {
+ Value v;
+ v.mkPath("test");
+ ASSERT_EQ(getJSONValue(v), "\"/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x\"");
+ }
+} /* namespace nix */
diff --git a/src/libexpr/tests/libexprtests.hh b/src/libexpr/tests/libexprtests.hh
new file mode 100644
index 000000000..4f6915882
--- /dev/null
+++ b/src/libexpr/tests/libexprtests.hh
@@ -0,0 +1,136 @@
+#include <gtest/gtest.h>
+#include <gmock/gmock.h>
+
+#include "value.hh"
+#include "nixexpr.hh"
+#include "eval.hh"
+#include "eval-inline.hh"
+#include "store-api.hh"
+
+
+namespace nix {
+ class LibExprTest : public ::testing::Test {
+ public:
+ static void SetUpTestSuite() {
+ initGC();
+ }
+
+ protected:
+ LibExprTest()
+ : store(openStore("dummy://"))
+ , state({}, store)
+ {
+ }
+ Value eval(std::string input, bool forceValue = true) {
+ Value v;
+ Expr * e = state.parseExprFromString(input, "");
+ assert(e);
+ state.eval(e, v);
+ if (forceValue)
+ state.forceValue(v, noPos);
+ return v;
+ }
+
+ Symbol createSymbol(const char * value) {
+ return state.symbols.create(value);
+ }
+
+ ref<Store> store;
+ EvalState state;
+ };
+
+ MATCHER(IsListType, "") {
+ return arg != nList;
+ }
+
+ MATCHER(IsList, "") {
+ return arg.type() == nList;
+ }
+
+ MATCHER(IsString, "") {
+ return arg.type() == nString;
+ }
+
+ MATCHER(IsNull, "") {
+ return arg.type() == nNull;
+ }
+
+ MATCHER(IsThunk, "") {
+ return arg.type() == nThunk;
+ }
+
+ MATCHER(IsAttrs, "") {
+ return arg.type() == nAttrs;
+ }
+
+ MATCHER_P(IsStringEq, s, fmt("The string is equal to \"%1%\"", s)) {
+ if (arg.type() != nString) {
+ return false;
+ }
+ return std::string_view(arg.string.s) == s;
+ }
+
+ MATCHER_P(IsIntEq, v, fmt("The string is equal to \"%1%\"", v)) {
+ if (arg.type() != nInt) {
+ return false;
+ }
+ return arg.integer == v;
+ }
+
+ MATCHER_P(IsFloatEq, v, fmt("The float is equal to \"%1%\"", v)) {
+ if (arg.type() != nFloat) {
+ return false;
+ }
+ return arg.fpoint == v;
+ }
+
+ MATCHER(IsTrue, "") {
+ if (arg.type() != nBool) {
+ return false;
+ }
+ return arg.boolean == true;
+ }
+
+ MATCHER(IsFalse, "") {
+ if (arg.type() != nBool) {
+ return false;
+ }
+ return arg.boolean == false;
+ }
+
+ MATCHER_P(IsPathEq, p, fmt("Is a path equal to \"%1%\"", p)) {
+ if (arg.type() != nPath) {
+ *result_listener << "Expected a path got " << arg.type();
+ return false;
+ } else if (std::string_view(arg.string.s) != p) {
+ *result_listener << "Expected a path that equals \"" << p << "\" but got: " << arg.string.s;
+ return false;
+ }
+ return true;
+ }
+
+
+ MATCHER_P(IsListOfSize, n, fmt("Is a list of size [%1%]", n)) {
+ if (arg.type() != nList) {
+ *result_listener << "Expected list got " << arg.type();
+ return false;
+ } else if (arg.listSize() != (size_t)n) {
+ *result_listener << "Expected as list of size " << n << " got " << arg.listSize();
+ return false;
+ }
+ return true;
+ }
+
+ MATCHER_P(IsAttrsOfSize, n, fmt("Is a set of size [%1%]", n)) {
+ if (arg.type() != nAttrs) {
+ *result_listener << "Expexted set got " << arg.type();
+ return false;
+ } else if (arg.attrs->size() != (size_t)n) {
+ *result_listener << "Expected a set with " << n << " attributes but got " << arg.attrs->size();
+ return false;
+ }
+ return true;
+ }
+
+
+} /* namespace nix */
diff --git a/src/libexpr/tests/local.mk b/src/libexpr/tests/local.mk
new file mode 100644
index 000000000..b95980cab
--- /dev/null
+++ b/src/libexpr/tests/local.mk
@@ -0,0 +1,15 @@
+check: libexpr-tests_RUN
+
+programs += libexpr-tests
+
+libexpr-tests_DIR := $(d)
+
+libexpr-tests_INSTALL_DIR :=
+
+libexpr-tests_SOURCES := $(wildcard $(d)/*.cc)
+
+libexpr-tests_CXXFLAGS += -I src/libexpr -I src/libutil -I src/libstore -I src/libexpr/tests
+
+libexpr-tests_LIBS = libexpr libutil libstore libfetchers
+
+libexpr-tests_LDFLAGS := $(GTEST_LIBS) -lgmock
diff --git a/src/libexpr/tests/primops.cc b/src/libexpr/tests/primops.cc
new file mode 100644
index 000000000..16cf66d2c
--- /dev/null
+++ b/src/libexpr/tests/primops.cc
@@ -0,0 +1,839 @@
+#include <gmock/gmock.h>
+#include <gtest/gtest.h>
+
+#include "libexprtests.hh"
+
+namespace nix {
+ class CaptureLogger : public Logger
+ {
+ std::ostringstream oss;
+
+ public:
+ CaptureLogger() {}
+
+ std::string get() const {
+ return oss.str();
+ }
+
+ void log(Verbosity lvl, const FormatOrString & fs) override {
+ oss << fs.s << std::endl;
+ }
+
+ void logEI(const ErrorInfo & ei) override {
+ showErrorInfo(oss, ei, loggerSettings.showTrace.get());
+ }
+ };
+
+ class CaptureLogging {
+ Logger * oldLogger;
+ std::unique_ptr<CaptureLogger> tempLogger;
+ public:
+ CaptureLogging() : tempLogger(std::make_unique<CaptureLogger>()) {
+ oldLogger = logger;
+ logger = tempLogger.get();
+ }
+
+ ~CaptureLogging() {
+ logger = oldLogger;
+ }
+
+ std::string get() const {
+ return tempLogger->get();
+ }
+ };
+
+
+ // Testing eval of PrimOp's
+ class PrimOpTest : public LibExprTest {};
+
+
+ TEST_F(PrimOpTest, throw) {
+ ASSERT_THROW(eval("throw \"foo\""), ThrownError);
+ }
+
+ TEST_F(PrimOpTest, abort) {
+ ASSERT_THROW(eval("abort \"abort\""), Abort);
+ }
+
+ TEST_F(PrimOpTest, ceil) {
+ auto v = eval("builtins.ceil 1.9");
+ ASSERT_THAT(v, IsIntEq(2));
+ }
+
+ TEST_F(PrimOpTest, floor) {
+ auto v = eval("builtins.floor 1.9");
+ ASSERT_THAT(v, IsIntEq(1));
+ }
+
+ TEST_F(PrimOpTest, tryEvalFailure) {
+ auto v = eval("builtins.tryEval (throw \"\")");
+ ASSERT_THAT(v, IsAttrsOfSize(2));
+ auto s = createSymbol("success");
+ auto p = v.attrs->get(s);
+ ASSERT_NE(p, nullptr);
+ ASSERT_THAT(*p->value, IsFalse());
+ }
+
+ TEST_F(PrimOpTest, tryEvalSuccess) {
+ auto v = eval("builtins.tryEval 123");
+ ASSERT_THAT(v, IsAttrs());
+ auto s = createSymbol("success");
+ auto p = v.attrs->get(s);
+ ASSERT_NE(p, nullptr);
+ ASSERT_THAT(*p->value, IsTrue());
+ s = createSymbol("value");
+ p = v.attrs->get(s);
+ ASSERT_NE(p, nullptr);
+ ASSERT_THAT(*p->value, IsIntEq(123));
+ }
+
+ TEST_F(PrimOpTest, getEnv) {
+ setenv("_NIX_UNIT_TEST_ENV_VALUE", "test value", 1);
+ auto v = eval("builtins.getEnv \"_NIX_UNIT_TEST_ENV_VALUE\"");
+ ASSERT_THAT(v, IsStringEq("test value"));
+ }
+
+ TEST_F(PrimOpTest, seq) {
+ ASSERT_THROW(eval("let x = throw \"test\"; in builtins.seq x { }"), ThrownError);
+ }
+
+ TEST_F(PrimOpTest, seqNotDeep) {
+ auto v = eval("let x = { z = throw \"test\"; }; in builtins.seq x { }");
+ ASSERT_THAT(v, IsAttrs());
+ }
+
+ TEST_F(PrimOpTest, deepSeq) {
+ ASSERT_THROW(eval("let x = { z = throw \"test\"; }; in builtins.deepSeq x { }"), ThrownError);
+ }
+
+ TEST_F(PrimOpTest, trace) {
+ CaptureLogging l;
+ auto v = eval("builtins.trace \"test string 123\" 123");
+ ASSERT_THAT(v, IsIntEq(123));
+ auto text = l.get();
+ ASSERT_NE(text.find("test string 123"), std::string::npos);
+ }
+
+ TEST_F(PrimOpTest, placeholder) {
+ auto v = eval("builtins.placeholder \"out\"");
+ ASSERT_THAT(v, IsStringEq("/1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9"));
+ }
+
+ TEST_F(PrimOpTest, baseNameOf) {
+ auto v = eval("builtins.baseNameOf /some/path");
+ ASSERT_THAT(v, IsStringEq("path"));
+ }
+
+ TEST_F(PrimOpTest, dirOf) {
+ auto v = eval("builtins.dirOf /some/path");
+ ASSERT_THAT(v, IsPathEq("/some"));
+ }
+
+ TEST_F(PrimOpTest, attrValues) {
+ auto v = eval("builtins.attrValues { x = \"foo\"; a = 1; }");
+ ASSERT_THAT(v, IsListOfSize(2));
+ ASSERT_THAT(*v.listElems()[0], IsIntEq(1));
+ ASSERT_THAT(*v.listElems()[1], IsStringEq("foo"));
+ }
+
+ TEST_F(PrimOpTest, getAttr) {
+ auto v = eval("builtins.getAttr \"x\" { x = \"foo\"; }");
+ ASSERT_THAT(v, IsStringEq("foo"));
+ }
+
+ TEST_F(PrimOpTest, getAttrNotFound) {
+ // FIXME: TypeError is really bad here, also the error wording is worse
+ // than on Nix <=2.3
+ ASSERT_THROW(eval("builtins.getAttr \"y\" { }"), TypeError);
+ }
+
+ TEST_F(PrimOpTest, unsafeGetAttrPos) {
+ // The `y` attribute is at position
+ const char* expr = "builtins.unsafeGetAttrPos \"y\" { y = \"x\"; }";
+ auto v = eval(expr);
+ ASSERT_THAT(v, IsAttrsOfSize(3));
+
+ auto file = v.attrs->find(createSymbol("file"));
+ ASSERT_NE(file, nullptr);
+ // FIXME: The file when running these tests is the input string?!?
+ ASSERT_THAT(*file->value, IsStringEq(expr));
+
+ auto line = v.attrs->find(createSymbol("line"));
+ ASSERT_NE(line, nullptr);
+ ASSERT_THAT(*line->value, IsIntEq(1));
+
+ auto column = v.attrs->find(createSymbol("column"));
+ ASSERT_NE(column, nullptr);
+ ASSERT_THAT(*column->value, IsIntEq(33));
+ }
+
+ TEST_F(PrimOpTest, hasAttr) {
+ auto v = eval("builtins.hasAttr \"x\" { x = 1; }");
+ ASSERT_THAT(v, IsTrue());
+ }
+
+ TEST_F(PrimOpTest, hasAttrNotFound) {
+ auto v = eval("builtins.hasAttr \"x\" { }");
+ ASSERT_THAT(v, IsFalse());
+ }
+
+ TEST_F(PrimOpTest, isAttrs) {
+ auto v = eval("builtins.isAttrs {}");
+ ASSERT_THAT(v, IsTrue());
+ }
+
+ TEST_F(PrimOpTest, isAttrsFalse) {
+ auto v = eval("builtins.isAttrs null");
+ ASSERT_THAT(v, IsFalse());
+ }
+
+ TEST_F(PrimOpTest, removeAttrs) {
+ auto v = eval("builtins.removeAttrs { x = 1; } [\"x\"]");
+ ASSERT_THAT(v, IsAttrsOfSize(0));
+ }
+
+ TEST_F(PrimOpTest, removeAttrsRetains) {
+ auto v = eval("builtins.removeAttrs { x = 1; y = 2; } [\"x\"]");
+ ASSERT_THAT(v, IsAttrsOfSize(1));
+ ASSERT_NE(v.attrs->find(createSymbol("y")), nullptr);
+ }
+
+ TEST_F(PrimOpTest, listToAttrsEmptyList) {
+ auto v = eval("builtins.listToAttrs []");
+ ASSERT_THAT(v, IsAttrsOfSize(0));
+ ASSERT_EQ(v.type(), nAttrs);
+ ASSERT_EQ(v.attrs->size(), 0);
+ }
+
+ TEST_F(PrimOpTest, listToAttrsNotFieldName) {
+ ASSERT_THROW(eval("builtins.listToAttrs [{}]"), Error);
+ }
+
+ TEST_F(PrimOpTest, listToAttrs) {
+ auto v = eval("builtins.listToAttrs [ { name = \"key\"; value = 123; } ]");
+ ASSERT_THAT(v, IsAttrsOfSize(1));
+ auto key = v.attrs->find(createSymbol("key"));
+ ASSERT_NE(key, nullptr);
+ ASSERT_THAT(*key->value, IsIntEq(123));
+ }
+
+ TEST_F(PrimOpTest, intersectAttrs) {
+ auto v = eval("builtins.intersectAttrs { a = 1; b = 2; } { b = 3; c = 4; }");
+ ASSERT_THAT(v, IsAttrsOfSize(1));
+ auto b = v.attrs->find(createSymbol("b"));
+ ASSERT_NE(b, nullptr);
+ ASSERT_THAT(*b->value, IsIntEq(3));
+ }
+
+ TEST_F(PrimOpTest, catAttrs) {
+ auto v = eval("builtins.catAttrs \"a\" [{a = 1;} {b = 0;} {a = 2;}]");
+ ASSERT_THAT(v, IsListOfSize(2));
+ ASSERT_THAT(*v.listElems()[0], IsIntEq(1));
+ ASSERT_THAT(*v.listElems()[1], IsIntEq(2));
+ }
+
+ TEST_F(PrimOpTest, functionArgs) {
+ auto v = eval("builtins.functionArgs ({ x, y ? 123}: 1)");
+ ASSERT_THAT(v, IsAttrsOfSize(2));
+
+ auto x = v.attrs->find(createSymbol("x"));
+ ASSERT_NE(x, nullptr);
+ ASSERT_THAT(*x->value, IsFalse());
+
+ auto y = v.attrs->find(createSymbol("y"));
+ ASSERT_NE(y, nullptr);
+ ASSERT_THAT(*y->value, IsTrue());
+ }
+
+ TEST_F(PrimOpTest, mapAttrs) {
+ auto v = eval("builtins.mapAttrs (name: value: value * 10) { a = 1; b = 2; }");
+ ASSERT_THAT(v, IsAttrsOfSize(2));
+
+ auto a = v.attrs->find(createSymbol("a"));
+ ASSERT_NE(a, nullptr);
+ ASSERT_THAT(*a->value, IsThunk());
+ state.forceValue(*a->value, noPos);
+ ASSERT_THAT(*a->value, IsIntEq(10));
+
+ auto b = v.attrs->find(createSymbol("b"));
+ ASSERT_NE(b, nullptr);
+ ASSERT_THAT(*b->value, IsThunk());
+ state.forceValue(*b->value, noPos);
+ ASSERT_THAT(*b->value, IsIntEq(20));
+ }
+
+ TEST_F(PrimOpTest, isList) {
+ auto v = eval("builtins.isList []");
+ ASSERT_THAT(v, IsTrue());
+ }
+
+ TEST_F(PrimOpTest, isListFalse) {
+ auto v = eval("builtins.isList null");
+ ASSERT_THAT(v, IsFalse());
+ }
+
+ TEST_F(PrimOpTest, elemtAt) {
+ auto v = eval("builtins.elemAt [0 1 2 3] 3");
+ ASSERT_THAT(v, IsIntEq(3));
+ }
+
+ TEST_F(PrimOpTest, elemtAtOutOfBounds) {
+ ASSERT_THROW(eval("builtins.elemAt [0 1 2 3] 5"), Error);
+ }
+
+ TEST_F(PrimOpTest, head) {
+ auto v = eval("builtins.head [ 3 2 1 0 ]");
+ ASSERT_THAT(v, IsIntEq(3));
+ }
+
+ TEST_F(PrimOpTest, headEmpty) {
+ ASSERT_THROW(eval("builtins.head [ ]"), Error);
+ }
+
+ TEST_F(PrimOpTest, headWrongType) {
+ ASSERT_THROW(eval("builtins.head { }"), Error);
+ }
+
+ TEST_F(PrimOpTest, tail) {
+ auto v = eval("builtins.tail [ 3 2 1 0 ]");
+ ASSERT_THAT(v, IsListOfSize(3));
+ for (const auto [n, elem] : enumerate(v.listItems()))
+ ASSERT_THAT(*elem, IsIntEq(2 - static_cast<int>(n)));
+ }
+
+ TEST_F(PrimOpTest, tailEmpty) {
+ ASSERT_THROW(eval("builtins.tail []"), Error);
+ }
+
+ TEST_F(PrimOpTest, map) {
+ auto v = eval("map (x: \"foo\" + x) [ \"bar\" \"bla\" \"abc\" ]");
+ ASSERT_THAT(v, IsListOfSize(3));
+ auto elem = v.listElems()[0];
+ ASSERT_THAT(*elem, IsThunk());
+ state.forceValue(*elem, noPos);
+ ASSERT_THAT(*elem, IsStringEq("foobar"));
+
+ elem = v.listElems()[1];
+ ASSERT_THAT(*elem, IsThunk());
+ state.forceValue(*elem, noPos);
+ ASSERT_THAT(*elem, IsStringEq("foobla"));
+
+ elem = v.listElems()[2];
+ ASSERT_THAT(*elem, IsThunk());
+ state.forceValue(*elem, noPos);
+ ASSERT_THAT(*elem, IsStringEq("fooabc"));
+ }
+
+ TEST_F(PrimOpTest, filter) {
+ auto v = eval("builtins.filter (x: x == 2) [ 3 2 3 2 3 2 ]");
+ ASSERT_THAT(v, IsListOfSize(3));
+ for (const auto elem : v.listItems())
+ ASSERT_THAT(*elem, IsIntEq(2));
+ }
+
+ TEST_F(PrimOpTest, elemTrue) {
+ auto v = eval("builtins.elem 3 [ 1 2 3 4 5 ]");
+ ASSERT_THAT(v, IsTrue());
+ }
+
+ TEST_F(PrimOpTest, elemFalse) {
+ auto v = eval("builtins.elem 6 [ 1 2 3 4 5 ]");
+ ASSERT_THAT(v, IsFalse());
+ }
+
+ TEST_F(PrimOpTest, concatLists) {
+ auto v = eval("builtins.concatLists [[1 2] [3 4]]");
+ ASSERT_THAT(v, IsListOfSize(4));
+ for (const auto [i, elem] : enumerate(v.listItems()))
+ ASSERT_THAT(*elem, IsIntEq(static_cast<int>(i)+1));
+ }
+
+ TEST_F(PrimOpTest, length) {
+ auto v = eval("builtins.length [ 1 2 3 ]");
+ ASSERT_THAT(v, IsIntEq(3));
+ }
+
+ TEST_F(PrimOpTest, foldStrict) {
+ auto v = eval("builtins.foldl' (a: b: a + b) 0 [1 2 3]");
+ ASSERT_THAT(v, IsIntEq(6));
+ }
+
+ TEST_F(PrimOpTest, anyTrue) {
+ auto v = eval("builtins.any (x: x == 2) [ 1 2 3 ]");
+ ASSERT_THAT(v, IsTrue());
+ }
+
+ TEST_F(PrimOpTest, anyFalse) {
+ auto v = eval("builtins.any (x: x == 5) [ 1 2 3 ]");
+ ASSERT_THAT(v, IsFalse());
+ }
+
+ TEST_F(PrimOpTest, allTrue) {
+ auto v = eval("builtins.all (x: x > 0) [ 1 2 3 ]");
+ ASSERT_THAT(v, IsTrue());
+ }
+
+ TEST_F(PrimOpTest, allFalse) {
+ auto v = eval("builtins.all (x: x <= 0) [ 1 2 3 ]");
+ ASSERT_THAT(v, IsFalse());
+ }
+
+ TEST_F(PrimOpTest, genList) {
+ auto v = eval("builtins.genList (x: x + 1) 3");
+ ASSERT_EQ(v.type(), nList);
+ ASSERT_EQ(v.listSize(), 3);
+ for (const auto [i, elem] : enumerate(v.listItems())) {
+ ASSERT_THAT(*elem, IsThunk());
+ state.forceValue(*elem, noPos);
+ ASSERT_THAT(*elem, IsIntEq(static_cast<int>(i)+1));
+ }
+ }
+
+ TEST_F(PrimOpTest, sortLessThan) {
+ auto v = eval("builtins.sort builtins.lessThan [ 483 249 526 147 42 77 ]");
+ ASSERT_EQ(v.type(), nList);
+ ASSERT_EQ(v.listSize(), 6);
+
+ const std::vector<int> numbers = { 42, 77, 147, 249, 483, 526 };
+ for (const auto [n, elem] : enumerate(v.listItems()))
+ ASSERT_THAT(*elem, IsIntEq(numbers[n]));
+ }
+
+ TEST_F(PrimOpTest, partition) {
+ auto v = eval("builtins.partition (x: x > 10) [1 23 9 3 42]");
+ ASSERT_THAT(v, IsAttrsOfSize(2));
+
+ auto right = v.attrs->get(createSymbol("right"));
+ ASSERT_NE(right, nullptr);
+ ASSERT_THAT(*right->value, IsListOfSize(2));
+ ASSERT_THAT(*right->value->listElems()[0], IsIntEq(23));
+ ASSERT_THAT(*right->value->listElems()[1], IsIntEq(42));
+
+ auto wrong = v.attrs->get(createSymbol("wrong"));
+ ASSERT_NE(wrong, nullptr);
+ ASSERT_EQ(wrong->value->type(), nList);
+ ASSERT_EQ(wrong->value->listSize(), 3);
+ ASSERT_THAT(*wrong->value, IsListOfSize(3));
+ ASSERT_THAT(*wrong->value->listElems()[0], IsIntEq(1));
+ ASSERT_THAT(*wrong->value->listElems()[1], IsIntEq(9));
+ ASSERT_THAT(*wrong->value->listElems()[2], IsIntEq(3));
+ }
+
+ TEST_F(PrimOpTest, concatMap) {
+ auto v = eval("builtins.concatMap (x: x ++ [0]) [ [1 2] [3 4] ]");
+ ASSERT_EQ(v.type(), nList);
+ ASSERT_EQ(v.listSize(), 6);
+
+ const std::vector<int> numbers = { 1, 2, 0, 3, 4, 0 };
+ for (const auto [n, elem] : enumerate(v.listItems()))
+ ASSERT_THAT(*elem, IsIntEq(numbers[n]));
+ }
+
+ TEST_F(PrimOpTest, addInt) {
+ auto v = eval("builtins.add 3 5");
+ ASSERT_THAT(v, IsIntEq(8));
+ }
+
+ TEST_F(PrimOpTest, addFloat) {
+ auto v = eval("builtins.add 3.0 5.0");
+ ASSERT_THAT(v, IsFloatEq(8.0));
+ }
+
+ TEST_F(PrimOpTest, addFloatToInt) {
+ auto v = eval("builtins.add 3.0 5");
+ ASSERT_THAT(v, IsFloatEq(8.0));
+
+ v = eval("builtins.add 3 5.0");
+ ASSERT_THAT(v, IsFloatEq(8.0));
+ }
+
+ TEST_F(PrimOpTest, subInt) {
+ auto v = eval("builtins.sub 5 2");
+ ASSERT_THAT(v, IsIntEq(3));
+ }
+
+ TEST_F(PrimOpTest, subFloat) {
+ auto v = eval("builtins.sub 5.0 2.0");
+ ASSERT_THAT(v, IsFloatEq(3.0));
+ }
+
+ TEST_F(PrimOpTest, subFloatFromInt) {
+ auto v = eval("builtins.sub 5.0 2");
+ ASSERT_THAT(v, IsFloatEq(3.0));
+
+ v = eval("builtins.sub 4 2.0");
+ ASSERT_THAT(v, IsFloatEq(2.0));
+ }
+
+ TEST_F(PrimOpTest, mulInt) {
+ auto v = eval("builtins.mul 3 5");
+ ASSERT_THAT(v, IsIntEq(15));
+ }
+
+ TEST_F(PrimOpTest, mulFloat) {
+ auto v = eval("builtins.mul 3.0 5.0");
+ ASSERT_THAT(v, IsFloatEq(15.0));
+ }
+
+ TEST_F(PrimOpTest, mulFloatMixed) {
+ auto v = eval("builtins.mul 3 5.0");
+ ASSERT_THAT(v, IsFloatEq(15.0));
+
+ v = eval("builtins.mul 2.0 5");
+ ASSERT_THAT(v, IsFloatEq(10.0));
+ }
+
+ TEST_F(PrimOpTest, divInt) {
+ auto v = eval("builtins.div 5 (-1)");
+ ASSERT_THAT(v, IsIntEq(-5));
+ }
+
+ TEST_F(PrimOpTest, divIntZero) {
+ ASSERT_THROW(eval("builtins.div 5 0"), EvalError);
+ }
+
+ TEST_F(PrimOpTest, divFloat) {
+ auto v = eval("builtins.div 5.0 (-1)");
+ ASSERT_THAT(v, IsFloatEq(-5.0));
+ }
+
+ TEST_F(PrimOpTest, divFloatZero) {
+ ASSERT_THROW(eval("builtins.div 5.0 0.0"), EvalError);
+ }
+
+ TEST_F(PrimOpTest, bitOr) {
+ auto v = eval("builtins.bitOr 1 2");
+ ASSERT_THAT(v, IsIntEq(3));
+ }
+
+ TEST_F(PrimOpTest, bitXor) {
+ auto v = eval("builtins.bitXor 3 2");
+ ASSERT_THAT(v, IsIntEq(1));
+ }
+
+ TEST_F(PrimOpTest, lessThanFalse) {
+ auto v = eval("builtins.lessThan 3 1");
+ ASSERT_THAT(v, IsFalse());
+ }
+
+ TEST_F(PrimOpTest, lessThanTrue) {
+ auto v = eval("builtins.lessThan 1 3");
+ ASSERT_THAT(v, IsTrue());
+ }
+
+ TEST_F(PrimOpTest, toStringAttrsThrows) {
+ ASSERT_THROW(eval("builtins.toString {}"), EvalError);
+ }
+
+ TEST_F(PrimOpTest, toStringLambdaThrows) {
+ ASSERT_THROW(eval("builtins.toString (x: x)"), EvalError);
+ }
+
+ class ToStringPrimOpTest :
+ public PrimOpTest,
+ public testing::WithParamInterface<std::tuple<std::string, std::string_view>>
+ {};
+
+ TEST_P(ToStringPrimOpTest, toString) {
+ const auto [input, output] = GetParam();
+ auto v = eval(input);
+ ASSERT_THAT(v, IsStringEq(output));
+ }
+
+#define CASE(input, output) (std::make_tuple(std::string_view("builtins.toString " input), std::string_view(output)))
+ INSTANTIATE_TEST_SUITE_P(
+ toString,
+ ToStringPrimOpTest,
+ testing::Values(
+ CASE(R"("foo")", "foo"),
+ CASE(R"(1)", "1"),
+ CASE(R"([1 2 3])", "1 2 3"),
+ CASE(R"(.123)", "0.123000"),
+ CASE(R"(true)", "1"),
+ CASE(R"(false)", ""),
+ CASE(R"(null)", ""),
+ CASE(R"({ v = "bar"; __toString = self: self.v; })", "bar"),
+ CASE(R"({ v = "bar"; __toString = self: self.v; outPath = "foo"; })", "bar"),
+ CASE(R"({ outPath = "foo"; })", "foo"),
+ CASE(R"(./test)", "/test")
+ )
+ );
+#undef CASE
+
+ TEST_F(PrimOpTest, substring){
+ auto v = eval("builtins.substring 0 3 \"nixos\"");
+ ASSERT_THAT(v, IsStringEq("nix"));
+ }
+
+ TEST_F(PrimOpTest, substringSmallerString){
+ auto v = eval("builtins.substring 0 3 \"n\"");
+ ASSERT_THAT(v, IsStringEq("n"));
+ }
+
+ TEST_F(PrimOpTest, substringEmptyString){
+ auto v = eval("builtins.substring 1 3 \"\"");
+ ASSERT_THAT(v, IsStringEq(""));
+ }
+
+ TEST_F(PrimOpTest, stringLength) {
+ auto v = eval("builtins.stringLength \"123\"");
+ ASSERT_THAT(v, IsIntEq(3));
+ }
+ TEST_F(PrimOpTest, hashStringMd5) {
+ auto v = eval("builtins.hashString \"md5\" \"asdf\"");
+ ASSERT_THAT(v, IsStringEq("912ec803b2ce49e4a541068d495ab570"));
+ }
+
+ TEST_F(PrimOpTest, hashStringSha1) {
+ auto v = eval("builtins.hashString \"sha1\" \"asdf\"");
+ ASSERT_THAT(v, IsStringEq("3da541559918a808c2402bba5012f6c60b27661c"));
+ }
+
+ TEST_F(PrimOpTest, hashStringSha256) {
+ auto v = eval("builtins.hashString \"sha256\" \"asdf\"");
+ ASSERT_THAT(v, IsStringEq("f0e4c2f76c58916ec258f246851bea091d14d4247a2fc3e18694461b1816e13b"));
+ }
+
+ TEST_F(PrimOpTest, hashStringSha512) {
+ auto v = eval("builtins.hashString \"sha512\" \"asdf\"");
+ ASSERT_THAT(v, IsStringEq("401b09eab3c013d4ca54922bb802bec8fd5318192b0a75f201d8b3727429080fb337591abd3e44453b954555b7a0812e1081c39b740293f765eae731f5a65ed1"));
+ }
+
+ TEST_F(PrimOpTest, hashStringInvalidHashType) {
+ ASSERT_THROW(eval("builtins.hashString \"foobar\" \"asdf\""), Error);
+ }
+
+ TEST_F(PrimOpTest, nixPath) {
+ auto v = eval("builtins.nixPath");
+ ASSERT_EQ(v.type(), nList);
+ // We can't test much more as currently the EvalSettings are a global
+ // that we can't easily swap / replace
+ }
+
+ TEST_F(PrimOpTest, langVersion) {
+ auto v = eval("builtins.langVersion");
+ ASSERT_EQ(v.type(), nInt);
+ }
+
+ TEST_F(PrimOpTest, storeDir) {
+ auto v = eval("builtins.storeDir");
+ ASSERT_THAT(v, IsStringEq("/nix/store"));
+ }
+
+ TEST_F(PrimOpTest, nixVersion) {
+ auto v = eval("builtins.nixVersion");
+ ASSERT_THAT(v, IsStringEq(nixVersion));
+ }
+
+ TEST_F(PrimOpTest, currentSystem) {
+ auto v = eval("builtins.currentSystem");
+ ASSERT_THAT(v, IsStringEq(settings.thisSystem.get()));
+ }
+
+ TEST_F(PrimOpTest, derivation) {
+ auto v = eval("derivation");
+ ASSERT_EQ(v.type(), nFunction);
+ ASSERT_TRUE(v.isLambda());
+ ASSERT_NE(v.lambda.fun, nullptr);
+ ASSERT_TRUE(v.lambda.fun->hasFormals());
+ }
+
+ TEST_F(PrimOpTest, currentTime) {
+ auto v = eval("builtins.currentTime");
+ ASSERT_EQ(v.type(), nInt);
+ ASSERT_TRUE(v.integer > 0);
+ }
+
+ TEST_F(PrimOpTest, splitVersion) {
+ auto v = eval("builtins.splitVersion \"1.2.3git\"");
+ ASSERT_THAT(v, IsListOfSize(4));
+
+ const std::vector<std::string_view> strings = { "1", "2", "3", "git" };
+ for (const auto [n, p] : enumerate(v.listItems()))
+ ASSERT_THAT(*p, IsStringEq(strings[n]));
+ }
+
+ class CompareVersionsPrimOpTest :
+ public PrimOpTest,
+ public testing::WithParamInterface<std::tuple<std::string, const int>>
+ {};
+
+ TEST_P(CompareVersionsPrimOpTest, compareVersions) {
+ auto [expression, expectation] = GetParam();
+ auto v = eval(expression);
+ ASSERT_THAT(v, IsIntEq(expectation));
+ }
+
+#define CASE(a, b, expected) (std::make_tuple("builtins.compareVersions \"" #a "\" \"" #b "\"", expected))
+ INSTANTIATE_TEST_SUITE_P(
+ compareVersions,
+ CompareVersionsPrimOpTest,
+ testing::Values(
+ // The first two are weird cases. Intuition tells they should
+ // be the same but they aren't.
+ CASE(1.0, 1.0.0, -1),
+ CASE(1.0.0, 1.0, 1),
+ // the following are from the nix-env manual:
+ CASE(1.0, 2.3, -1),
+ CASE(2.1, 2.3, -1),
+ CASE(2.3, 2.3, 0),
+ CASE(2.5, 2.3, 1),
+ CASE(3.1, 2.3, 1),
+ CASE(2.3.1, 2.3, 1),
+ CASE(2.3.1, 2.3a, 1),
+ CASE(2.3pre1, 2.3, -1),
+ CASE(2.3pre3, 2.3pre12, -1),
+ CASE(2.3a, 2.3c, -1),
+ CASE(2.3pre1, 2.3c, -1),
+ CASE(2.3pre1, 2.3q, -1)
+ )
+ );
+#undef CASE
+
+
+ class ParseDrvNamePrimOpTest :
+ public PrimOpTest,
+ public testing::WithParamInterface<std::tuple<std::string, std::string_view, std::string_view>>
+ {};
+
+ TEST_P(ParseDrvNamePrimOpTest, parseDrvName) {
+ auto [input, expectedName, expectedVersion] = GetParam();
+ const auto expr = fmt("builtins.parseDrvName \"%1%\"", input);
+ auto v = eval(expr);
+ ASSERT_THAT(v, IsAttrsOfSize(2));
+
+ auto name = v.attrs->find(createSymbol("name"));
+ ASSERT_TRUE(name);
+ ASSERT_THAT(*name->value, IsStringEq(expectedName));
+
+ auto version = v.attrs->find(createSymbol("version"));
+ ASSERT_TRUE(version);
+ ASSERT_THAT(*version->value, IsStringEq(expectedVersion));
+ }
+
+ INSTANTIATE_TEST_SUITE_P(
+ parseDrvName,
+ ParseDrvNamePrimOpTest,
+ testing::Values(
+ std::make_tuple("nix-0.12pre12876", "nix", "0.12pre12876"),
+ std::make_tuple("a-b-c-1234pre5+git", "a-b-c", "1234pre5+git")
+ )
+ );
+
+ TEST_F(PrimOpTest, replaceStrings) {
+ // FIXME: add a test that verifies the string context is as expected
+ auto v = eval("builtins.replaceStrings [\"oo\" \"a\"] [\"a\" \"i\"] \"foobar\"");
+ ASSERT_EQ(v.type(), nString);
+ ASSERT_EQ(v.string.s, std::string_view("fabir"));
+ }
+
+ TEST_F(PrimOpTest, concatStringsSep) {
+ // FIXME: add a test that verifies the string context is as expected
+ auto v = eval("builtins.concatStringsSep \"%\" [\"foo\" \"bar\" \"baz\"]");
+ ASSERT_EQ(v.type(), nString);
+ ASSERT_EQ(std::string_view(v.string.s), "foo%bar%baz");
+ }
+
+ TEST_F(PrimOpTest, split1) {
+ // v = [ "" [ "a" ] "c" ]
+ auto v = eval("builtins.split \"(a)b\" \"abc\"");
+ ASSERT_THAT(v, IsListOfSize(3));
+
+ ASSERT_THAT(*v.listElems()[0], IsStringEq(""));
+
+ ASSERT_THAT(*v.listElems()[1], IsListOfSize(1));
+ ASSERT_THAT(*v.listElems()[1]->listElems()[0], IsStringEq("a"));
+
+ ASSERT_THAT(*v.listElems()[2], IsStringEq("c"));
+ }
+
+ TEST_F(PrimOpTest, split2) {
+ // v is expected to be a list [ "" [ "a" ] "b" [ "c"] "" ]
+ auto v = eval("builtins.split \"([ac])\" \"abc\"");
+ ASSERT_THAT(v, IsListOfSize(5));
+
+ ASSERT_THAT(*v.listElems()[0], IsStringEq(""));
+
+ ASSERT_THAT(*v.listElems()[1], IsListOfSize(1));
+ ASSERT_THAT(*v.listElems()[1]->listElems()[0], IsStringEq("a"));
+
+ ASSERT_THAT(*v.listElems()[2], IsStringEq("b"));
+
+ ASSERT_THAT(*v.listElems()[3], IsListOfSize(1));
+ ASSERT_THAT(*v.listElems()[3]->listElems()[0], IsStringEq("c"));
+
+ ASSERT_THAT(*v.listElems()[4], IsStringEq(""));
+ }
+
+ TEST_F(PrimOpTest, split3) {
+ auto v = eval("builtins.split \"(a)|(c)\" \"abc\"");
+ ASSERT_THAT(v, IsListOfSize(5));
+
+ // First list element
+ ASSERT_THAT(*v.listElems()[0], IsStringEq(""));
+
+ // 2nd list element is a list [ "" null ]
+ ASSERT_THAT(*v.listElems()[1], IsListOfSize(2));
+ ASSERT_THAT(*v.listElems()[1]->listElems()[0], IsStringEq("a"));
+ ASSERT_THAT(*v.listElems()[1]->listElems()[1], IsNull());
+
+ // 3rd element
+ ASSERT_THAT(*v.listElems()[2], IsStringEq("b"));
+
+ // 4th element is a list: [ null "c" ]
+ ASSERT_THAT(*v.listElems()[3], IsListOfSize(2));
+ ASSERT_THAT(*v.listElems()[3]->listElems()[0], IsNull());
+ ASSERT_THAT(*v.listElems()[3]->listElems()[1], IsStringEq("c"));
+
+ // 5th element is the empty string
+ ASSERT_THAT(*v.listElems()[4], IsStringEq(""));
+ }
+
+ TEST_F(PrimOpTest, split4) {
+ auto v = eval("builtins.split \"([[:upper:]]+)\" \" FOO \"");
+ ASSERT_THAT(v, IsListOfSize(3));
+ auto first = v.listElems()[0];
+ auto second = v.listElems()[1];
+ auto third = v.listElems()[2];
+
+ ASSERT_THAT(*first, IsStringEq(" "));
+
+ ASSERT_THAT(*second, IsListOfSize(1));
+ ASSERT_THAT(*second->listElems()[0], IsStringEq("FOO"));
+
+ ASSERT_THAT(*third, IsStringEq(" "));
+ }
+
+ TEST_F(PrimOpTest, match1) {
+ auto v = eval("builtins.match \"ab\" \"abc\"");
+ ASSERT_THAT(v, IsNull());
+ }
+
+ TEST_F(PrimOpTest, match2) {
+ auto v = eval("builtins.match \"abc\" \"abc\"");
+ ASSERT_THAT(v, IsListOfSize(0));
+ }
+
+ TEST_F(PrimOpTest, match3) {
+ auto v = eval("builtins.match \"a(b)(c)\" \"abc\"");
+ ASSERT_THAT(v, IsListOfSize(2));
+ ASSERT_THAT(*v.listElems()[0], IsStringEq("b"));
+ ASSERT_THAT(*v.listElems()[1], IsStringEq("c"));
+ }
+
+ TEST_F(PrimOpTest, match4) {
+ auto v = eval("builtins.match \"[[:space:]]+([[:upper:]]+)[[:space:]]+\" \" FOO \"");
+ ASSERT_THAT(v, IsListOfSize(1));
+ ASSERT_THAT(*v.listElems()[0], IsStringEq("FOO"));
+ }
+
+ TEST_F(PrimOpTest, attrNames) {
+ auto v = eval("builtins.attrNames { x = 1; y = 2; z = 3; a = 2; }");
+ ASSERT_THAT(v, IsListOfSize(4));
+
+ // ensure that the list is sorted
+ const std::vector<std::string_view> expected { "a", "x", "y", "z" };
+ for (const auto [n, elem] : enumerate(v.listItems()))
+ ASSERT_THAT(*elem, IsStringEq(expected[n]));
+ }
+} /* namespace nix */
diff --git a/src/libexpr/tests/trivial.cc b/src/libexpr/tests/trivial.cc
new file mode 100644
index 000000000..8ce276e52
--- /dev/null
+++ b/src/libexpr/tests/trivial.cc
@@ -0,0 +1,196 @@
+#include "libexprtests.hh"
+
+namespace nix {
+ // Testing of trivial expressions
+ class TrivialExpressionTest : public LibExprTest {};
+
+ TEST_F(TrivialExpressionTest, true) {
+ auto v = eval("true");
+ ASSERT_THAT(v, IsTrue());
+ }
+
+ TEST_F(TrivialExpressionTest, false) {
+ auto v = eval("false");
+ ASSERT_THAT(v, IsFalse());
+ }
+
+ TEST_F(TrivialExpressionTest, null) {
+ auto v = eval("null");
+ ASSERT_THAT(v, IsNull());
+ }
+
+ TEST_F(TrivialExpressionTest, 1) {
+ auto v = eval("1");
+ ASSERT_THAT(v, IsIntEq(1));
+ }
+
+ TEST_F(TrivialExpressionTest, 1plus1) {
+ auto v = eval("1+1");
+ ASSERT_THAT(v, IsIntEq(2));
+ }
+
+ TEST_F(TrivialExpressionTest, minus1) {
+ auto v = eval("-1");
+ ASSERT_THAT(v, IsIntEq(-1));
+ }
+
+ TEST_F(TrivialExpressionTest, 1minus1) {
+ auto v = eval("1-1");
+ ASSERT_THAT(v, IsIntEq(0));
+ }
+
+ TEST_F(TrivialExpressionTest, lambdaAdd) {
+ auto v = eval("let add = a: b: a + b; in add 1 2");
+ ASSERT_THAT(v, IsIntEq(3));
+ }
+
+ TEST_F(TrivialExpressionTest, list) {
+ auto v = eval("[]");
+ ASSERT_THAT(v, IsListOfSize(0));
+ }
+
+ TEST_F(TrivialExpressionTest, attrs) {
+ auto v = eval("{}");
+ ASSERT_THAT(v, IsAttrsOfSize(0));
+ }
+
+ TEST_F(TrivialExpressionTest, float) {
+ auto v = eval("1.234");
+ ASSERT_THAT(v, IsFloatEq(1.234));
+ }
+
+ TEST_F(TrivialExpressionTest, updateAttrs) {
+ auto v = eval("{ a = 1; } // { b = 2; a = 3; }");
+ ASSERT_THAT(v, IsAttrsOfSize(2));
+ auto a = v.attrs->find(createSymbol("a"));
+ ASSERT_NE(a, nullptr);
+ ASSERT_THAT(*a->value, IsIntEq(3));
+
+ auto b = v.attrs->find(createSymbol("b"));
+ ASSERT_NE(b, nullptr);
+ ASSERT_THAT(*b->value, IsIntEq(2));
+ }
+
+ TEST_F(TrivialExpressionTest, hasAttrOpFalse) {
+ auto v = eval("{} ? a");
+ ASSERT_THAT(v, IsFalse());
+ }
+
+ TEST_F(TrivialExpressionTest, hasAttrOpTrue) {
+ auto v = eval("{ a = 123; } ? a");
+ ASSERT_THAT(v, IsTrue());
+ }
+
+ TEST_F(TrivialExpressionTest, withFound) {
+ auto v = eval("with { a = 23; }; a");
+ ASSERT_THAT(v, IsIntEq(23));
+ }
+
+ TEST_F(TrivialExpressionTest, withNotFound) {
+ ASSERT_THROW(eval("with {}; a"), Error);
+ }
+
+ TEST_F(TrivialExpressionTest, withOverride) {
+ auto v = eval("with { a = 23; }; with { a = 42; }; a");
+ ASSERT_THAT(v, IsIntEq(42));
+ }
+
+ TEST_F(TrivialExpressionTest, letOverWith) {
+ auto v = eval("let a = 23; in with { a = 1; }; a");
+ ASSERT_THAT(v, IsIntEq(23));
+ }
+
+ TEST_F(TrivialExpressionTest, multipleLet) {
+ auto v = eval("let a = 23; in let a = 42; in a");
+ ASSERT_THAT(v, IsIntEq(42));
+ }
+
+ TEST_F(TrivialExpressionTest, defaultFunctionArgs) {
+ auto v = eval("({ a ? 123 }: a) {}");
+ ASSERT_THAT(v, IsIntEq(123));
+ }
+
+ TEST_F(TrivialExpressionTest, defaultFunctionArgsOverride) {
+ auto v = eval("({ a ? 123 }: a) { a = 5; }");
+ ASSERT_THAT(v, IsIntEq(5));
+ }
+
+ TEST_F(TrivialExpressionTest, defaultFunctionArgsCaptureBack) {
+ auto v = eval("({ a ? 123 }@args: args) {}");
+ ASSERT_THAT(v, IsAttrsOfSize(0));
+ }
+
+ TEST_F(TrivialExpressionTest, defaultFunctionArgsCaptureFront) {
+ auto v = eval("(args@{ a ? 123 }: args) {}");
+ ASSERT_THAT(v, IsAttrsOfSize(0));
+ }
+
+ TEST_F(TrivialExpressionTest, assertThrows) {
+ ASSERT_THROW(eval("let x = arg: assert arg == 1; 123; in x 2"), Error);
+ }
+
+ TEST_F(TrivialExpressionTest, assertPassed) {
+ auto v = eval("let x = arg: assert arg == 1; 123; in x 1");
+ ASSERT_THAT(v, IsIntEq(123));
+ }
+
+ class AttrSetMergeTrvialExpressionTest :
+ public TrivialExpressionTest,
+ public testing::WithParamInterface<const char*>
+ {};
+
+ TEST_P(AttrSetMergeTrvialExpressionTest, attrsetMergeLazy) {
+ // Usually Nix rejects duplicate keys in an attrset but it does allow
+ // so if it is an attribute set that contains disjoint sets of keys.
+ // The below is equivalent to `{a.b = 1; a.c = 2; }`.
+ // The attribute set `a` will be a Thunk at first as the attribuets
+ // have to be merged (or otherwise computed) and that is done in a lazy
+ // manner.
+
+ auto expr = GetParam();
+ auto v = eval(expr);
+ ASSERT_THAT(v, IsAttrsOfSize(1));
+
+ auto a = v.attrs->find(createSymbol("a"));
+ ASSERT_NE(a, nullptr);
+
+ ASSERT_THAT(*a->value, IsThunk());
+ state.forceValue(*a->value, noPos);
+
+ ASSERT_THAT(*a->value, IsAttrsOfSize(2));
+
+ auto b = a->value->attrs->find(createSymbol("b"));
+ ASSERT_NE(b, nullptr);
+ ASSERT_THAT(*b->value, IsIntEq(1));
+
+ auto c = a->value->attrs->find(createSymbol("c"));
+ ASSERT_NE(c, nullptr);
+ ASSERT_THAT(*c->value, IsIntEq(2));
+ }
+
+ INSTANTIATE_TEST_SUITE_P(
+ attrsetMergeLazy,
+ AttrSetMergeTrvialExpressionTest,
+ testing::Values(
+ "{ a.b = 1; a.c = 2; }",
+ "{ a = { b = 1; }; a = { c = 2; }; }"
+ )
+ );
+
+ TEST_F(TrivialExpressionTest, functor) {
+ auto v = eval("{ __functor = self: arg: self.v + arg; v = 10; } 5");
+ ASSERT_THAT(v, IsIntEq(15));
+ }
+
+ TEST_F(TrivialExpressionTest, bindOr) {
+ auto v = eval("{ or = 1; }");
+ ASSERT_THAT(v, IsAttrsOfSize(1));
+ auto b = v.attrs->find(createSymbol("or"));
+ ASSERT_NE(b, nullptr);
+ ASSERT_THAT(*b->value, IsIntEq(1));
+ }
+
+ TEST_F(TrivialExpressionTest, orCantBeUsed) {
+ ASSERT_THROW(eval("let or = 1; in or"), Error);
+ }
+} /* namespace nix */
diff --git a/src/libexpr/value-to-json.cc b/src/libexpr/value-to-json.cc
index 68235ad11..4d63d8b49 100644
--- a/src/libexpr/value-to-json.cc
+++ b/src/libexpr/value-to-json.cc
@@ -10,7 +10,7 @@
namespace nix {
void printValueAsJSON(EvalState & state, bool strict,
- Value & v, const PosIdx pos, JSONPlaceholder & out, PathSet & context)
+ Value & v, const PosIdx pos, JSONPlaceholder & out, PathSet & context, bool copyToStore)
{
checkInterrupt();
@@ -32,7 +32,10 @@ void printValueAsJSON(EvalState & state, bool strict,
break;
case nPath:
- out.write(state.copyPathToStore(context, v.path));
+ if (copyToStore)
+ out.write(state.copyPathToStore(context, v.path));
+ else
+ out.write(v.path);
break;
case nNull:
@@ -54,10 +57,10 @@ void printValueAsJSON(EvalState & state, bool strict,
for (auto & j : names) {
Attr & a(*v.attrs->find(state.symbols.create(j)));
auto placeholder(obj.placeholder(j));
- printValueAsJSON(state, strict, *a.value, a.pos, placeholder, context);
+ printValueAsJSON(state, strict, *a.value, a.pos, placeholder, context, copyToStore);
}
} else
- printValueAsJSON(state, strict, *i->value, i->pos, out, context);
+ printValueAsJSON(state, strict, *i->value, i->pos, out, context, copyToStore);
break;
}
@@ -65,13 +68,13 @@ void printValueAsJSON(EvalState & state, bool strict,
auto list(out.list());
for (auto elem : v.listItems()) {
auto placeholder(list.placeholder());
- printValueAsJSON(state, strict, *elem, pos, placeholder, context);
+ printValueAsJSON(state, strict, *elem, pos, placeholder, context, copyToStore);
}
break;
}
case nExternal:
- v.external->printValueAsJSON(state, strict, out, context);
+ v.external->printValueAsJSON(state, strict, out, context, copyToStore);
break;
case nFloat:
@@ -85,21 +88,22 @@ void printValueAsJSON(EvalState & state, bool strict,
.errPos = state.positions[v.determinePos(pos)]
});
e.addTrace(state.positions[pos], hintfmt("message for the trace"));
+ state.debugThrowLastTrace(e);
throw e;
}
}
void printValueAsJSON(EvalState & state, bool strict,
- Value & v, const PosIdx pos, std::ostream & str, PathSet & context)
+ Value & v, const PosIdx pos, std::ostream & str, PathSet & context, bool copyToStore)
{
JSONPlaceholder out(str);
- printValueAsJSON(state, strict, v, pos, out, context);
+ printValueAsJSON(state, strict, v, pos, out, context, copyToStore);
}
void ExternalValueBase::printValueAsJSON(EvalState & state, bool strict,
- JSONPlaceholder & out, PathSet & context) const
+ JSONPlaceholder & out, PathSet & context, bool copyToStore) const
{
- throw TypeError("cannot convert %1% to JSON", showType());
+ state.debugThrowLastTrace(TypeError("cannot convert %1% to JSON", showType()));
}
diff --git a/src/libexpr/value-to-json.hh b/src/libexpr/value-to-json.hh
index c020a817a..7ddc8a5b1 100644
--- a/src/libexpr/value-to-json.hh
+++ b/src/libexpr/value-to-json.hh
@@ -11,9 +11,9 @@ namespace nix {
class JSONPlaceholder;
void printValueAsJSON(EvalState & state, bool strict,
- Value & v, const PosIdx pos, JSONPlaceholder & out, PathSet & context);
+ Value & v, const PosIdx pos, JSONPlaceholder & out, PathSet & context, bool copyToStore = true);
void printValueAsJSON(EvalState & state, bool strict,
- Value & v, const PosIdx pos, std::ostream & str, PathSet & context);
+ Value & v, const PosIdx pos, std::ostream & str, PathSet & context, bool copyToStore = true);
}
diff --git a/src/libexpr/value.hh b/src/libexpr/value.hh
index 3fa43bc5c..a428bc565 100644
--- a/src/libexpr/value.hh
+++ b/src/libexpr/value.hh
@@ -99,7 +99,7 @@ class ExternalValueBase
/* Print the value as JSON. Defaults to unconvertable, i.e. throws an error */
virtual void printValueAsJSON(EvalState & state, bool strict,
- JSONPlaceholder & out, PathSet & context) const;
+ JSONPlaceholder & out, PathSet & context, bool copyToStore = true) const;
/* Print the value as XML. Defaults to unevaluated */
virtual void printValueAsXML(EvalState & state, bool strict, bool location,
@@ -404,9 +404,9 @@ public:
#if HAVE_BOEHMGC
-typedef std::vector<Value *, traceable_allocator<Value *> > ValueVector;
-typedef std::map<Symbol, Value *, std::less<Symbol>, traceable_allocator<std::pair<const Symbol, Value *> > > ValueMap;
-typedef std::map<Symbol, ValueVector, std::less<Symbol>, traceable_allocator<std::pair<const Symbol, ValueVector> > > ValueVectorMap;
+typedef std::vector<Value *, traceable_allocator<Value *>> ValueVector;
+typedef std::map<Symbol, Value *, std::less<Symbol>, traceable_allocator<std::pair<const Symbol, Value *>>> ValueMap;
+typedef std::map<Symbol, ValueVector, std::less<Symbol>, traceable_allocator<std::pair<const Symbol, ValueVector>>> ValueVectorMap;
#else
typedef std::vector<Value *> ValueVector;
typedef std::map<Symbol, Value *> ValueMap;
diff --git a/src/libfetchers/fetch-settings.hh b/src/libfetchers/fetch-settings.hh
index 04c9feda0..6452143a1 100644
--- a/src/libfetchers/fetch-settings.hh
+++ b/src/libfetchers/fetch-settings.hh
@@ -70,7 +70,7 @@ struct FetchSettings : public Config
Setting<bool> warnDirty{this, true, "warn-dirty",
"Whether to warn about dirty Git/Mercurial trees."};
- Setting<std::string> flakeRegistry{this, "https://github.com/NixOS/flake-registry/raw/master/flake-registry.json", "flake-registry",
+ Setting<std::string> flakeRegistry{this, "https://channels.nixos.org/flake-registry.json", "flake-registry",
"Path or URI of the global flake registry."};
Setting<bool> useRegistries{this, true, "use-registries",
diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc
index af40990e5..c1a21e764 100644
--- a/src/libfetchers/git.cc
+++ b/src/libfetchers/git.cc
@@ -5,15 +5,20 @@
#include "store-api.hh"
#include "url-parts.hh"
#include "pathlocks.hh"
+#include "util.hh"
+#include "git.hh"
#include "fetch-settings.hh"
+#include <regex>
+#include <string.h>
#include <sys/time.h>
#include <sys/wait.h>
using namespace std::string_literals;
namespace nix::fetchers {
+namespace {
// Explicit initial branch of our bare repo to suppress warnings from new version of git.
// The value itself does not matter, since we always fetch a specific revision or branch.
@@ -21,25 +26,228 @@ namespace nix::fetchers {
// old version of git, which will ignore unrecognized `-c` options.
const std::string gitInitialBranch = "__nix_dummy_branch";
-static std::string getGitDir()
+bool isCacheFileWithinTtl(const time_t now, const struct stat & st)
{
- auto gitDir = getEnv("GIT_DIR");
- if (!gitDir) {
- return ".git";
+ return st.st_mtime + settings.tarballTtl > now;
+}
+
+bool touchCacheFile(const Path& path, const time_t& touch_time)
+{
+ struct timeval times[2];
+ times[0].tv_sec = touch_time;
+ times[0].tv_usec = 0;
+ times[1].tv_sec = touch_time;
+ times[1].tv_usec = 0;
+
+ return lutimes(path.c_str(), times) == 0;
+}
+
+Path getCachePath(std::string key)
+{
+ return getCacheDir() + "/nix/gitv3/" +
+ hashString(htSHA256, key).to_string(Base32, false);
+}
+
+// Returns the name of the HEAD branch.
+//
+// Returns the head branch name as reported by git ls-remote --symref, e.g., if
+// ls-remote returns the output below, "main" is returned based on the ref line.
+//
+// ref: refs/heads/main HEAD
+// ...
+std::optional<std::string> readHead(const Path & path)
+{
+ auto [exit_code, output] = runProgram(RunOptions {
+ .program = "git",
+ .args = {"ls-remote", "--symref", path},
+ });
+ if (exit_code != 0) {
+ return std::nullopt;
}
- return *gitDir;
+
+ std::string_view line = output;
+ line = line.substr(0, line.find("\n"));
+ if (const auto parseResult = git::parseLsRemoteLine(line)) {
+ switch (parseResult->kind) {
+ case git::LsRemoteRefLine::Kind::Symbolic:
+ debug("resolved HEAD ref '%s' for repo '%s'", parseResult->target, path);
+ break;
+ case git::LsRemoteRefLine::Kind::Object:
+ debug("resolved HEAD rev '%s' for repo '%s'", parseResult->target, path);
+ break;
+ }
+ return parseResult->target;
+ }
+ return std::nullopt;
}
-static std::string readHead(const Path & path)
+// Persist the HEAD ref from the remote repo in the local cached repo.
+bool storeCachedHead(const std::string& actualUrl, const std::string& headRef)
{
- return chomp(runProgram("git", true, { "-C", path, "--git-dir", ".git", "rev-parse", "--abbrev-ref", "HEAD" }));
+ Path cacheDir = getCachePath(actualUrl);
+ auto gitDir = ".";
+ try {
+ runProgram("git", true, { "-C", cacheDir, "--git-dir", gitDir, "symbolic-ref", "--", "HEAD", headRef });
+ } catch (ExecError &e) {
+ if (!WIFEXITED(e.status)) throw;
+ return false;
+ }
+ /* No need to touch refs/HEAD, because `git symbolic-ref` updates the mtime. */
+ return true;
}
-static bool isNotDotGitDirectory(const Path & path)
+std::optional<std::string> readHeadCached(const std::string& actualUrl)
+{
+ // Create a cache path to store the branch of the HEAD ref. Append something
+ // in front of the URL to prevent collision with the repository itself.
+ Path cacheDir = getCachePath(actualUrl);
+ Path headRefFile = cacheDir + "/HEAD";
+
+ time_t now = time(0);
+ struct stat st;
+ std::optional<std::string> cachedRef;
+ if (stat(headRefFile.c_str(), &st) == 0) {
+ cachedRef = readHead(cacheDir);
+ if (cachedRef != std::nullopt &&
+ *cachedRef != gitInitialBranch &&
+ isCacheFileWithinTtl(now, st)) {
+ debug("using cached HEAD ref '%s' for repo '%s'", *cachedRef, actualUrl);
+ return cachedRef;
+ }
+ }
+
+ auto ref = readHead(actualUrl);
+ if (ref) {
+ return ref;
+ }
+
+ if (cachedRef) {
+ // If the cached git ref is expired in fetch() below, and the 'git fetch'
+ // fails, it falls back to continuing with the most recent version.
+ // This function must behave the same way, so we return the expired
+ // cached ref here.
+ warn("could not get HEAD ref for repository '%s'; using expired cached ref '%s'", actualUrl, *cachedRef);
+ return *cachedRef;
+ }
+
+ return std::nullopt;
+}
+
+bool isNotDotGitDirectory(const Path & path)
{
return baseNameOf(path) != ".git";
}
+struct WorkdirInfo
+{
+ bool clean = false;
+ bool hasHead = false;
+};
+
+// Returns whether a git workdir is clean and has commits.
+WorkdirInfo getWorkdirInfo(const Input & input, const Path & workdir)
+{
+ const bool submodules = maybeGetBoolAttr(input.attrs, "submodules").value_or(false);
+ std::string gitDir(".git");
+
+ auto env = getEnv();
+ // Set LC_ALL to C: because we rely on the error messages from git rev-parse to determine what went wrong
+ // that way unknown errors can lead to a failure instead of continuing through the wrong code path
+ env["LC_ALL"] = "C";
+
+ /* Check whether HEAD points to something that looks like a commit,
+ since that is the refrence we want to use later on. */
+ auto result = runProgram(RunOptions {
+ .program = "git",
+ .args = { "-C", workdir, "--git-dir", gitDir, "rev-parse", "--verify", "--no-revs", "HEAD^{commit}" },
+ .environment = env,
+ .mergeStderrToStdout = true
+ });
+ auto exitCode = WEXITSTATUS(result.first);
+ auto errorMessage = result.second;
+
+ if (errorMessage.find("fatal: not a git repository") != std::string::npos) {
+ throw Error("'%s' is not a Git repository", workdir);
+ } else if (errorMessage.find("fatal: Needed a single revision") != std::string::npos) {
+ // indicates that the repo does not have any commits
+ // we want to proceed and will consider it dirty later
+ } else if (exitCode != 0) {
+ // any other errors should lead to a failure
+ throw Error("getting the HEAD of the Git tree '%s' failed with exit code %d:\n%s", workdir, exitCode, errorMessage);
+ }
+
+ bool clean = false;
+ bool hasHead = exitCode == 0;
+
+ try {
+ if (hasHead) {
+ // Using git diff is preferrable over lower-level operations here,
+ // because its conceptually simpler and we only need the exit code anyways.
+ auto gitDiffOpts = Strings({ "-C", workdir, "--git-dir", gitDir, "diff", "HEAD", "--quiet"});
+ if (!submodules) {
+ // Changes in submodules should only make the tree dirty
+ // when those submodules will be copied as well.
+ gitDiffOpts.emplace_back("--ignore-submodules");
+ }
+ gitDiffOpts.emplace_back("--");
+ runProgram("git", true, gitDiffOpts);
+
+ clean = true;
+ }
+ } catch (ExecError & e) {
+ if (!WIFEXITED(e.status) || WEXITSTATUS(e.status) != 1) throw;
+ }
+
+ return WorkdirInfo { .clean = clean, .hasHead = hasHead };
+}
+
+std::pair<StorePath, Input> fetchFromWorkdir(ref<Store> store, Input & input, const Path & workdir, const WorkdirInfo & workdirInfo)
+{
+ const bool submodules = maybeGetBoolAttr(input.attrs, "submodules").value_or(false);
+ auto gitDir = ".git";
+
+ if (!fetchSettings.allowDirty)
+ throw Error("Git tree '%s' is dirty", workdir);
+
+ if (fetchSettings.warnDirty)
+ warn("Git tree '%s' is dirty", workdir);
+
+ auto gitOpts = Strings({ "-C", workdir, "--git-dir", gitDir, "ls-files", "-z" });
+ if (submodules)
+ gitOpts.emplace_back("--recurse-submodules");
+
+ auto files = tokenizeString<std::set<std::string>>(
+ runProgram("git", true, gitOpts), "\0"s);
+
+ Path actualPath(absPath(workdir));
+
+ PathFilter filter = [&](const Path & p) -> bool {
+ assert(hasPrefix(p, actualPath));
+ std::string file(p, actualPath.size() + 1);
+
+ auto st = lstat(p);
+
+ if (S_ISDIR(st.st_mode)) {
+ auto prefix = file + "/";
+ auto i = files.lower_bound(prefix);
+ return i != files.end() && hasPrefix(*i, prefix);
+ }
+
+ return files.count(file);
+ };
+
+ auto storePath = store->addToStore(input.getName(), actualPath, FileIngestionMethod::Recursive, htSHA256, filter);
+
+ // FIXME: maybe we should use the timestamp of the last
+ // modified dirty file?
+ input.attrs.insert_or_assign(
+ "lastModified",
+ workdirInfo.hasHead ? std::stoull(runProgram("git", true, { "-C", actualPath, "--git-dir", gitDir, "log", "-1", "--format=%ct", "--no-show-signature", "HEAD" })) : 0);
+
+ return {std::move(storePath), input};
+}
+} // end namespace
+
struct GitInputScheme : InputScheme
{
std::optional<Input> inputFromURL(const ParsedURL & url) override
@@ -159,10 +367,10 @@ struct GitInputScheme : InputScheme
{
auto sourcePath = getSourcePath(input);
assert(sourcePath);
- auto gitDir = getGitDir();
+ auto gitDir = ".git";
runProgram("git", true,
- { "-C", *sourcePath, "--git-dir", gitDir, "add", "--force", "--intent-to-add", "--", std::string(file) });
+ { "-C", *sourcePath, "--git-dir", gitDir, "add", "--intent-to-add", "--", std::string(file) });
if (commitMsg)
runProgram("git", true,
@@ -185,7 +393,7 @@ struct GitInputScheme : InputScheme
std::pair<StorePath, Input> fetch(ref<Store> store, const Input & _input) override
{
Input input(_input);
- auto gitDir = getGitDir();
+ auto gitDir = ".git";
std::string name = input.getName();
@@ -234,123 +442,50 @@ struct GitInputScheme : InputScheme
auto [isLocal, actualUrl_] = getActualUrl(input);
auto actualUrl = actualUrl_; // work around clang bug
- // If this is a local directory and no ref or revision is
- // given, then allow the use of an unclean working tree.
+ /* If this is a local directory and no ref or revision is given,
+ allow fetching directly from a dirty workdir. */
if (!input.getRef() && !input.getRev() && isLocal) {
- bool clean = false;
-
- auto env = getEnv();
- // Set LC_ALL to C: because we rely on the error messages from git rev-parse to determine what went wrong
- // that way unknown errors can lead to a failure instead of continuing through the wrong code path
- env["LC_ALL"] = "C";
-
- /* Check whether HEAD points to something that looks like a commit,
- since that is the refrence we want to use later on. */
- auto result = runProgram(RunOptions {
- .program = "git",
- .args = { "-C", actualUrl, "--git-dir", gitDir, "rev-parse", "--verify", "--no-revs", "HEAD^{commit}" },
- .environment = env,
- .mergeStderrToStdout = true
- });
- auto exitCode = WEXITSTATUS(result.first);
- auto errorMessage = result.second;
-
- if (errorMessage.find("fatal: not a git repository") != std::string::npos) {
- throw Error("'%s' is not a Git repository", actualUrl);
- } else if (errorMessage.find("fatal: Needed a single revision") != std::string::npos) {
- // indicates that the repo does not have any commits
- // we want to proceed and will consider it dirty later
- } else if (exitCode != 0) {
- // any other errors should lead to a failure
- throw Error("getting the HEAD of the Git tree '%s' failed with exit code %d:\n%s", actualUrl, exitCode, errorMessage);
- }
-
- bool hasHead = exitCode == 0;
- try {
- if (hasHead) {
- // Using git diff is preferrable over lower-level operations here,
- // because its conceptually simpler and we only need the exit code anyways.
- auto gitDiffOpts = Strings({ "-C", actualUrl, "--git-dir", gitDir, "diff", "HEAD", "--quiet"});
- if (!submodules) {
- // Changes in submodules should only make the tree dirty
- // when those submodules will be copied as well.
- gitDiffOpts.emplace_back("--ignore-submodules");
- }
- gitDiffOpts.emplace_back("--");
- runProgram("git", true, gitDiffOpts);
-
- clean = true;
- }
- } catch (ExecError & e) {
- if (!WIFEXITED(e.status) || WEXITSTATUS(e.status) != 1) throw;
- }
-
- if (!clean) {
-
- /* This is an unclean working tree. So copy all tracked files. */
-
- if (!fetchSettings.allowDirty)
- throw Error("Git tree '%s' is dirty", actualUrl);
-
- if (fetchSettings.warnDirty)
- warn("Git tree '%s' is dirty", actualUrl);
-
- auto gitOpts = Strings({ "-C", actualUrl, "--git-dir", gitDir, "ls-files", "-z" });
- if (submodules)
- gitOpts.emplace_back("--recurse-submodules");
-
- auto files = tokenizeString<std::set<std::string>>(
- runProgram("git", true, gitOpts), "\0"s);
-
- Path actualPath(absPath(actualUrl));
-
- PathFilter filter = [&](const Path & p) -> bool {
- assert(hasPrefix(p, actualPath));
- std::string file(p, actualPath.size() + 1);
-
- auto st = lstat(p);
-
- if (S_ISDIR(st.st_mode)) {
- auto prefix = file + "/";
- auto i = files.lower_bound(prefix);
- return i != files.end() && hasPrefix(*i, prefix);
- }
-
- return files.count(file);
- };
-
- auto storePath = store->addToStore(input.getName(), actualPath, FileIngestionMethod::Recursive, htSHA256, filter);
-
- // FIXME: maybe we should use the timestamp of the last
- // modified dirty file?
- input.attrs.insert_or_assign(
- "lastModified",
- hasHead ? std::stoull(runProgram("git", true, { "-C", actualPath, "--git-dir", gitDir, "log", "-1", "--format=%ct", "--no-show-signature", "HEAD" })) : 0);
-
- return {std::move(storePath), input};
+ auto workdirInfo = getWorkdirInfo(input, actualUrl);
+ if (!workdirInfo.clean) {
+ return fetchFromWorkdir(store, input, actualUrl, workdirInfo);
}
}
- if (!input.getRef()) input.attrs.insert_or_assign("ref", isLocal ? readHead(actualUrl) : "master");
-
Attrs unlockedAttrs({
{"type", cacheType},
{"name", name},
{"url", actualUrl},
- {"ref", *input.getRef()},
});
Path repoDir;
if (isLocal) {
+ if (!input.getRef()) {
+ auto head = readHead(actualUrl);
+ if (!head) {
+ warn("could not read HEAD ref from repo at '%s', using 'master'", actualUrl);
+ head = "master";
+ }
+ input.attrs.insert_or_assign("ref", *head);
+ unlockedAttrs.insert_or_assign("ref", *head);
+ }
if (!input.getRev())
input.attrs.insert_or_assign("rev",
Hash::parseAny(chomp(runProgram("git", true, { "-C", actualUrl, "--git-dir", gitDir, "rev-parse", *input.getRef() })), htSHA1).gitRev());
repoDir = actualUrl;
-
} else {
+ const bool useHeadRef = !input.getRef();
+ if (useHeadRef) {
+ auto head = readHeadCached(actualUrl);
+ if (!head) {
+ warn("could not read HEAD ref from repo at '%s', using 'master'", actualUrl);
+ head = "master";
+ }
+ input.attrs.insert_or_assign("ref", *head);
+ unlockedAttrs.insert_or_assign("ref", *head);
+ }
if (auto res = getCache()->lookup(store, unlockedAttrs)) {
auto rev2 = Hash::parseAny(getStrAttr(res->first, "rev"), htSHA1);
@@ -360,7 +495,7 @@ struct GitInputScheme : InputScheme
}
}
- Path cacheDir = getCacheDir() + "/nix/gitv3/" + hashString(htSHA256, actualUrl).to_string(Base32, false);
+ Path cacheDir = getCachePath(actualUrl);
repoDir = cacheDir;
gitDir = ".";
@@ -383,7 +518,7 @@ struct GitInputScheme : InputScheme
repo. */
if (input.getRev()) {
try {
- runProgram("git", true, { "-C", repoDir, "cat-file", "-e", input.getRev()->gitRev() });
+ runProgram("git", true, { "-C", repoDir, "--git-dir", gitDir, "cat-file", "-e", input.getRev()->gitRev() });
doFetch = false;
} catch (ExecError & e) {
if (WIFEXITED(e.status)) {
@@ -400,7 +535,7 @@ struct GitInputScheme : InputScheme
git fetch to update the local ref to the remote ref. */
struct stat st;
doFetch = stat(localRefFile.c_str(), &st) != 0 ||
- (uint64_t) st.st_mtime + settings.tarballTtl <= (uint64_t) now;
+ !isCacheFileWithinTtl(now, st);
}
}
@@ -418,19 +553,16 @@ struct GitInputScheme : InputScheme
: ref == "HEAD"
? *ref
: "refs/heads/" + *ref;
- runProgram("git", true, { "-C", repoDir, "fetch", "--quiet", "--force", "--", actualUrl, fmt("%s:%s", fetchRef, fetchRef) });
+ runProgram("git", true, { "-C", repoDir, "--git-dir", gitDir, "fetch", "--quiet", "--force", "--", actualUrl, fmt("%s:%s", fetchRef, fetchRef) });
} catch (Error & e) {
if (!pathExists(localRefFile)) throw;
warn("could not update local clone of Git repository '%s'; continuing with the most recent version", actualUrl);
}
- struct timeval times[2];
- times[0].tv_sec = now;
- times[0].tv_usec = 0;
- times[1].tv_sec = now;
- times[1].tv_usec = 0;
-
- utimes(localRefFile.c_str(), times);
+ if (!touchCacheFile(localRefFile, now))
+ warn("could not update mtime for file '%s': %s", localRefFile, strerror(errno));
+ if (useHeadRef && !storeCachedHead(actualUrl, *input.getRef()))
+ warn("could not update cached head '%s' for '%s'", *input.getRef(), actualUrl);
}
if (!input.getRev())
@@ -442,7 +574,7 @@ struct GitInputScheme : InputScheme
bool isShallow = chomp(runProgram("git", true, { "-C", repoDir, "--git-dir", gitDir, "rev-parse", "--is-shallow-repository" })) == "true";
if (isShallow && !shallow)
- throw Error("'%s' is a shallow Git repository, but a non-shallow repository is needed", actualUrl);
+ throw Error("'%s' is a shallow Git repository, but shallow repositories are only allowed when `shallow = true;` is specified.", actualUrl);
// FIXME: check whether rev is an ancestor of ref.
@@ -459,7 +591,7 @@ struct GitInputScheme : InputScheme
auto result = runProgram(RunOptions {
.program = "git",
- .args = { "-C", repoDir, "cat-file", "commit", input.getRev()->gitRev() },
+ .args = { "-C", repoDir, "--git-dir", gitDir, "cat-file", "commit", input.getRev()->gitRev() },
.mergeStderrToStdout = true
});
if (WEXITSTATUS(result.first) == 128
@@ -498,7 +630,7 @@ struct GitInputScheme : InputScheme
auto source = sinkToSource([&](Sink & sink) {
runProgram2({
.program = "git",
- .args = { "-C", repoDir, "archive", input.getRev()->gitRev() },
+ .args = { "-C", repoDir, "--git-dir", gitDir, "archive", input.getRev()->gitRev() },
.standardOut = &sink
});
});
@@ -508,7 +640,7 @@ struct GitInputScheme : InputScheme
auto storePath = store->addToStore(name, tmpDir, FileIngestionMethod::Recursive, htSHA256, filter);
- auto lastModified = std::stoull(runProgram("git", true, { "-C", repoDir, "log", "-1", "--format=%ct", "--no-show-signature", input.getRev()->gitRev() }));
+ auto lastModified = std::stoull(runProgram("git", true, { "-C", repoDir, "--git-dir", gitDir, "log", "-1", "--format=%ct", "--no-show-signature", input.getRev()->gitRev() }));
Attrs infoAttrs({
{"rev", input.getRev()->gitRev()},
@@ -517,7 +649,7 @@ struct GitInputScheme : InputScheme
if (!shallow)
infoAttrs.insert_or_assign("revCount",
- std::stoull(runProgram("git", true, { "-C", repoDir, "rev-list", "--count", input.getRev()->gitRev() })));
+ std::stoull(runProgram("git", true, { "-C", repoDir, "--git-dir", gitDir, "rev-list", "--count", input.getRev()->gitRev() })));
if (!_input.getRev())
getCache()->add(
diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc
index 58b6e7c04..a491d82a6 100644
--- a/src/libfetchers/github.cc
+++ b/src/libfetchers/github.cc
@@ -4,7 +4,7 @@
#include "store-api.hh"
#include "types.hh"
#include "url-parts.hh"
-
+#include "git.hh"
#include "fetchers.hh"
#include "fetch-settings.hh"
@@ -243,7 +243,10 @@ struct GitHubInputScheme : GitArchiveInputScheme
Hash getRevFromRef(nix::ref<Store> store, const Input & input) const override
{
auto host = maybeGetStrAttr(input.attrs, "host").value_or("github.com");
- auto url = fmt("https://api.%s/repos/%s/%s/commits/%s", // FIXME: check
+ auto url = fmt(
+ host == "github.com"
+ ? "https://api.%s/repos/%s/%s/commits/%s"
+ : "https://%s/api/v3/repos/%s/%s/commits/%s",
host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), *input.getRef());
Headers headers = makeHeadersWithAuthTokens(host);
@@ -262,7 +265,10 @@ struct GitHubInputScheme : GitArchiveInputScheme
// FIXME: use regular /archive URLs instead? api.github.com
// might have stricter rate limits.
auto host = maybeGetStrAttr(input.attrs, "host").value_or("github.com");
- auto url = fmt("https://api.%s/repos/%s/%s/tarball/%s", // FIXME: check if this is correct for self hosted instances
+ auto url = fmt(
+ host == "github.com"
+ ? "https://api.%s/repos/%s/%s/tarball/%s"
+ : "https://%s/api/v3/repos/%s/%s/tarball/%s",
host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"),
input.getRev()->to_string(Base16, false));
@@ -375,7 +381,7 @@ struct SourceHutInputScheme : GitArchiveInputScheme
Headers headers = makeHeadersWithAuthTokens(host);
- std::string ref_uri;
+ std::string refUri;
if (ref == "HEAD") {
auto file = store->toRealPath(
downloadFile(store, fmt("%s/HEAD", base_url), "source", false, headers).storePath);
@@ -383,35 +389,32 @@ struct SourceHutInputScheme : GitArchiveInputScheme
std::string line;
getline(is, line);
- auto ref_index = line.find("ref: ");
- if (ref_index == std::string::npos) {
+ auto remoteLine = git::parseLsRemoteLine(line);
+ if (!remoteLine) {
throw BadURL("in '%d', couldn't resolve HEAD ref '%d'", input.to_string(), ref);
}
-
- ref_uri = line.substr(ref_index+5, line.length()-1);
- } else
- ref_uri = fmt("refs/(heads|tags)/%s", ref);
+ refUri = remoteLine->target;
+ } else {
+ refUri = fmt("refs/(heads|tags)/%s", ref);
+ }
+ std::regex refRegex(refUri);
auto file = store->toRealPath(
downloadFile(store, fmt("%s/info/refs", base_url), "source", false, headers).storePath);
std::ifstream is(file);
std::string line;
- std::string id;
- while(getline(is, line)) {
- // Append $ to avoid partial name matches
- std::regex pattern(fmt("%s$", ref_uri));
-
- if (std::regex_search(line, pattern)) {
- id = line.substr(0, line.find('\t'));
- break;
- }
+ std::optional<std::string> id;
+ while(!id && getline(is, line)) {
+ auto parsedLine = git::parseLsRemoteLine(line);
+ if (parsedLine && parsedLine->reference && std::regex_match(*parsedLine->reference, refRegex))
+ id = parsedLine->target;
}
- if(id.empty())
+ if(!id)
throw BadURL("in '%d', couldn't find ref '%d'", input.to_string(), ref);
- auto rev = Hash::parseAny(id, htSHA1);
+ auto rev = Hash::parseAny(*id, htSHA1);
debug("HEAD revision for '%s' is %s", fmt("%s/%s", base_url, ref), rev.gitRev());
return rev;
}
diff --git a/src/libfetchers/tarball.cc b/src/libfetchers/tarball.cc
index dde0ad761..6c551bd93 100644
--- a/src/libfetchers/tarball.cc
+++ b/src/libfetchers/tarball.cc
@@ -6,6 +6,7 @@
#include "archive.hh"
#include "tarfile.hh"
#include "types.hh"
+#include "split.hh"
namespace nix::fetchers {
@@ -168,24 +169,34 @@ std::pair<Tree, time_t> downloadTarball(
};
}
-struct TarballInputScheme : InputScheme
+// An input scheme corresponding to a curl-downloadable resource.
+struct CurlInputScheme : InputScheme
{
- std::optional<Input> inputFromURL(const ParsedURL & url) override
+ virtual const std::string inputType() const = 0;
+ const std::set<std::string> transportUrlSchemes = {"file", "http", "https"};
+
+ const bool hasTarballExtension(std::string_view path) const
{
- if (url.scheme != "file" && url.scheme != "http" && url.scheme != "https") return {};
+ return hasSuffix(path, ".zip") || hasSuffix(path, ".tar")
+ || hasSuffix(path, ".tgz") || hasSuffix(path, ".tar.gz")
+ || hasSuffix(path, ".tar.xz") || hasSuffix(path, ".tar.bz2")
+ || hasSuffix(path, ".tar.zst");
+ }
- if (!hasSuffix(url.path, ".zip")
- && !hasSuffix(url.path, ".tar")
- && !hasSuffix(url.path, ".tgz")
- && !hasSuffix(url.path, ".tar.gz")
- && !hasSuffix(url.path, ".tar.xz")
- && !hasSuffix(url.path, ".tar.bz2")
- && !hasSuffix(url.path, ".tar.zst"))
- return {};
+ virtual bool isValidURL(const ParsedURL & url) const = 0;
+
+ std::optional<Input> inputFromURL(const ParsedURL & url) override
+ {
+ if (!isValidURL(url))
+ return std::nullopt;
Input input;
- input.attrs.insert_or_assign("type", "tarball");
- input.attrs.insert_or_assign("url", url.to_string());
+
+ auto urlWithoutApplicationScheme = url;
+ urlWithoutApplicationScheme.scheme = parseUrlScheme(url.scheme).transport;
+
+ input.attrs.insert_or_assign("type", inputType());
+ input.attrs.insert_or_assign("url", urlWithoutApplicationScheme.to_string());
auto narHash = url.query.find("narHash");
if (narHash != url.query.end())
input.attrs.insert_or_assign("narHash", narHash->second);
@@ -194,14 +205,17 @@ struct TarballInputScheme : InputScheme
std::optional<Input> inputFromAttrs(const Attrs & attrs) override
{
- if (maybeGetStrAttr(attrs, "type") != "tarball") return {};
+ auto type = maybeGetStrAttr(attrs, "type");
+ if (type != inputType()) return {};
+ std::set<std::string> allowedNames = {"type", "url", "narHash", "name", "unpack"};
for (auto & [name, value] : attrs)
- if (name != "type" && name != "url" && /* name != "hash" && */ name != "narHash" && name != "name")
- throw Error("unsupported tarball input attribute '%s'", name);
+ if (!allowedNames.count(name))
+ throw Error("unsupported %s input attribute '%s'", *type, name);
Input input;
input.attrs = attrs;
+
//input.locked = (bool) maybeGetStrAttr(input.attrs, "hash");
return input;
}
@@ -209,14 +223,9 @@ struct TarballInputScheme : InputScheme
ParsedURL toURL(const Input & input) override
{
auto url = parseURL(getStrAttr(input.attrs, "url"));
- // NAR hashes are preferred over file hashes since tar/zip files
- // don't have a canonical representation.
+ // NAR hashes are preferred over file hashes since tar/zip files // don't have a canonical representation.
if (auto narHash = input.getNarHash())
url.query.insert_or_assign("narHash", narHash->to_string(SRI, true));
- /*
- else if (auto hash = maybeGetStrAttr(input.attrs, "hash"))
- url.query.insert_or_assign("hash", Hash(*hash).to_string(SRI, true));
- */
return url;
}
@@ -225,6 +234,42 @@ struct TarballInputScheme : InputScheme
return true;
}
+};
+
+struct FileInputScheme : CurlInputScheme
+{
+ const std::string inputType() const override { return "file"; }
+
+ bool isValidURL(const ParsedURL & url) const override
+ {
+ auto parsedUrlScheme = parseUrlScheme(url.scheme);
+ return transportUrlSchemes.count(std::string(parsedUrlScheme.transport))
+ && (parsedUrlScheme.application
+ ? parsedUrlScheme.application.value() == inputType()
+ : !hasTarballExtension(url.path));
+ }
+
+ std::pair<StorePath, Input> fetch(ref<Store> store, const Input & input) override
+ {
+ auto file = downloadFile(store, getStrAttr(input.attrs, "url"), input.getName(), false);
+ return {std::move(file.storePath), input};
+ }
+};
+
+struct TarballInputScheme : CurlInputScheme
+{
+ const std::string inputType() const override { return "tarball"; }
+
+ bool isValidURL(const ParsedURL & url) const override
+ {
+ auto parsedUrlScheme = parseUrlScheme(url.scheme);
+
+ return transportUrlSchemes.count(std::string(parsedUrlScheme.transport))
+ && (parsedUrlScheme.application
+ ? parsedUrlScheme.application.value() == inputType()
+ : hasTarballExtension(url.path));
+ }
+
std::pair<StorePath, Input> fetch(ref<Store> store, const Input & input) override
{
auto tree = downloadTarball(store, getStrAttr(input.attrs, "url"), input.getName(), false).first;
@@ -233,5 +278,6 @@ struct TarballInputScheme : InputScheme
};
static auto rTarballInputScheme = OnStartup([] { registerInputScheme(std::make_unique<TarballInputScheme>()); });
+static auto rFileInputScheme = OnStartup([] { registerInputScheme(std::make_unique<FileInputScheme>()); });
}
diff --git a/src/libmain/loggers.cc b/src/libmain/loggers.cc
index cdf23859b..cda5cb939 100644
--- a/src/libmain/loggers.cc
+++ b/src/libmain/loggers.cc
@@ -30,8 +30,11 @@ Logger * makeDefaultLogger() {
return makeJSONLogger(*makeSimpleLogger(true));
case LogFormat::bar:
return makeProgressBar();
- case LogFormat::barWithLogs:
- return makeProgressBar(true);
+ case LogFormat::barWithLogs: {
+ auto logger = makeProgressBar();
+ logger->setPrintBuildLogs(true);
+ return logger;
+ }
default:
abort();
}
diff --git a/src/libmain/progress-bar.cc b/src/libmain/progress-bar.cc
index f4306ab91..0bbeaff8d 100644
--- a/src/libmain/progress-bar.cc
+++ b/src/libmain/progress-bar.cc
@@ -8,6 +8,7 @@
#include <map>
#include <thread>
#include <iostream>
+#include <chrono>
namespace nix {
@@ -48,6 +49,7 @@ private:
bool visible = true;
ActivityId parent;
std::optional<std::string> name;
+ std::chrono::time_point<std::chrono::steady_clock> startTime;
};
struct ActivitiesByType
@@ -79,22 +81,22 @@ private:
std::condition_variable quitCV, updateCV;
- bool printBuildLogs;
+ bool printBuildLogs = false;
bool isTTY;
public:
- ProgressBar(bool printBuildLogs, bool isTTY)
- : printBuildLogs(printBuildLogs)
- , isTTY(isTTY)
+ ProgressBar(bool isTTY)
+ : isTTY(isTTY)
{
state_.lock()->active = isTTY;
updateThread = std::thread([&]() {
auto state(state_.lock());
+ auto nextWakeup = std::chrono::milliseconds::max();
while (state->active) {
if (!state->haveUpdate)
- state.wait(updateCV);
- draw(*state);
+ state.wait_for(updateCV, nextWakeup);
+ nextWakeup = draw(*state);
state.wait_for(quitCV, std::chrono::milliseconds(50));
}
});
@@ -118,7 +120,8 @@ public:
updateThread.join();
}
- bool isVerbose() override {
+ bool isVerbose() override
+ {
return printBuildLogs;
}
@@ -159,11 +162,13 @@ public:
if (lvl <= verbosity && !s.empty() && type != actBuildWaiting)
log(*state, lvl, s + "...");
- state->activities.emplace_back(ActInfo());
+ state->activities.emplace_back(ActInfo {
+ .s = s,
+ .type = type,
+ .parent = parent,
+ .startTime = std::chrono::steady_clock::now()
+ });
auto i = std::prev(state->activities.end());
- i->s = s;
- i->type = type;
- i->parent = parent;
state->its.emplace(act, i);
state->activitiesByType[type].its.emplace(act, i);
@@ -327,10 +332,12 @@ public:
updateCV.notify_one();
}
- void draw(State & state)
+ std::chrono::milliseconds draw(State & state)
{
+ auto nextWakeup = std::chrono::milliseconds::max();
+
state.haveUpdate = false;
- if (!state.active) return;
+ if (!state.active) return nextWakeup;
std::string line;
@@ -341,12 +348,25 @@ public:
line += "]";
}
+ auto now = std::chrono::steady_clock::now();
+
if (!state.activities.empty()) {
if (!status.empty()) line += " ";
auto i = state.activities.rbegin();
- while (i != state.activities.rend() && (!i->visible || (i->s.empty() && i->lastLine.empty())))
+ while (i != state.activities.rend()) {
+ if (i->visible && (!i->s.empty() || !i->lastLine.empty())) {
+ /* Don't show activities until some time has
+ passed, to avoid displaying very short
+ activities. */
+ auto delay = std::chrono::milliseconds(10);
+ if (i->startTime + delay < now)
+ break;
+ else
+ nextWakeup = std::min(nextWakeup, std::chrono::duration_cast<std::chrono::milliseconds>(delay - (now - i->startTime)));
+ }
++i;
+ }
if (i != state.activities.rend()) {
line += i->s;
@@ -366,6 +386,8 @@ public:
if (width <= 0) width = std::numeric_limits<decltype(width)>::max();
writeToStderr("\r" + filterANSIEscapes(line, false, width) + ANSI_NORMAL + "\e[K");
+
+ return nextWakeup;
}
std::string getStatus(State & state)
@@ -480,19 +502,21 @@ public:
draw(*state);
return s[0];
}
+
+ virtual void setPrintBuildLogs(bool printBuildLogs)
+ {
+ this->printBuildLogs = printBuildLogs;
+ }
};
-Logger * makeProgressBar(bool printBuildLogs)
+Logger * makeProgressBar()
{
- return new ProgressBar(
- printBuildLogs,
- shouldANSI()
- );
+ return new ProgressBar(shouldANSI());
}
-void startProgressBar(bool printBuildLogs)
+void startProgressBar()
{
- logger = makeProgressBar(printBuildLogs);
+ logger = makeProgressBar();
}
void stopProgressBar()
diff --git a/src/libmain/progress-bar.hh b/src/libmain/progress-bar.hh
index 7f0dafecf..3a76f8448 100644
--- a/src/libmain/progress-bar.hh
+++ b/src/libmain/progress-bar.hh
@@ -4,9 +4,9 @@
namespace nix {
-Logger * makeProgressBar(bool printBuildLogs = false);
+Logger * makeProgressBar();
-void startProgressBar(bool printBuildLogs = false);
+void startProgressBar();
void stopProgressBar();
diff --git a/src/libmain/shared.cc b/src/libmain/shared.cc
index 31454e49d..52b75f757 100644
--- a/src/libmain/shared.cc
+++ b/src/libmain/shared.cc
@@ -181,8 +181,9 @@ void initNix()
/* Reset SIGCHLD to its default. */
struct sigaction act;
sigemptyset(&act.sa_mask);
- act.sa_handler = SIG_DFL;
act.sa_flags = 0;
+
+ act.sa_handler = SIG_DFL;
if (sigaction(SIGCHLD, &act, 0))
throw SysError("resetting SIGCHLD");
@@ -194,9 +195,20 @@ void initNix()
/* HACK: on darwin, we need can’t use sigprocmask with SIGWINCH.
* Instead, add a dummy sigaction handler, and signalHandlerThread
* can handle the rest. */
- struct sigaction sa;
- sa.sa_handler = sigHandler;
- if (sigaction(SIGWINCH, &sa, 0)) throw SysError("handling SIGWINCH");
+ act.sa_handler = sigHandler;
+ if (sigaction(SIGWINCH, &act, 0)) throw SysError("handling SIGWINCH");
+
+ /* Disable SA_RESTART for interrupts, so that system calls on this thread
+ * error with EINTR like they do on Linux.
+ * Most signals on BSD systems default to SA_RESTART on, but Nix
+ * expects EINTR from syscalls to properly exit. */
+ act.sa_handler = SIG_DFL;
+ if (sigaction(SIGINT, &act, 0)) throw SysError("handling SIGINT");
+ if (sigaction(SIGTERM, &act, 0)) throw SysError("handling SIGTERM");
+ if (sigaction(SIGHUP, &act, 0)) throw SysError("handling SIGHUP");
+ if (sigaction(SIGPIPE, &act, 0)) throw SysError("handling SIGPIPE");
+ if (sigaction(SIGQUIT, &act, 0)) throw SysError("handling SIGQUIT");
+ if (sigaction(SIGTRAP, &act, 0)) throw SysError("handling SIGTRAP");
#endif
/* Register a SIGSEGV handler to detect stack overflows. */
diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc
index 53f212c1d..41d2e2a1c 100644
--- a/src/libstore/build/derivation-goal.cc
+++ b/src/libstore/build/derivation-goal.cc
@@ -344,7 +344,7 @@ void DerivationGoal::gaveUpOnSubstitution()
for (auto & i : dynamic_cast<Derivation *>(drv.get())->inputDrvs) {
/* Ensure that pure, non-fixed-output derivations don't
depend on impure derivations. */
- if (drv->type().isPure() && !drv->type().isFixed()) {
+ if (settings.isExperimentalFeatureEnabled(Xp::ImpureDerivations) && drv->type().isPure() && !drv->type().isFixed()) {
auto inputDrv = worker.evalStore.readDerivation(i.first);
if (!inputDrv.type().isPure())
throw Error("pure derivation '%s' depends on impure derivation '%s'",
@@ -705,8 +705,7 @@ static void movePath(const Path & src, const Path & dst)
if (changePerm)
chmod_(src, st.st_mode | S_IWUSR);
- if (rename(src.c_str(), dst.c_str()))
- throw SysError("renaming '%1%' to '%2%'", src, dst);
+ renameFile(src, dst);
if (changePerm)
chmod_(dst, st.st_mode);
@@ -786,8 +785,7 @@ void runPostBuildHook(
Store & store,
Logger & logger,
const StorePath & drvPath,
- StorePathSet outputPaths
-)
+ const StorePathSet & outputPaths)
{
auto hook = settings.postBuildHook;
if (hook == "")
@@ -906,7 +904,7 @@ void DerivationGoal::buildDone()
auto builtOutputs = registerOutputs();
StorePathSet outputPaths;
- for (auto & [_, output] : buildResult.builtOutputs)
+ for (auto & [_, output] : builtOutputs)
outputPaths.insert(output.outPath);
runPostBuildHook(
worker.store,
@@ -915,12 +913,6 @@ void DerivationGoal::buildDone()
outputPaths
);
- if (buildMode == bmCheck) {
- cleanupPostOutputsRegisteredModeCheck();
- done(BuildResult::Built, std::move(builtOutputs));
- return;
- }
-
cleanupPostOutputsRegisteredModeNonCheck();
/* Repeat the build if necessary. */
@@ -985,21 +977,28 @@ void DerivationGoal::resolvedFinished()
realWantedOutputs = resolvedDrv.outputNames();
for (auto & wantedOutput : realWantedOutputs) {
- assert(initialOutputs.count(wantedOutput) != 0);
- assert(resolvedHashes.count(wantedOutput) != 0);
- auto realisation = resolvedResult.builtOutputs.at(
- DrvOutput { resolvedHashes.at(wantedOutput), wantedOutput });
+ auto initialOutput = get(initialOutputs, wantedOutput);
+ auto resolvedHash = get(resolvedHashes, wantedOutput);
+ if ((!initialOutput) || (!resolvedHash))
+ throw Error(
+ "derivation '%s' doesn't have expected output '%s' (derivation-goal.cc/resolvedFinished,resolve)",
+ worker.store.printStorePath(drvPath), wantedOutput);
+ auto realisation = get(resolvedResult.builtOutputs, DrvOutput { *resolvedHash, wantedOutput });
+ if (!realisation)
+ throw Error(
+ "derivation '%s' doesn't have expected output '%s' (derivation-goal.cc/resolvedFinished,realisation)",
+ worker.store.printStorePath(resolvedDrvGoal->drvPath), wantedOutput);
if (drv->type().isPure()) {
- auto newRealisation = realisation;
- newRealisation.id = DrvOutput { initialOutputs.at(wantedOutput).outputHash, wantedOutput };
+ auto newRealisation = *realisation;
+ newRealisation.id = DrvOutput { initialOutput->outputHash, wantedOutput };
newRealisation.signatures.clear();
if (!drv->type().isFixed())
- newRealisation.dependentRealisations = drvOutputReferences(worker.store, *drv, realisation.outPath);
+ newRealisation.dependentRealisations = drvOutputReferences(worker.store, *drv, realisation->outPath);
signRealisation(newRealisation);
worker.store.registerDrvOutput(newRealisation);
}
- outputPaths.insert(realisation.outPath);
- builtOutputs.emplace(realisation.id, realisation);
+ outputPaths.insert(realisation->outPath);
+ builtOutputs.emplace(realisation->id, *realisation);
}
runPostBuildHook(
@@ -1295,7 +1294,11 @@ std::pair<bool, DrvOutputs> DerivationGoal::checkPathValidity()
DrvOutputs validOutputs;
for (auto & i : queryPartialDerivationOutputMap()) {
- InitialOutput & info = initialOutputs.at(i.first);
+ auto initialOutput = get(initialOutputs, i.first);
+ if (!initialOutput)
+ // this is an invalid output, gets catched with (!wantedOutputsLeft.empty())
+ continue;
+ auto & info = *initialOutput;
info.wanted = wantOutput(i.first, wantedOutputs);
if (info.wanted)
wantedOutputsLeft.erase(i.first);
@@ -1310,7 +1313,7 @@ std::pair<bool, DrvOutputs> DerivationGoal::checkPathValidity()
: PathStatus::Corrupt,
};
}
- auto drvOutput = DrvOutput{initialOutputs.at(i.first).outputHash, i.first};
+ auto drvOutput = DrvOutput{info.outputHash, i.first};
if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations)) {
if (auto real = worker.store.queryRealisation(drvOutput)) {
info.known = {
diff --git a/src/libstore/build/hook-instance.cc b/src/libstore/build/hook-instance.cc
index 0f6f580be..1f19ddccc 100644
--- a/src/libstore/build/hook-instance.cc
+++ b/src/libstore/build/hook-instance.cc
@@ -7,6 +7,22 @@ HookInstance::HookInstance()
{
debug("starting build hook '%s'", settings.buildHook);
+ auto buildHookArgs = tokenizeString<std::list<std::string>>(settings.buildHook.get());
+
+ if (buildHookArgs.empty())
+ throw Error("'build-hook' setting is empty");
+
+ auto buildHook = buildHookArgs.front();
+ buildHookArgs.pop_front();
+
+ Strings args;
+
+ for (auto & arg : buildHookArgs)
+ args.push_back(arg);
+
+ args.push_back(std::string(baseNameOf(settings.buildHook.get())));
+ args.push_back(std::to_string(verbosity));
+
/* Create a pipe to get the output of the child. */
fromHook.create();
@@ -36,14 +52,9 @@ HookInstance::HookInstance()
if (dup2(builderOut.readSide.get(), 5) == -1)
throw SysError("dupping builder's stdout/stderr");
- Strings args = {
- std::string(baseNameOf(settings.buildHook.get())),
- std::to_string(verbosity),
- };
-
- execv(settings.buildHook.get().c_str(), stringsToCharPtrs(args).data());
+ execv(buildHook.c_str(), stringsToCharPtrs(args).data());
- throw SysError("executing '%s'", settings.buildHook);
+ throw SysError("executing '%s'", buildHook);
});
pid.setSeparatePG(true);
diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc
index 4c91fa4fb..18b682e13 100644
--- a/src/libstore/build/local-derivation-goal.cc
+++ b/src/libstore/build/local-derivation-goal.cc
@@ -14,6 +14,7 @@
#include "worker-protocol.hh"
#include "topo-sort.hh"
#include "callback.hh"
+#include "json-utils.hh"
#include <regex>
#include <queue>
@@ -56,8 +57,6 @@
#include <pwd.h>
#include <grp.h>
-#include <nlohmann/json.hpp>
-
namespace nix {
void handleDiffHook(
@@ -224,8 +223,7 @@ static void movePath(const Path & src, const Path & dst)
if (changePerm)
chmod_(src, st.st_mode | S_IWUSR);
- if (rename(src.c_str(), dst.c_str()))
- throw SysError("renaming '%1%' to '%2%'", src, dst);
+ renameFile(src, dst);
if (changePerm)
chmod_(dst, st.st_mode);
@@ -312,7 +310,7 @@ bool LocalDerivationGoal::cleanupDecideWhetherDiskFull()
if (buildMode != bmCheck && status.known->isValid()) continue;
auto p = worker.store.printStorePath(status.known->path);
if (pathExists(chrootRootDir + p))
- rename((chrootRootDir + p).c_str(), p.c_str());
+ renameFile((chrootRootDir + p), p);
}
return diskFull;
@@ -482,7 +480,7 @@ void LocalDerivationGoal::startBuilder()
temporary build directory. The text files have the format used
by `nix-store --register-validity'. However, the deriver
fields are left empty. */
- auto s = get(drv->env, "exportReferencesGraph").value_or("");
+ auto s = getOr(drv->env, "exportReferencesGraph", "");
Strings ss = tokenizeString<Strings>(s);
if (ss.size() % 2 != 0)
throw BuildError("odd number of tokens in 'exportReferencesGraph': '%1%'", s);
@@ -846,18 +844,43 @@ void LocalDerivationGoal::startBuilder()
/* Some distros patch Linux to not allow unprivileged
* user namespaces. If we get EPERM or EINVAL, try
* without CLONE_NEWUSER and see if that works.
+ * Details: https://salsa.debian.org/kernel-team/linux/-/commit/d98e00eda6bea437e39b9e80444eee84a32438a6
*/
usingUserNamespace = false;
flags &= ~CLONE_NEWUSER;
child = clone(childEntry, stack + stackSize, flags, this);
}
- /* Otherwise exit with EPERM so we can handle this in the
- parent. This is only done when sandbox-fallback is set
- to true (the default). */
- if (child == -1 && (errno == EPERM || errno == EINVAL) && settings.sandboxFallback)
- _exit(1);
- if (child == -1) throw SysError("cloning builder process");
-
+ if (child == -1) {
+ switch(errno) {
+ case EPERM:
+ case EINVAL: {
+ int errno_ = errno;
+ if (!userNamespacesEnabled && errno==EPERM)
+ notice("user namespaces appear to be disabled; they are required for sandboxing; check /proc/sys/user/max_user_namespaces");
+ if (userNamespacesEnabled) {
+ Path procSysKernelUnprivilegedUsernsClone = "/proc/sys/kernel/unprivileged_userns_clone";
+ if (pathExists(procSysKernelUnprivilegedUsernsClone)
+ && trim(readFile(procSysKernelUnprivilegedUsernsClone)) == "0") {
+ notice("user namespaces appear to be disabled; they are required for sandboxing; check /proc/sys/kernel/unprivileged_userns_clone");
+ }
+ }
+ Path procSelfNsUser = "/proc/self/ns/user";
+ if (!pathExists(procSelfNsUser))
+ notice("/proc/self/ns/user does not exist; your kernel was likely built without CONFIG_USER_NS=y, which is required for sandboxing");
+ /* Otherwise exit with EPERM so we can handle this in the
+ parent. This is only done when sandbox-fallback is set
+ to true (the default). */
+ if (settings.sandboxFallback)
+ _exit(1);
+ /* Mention sandbox-fallback in the error message so the user
+ knows that having it disabled contributed to the
+ unrecoverability of this failure */
+ throw SysError(errno_, "creating sandboxed builder process using clone(), without sandbox-fallback");
+ }
+ default:
+ throw SysError("creating sandboxed builder process using clone()");
+ }
+ }
writeFull(builderOut.writeSide.get(),
fmt("%d %d\n", usingUserNamespace, child));
_exit(0);
@@ -989,7 +1012,7 @@ void LocalDerivationGoal::initTmpDir() {
there is no size constraint). */
if (!parsedDrv->getStructuredAttrs()) {
- StringSet passAsFile = tokenizeString<StringSet>(get(drv->env, "passAsFile").value_or(""));
+ StringSet passAsFile = tokenizeString<StringSet>(getOr(drv->env, "passAsFile", ""));
for (auto & i : drv->env) {
if (passAsFile.find(i.first) == passAsFile.end()) {
env[i.first] = i.second;
@@ -1718,7 +1741,19 @@ void LocalDerivationGoal::runChild()
for (auto & i : dirsInChroot) {
if (i.second.source == "/proc") continue; // backwards compatibility
- doBind(i.second.source, chrootRootDir + i.first, i.second.optional);
+
+ #if HAVE_EMBEDDED_SANDBOX_SHELL
+ if (i.second.source == "__embedded_sandbox_shell__") {
+ static unsigned char sh[] = {
+ #include "embedded-sandbox-shell.gen.hh"
+ };
+ auto dst = chrootRootDir + i.first;
+ createDirs(dirOf(dst));
+ writeFile(dst, std::string_view((const char *) sh, sizeof(sh)));
+ chmod_(dst, 0555);
+ } else
+ #endif
+ doBind(i.second.source, chrootRootDir + i.first, i.second.optional);
}
/* Bind a new instance of procfs on /proc. */
@@ -2128,12 +2163,22 @@ DrvOutputs LocalDerivationGoal::registerOutputs()
std::map<std::string, std::variant<AlreadyRegistered, PerhapsNeedToRegister>> outputReferencesIfUnregistered;
std::map<std::string, struct stat> outputStats;
for (auto & [outputName, _] : drv->outputs) {
- auto actualPath = toRealPathChroot(worker.store.printStorePath(scratchOutputs.at(outputName)));
+ auto scratchOutput = get(scratchOutputs, outputName);
+ if (!scratchOutput)
+ throw BuildError(
+ "builder for '%s' has no scratch output for '%s'",
+ worker.store.printStorePath(drvPath), outputName);
+ auto actualPath = toRealPathChroot(worker.store.printStorePath(*scratchOutput));
outputsToSort.insert(outputName);
/* Updated wanted info to remove the outputs we definitely don't need to register */
- auto & initialInfo = initialOutputs.at(outputName);
+ auto initialOutput = get(initialOutputs, outputName);
+ if (!initialOutput)
+ throw BuildError(
+ "builder for '%s' has no initial output for '%s'",
+ worker.store.printStorePath(drvPath), outputName);
+ auto & initialInfo = *initialOutput;
/* Don't register if already valid, and not checking */
initialInfo.wanted = buildMode == bmCheck
@@ -2185,6 +2230,11 @@ DrvOutputs LocalDerivationGoal::registerOutputs()
auto sortedOutputNames = topoSort(outputsToSort,
{[&](const std::string & name) {
+ auto orifu = get(outputReferencesIfUnregistered, name);
+ if (!orifu)
+ throw BuildError(
+ "no output reference for '%s' in build of '%s'",
+ name, worker.store.printStorePath(drvPath));
return std::visit(overloaded {
/* Since we'll use the already installed versions of these, we
can treat them as leaves and ignore any references they
@@ -2199,7 +2249,7 @@ DrvOutputs LocalDerivationGoal::registerOutputs()
referencedOutputs.insert(o);
return referencedOutputs;
},
- }, outputReferencesIfUnregistered.at(name));
+ }, *orifu);
}},
{[&](const std::string & path, const std::string & parent) {
// TODO with more -vvvv also show the temporary paths for manual inspection.
@@ -2213,9 +2263,10 @@ DrvOutputs LocalDerivationGoal::registerOutputs()
OutputPathMap finalOutputs;
for (auto & outputName : sortedOutputNames) {
- auto output = drv->outputs.at(outputName);
- auto & scratchPath = scratchOutputs.at(outputName);
- auto actualPath = toRealPathChroot(worker.store.printStorePath(scratchPath));
+ auto output = get(drv->outputs, outputName);
+ auto scratchPath = get(scratchOutputs, outputName);
+ assert(output && scratchPath);
+ auto actualPath = toRealPathChroot(worker.store.printStorePath(*scratchPath));
auto finish = [&](StorePath finalStorePath) {
/* Store the final path */
@@ -2223,10 +2274,13 @@ DrvOutputs LocalDerivationGoal::registerOutputs()
/* The rewrite rule will be used in downstream outputs that refer to
use. This is why the topological sort is essential to do first
before this for loop. */
- if (scratchPath != finalStorePath)
- outputRewrites[std::string { scratchPath.hashPart() }] = std::string { finalStorePath.hashPart() };
+ if (*scratchPath != finalStorePath)
+ outputRewrites[std::string { scratchPath->hashPart() }] = std::string { finalStorePath.hashPart() };
};
+ auto orifu = get(outputReferencesIfUnregistered, outputName);
+ assert(orifu);
+
std::optional<StorePathSet> referencesOpt = std::visit(overloaded {
[&](const AlreadyRegistered & skippedFinalPath) -> std::optional<StorePathSet> {
finish(skippedFinalPath.path);
@@ -2235,7 +2289,7 @@ DrvOutputs LocalDerivationGoal::registerOutputs()
[&](const PerhapsNeedToRegister & r) -> std::optional<StorePathSet> {
return r.refs;
},
- }, outputReferencesIfUnregistered.at(outputName));
+ }, *orifu);
if (!referencesOpt)
continue;
@@ -2268,25 +2322,29 @@ DrvOutputs LocalDerivationGoal::registerOutputs()
for (auto & r : references) {
auto name = r.name();
auto origHash = std::string { r.hashPart() };
- if (r == scratchPath)
+ if (r == *scratchPath) {
res.first = true;
- else if (outputRewrites.count(origHash) == 0)
- res.second.insert(r);
- else {
- std::string newRef = outputRewrites.at(origHash);
+ } else if (auto outputRewrite = get(outputRewrites, origHash)) {
+ std::string newRef = *outputRewrite;
newRef += '-';
newRef += name;
res.second.insert(StorePath { newRef });
+ } else {
+ res.second.insert(r);
}
}
return res;
};
auto newInfoFromCA = [&](const DerivationOutput::CAFloating outputHash) -> ValidPathInfo {
- auto & st = outputStats.at(outputName);
+ auto st = get(outputStats, outputName);
+ if (!st)
+ throw BuildError(
+ "output path %1% without valid stats info",
+ actualPath);
if (outputHash.method == FileIngestionMethod::Flat) {
/* The output path should be a regular file without execute permission. */
- if (!S_ISREG(st.st_mode) || (st.st_mode & S_IXUSR) != 0)
+ if (!S_ISREG(st->st_mode) || (st->st_mode & S_IXUSR) != 0)
throw BuildError(
"output path '%1%' should be a non-executable regular file "
"since recursive hashing is not enabled (outputHashMode=flat)",
@@ -2294,7 +2352,7 @@ DrvOutputs LocalDerivationGoal::registerOutputs()
}
rewriteOutput();
/* FIXME optimize and deduplicate with addToStore */
- std::string oldHashPart { scratchPath.hashPart() };
+ std::string oldHashPart { scratchPath->hashPart() };
HashModuloSink caSink { outputHash.hashType, oldHashPart };
switch (outputHash.method) {
case FileIngestionMethod::Recursive:
@@ -2313,13 +2371,11 @@ DrvOutputs LocalDerivationGoal::registerOutputs()
outputPathName(drv->name, outputName),
refs.second,
refs.first);
- if (scratchPath != finalPath) {
+ if (*scratchPath != finalPath) {
// Also rewrite the output path
auto source = sinkToSource([&](Sink & nextSink) {
- StringSink sink;
- dumpPath(actualPath, sink);
RewritingSink rsink2(oldHashPart, std::string(finalPath.hashPart()), nextSink);
- rsink2(sink.s);
+ dumpPath(actualPath, rsink2);
rsink2.flush();
});
Path tmpPath = actualPath + ".tmp";
@@ -2354,9 +2410,9 @@ DrvOutputs LocalDerivationGoal::registerOutputs()
auto requiredFinalPath = output.path;
/* Preemptively add rewrite rule for final hash, as that is
what the NAR hash will use rather than normalized-self references */
- if (scratchPath != requiredFinalPath)
+ if (*scratchPath != requiredFinalPath)
outputRewrites.insert_or_assign(
- std::string { scratchPath.hashPart() },
+ std::string { scratchPath->hashPart() },
std::string { requiredFinalPath.hashPart() });
rewriteOutput();
auto narHashAndSize = hashPath(htSHA256, actualPath);
@@ -2409,7 +2465,7 @@ DrvOutputs LocalDerivationGoal::registerOutputs()
});
},
- }, output.raw());
+ }, output->raw());
/* FIXME: set proper permissions in restorePath() so
we don't have to do another traversal. */
@@ -2425,7 +2481,7 @@ DrvOutputs LocalDerivationGoal::registerOutputs()
derivations. */
PathLocks dynamicOutputLock;
dynamicOutputLock.setDeletion(true);
- auto optFixedPath = output.path(worker.store, drv->name, outputName);
+ auto optFixedPath = output->path(worker.store, drv->name, outputName);
if (!optFixedPath ||
worker.store.printStorePath(*optFixedPath) != finalDestPath)
{
@@ -2491,11 +2547,10 @@ DrvOutputs LocalDerivationGoal::registerOutputs()
/* For debugging, print out the referenced and unreferenced paths. */
for (auto & i : inputPaths) {
- auto j = references.find(i);
- if (j == references.end())
- debug("unreferenced input: '%1%'", worker.store.printStorePath(i));
- else
+ if (references.count(i))
debug("referenced input: '%1%'", worker.store.printStorePath(i));
+ else
+ debug("unreferenced input: '%1%'", worker.store.printStorePath(i));
}
if (curRound == nrRounds) {
@@ -2567,8 +2622,7 @@ DrvOutputs LocalDerivationGoal::registerOutputs()
Path prev = path + checkSuffix;
deletePath(prev);
Path dst = path + checkSuffix;
- if (rename(path.c_str(), dst.c_str()))
- throw SysError("renaming '%s' to '%s'", path, dst);
+ renameFile(path, dst);
}
}
@@ -2612,9 +2666,11 @@ DrvOutputs LocalDerivationGoal::registerOutputs()
DrvOutputs builtOutputs;
for (auto & [outputName, newInfo] : infos) {
+ auto oldinfo = get(initialOutputs, outputName);
+ assert(oldinfo);
auto thisRealisation = Realisation {
.id = DrvOutput {
- initialOutputs.at(outputName).outputHash,
+ oldinfo->outputHash,
outputName
},
.outPath = newInfo.path
@@ -2710,9 +2766,10 @@ void LocalDerivationGoal::checkOutputs(const std::map<std::string, ValidPathInfo
for (auto & i : *value) {
if (worker.store.isStorePath(i))
spec.insert(worker.store.parseStorePath(i));
- else if (outputs.count(i))
- spec.insert(outputs.at(i).path);
- else throw BuildError("derivation contains an illegal reference specifier '%s'", i);
+ else if (auto output = get(outputs, i))
+ spec.insert(output->path);
+ else
+ throw BuildError("derivation contains an illegal reference specifier '%s'", i);
}
auto used = recursive
@@ -2751,24 +2808,18 @@ void LocalDerivationGoal::checkOutputs(const std::map<std::string, ValidPathInfo
};
if (auto structuredAttrs = parsedDrv->getStructuredAttrs()) {
- auto outputChecks = structuredAttrs->find("outputChecks");
- if (outputChecks != structuredAttrs->end()) {
- auto output = outputChecks->find(outputName);
-
- if (output != outputChecks->end()) {
+ if (auto outputChecks = get(*structuredAttrs, "outputChecks")) {
+ if (auto output = get(*outputChecks, outputName)) {
Checks checks;
- auto maxSize = output->find("maxSize");
- if (maxSize != output->end())
+ if (auto maxSize = get(*output, "maxSize"))
checks.maxSize = maxSize->get<uint64_t>();
- auto maxClosureSize = output->find("maxClosureSize");
- if (maxClosureSize != output->end())
+ if (auto maxClosureSize = get(*output, "maxClosureSize"))
checks.maxClosureSize = maxClosureSize->get<uint64_t>();
- auto get = [&](const std::string & name) -> std::optional<Strings> {
- auto i = output->find(name);
- if (i != output->end()) {
+ auto get_ = [&](const std::string & name) -> std::optional<Strings> {
+ if (auto i = get(*output, name)) {
Strings res;
for (auto j = i->begin(); j != i->end(); ++j) {
if (!j->is_string())
@@ -2781,10 +2832,10 @@ void LocalDerivationGoal::checkOutputs(const std::map<std::string, ValidPathInfo
return {};
};
- checks.allowedReferences = get("allowedReferences");
- checks.allowedRequisites = get("allowedRequisites");
- checks.disallowedReferences = get("disallowedReferences");
- checks.disallowedRequisites = get("disallowedRequisites");
+ checks.allowedReferences = get_("allowedReferences");
+ checks.allowedRequisites = get_("allowedRequisites");
+ checks.disallowedReferences = get_("disallowedReferences");
+ checks.disallowedRequisites = get_("disallowedRequisites");
applyChecks(checks);
}
diff --git a/src/libstore/build/substitution-goal.cc b/src/libstore/build/substitution-goal.cc
index ca5218627..2af105b4d 100644
--- a/src/libstore/build/substitution-goal.cc
+++ b/src/libstore/build/substitution-goal.cc
@@ -154,7 +154,7 @@ void PathSubstitutionGoal::tryNext()
only after we've downloaded the path. */
if (!sub->isTrusted && worker.store.pathInfoIsUntrusted(*info))
{
- warn("the substitute for '%s' from '%s' is not signed by any of the keys in 'trusted-public-keys'",
+ warn("ignoring substitute for '%s' from '%s', as it's not signed by any of the keys in 'trusted-public-keys'",
worker.store.printStorePath(storePath), sub->getUri());
tryNext();
return;
diff --git a/src/libstore/build/worker.cc b/src/libstore/build/worker.cc
index f72c1cc9c..b192fbc77 100644
--- a/src/libstore/build/worker.cc
+++ b/src/libstore/build/worker.cc
@@ -350,7 +350,7 @@ void Worker::waitForInput()
become `available'. Note that `available' (i.e., non-blocking)
includes EOF. */
std::vector<struct pollfd> pollStatus;
- std::map <int, int> fdToPollStatus;
+ std::map<int, size_t> fdToPollStatus;
for (auto & i : children) {
for (auto & j : i.fds) {
pollStatus.push_back((struct pollfd) { .fd = j, .events = POLLIN });
@@ -380,7 +380,10 @@ void Worker::waitForInput()
std::set<int> fds2(j->fds);
std::vector<unsigned char> buffer(4096);
for (auto & k : fds2) {
- if (pollStatus.at(fdToPollStatus.at(k)).revents) {
+ const auto fdPollStatusId = get(fdToPollStatus, k);
+ assert(fdPollStatusId);
+ assert(*fdPollStatusId < pollStatus.size());
+ if (pollStatus.at(*fdPollStatusId).revents) {
ssize_t rd = ::read(k, buffer.data(), buffer.size());
// FIXME: is there a cleaner way to handle pt close
// than EIO? Is this even standard?
diff --git a/src/libstore/builtins/buildenv.cc b/src/libstore/builtins/buildenv.cc
index 6f6ad57cb..47458a388 100644
--- a/src/libstore/builtins/buildenv.cc
+++ b/src/libstore/builtins/buildenv.cc
@@ -93,8 +93,9 @@ static void createLinks(State & state, const Path & srcDir, const Path & dstDir,
auto prevPriority = state.priorities[dstFile];
if (prevPriority == priority)
throw Error(
- "packages '%1%' and '%2%' have the same priority %3%; "
+ "files '%1%' and '%2%' have the same priority %3%; "
"use 'nix-env --set-flag priority NUMBER INSTALLED_PKGNAME' "
+ "or type 'nix profile install --help' if using 'nix profile' to find out how"
"to change the priority of one of the conflicting packages"
" (0 being the highest priority)",
srcFile, readLink(dstFile), priority);
diff --git a/src/libstore/builtins/fetchurl.cc b/src/libstore/builtins/fetchurl.cc
index af3dfc409..7d7924d77 100644
--- a/src/libstore/builtins/fetchurl.cc
+++ b/src/libstore/builtins/fetchurl.cc
@@ -24,7 +24,7 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData)
Path storePath = getAttr("out");
auto mainUrl = getAttr("url");
- bool unpack = get(drv.env, "unpack").value_or("") == "1";
+ bool unpack = getOr(drv.env, "unpack", "") == "1";
/* Note: have to use a fresh fileTransfer here because we're in
a forked process. */
diff --git a/src/libstore/builtins/unpack-channel.cc b/src/libstore/builtins/unpack-channel.cc
index 426d58a53..ba04bb16c 100644
--- a/src/libstore/builtins/unpack-channel.cc
+++ b/src/libstore/builtins/unpack-channel.cc
@@ -22,8 +22,7 @@ void builtinUnpackChannel(const BasicDerivation & drv)
auto entries = readDirectory(out);
if (entries.size() != 1)
throw Error("channel tarball '%s' contains more than one file", src);
- if (rename((out + "/" + entries[0].name).c_str(), (out + "/" + channelName).c_str()) == -1)
- throw SysError("renaming channel directory");
+ renameFile((out + "/" + entries[0].name), (out + "/" + channelName));
}
}
diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc
index 1c695de82..fe99c3c5e 100644
--- a/src/libstore/derivations.cc
+++ b/src/libstore/derivations.cc
@@ -661,8 +661,10 @@ DrvHash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOut
if (res.kind == DrvHash::Kind::Deferred)
kind = DrvHash::Kind::Deferred;
for (auto & outputName : inputOutputs) {
- const auto h = res.hashes.at(outputName);
- inputs2[h.to_string(Base16, false)].insert(outputName);
+ const auto h = get(res.hashes, outputName);
+ if (!h)
+ throw Error("no hash for output '%s' of derivation '%s'", outputName, drv.name);
+ inputs2[h->to_string(Base16, false)].insert(outputName);
}
}
@@ -836,8 +838,11 @@ static void rewriteDerivation(Store & store, BasicDerivation & drv, const String
auto hashModulo = hashDerivationModulo(store, Derivation(drv), true);
for (auto & [outputName, output] : drv.outputs) {
if (std::holds_alternative<DerivationOutput::Deferred>(output.raw())) {
- auto & h = hashModulo.hashes.at(outputName);
- auto outPath = store.makeOutputPath(outputName, h, drv.name);
+ auto h = get(hashModulo.hashes, outputName);
+ if (!h)
+ throw Error("derivation '%s' output '%s' has no hash (derivations.cc/rewriteDerivation)",
+ drv.name, outputName);
+ auto outPath = store.makeOutputPath(outputName, *h, drv.name);
drv.env[outputName] = store.printStorePath(outPath);
output = DerivationOutput::InputAddressed {
.path = std::move(outPath),
diff --git a/src/libstore/derived-path.cc b/src/libstore/derived-path.cc
index 319b1c790..44587ae78 100644
--- a/src/libstore/derived-path.cc
+++ b/src/libstore/derived-path.cc
@@ -4,6 +4,8 @@
#include <nlohmann/json.hpp>
+#include <optional>
+
namespace nix {
nlohmann::json DerivedPath::Opaque::toJSON(ref<Store> store) const {
@@ -17,12 +19,12 @@ nlohmann::json DerivedPath::Built::toJSON(ref<Store> store) const {
res["drvPath"] = store->printStorePath(drvPath);
// Fallback for the input-addressed derivation case: We expect to always be
// able to print the output paths, so let’s do it
- auto knownOutputs = store->queryPartialDerivationOutputMap(drvPath);
+ const auto knownOutputs = store->queryPartialDerivationOutputMap(drvPath);
for (const auto& output : outputs) {
- if (knownOutputs.at(output))
- res["outputs"][output] = store->printStorePath(knownOutputs.at(output).value());
- else
- res["outputs"][output] = nullptr;
+ auto knownOutput = get(knownOutputs, output);
+ res["outputs"][output] = (knownOutput && *knownOutput)
+ ? store->printStorePath(**knownOutput)
+ : nullptr;
}
return res;
}
@@ -123,10 +125,15 @@ RealisedPath::Set BuiltPath::toRealisedPaths(Store & store) const
for (auto& [outputName, outputPath] : p.outputs) {
if (settings.isExperimentalFeatureEnabled(
Xp::CaDerivations)) {
+ auto drvOutput = get(drvHashes, outputName);
+ if (!drvOutput)
+ throw Error(
+ "the derivation '%s' has unrealised output '%s' (derived-path.cc/toRealisedPaths)",
+ store.printStorePath(p.drvPath), outputName);
auto thisRealisation = store.queryRealisation(
- DrvOutput{drvHashes.at(outputName), outputName});
- assert(thisRealisation); // We’ve built it, so we must h
- // ve the realisation
+ DrvOutput{*drvOutput, outputName});
+ assert(thisRealisation); // We’ve built it, so we must
+ // have the realisation
res.insert(*thisRealisation);
} else {
res.insert(outputPath);
diff --git a/src/libstore/filetransfer.cc b/src/libstore/filetransfer.cc
index 529a41891..252403cb5 100644
--- a/src/libstore/filetransfer.cc
+++ b/src/libstore/filetransfer.cc
@@ -308,6 +308,9 @@ struct curlFileTransfer : public FileTransfer
curl_easy_setopt(req, CURLOPT_HTTPHEADER, requestHeaders);
+ if (settings.downloadSpeed.get() > 0)
+ curl_easy_setopt(req, CURLOPT_MAX_RECV_SPEED_LARGE, (curl_off_t) (settings.downloadSpeed.get() * 1024));
+
if (request.head)
curl_easy_setopt(req, CURLOPT_NOBODY, 1);
@@ -692,10 +695,10 @@ struct curlFileTransfer : public FileTransfer
#if ENABLE_S3
auto [bucketName, key, params] = parseS3Uri(request.uri);
- std::string profile = get(params, "profile").value_or("");
- std::string region = get(params, "region").value_or(Aws::Region::US_EAST_1);
- std::string scheme = get(params, "scheme").value_or("");
- std::string endpoint = get(params, "endpoint").value_or("");
+ std::string profile = getOr(params, "profile", "");
+ std::string region = getOr(params, "region", Aws::Region::US_EAST_1);
+ std::string scheme = getOr(params, "scheme", "");
+ std::string endpoint = getOr(params, "endpoint", "");
S3Helper s3Helper(profile, region, scheme, endpoint);
diff --git a/src/libstore/gc.cc b/src/libstore/gc.cc
index f65fb1b2e..4c1a82279 100644
--- a/src/libstore/gc.cc
+++ b/src/libstore/gc.cc
@@ -39,9 +39,7 @@ static void makeSymlink(const Path & link, const Path & target)
createSymlink(target, tempLink);
/* Atomically replace the old one. */
- if (rename(tempLink.c_str(), link.c_str()) == -1)
- throw SysError("cannot rename '%1%' to '%2%'",
- tempLink , link);
+ renameFile(tempLink, link);
}
@@ -135,6 +133,7 @@ void LocalStore::addTempRoot(const StorePath & path)
state->fdRootsSocket.close();
goto restart;
}
+ throw;
}
}
@@ -153,6 +152,7 @@ void LocalStore::addTempRoot(const StorePath & path)
state->fdRootsSocket.close();
goto restart;
}
+ throw;
} catch (EndOfFile & e) {
debug("GC socket disconnected");
state->fdRootsSocket.close();
diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc
index cc009a026..d724897bb 100644
--- a/src/libstore/globals.cc
+++ b/src/libstore/globals.cc
@@ -36,7 +36,6 @@ Settings::Settings()
, nixStateDir(canonPath(getEnv("NIX_STATE_DIR").value_or(NIX_STATE_DIR)))
, nixConfDir(canonPath(getEnv("NIX_CONF_DIR").value_or(NIX_CONF_DIR)))
, nixUserConfFiles(getUserConfigFiles())
- , nixLibexecDir(canonPath(getEnv("NIX_LIBEXEC_DIR").value_or(NIX_LIBEXEC_DIR)))
, nixBinDir(canonPath(getEnv("NIX_BIN_DIR").value_or(NIX_BIN_DIR)))
, nixManDir(canonPath(NIX_MAN_DIR))
, nixDaemonSocketFile(canonPath(getEnv("NIX_DAEMON_SOCKET_PATH").value_or(nixStateDir + DEFAULT_SOCKET_PATH)))
@@ -67,12 +66,13 @@ Settings::Settings()
sandboxPaths = tokenizeString<StringSet>("/bin/sh=" SANDBOX_SHELL);
#endif
-
-/* chroot-like behavior from Apple's sandbox */
+ /* chroot-like behavior from Apple's sandbox */
#if __APPLE__
sandboxPaths = tokenizeString<StringSet>("/System/Library/Frameworks /System/Library/PrivateFrameworks /bin/sh /bin/bash /private/tmp /private/var/tmp /usr/lib");
allowedImpureHostPrefixes = tokenizeString<StringSet>("/System/Library /usr/lib /dev /bin/sh");
#endif
+
+ buildHook = getSelfExe().value_or("nix") + " __build-remote";
}
void loadConfFile()
@@ -114,7 +114,13 @@ std::vector<Path> getUserConfigFiles()
unsigned int Settings::getDefaultCores()
{
- return std::max(1U, std::thread::hardware_concurrency());
+ const unsigned int concurrency = std::max(1U, std::thread::hardware_concurrency());
+ const unsigned int maxCPU = getMaxCPU();
+
+ if (maxCPU > 0)
+ return maxCPU;
+ else
+ return concurrency;
}
StringSet Settings::getDefaultSystemFeatures()
diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh
index feb6899cd..e9d721e59 100644
--- a/src/libstore/globals.hh
+++ b/src/libstore/globals.hh
@@ -79,9 +79,6 @@ public:
/* A list of user configuration files to load. */
std::vector<Path> nixUserConfFiles;
- /* The directory where internal helper programs are stored. */
- Path nixLibexecDir;
-
/* The directory where the main programs are stored. */
Path nixBinDir;
@@ -195,7 +192,7 @@ public:
)",
{"build-timeout"}};
- PathSetting buildHook{this, true, nixLibexecDir + "/nix/build-remote", "build-hook",
+ PathSetting buildHook{this, true, "", "build-hook",
"The path of the helper program that executes builds to remote machines."};
Setting<std::string> builders{
@@ -749,6 +746,13 @@ public:
/nix/store/xfghy8ixrhz3kyy6p724iv3cxji088dx-bash-4.4-p23`.
)"};
+ Setting<unsigned int> downloadSpeed {
+ this, 0, "download-speed",
+ R"(
+ Specify the maximum transfer rate in kilobytes per second you want
+ Nix to use for downloads.
+ )"};
+
Setting<std::string> netrcFile{
this, fmt("%s/%s", nixConfDir, "netrc"), "netrc-file",
R"(
@@ -802,7 +806,7 @@ public:
)"};
Setting<StringSet> ignoredAcls{
- this, {"security.selinux", "system.nfs4_acl"}, "ignored-acls",
+ this, {"security.selinux", "system.nfs4_acl", "security.csm"}, "ignored-acls",
R"(
A list of ACLs that should be ignored, normally Nix attempts to
remove all ACLs from files and directories in the Nix store, but
diff --git a/src/libstore/http-binary-cache-store.cc b/src/libstore/http-binary-cache-store.cc
index 3cb5efdbf..73bcd6e81 100644
--- a/src/libstore/http-binary-cache-store.cc
+++ b/src/libstore/http-binary-cache-store.cc
@@ -161,7 +161,12 @@ protected:
void getFile(const std::string & path,
Callback<std::optional<std::string>> callback) noexcept override
{
- checkEnabled();
+ try {
+ checkEnabled();
+ } catch (...) {
+ callback.rethrow();
+ return;
+ }
auto request(makeRequest(path));
diff --git a/src/libstore/local-binary-cache-store.cc b/src/libstore/local-binary-cache-store.cc
index f754770f9..f20b1fa02 100644
--- a/src/libstore/local-binary-cache-store.cc
+++ b/src/libstore/local-binary-cache-store.cc
@@ -57,8 +57,7 @@ protected:
AutoDelete del(tmp, false);
StreamToSourceAdapter source(istream);
writeFile(tmp, source);
- if (rename(tmp.c_str(), path2.c_str()))
- throw SysError("renaming '%1%' to '%2%'", tmp, path2);
+ renameFile(tmp, path2);
del.cancel();
}
@@ -69,6 +68,7 @@ protected:
} catch (SysError & e) {
if (e.errNo == ENOENT)
throw NoSuchBinaryCacheFile("file '%s' does not exist in binary cache", path);
+ throw;
}
}
@@ -107,7 +107,7 @@ bool LocalBinaryCacheStore::fileExists(const std::string & path)
std::set<std::string> LocalBinaryCacheStore::uriSchemes()
{
- if (getEnv("_NIX_FORCE_HTTP_BINARY_CACHE_STORE") == "1")
+ if (getEnv("_NIX_FORCE_HTTP") == "1")
return {};
else
return {"file"};
diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc
index 5cc5c91cc..a272e4301 100644
--- a/src/libstore/local-store.cc
+++ b/src/libstore/local-store.cc
@@ -718,7 +718,11 @@ void LocalStore::checkDerivationOutputs(const StorePath & drvPath, const Derivat
// somewhat expensive so we do lazily
hashesModulo = hashDerivationModulo(*this, drv, true);
}
- StorePath recomputed = makeOutputPath(i.first, hashesModulo->hashes.at(i.first), drvName);
+ auto currentOutputHash = get(hashesModulo->hashes, i.first);
+ if (!currentOutputHash)
+ throw Error("derivation '%s' has unexpected output '%s' (local-store / hashesModulo) named '%s'",
+ printStorePath(drvPath), printStorePath(doia.path), i.first);
+ StorePath recomputed = makeOutputPath(i.first, *currentOutputHash, drvName);
if (doia.path != recomputed)
throw Error("derivation '%s' has incorrect output '%s', should be '%s'",
printStorePath(drvPath), printStorePath(doia.path), printStorePath(recomputed));
@@ -1426,8 +1430,7 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name
writeFile(realPath, dumpSource);
} else {
/* Move the temporary path we restored above. */
- if (rename(tempPath.c_str(), realPath.c_str()))
- throw Error("renaming '%s' to '%s'", tempPath, realPath);
+ moveFile(tempPath, realPath);
}
/* For computing the nar hash. In recursive SHA-256 mode, this
@@ -1938,8 +1941,7 @@ void LocalStore::addBuildLog(const StorePath & drvPath, std::string_view log)
writeFile(tmpFile, compress("bzip2", log));
- if (rename(tmpFile.c_str(), logPath.c_str()) != 0)
- throw SysError("renaming '%1%' to '%2%'", tmpFile, logPath);
+ renameFile(tmpFile, logPath);
}
std::optional<std::string> LocalStore::getVersion()
diff --git a/src/libstore/local.mk b/src/libstore/local.mk
index b992bcbc0..1d26ac918 100644
--- a/src/libstore/local.mk
+++ b/src/libstore/local.mk
@@ -39,14 +39,23 @@ libstore_CXXFLAGS += \
-DNIX_STATE_DIR=\"$(localstatedir)/nix\" \
-DNIX_LOG_DIR=\"$(localstatedir)/log/nix\" \
-DNIX_CONF_DIR=\"$(sysconfdir)/nix\" \
- -DNIX_LIBEXEC_DIR=\"$(libexecdir)\" \
-DNIX_BIN_DIR=\"$(bindir)\" \
-DNIX_MAN_DIR=\"$(mandir)\" \
-DLSOF=\"$(lsof)\"
+ifeq ($(embedded_sandbox_shell),yes)
+libstore_CXXFLAGS += -DSANDBOX_SHELL=\"__embedded_sandbox_shell__\"
+
+$(d)/build/local-derivation-goal.cc: $(d)/embedded-sandbox-shell.gen.hh
+
+$(d)/embedded-sandbox-shell.gen.hh: $(sandbox_shell)
+ $(trace-gen) hexdump -v -e '1/1 "0x%x," "\n"' < $< > $@.tmp
+ @mv $@.tmp $@
+else
ifneq ($(sandbox_shell),)
libstore_CXXFLAGS += -DSANDBOX_SHELL="\"$(sandbox_shell)\""
endif
+endif
$(d)/local-store.cc: $(d)/schema.sql.gen.hh $(d)/ca-specific-schema.sql.gen.hh
diff --git a/src/libstore/lock.cc b/src/libstore/lock.cc
index f1356fdca..fa718f55d 100644
--- a/src/libstore/lock.cc
+++ b/src/libstore/lock.cc
@@ -67,13 +67,26 @@ bool UserLock::findFreeUser() {
#if __linux__
/* Get the list of supplementary groups of this build user. This
is usually either empty or contains a group such as "kvm". */
- supplementaryGIDs.resize(10);
- int ngroups = supplementaryGIDs.size();
- int err = getgrouplist(pw->pw_name, pw->pw_gid,
- supplementaryGIDs.data(), &ngroups);
+ int ngroups = 32; // arbitrary initial guess
+ supplementaryGIDs.resize(ngroups);
+
+ int err = getgrouplist(pw->pw_name, pw->pw_gid, supplementaryGIDs.data(),
+ &ngroups);
+
+ // Our initial size of 32 wasn't sufficient, the correct size has
+ // been stored in ngroups, so we try again.
+ if (err == -1) {
+ supplementaryGIDs.resize(ngroups);
+ err = getgrouplist(pw->pw_name, pw->pw_gid, supplementaryGIDs.data(),
+ &ngroups);
+ }
+
+ // If it failed once more, then something must be broken.
if (err == -1)
- throw Error("failed to get list of supplementary groups for '%1%'", pw->pw_name);
+ throw Error("failed to get list of supplementary groups for '%1%'",
+ pw->pw_name);
+ // Finally, trim back the GID list to its real size
supplementaryGIDs.resize(ngroups);
#endif
diff --git a/src/libstore/misc.cc b/src/libstore/misc.cc
index 2bbd7aa70..fb985c97b 100644
--- a/src/libstore/misc.cc
+++ b/src/libstore/misc.cc
@@ -278,11 +278,16 @@ std::map<DrvOutput, StorePath> drvOutputReferences(
std::set<Realisation> inputRealisations;
for (const auto & [inputDrv, outputNames] : drv.inputDrvs) {
- auto outputHashes =
+ const auto outputHashes =
staticOutputHashes(store, store.readDerivation(inputDrv));
for (const auto & outputName : outputNames) {
+ auto outputHash = get(outputHashes, outputName);
+ if (!outputHash)
+ throw Error(
+ "output '%s' of derivation '%s' isn't realised", outputName,
+ store.printStorePath(inputDrv));
auto thisRealisation = store.queryRealisation(
- DrvOutput{outputHashes.at(outputName), outputName});
+ DrvOutput{*outputHash, outputName});
if (!thisRealisation)
throw Error(
"output '%s' of derivation '%s' isn't built", outputName,
diff --git a/src/libstore/nar-info-disk-cache.cc b/src/libstore/nar-info-disk-cache.cc
index 9dd81ddfb..f4ea739b0 100644
--- a/src/libstore/nar-info-disk-cache.cc
+++ b/src/libstore/nar-info-disk-cache.cc
@@ -62,6 +62,9 @@ public:
/* How often to purge expired entries from the cache. */
const int purgeInterval = 24 * 3600;
+ /* How long to cache binary cache info (i.e. /nix-cache-info) */
+ const int cacheInfoTtl = 7 * 24 * 3600;
+
struct Cache
{
int id;
@@ -98,7 +101,7 @@ public:
"insert or replace into BinaryCaches(url, timestamp, storeDir, wantMassQuery, priority) values (?, ?, ?, ?, ?)");
state->queryCache.create(state->db,
- "select id, storeDir, wantMassQuery, priority from BinaryCaches where url = ?");
+ "select id, storeDir, wantMassQuery, priority from BinaryCaches where url = ? and timestamp > ?");
state->insertNAR.create(state->db,
"insert or replace into NARs(cache, hashPart, namePart, url, compression, fileHash, fileSize, narHash, "
@@ -183,7 +186,7 @@ public:
auto i = state->caches.find(uri);
if (i == state->caches.end()) {
- auto queryCache(state->queryCache.use()(uri));
+ auto queryCache(state->queryCache.use()(uri)(time(0) - cacheInfoTtl));
if (!queryCache.next())
return std::nullopt;
state->caches.emplace(uri,
diff --git a/src/libstore/nar-info.cc b/src/libstore/nar-info.cc
index 2d75e7a82..071d8355e 100644
--- a/src/libstore/nar-info.cc
+++ b/src/libstore/nar-info.cc
@@ -69,8 +69,6 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
if (value != "unknown-deriver")
deriver = StorePath(value);
}
- else if (name == "System")
- system = value;
else if (name == "Sig")
sigs.insert(value);
else if (name == "CA") {
@@ -106,9 +104,6 @@ std::string NarInfo::to_string(const Store & store) const
if (deriver)
res += "Deriver: " + std::string(deriver->to_string()) + "\n";
- if (!system.empty())
- res += "System: " + system + "\n";
-
for (auto sig : sigs)
res += "Sig: " + sig + "\n";
diff --git a/src/libstore/nar-info.hh b/src/libstore/nar-info.hh
index 39ced76e5..01683ec73 100644
--- a/src/libstore/nar-info.hh
+++ b/src/libstore/nar-info.hh
@@ -14,7 +14,6 @@ struct NarInfo : ValidPathInfo
std::string compression;
std::optional<Hash> fileHash;
uint64_t fileSize = 0;
- std::string system;
NarInfo() = delete;
NarInfo(StorePath && path, Hash narHash) : ValidPathInfo(std::move(path), narHash) { }
diff --git a/src/libstore/optimise-store.cc b/src/libstore/optimise-store.cc
index 8af9b1dde..4d2781180 100644
--- a/src/libstore/optimise-store.cc
+++ b/src/libstore/optimise-store.cc
@@ -229,7 +229,9 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
}
/* Atomically replace the old file with the new hard link. */
- if (rename(tempLink.c_str(), path.c_str()) == -1) {
+ try {
+ renameFile(tempLink, path);
+ } catch (SysError & e) {
if (unlink(tempLink.c_str()) == -1)
printError("unable to unlink '%1%'", tempLink);
if (errno == EMLINK) {
@@ -240,7 +242,7 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
debug("'%s' has reached maximum number of links", linkPath);
return;
}
- throw SysError("cannot rename '%1%' to '%2%'", tempLink, path);
+ throw;
}
stats.filesLinked++;
diff --git a/src/libstore/path-with-outputs.cc b/src/libstore/path-with-outputs.cc
index 078c117bd..d6d67ea05 100644
--- a/src/libstore/path-with-outputs.cc
+++ b/src/libstore/path-with-outputs.cc
@@ -1,5 +1,8 @@
#include "path-with-outputs.hh"
#include "store-api.hh"
+#include "nlohmann/json.hpp"
+
+#include <regex>
namespace nix {
@@ -68,4 +71,57 @@ StorePathWithOutputs followLinksToStorePathWithOutputs(const Store & store, std:
return StorePathWithOutputs { store.followLinksToStorePath(path), std::move(outputs) };
}
+std::pair<std::string, OutputsSpec> parseOutputsSpec(const std::string & s)
+{
+ static std::regex regex(R"((.*)\^((\*)|([a-z]+(,[a-z]+)*)))");
+
+ std::smatch match;
+ if (!std::regex_match(s, match, regex))
+ return {s, DefaultOutputs()};
+
+ if (match[3].matched)
+ return {match[1], AllOutputs()};
+
+ return {match[1], tokenizeString<OutputNames>(match[4].str(), ",")};
+}
+
+std::string printOutputsSpec(const OutputsSpec & outputsSpec)
+{
+ if (std::get_if<DefaultOutputs>(&outputsSpec))
+ return "";
+
+ if (std::get_if<AllOutputs>(&outputsSpec))
+ return "^*";
+
+ if (auto outputNames = std::get_if<OutputNames>(&outputsSpec))
+ return "^" + concatStringsSep(",", *outputNames);
+
+ assert(false);
+}
+
+void to_json(nlohmann::json & json, const OutputsSpec & outputsSpec)
+{
+ if (std::get_if<DefaultOutputs>(&outputsSpec))
+ json = nullptr;
+
+ else if (std::get_if<AllOutputs>(&outputsSpec))
+ json = std::vector<std::string>({"*"});
+
+ else if (auto outputNames = std::get_if<OutputNames>(&outputsSpec))
+ json = *outputNames;
+}
+
+void from_json(const nlohmann::json & json, OutputsSpec & outputsSpec)
+{
+ if (json.is_null())
+ outputsSpec = DefaultOutputs();
+ else {
+ auto names = json.get<OutputNames>();
+ if (names == OutputNames({"*"}))
+ outputsSpec = AllOutputs();
+ else
+ outputsSpec = names;
+ }
+}
+
}
diff --git a/src/libstore/path-with-outputs.hh b/src/libstore/path-with-outputs.hh
index 4c4023dcb..0cb5eb223 100644
--- a/src/libstore/path-with-outputs.hh
+++ b/src/libstore/path-with-outputs.hh
@@ -4,6 +4,7 @@
#include "path.hh"
#include "derived-path.hh"
+#include "nlohmann/json_fwd.hpp"
namespace nix {
@@ -32,4 +33,25 @@ StorePathWithOutputs parsePathWithOutputs(const Store & store, std::string_view
StorePathWithOutputs followLinksToStorePathWithOutputs(const Store & store, std::string_view pathWithOutputs);
+typedef std::set<std::string> OutputNames;
+
+struct AllOutputs {
+ bool operator < (const AllOutputs & _) const { return false; }
+};
+
+struct DefaultOutputs {
+ bool operator < (const DefaultOutputs & _) const { return false; }
+};
+
+typedef std::variant<DefaultOutputs, AllOutputs, OutputNames> OutputsSpec;
+
+/* Parse a string of the form 'prefix^output1,...outputN' or
+ 'prefix^*', returning the prefix and the outputs spec. */
+std::pair<std::string, OutputsSpec> parseOutputsSpec(const std::string & s);
+
+std::string printOutputsSpec(const OutputsSpec & outputsSpec);
+
+void to_json(nlohmann::json &, const OutputsSpec &);
+void from_json(const nlohmann::json &, OutputsSpec &);
+
}
diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc
index 347e32094..96a29155c 100644
--- a/src/libstore/remote-store.cc
+++ b/src/libstore/remote-store.cc
@@ -580,7 +580,6 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
try {
conn->to.written = 0;
- conn->to.warn = true;
connections->incCapacity();
{
Finally cleanup([&]() { connections->decCapacity(); });
@@ -591,7 +590,6 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
dumpString(contents, conn->to);
}
}
- conn->to.warn = false;
conn.processStderr();
} catch (SysError & e) {
/* Daemon closed while we were sending the path. Probably OOM
@@ -674,6 +672,23 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source,
void RemoteStore::addMultipleToStore(
+ PathsSource & pathsToCopy,
+ Activity & act,
+ RepairFlag repair,
+ CheckSigsFlag checkSigs)
+{
+ auto source = sinkToSource([&](Sink & sink) {
+ sink << pathsToCopy.size();
+ for (auto & [pathInfo, pathSource] : pathsToCopy) {
+ pathInfo.write(sink, *this, 16);
+ pathSource->drainInto(sink);
+ }
+ });
+
+ addMultipleToStore(*source, repair, checkSigs);
+}
+
+void RemoteStore::addMultipleToStore(
Source & source,
RepairFlag repair,
CheckSigsFlag checkSigs)
@@ -718,36 +733,34 @@ void RemoteStore::registerDrvOutput(const Realisation & info)
void RemoteStore::queryRealisationUncached(const DrvOutput & id,
Callback<std::shared_ptr<const Realisation>> callback) noexcept
{
- auto conn(getConnection());
+ try {
+ auto conn(getConnection());
- if (GET_PROTOCOL_MINOR(conn->daemonVersion) < 27) {
- warn("the daemon is too old to support content-addressed derivations, please upgrade it to 2.4");
- try {
- callback(nullptr);
- } catch (...) { return callback.rethrow(); }
- }
+ if (GET_PROTOCOL_MINOR(conn->daemonVersion) < 27) {
+ warn("the daemon is too old to support content-addressed derivations, please upgrade it to 2.4");
+ return callback(nullptr);
+ }
- conn->to << wopQueryRealisation;
- conn->to << id.to_string();
- conn.processStderr();
+ conn->to << wopQueryRealisation;
+ conn->to << id.to_string();
+ conn.processStderr();
- auto real = [&]() -> std::shared_ptr<const Realisation> {
- if (GET_PROTOCOL_MINOR(conn->daemonVersion) < 31) {
- auto outPaths = worker_proto::read(
- *this, conn->from, Phantom<std::set<StorePath>> {});
- if (outPaths.empty())
- return nullptr;
- return std::make_shared<const Realisation>(Realisation { .id = id, .outPath = *outPaths.begin() });
- } else {
- auto realisations = worker_proto::read(
- *this, conn->from, Phantom<std::set<Realisation>> {});
- if (realisations.empty())
- return nullptr;
- return std::make_shared<const Realisation>(*realisations.begin());
- }
- }();
+ auto real = [&]() -> std::shared_ptr<const Realisation> {
+ if (GET_PROTOCOL_MINOR(conn->daemonVersion) < 31) {
+ auto outPaths = worker_proto::read(
+ *this, conn->from, Phantom<std::set<StorePath>> {});
+ if (outPaths.empty())
+ return nullptr;
+ return std::make_shared<const Realisation>(Realisation { .id = id, .outPath = *outPaths.begin() });
+ } else {
+ auto realisations = worker_proto::read(
+ *this, conn->from, Phantom<std::set<Realisation>> {});
+ if (realisations.empty())
+ return nullptr;
+ return std::make_shared<const Realisation>(*realisations.begin());
+ }
+ }();
- try {
callback(std::shared_ptr<const Realisation>(real));
} catch (...) { return callback.rethrow(); }
}
@@ -853,15 +866,15 @@ std::vector<BuildResult> RemoteStore::buildPathsWithResults(
OutputPathMap outputs;
auto drv = evalStore->readDerivation(bfd.drvPath);
- auto outputHashes = staticOutputHashes(*evalStore, drv); // FIXME: expensive
- auto drvOutputs = drv.outputsAndOptPaths(*this);
+ const auto outputHashes = staticOutputHashes(*evalStore, drv); // FIXME: expensive
+ const auto drvOutputs = drv.outputsAndOptPaths(*this);
for (auto & output : bfd.outputs) {
- if (!outputHashes.count(output))
+ auto outputHash = get(outputHashes, output);
+ if (!outputHash)
throw Error(
"the derivation '%s' doesn't have an output named '%s'",
printStorePath(bfd.drvPath), output);
- auto outputId =
- DrvOutput{outputHashes.at(output), output};
+ auto outputId = DrvOutput{ *outputHash, output };
if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations)) {
auto realisation =
queryRealisation(outputId);
@@ -874,13 +887,14 @@ std::vector<BuildResult> RemoteStore::buildPathsWithResults(
} else {
// If ca-derivations isn't enabled, assume that
// the output path is statically known.
- assert(drvOutputs.count(output));
- assert(drvOutputs.at(output).second);
+ const auto drvOutput = get(drvOutputs, output);
+ assert(drvOutput);
+ assert(drvOutput->second);
res.builtOutputs.emplace(
outputId,
Realisation {
.id = outputId,
- .outPath = *drvOutputs.at(output).second
+ .outPath = *drvOutput->second,
});
}
}
diff --git a/src/libstore/remote-store.hh b/src/libstore/remote-store.hh
index 8493be6fc..11d089cd2 100644
--- a/src/libstore/remote-store.hh
+++ b/src/libstore/remote-store.hh
@@ -88,6 +88,12 @@ public:
RepairFlag repair,
CheckSigsFlag checkSigs) override;
+ void addMultipleToStore(
+ PathsSource & pathsToCopy,
+ Activity & act,
+ RepairFlag repair,
+ CheckSigsFlag checkSigs) override;
+
StorePath addTextToStore(
std::string_view name,
std::string_view s,
diff --git a/src/libstore/s3.hh b/src/libstore/s3.hh
index 3f55c74db..cdb3e5908 100644
--- a/src/libstore/s3.hh
+++ b/src/libstore/s3.hh
@@ -5,6 +5,7 @@
#include "ref.hh"
#include <optional>
+#include <string>
namespace Aws { namespace Client { class ClientConfiguration; } }
namespace Aws { namespace S3 { class S3Client; } }
diff --git a/src/libstore/sandbox-defaults.sb b/src/libstore/sandbox-defaults.sb
index 56b35c3fe..d9d710559 100644
--- a/src/libstore/sandbox-defaults.sb
+++ b/src/libstore/sandbox-defaults.sb
@@ -98,7 +98,9 @@
(allow file*
(literal "/private/var/select/sh"))
-; Allow Rosetta 2 to run x86_64 binaries on aarch64-darwin.
+; Allow Rosetta 2 to run x86_64 binaries on aarch64-darwin (and vice versa).
(allow file-read*
(subpath "/Library/Apple/usr/libexec/oah")
- (subpath "/System/Library/Apple/usr/libexec/oah"))
+ (subpath "/System/Library/Apple/usr/libexec/oah")
+ (subpath "/System/Library/LaunchDaemons/com.apple.oahd.plist")
+ (subpath "/Library/Apple/System/Library/LaunchDaemons/com.apple.oahd.plist"))
diff --git a/src/libstore/sandbox-network.sb b/src/libstore/sandbox-network.sb
index 56beec761..19e9eea9a 100644
--- a/src/libstore/sandbox-network.sb
+++ b/src/libstore/sandbox-network.sb
@@ -14,3 +14,7 @@
; Allow DNS lookups.
(allow network-outbound (remote unix-socket (path-literal "/private/var/run/mDNSResponder")))
+
+; Allow access to trustd.
+(allow mach-lookup (global-name "com.apple.trustd"))
+(allow mach-lookup (global-name "com.apple.trustd.agent"))
diff --git a/src/libstore/schema.sql b/src/libstore/schema.sql
index 09c71a2b8..d65e5335e 100644
--- a/src/libstore/schema.sql
+++ b/src/libstore/schema.sql
@@ -1,7 +1,7 @@
create table if not exists ValidPaths (
id integer primary key autoincrement not null,
path text unique not null,
- hash text not null,
+ hash text not null, -- base16 representation
registrationTime integer not null,
deriver text,
narSize integer,
diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc
index 59937be4d..86b12257a 100644
--- a/src/libstore/store-api.cc
+++ b/src/libstore/store-api.cc
@@ -258,6 +258,84 @@ StorePath Store::addToStore(
return addToStoreFromDump(*source, name, method, hashAlgo, repair, references);
}
+void Store::addMultipleToStore(
+ PathsSource & pathsToCopy,
+ Activity & act,
+ RepairFlag repair,
+ CheckSigsFlag checkSigs)
+{
+ std::atomic<size_t> nrDone{0};
+ std::atomic<size_t> nrFailed{0};
+ std::atomic<uint64_t> bytesExpected{0};
+ std::atomic<uint64_t> nrRunning{0};
+
+ using PathWithInfo = std::pair<ValidPathInfo, std::unique_ptr<Source>>;
+
+ std::map<StorePath, PathWithInfo *> infosMap;
+ StorePathSet storePathsToAdd;
+ for (auto & thingToAdd : pathsToCopy) {
+ infosMap.insert_or_assign(thingToAdd.first.path, &thingToAdd);
+ storePathsToAdd.insert(thingToAdd.first.path);
+ }
+
+ auto showProgress = [&]() {
+ act.progress(nrDone, pathsToCopy.size(), nrRunning, nrFailed);
+ };
+
+ ThreadPool pool;
+
+ processGraph<StorePath>(pool,
+ storePathsToAdd,
+
+ [&](const StorePath & path) {
+
+ auto & [info, _] = *infosMap.at(path);
+
+ if (isValidPath(info.path)) {
+ nrDone++;
+ showProgress();
+ return StorePathSet();
+ }
+
+ bytesExpected += info.narSize;
+ act.setExpected(actCopyPath, bytesExpected);
+
+ return info.references;
+ },
+
+ [&](const StorePath & path) {
+ checkInterrupt();
+
+ auto & [info_, source_] = *infosMap.at(path);
+ auto info = info_;
+ info.ultimate = false;
+
+ /* Make sure that the Source object is destroyed when
+ we're done. In particular, a SinkToSource object must
+ be destroyed to ensure that the destructors on its
+ stack frame are run; this includes
+ LegacySSHStore::narFromPath()'s connection lock. */
+ auto source = std::move(source_);
+
+ if (!isValidPath(info.path)) {
+ MaintainCount<decltype(nrRunning)> mc(nrRunning);
+ showProgress();
+ try {
+ addToStore(info, *source, repair, checkSigs);
+ } catch (Error & e) {
+ nrFailed++;
+ if (!settings.keepGoing)
+ throw e;
+ printMsg(lvlError, "could not copy %s: %s", printStorePath(path), e.what());
+ showProgress();
+ return;
+ }
+ }
+
+ nrDone++;
+ showProgress();
+ });
+}
void Store::addMultipleToStore(
Source & source,
@@ -992,113 +1070,61 @@ std::map<StorePath, StorePath> copyPaths(
for (auto & path : storePaths)
if (!valid.count(path)) missing.insert(path);
+ Activity act(*logger, lvlInfo, actCopyPaths, fmt("copying %d paths", missing.size()));
+
+ // In the general case, `addMultipleToStore` requires a sorted list of
+ // store paths to add, so sort them right now
+ auto sortedMissing = srcStore.topoSortPaths(missing);
+ std::reverse(sortedMissing.begin(), sortedMissing.end());
+
std::map<StorePath, StorePath> pathsMap;
for (auto & path : storePaths)
pathsMap.insert_or_assign(path, path);
- Activity act(*logger, lvlInfo, actCopyPaths, fmt("copying %d paths", missing.size()));
+ Store::PathsSource pathsToCopy;
+
+ auto computeStorePathForDst = [&](const ValidPathInfo & currentPathInfo) -> StorePath {
+ auto storePathForSrc = currentPathInfo.path;
+ auto storePathForDst = storePathForSrc;
+ if (currentPathInfo.ca && currentPathInfo.references.empty()) {
+ storePathForDst = dstStore.makeFixedOutputPathFromCA(storePathForSrc.name(), *currentPathInfo.ca);
+ if (dstStore.storeDir == srcStore.storeDir)
+ assert(storePathForDst == storePathForSrc);
+ if (storePathForDst != storePathForSrc)
+ debug("replaced path '%s' to '%s' for substituter '%s'",
+ srcStore.printStorePath(storePathForSrc),
+ dstStore.printStorePath(storePathForDst),
+ dstStore.getUri());
+ }
+ return storePathForDst;
+ };
- auto sorted = srcStore.topoSortPaths(missing);
- std::reverse(sorted.begin(), sorted.end());
+ for (auto & missingPath : sortedMissing) {
+ auto info = srcStore.queryPathInfo(missingPath);
- auto source = sinkToSource([&](Sink & sink) {
- sink << sorted.size();
- for (auto & storePath : sorted) {
+ auto storePathForDst = computeStorePathForDst(*info);
+ pathsMap.insert_or_assign(missingPath, storePathForDst);
+
+ ValidPathInfo infoForDst = *info;
+ infoForDst.path = storePathForDst;
+
+ auto source = sinkToSource([&](Sink & sink) {
+ // We can reasonably assume that the copy will happen whenever we
+ // read the path, so log something about that at that point
auto srcUri = srcStore.getUri();
auto dstUri = dstStore.getUri();
- auto storePathS = srcStore.printStorePath(storePath);
+ auto storePathS = srcStore.printStorePath(missingPath);
Activity act(*logger, lvlInfo, actCopyPath,
makeCopyPathMessage(srcUri, dstUri, storePathS),
{storePathS, srcUri, dstUri});
PushActivity pact(act.id);
- auto info = srcStore.queryPathInfo(storePath);
- info->write(sink, srcStore, 16);
- srcStore.narFromPath(storePath, sink);
- }
- });
-
- dstStore.addMultipleToStore(*source, repair, checkSigs);
-
- #if 0
- std::atomic<size_t> nrDone{0};
- std::atomic<size_t> nrFailed{0};
- std::atomic<uint64_t> bytesExpected{0};
- std::atomic<uint64_t> nrRunning{0};
-
- auto showProgress = [&]() {
- act.progress(nrDone, missing.size(), nrRunning, nrFailed);
- };
-
- ThreadPool pool;
-
- processGraph<StorePath>(pool,
- StorePathSet(missing.begin(), missing.end()),
-
- [&](const StorePath & storePath) {
- auto info = srcStore.queryPathInfo(storePath);
- auto storePathForDst = storePath;
- if (info->ca && info->references.empty()) {
- storePathForDst = dstStore.makeFixedOutputPathFromCA(storePath.name(), *info->ca);
- if (dstStore.storeDir == srcStore.storeDir)
- assert(storePathForDst == storePath);
- if (storePathForDst != storePath)
- debug("replaced path '%s' to '%s' for substituter '%s'",
- srcStore.printStorePath(storePath),
- dstStore.printStorePath(storePathForDst),
- dstStore.getUri());
- }
- pathsMap.insert_or_assign(storePath, storePathForDst);
-
- if (dstStore.isValidPath(storePath)) {
- nrDone++;
- showProgress();
- return StorePathSet();
- }
-
- bytesExpected += info->narSize;
- act.setExpected(actCopyPath, bytesExpected);
-
- return info->references;
- },
-
- [&](const StorePath & storePath) {
- checkInterrupt();
-
- auto info = srcStore.queryPathInfo(storePath);
-
- auto storePathForDst = storePath;
- if (info->ca && info->references.empty()) {
- storePathForDst = dstStore.makeFixedOutputPathFromCA(storePath.name(), *info->ca);
- if (dstStore.storeDir == srcStore.storeDir)
- assert(storePathForDst == storePath);
- if (storePathForDst != storePath)
- debug("replaced path '%s' to '%s' for substituter '%s'",
- srcStore.printStorePath(storePath),
- dstStore.printStorePath(storePathForDst),
- dstStore.getUri());
- }
- pathsMap.insert_or_assign(storePath, storePathForDst);
-
- if (!dstStore.isValidPath(storePathForDst)) {
- MaintainCount<decltype(nrRunning)> mc(nrRunning);
- showProgress();
- try {
- copyStorePath(srcStore, dstStore, storePath, repair, checkSigs);
- } catch (Error &e) {
- nrFailed++;
- if (!settings.keepGoing)
- throw e;
- printMsg(lvlError, "could not copy %s: %s", dstStore.printStorePath(storePath), e.what());
- showProgress();
- return;
- }
- }
-
- nrDone++;
- showProgress();
+ srcStore.narFromPath(missingPath, sink);
});
- #endif
+ pathsToCopy.push_back(std::pair{infoForDst, std::move(source)});
+ }
+
+ dstStore.addMultipleToStore(pathsToCopy, act, repair, checkSigs);
return pathsMap;
}
@@ -1302,7 +1328,8 @@ std::pair<std::string, Store::Params> splitUriAndParams(const std::string & uri_
return {uri, params};
}
-static bool isNonUriPath(const std::string & spec) {
+static bool isNonUriPath(const std::string & spec)
+{
return
// is not a URL
spec.find("://") == std::string::npos
@@ -1314,11 +1341,36 @@ static bool isNonUriPath(const std::string & spec) {
std::shared_ptr<Store> openFromNonUri(const std::string & uri, const Store::Params & params)
{
if (uri == "" || uri == "auto") {
- auto stateDir = get(params, "state").value_or(settings.nixStateDir);
+ auto stateDir = getOr(params, "state", settings.nixStateDir);
if (access(stateDir.c_str(), R_OK | W_OK) == 0)
return std::make_shared<LocalStore>(params);
else if (pathExists(settings.nixDaemonSocketFile))
return std::make_shared<UDSRemoteStore>(params);
+ #if __linux__
+ else if (!pathExists(stateDir)
+ && params.empty()
+ && getuid() != 0
+ && !getEnv("NIX_STORE_DIR").has_value()
+ && !getEnv("NIX_STATE_DIR").has_value())
+ {
+ /* If /nix doesn't exist, there is no daemon socket, and
+ we're not root, then automatically set up a chroot
+ store in ~/.local/share/nix/root. */
+ auto chrootStore = getDataDir() + "/nix/root";
+ if (!pathExists(chrootStore)) {
+ try {
+ createDirs(chrootStore);
+ } catch (Error & e) {
+ return std::make_shared<LocalStore>(params);
+ }
+ warn("'/nix' does not exist, so Nix will use '%s' as a chroot store", chrootStore);
+ } else
+ debug("'/nix' does not exist, so Nix will use '%s' as a chroot store", chrootStore);
+ Store::Params params2;
+ params2["root"] = chrootStore;
+ return std::make_shared<LocalStore>(params2);
+ }
+ #endif
else
return std::make_shared<LocalStore>(params);
} else if (uri == "daemon") {
diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh
index 0c8a4db56..c8a667c6d 100644
--- a/src/libstore/store-api.hh
+++ b/src/libstore/store-api.hh
@@ -1,5 +1,6 @@
#pragma once
+#include "nar-info.hh"
#include "realisation.hh"
#include "path.hh"
#include "derived-path.hh"
@@ -359,12 +360,22 @@ public:
virtual void addToStore(const ValidPathInfo & info, Source & narSource,
RepairFlag repair = NoRepair, CheckSigsFlag checkSigs = CheckSigs) = 0;
+ // A list of paths infos along with a source providing the content of the
+ // associated store path
+ using PathsSource = std::vector<std::pair<ValidPathInfo, std::unique_ptr<Source>>>;
+
/* Import multiple paths into the store. */
virtual void addMultipleToStore(
Source & source,
RepairFlag repair = NoRepair,
CheckSigsFlag checkSigs = CheckSigs);
+ virtual void addMultipleToStore(
+ PathsSource & pathsToCopy,
+ Activity & act,
+ RepairFlag repair = NoRepair,
+ CheckSigsFlag checkSigs = CheckSigs);
+
/* Copy the contents of a path to the store and register the
validity the resulting path. The resulting path is returned.
The function object `filter' can be used to exclude files (see
diff --git a/src/libstore/tests/path-with-outputs.cc b/src/libstore/tests/path-with-outputs.cc
new file mode 100644
index 000000000..350ea7ffd
--- /dev/null
+++ b/src/libstore/tests/path-with-outputs.cc
@@ -0,0 +1,46 @@
+#include "path-with-outputs.hh"
+
+#include <gtest/gtest.h>
+
+namespace nix {
+
+TEST(parseOutputsSpec, basic)
+{
+ {
+ auto [prefix, outputsSpec] = parseOutputsSpec("foo");
+ ASSERT_EQ(prefix, "foo");
+ ASSERT_TRUE(std::get_if<DefaultOutputs>(&outputsSpec));
+ }
+
+ {
+ auto [prefix, outputsSpec] = parseOutputsSpec("foo^*");
+ ASSERT_EQ(prefix, "foo");
+ ASSERT_TRUE(std::get_if<AllOutputs>(&outputsSpec));
+ }
+
+ {
+ auto [prefix, outputsSpec] = parseOutputsSpec("foo^out");
+ ASSERT_EQ(prefix, "foo");
+ ASSERT_TRUE(std::get<OutputNames>(outputsSpec) == OutputNames({"out"}));
+ }
+
+ {
+ auto [prefix, outputsSpec] = parseOutputsSpec("foo^out,bin");
+ ASSERT_EQ(prefix, "foo");
+ ASSERT_TRUE(std::get<OutputNames>(outputsSpec) == OutputNames({"out", "bin"}));
+ }
+
+ {
+ auto [prefix, outputsSpec] = parseOutputsSpec("foo^bar^out,bin");
+ ASSERT_EQ(prefix, "foo^bar");
+ ASSERT_TRUE(std::get<OutputNames>(outputsSpec) == OutputNames({"out", "bin"}));
+ }
+
+ {
+ auto [prefix, outputsSpec] = parseOutputsSpec("foo^&*()");
+ ASSERT_EQ(prefix, "foo^&*()");
+ ASSERT_TRUE(std::get_if<DefaultOutputs>(&outputsSpec));
+ }
+}
+
+}
diff --git a/src/libutil/args.cc b/src/libutil/args.cc
index 4b8c55686..44b63f0f6 100644
--- a/src/libutil/args.cc
+++ b/src/libutil/args.cc
@@ -124,7 +124,7 @@ bool Args::processFlag(Strings::iterator & pos, Strings::iterator end)
bool anyCompleted = false;
for (size_t n = 0 ; n < flag.handler.arity; ++n) {
if (pos == end) {
- if (flag.handler.arity == ArityAny) break;
+ if (flag.handler.arity == ArityAny || anyCompleted) break;
throw UsageError("flag '%s' requires %d argument(s)", name, flag.handler.arity);
}
if (auto prefix = needsCompletion(*pos)) {
@@ -362,6 +362,14 @@ bool MultiCommand::processArgs(const Strings & args, bool finish)
return Args::processArgs(args, finish);
}
+void MultiCommand::completionHook()
+{
+ if (command)
+ return command->second->completionHook();
+ else
+ return Args::completionHook();
+}
+
nlohmann::json MultiCommand::toJSON()
{
auto cmds = nlohmann::json::object();
diff --git a/src/libutil/args.hh b/src/libutil/args.hh
index fdd036f9a..84866f12b 100644
--- a/src/libutil/args.hh
+++ b/src/libutil/args.hh
@@ -25,6 +25,8 @@ public:
/* Return a short one-line description of the command. */
virtual std::string description() { return ""; }
+ virtual bool forceImpureByDefault() { return false; }
+
/* Return documentation about this command, in Markdown format. */
virtual std::string doc() { return ""; }
@@ -146,6 +148,11 @@ protected:
argument (if any) have been processed. */
virtual void initialFlagsProcessed() {}
+ /* Called after the command line has been processed if we need to generate
+ completions. Useful for commands that need to know the whole command line
+ in order to know what completions to generate. */
+ virtual void completionHook() { }
+
public:
void addFlag(Flag && flag);
@@ -221,6 +228,8 @@ public:
bool processArgs(const Strings & args, bool finish) override;
+ void completionHook() override;
+
nlohmann::json toJSON() override;
};
diff --git a/src/libutil/error.hh b/src/libutil/error.hh
index 2172cb988..50335676e 100644
--- a/src/libutil/error.hh
+++ b/src/libutil/error.hh
@@ -98,6 +98,15 @@ struct ErrPos {
}
};
+std::optional<LinesOfCode> getCodeLines(const ErrPos & errPos);
+
+void printCodeLines(std::ostream & out,
+ const std::string & prefix,
+ const ErrPos & errPos,
+ const LinesOfCode & loc);
+
+void printAtPos(const ErrPos & pos, std::ostream & out);
+
struct Trace {
std::optional<ErrPos> pos;
hintformat hint;
@@ -201,13 +210,19 @@ public:
int errNo;
template<typename... Args>
- SysError(const Args & ... args)
+ SysError(int errNo_, const Args & ... args)
: Error("")
{
- errNo = errno;
+ errNo = errNo_;
auto hf = hintfmt(args...);
err.msg = hintfmt("%1%: %2%", normaltxt(hf.str()), strerror(errNo));
}
+
+ template<typename... Args>
+ SysError(const Args & ... args)
+ : SysError(errno, args ...)
+ {
+ }
};
}
diff --git a/src/libutil/experimental-features.cc b/src/libutil/experimental-features.cc
index df37edf57..fa79cca6b 100644
--- a/src/libutil/experimental-features.cc
+++ b/src/libutil/experimental-features.cc
@@ -13,6 +13,7 @@ std::map<ExperimentalFeature, std::string> stringifiedXpFeatures = {
{ Xp::RecursiveNix, "recursive-nix" },
{ Xp::NoUrlLiterals, "no-url-literals" },
{ Xp::FetchClosure, "fetch-closure" },
+ { Xp::ReplFlake, "repl-flake" },
};
const std::optional<ExperimentalFeature> parseExperimentalFeature(const std::string_view & name)
@@ -35,7 +36,9 @@ const std::optional<ExperimentalFeature> parseExperimentalFeature(const std::str
std::string_view showExperimentalFeature(const ExperimentalFeature feature)
{
- return stringifiedXpFeatures.at(feature);
+ const auto ret = get(stringifiedXpFeatures, feature);
+ assert(ret);
+ return *ret;
}
std::set<ExperimentalFeature> parseFeatures(const std::set<std::string> & rawFeatures)
@@ -58,11 +61,13 @@ std::ostream & operator <<(std::ostream & str, const ExperimentalFeature & featu
return str << showExperimentalFeature(feature);
}
-void to_json(nlohmann::json& j, const ExperimentalFeature& feature) {
+void to_json(nlohmann::json & j, const ExperimentalFeature & feature)
+{
j = showExperimentalFeature(feature);
}
-void from_json(const nlohmann::json& j, ExperimentalFeature& feature) {
+void from_json(const nlohmann::json & j, ExperimentalFeature & feature)
+{
const std::string input = j;
const auto parsed = parseExperimentalFeature(input);
diff --git a/src/libutil/experimental-features.hh b/src/libutil/experimental-features.hh
index a6d080094..d09ab025c 100644
--- a/src/libutil/experimental-features.hh
+++ b/src/libutil/experimental-features.hh
@@ -22,6 +22,7 @@ enum struct ExperimentalFeature
RecursiveNix,
NoUrlLiterals,
FetchClosure,
+ ReplFlake,
};
/**
@@ -55,7 +56,7 @@ public:
* Semi-magic conversion to and from json.
* See the nlohmann/json readme for more details.
*/
-void to_json(nlohmann::json&, const ExperimentalFeature&);
-void from_json(const nlohmann::json&, ExperimentalFeature&);
+void to_json(nlohmann::json &, const ExperimentalFeature &);
+void from_json(const nlohmann::json &, ExperimentalFeature &);
}
diff --git a/src/libutil/filesystem.cc b/src/libutil/filesystem.cc
new file mode 100644
index 000000000..403389e60
--- /dev/null
+++ b/src/libutil/filesystem.cc
@@ -0,0 +1,172 @@
+#include <sys/time.h>
+#include <filesystem>
+
+#include "finally.hh"
+#include "util.hh"
+#include "types.hh"
+
+namespace fs = std::filesystem;
+
+namespace nix {
+
+static Path tempName(Path tmpRoot, const Path & prefix, bool includePid,
+ int & counter)
+{
+ tmpRoot = canonPath(tmpRoot.empty() ? getEnv("TMPDIR").value_or("/tmp") : tmpRoot, true);
+ if (includePid)
+ return (format("%1%/%2%-%3%-%4%") % tmpRoot % prefix % getpid() % counter++).str();
+ else
+ return (format("%1%/%2%-%3%") % tmpRoot % prefix % counter++).str();
+}
+
+Path createTempDir(const Path & tmpRoot, const Path & prefix,
+ bool includePid, bool useGlobalCounter, mode_t mode)
+{
+ static int globalCounter = 0;
+ int localCounter = 0;
+ int & counter(useGlobalCounter ? globalCounter : localCounter);
+
+ while (1) {
+ checkInterrupt();
+ Path tmpDir = tempName(tmpRoot, prefix, includePid, counter);
+ if (mkdir(tmpDir.c_str(), mode) == 0) {
+#if __FreeBSD__
+ /* Explicitly set the group of the directory. This is to
+ work around around problems caused by BSD's group
+ ownership semantics (directories inherit the group of
+ the parent). For instance, the group of /tmp on
+ FreeBSD is "wheel", so all directories created in /tmp
+ will be owned by "wheel"; but if the user is not in
+ "wheel", then "tar" will fail to unpack archives that
+ have the setgid bit set on directories. */
+ if (chown(tmpDir.c_str(), (uid_t) -1, getegid()) != 0)
+ throw SysError("setting group of directory '%1%'", tmpDir);
+#endif
+ return tmpDir;
+ }
+ if (errno != EEXIST)
+ throw SysError("creating directory '%1%'", tmpDir);
+ }
+}
+
+
+std::pair<AutoCloseFD, Path> createTempFile(const Path & prefix)
+{
+ Path tmpl(getEnv("TMPDIR").value_or("/tmp") + "/" + prefix + ".XXXXXX");
+ // Strictly speaking, this is UB, but who cares...
+ // FIXME: use O_TMPFILE.
+ AutoCloseFD fd(mkstemp((char *) tmpl.c_str()));
+ if (!fd)
+ throw SysError("creating temporary file '%s'", tmpl);
+ closeOnExec(fd.get());
+ return {std::move(fd), tmpl};
+}
+
+void createSymlink(const Path & target, const Path & link,
+ std::optional<time_t> mtime)
+{
+ if (symlink(target.c_str(), link.c_str()))
+ throw SysError("creating symlink from '%1%' to '%2%'", link, target);
+ if (mtime) {
+ struct timeval times[2];
+ times[0].tv_sec = *mtime;
+ times[0].tv_usec = 0;
+ times[1].tv_sec = *mtime;
+ times[1].tv_usec = 0;
+ if (lutimes(link.c_str(), times))
+ throw SysError("setting time of symlink '%s'", link);
+ }
+}
+
+void replaceSymlink(const Path & target, const Path & link,
+ std::optional<time_t> mtime)
+{
+ for (unsigned int n = 0; true; n++) {
+ Path tmp = canonPath(fmt("%s/.%d_%s", dirOf(link), n, baseNameOf(link)));
+
+ try {
+ createSymlink(target, tmp, mtime);
+ } catch (SysError & e) {
+ if (e.errNo == EEXIST) continue;
+ throw;
+ }
+
+ renameFile(tmp, link);
+
+ break;
+ }
+}
+
+void setWriteTime(const fs::path & p, const struct stat & st)
+{
+ struct timeval times[2];
+ times[0] = {
+ .tv_sec = st.st_atime,
+ .tv_usec = 0,
+ };
+ times[1] = {
+ .tv_sec = st.st_mtime,
+ .tv_usec = 0,
+ };
+ if (lutimes(p.c_str(), times) != 0)
+ throw SysError("changing modification time of '%s'", p);
+}
+
+void copy(const fs::directory_entry & from, const fs::path & to, bool andDelete)
+{
+ // TODO: Rewrite the `is_*` to use `symlink_status()`
+ auto statOfFrom = lstat(from.path().c_str());
+ auto fromStatus = from.symlink_status();
+
+ // Mark the directory as writable so that we can delete its children
+ if (andDelete && fs::is_directory(fromStatus)) {
+ fs::permissions(from.path(), fs::perms::owner_write, fs::perm_options::add | fs::perm_options::nofollow);
+ }
+
+
+ if (fs::is_symlink(fromStatus) || fs::is_regular_file(fromStatus)) {
+ fs::copy(from.path(), to, fs::copy_options::copy_symlinks | fs::copy_options::overwrite_existing);
+ } else if (fs::is_directory(fromStatus)) {
+ fs::create_directory(to);
+ for (auto & entry : fs::directory_iterator(from.path())) {
+ copy(entry, to / entry.path().filename(), andDelete);
+ }
+ } else {
+ throw Error("file '%s' has an unsupported type", from.path());
+ }
+
+ setWriteTime(to, statOfFrom);
+ if (andDelete) {
+ if (!fs::is_symlink(fromStatus))
+ fs::permissions(from.path(), fs::perms::owner_write, fs::perm_options::add | fs::perm_options::nofollow);
+ fs::remove(from.path());
+ }
+}
+
+void renameFile(const Path & oldName, const Path & newName)
+{
+ fs::rename(oldName, newName);
+}
+
+void moveFile(const Path & oldName, const Path & newName)
+{
+ try {
+ renameFile(oldName, newName);
+ } catch (fs::filesystem_error & e) {
+ auto oldPath = fs::path(oldName);
+ auto newPath = fs::path(newName);
+ // For the move to be as atomic as possible, copy to a temporary
+ // directory
+ fs::path temp = createTempDir(newPath.parent_path(), "rename-tmp");
+ Finally removeTemp = [&]() { fs::remove(temp); };
+ auto tempCopyTarget = temp / "copy-target";
+ if (e.code().value() == EXDEV) {
+ fs::remove(newPath);
+ warn("Can’t rename %s as %s, copying instead", oldName, newName);
+ copy(fs::directory_entry(oldPath), tempCopyTarget, true);
+ renameFile(tempCopyTarget, newPath);
+ }
+ }
+}
+
+}
diff --git a/src/libutil/git.cc b/src/libutil/git.cc
new file mode 100644
index 000000000..f35c2fdb7
--- /dev/null
+++ b/src/libutil/git.cc
@@ -0,0 +1,25 @@
+#include "git.hh"
+
+#include <regex>
+
+namespace nix {
+namespace git {
+
+std::optional<LsRemoteRefLine> parseLsRemoteLine(std::string_view line)
+{
+ const static std::regex line_regex("^(ref: *)?([^\\s]+)(?:\\t+(.*))?$");
+ std::match_results<std::string_view::const_iterator> match;
+ if (!std::regex_match(line.cbegin(), line.cend(), match, line_regex))
+ return std::nullopt;
+
+ return LsRemoteRefLine {
+ .kind = match[1].length() == 0
+ ? LsRemoteRefLine::Kind::Object
+ : LsRemoteRefLine::Kind::Symbolic,
+ .target = match[2],
+ .reference = match[3].length() == 0 ? std::nullopt : std::optional<std::string>{ match[3] }
+ };
+}
+
+}
+}
diff --git a/src/libutil/git.hh b/src/libutil/git.hh
new file mode 100644
index 000000000..cb13ef0e5
--- /dev/null
+++ b/src/libutil/git.hh
@@ -0,0 +1,40 @@
+#pragma once
+
+#include <string>
+#include <string_view>
+#include <optional>
+
+namespace nix {
+
+namespace git {
+
+// A line from the output of `git ls-remote --symref`.
+//
+// These can be of two kinds:
+//
+// - Symbolic references of the form
+//
+// ref: {target} {reference}
+//
+// where {target} is itself a reference and {reference} is optional
+//
+// - Object references of the form
+//
+// {target} {reference}
+//
+// where {target} is a commit id and {reference} is mandatory
+struct LsRemoteRefLine {
+ enum struct Kind {
+ Symbolic,
+ Object
+ };
+ Kind kind;
+ std::string target;
+ std::optional<std::string> reference;
+};
+
+std::optional<LsRemoteRefLine> parseLsRemoteLine(std::string_view line);
+
+}
+
+}
diff --git a/src/libutil/hilite.cc b/src/libutil/hilite.cc
index a5991ca39..e5088230d 100644
--- a/src/libutil/hilite.cc
+++ b/src/libutil/hilite.cc
@@ -8,9 +8,9 @@ std::string hiliteMatches(
std::string_view prefix,
std::string_view postfix)
{
- // Avoid copy on zero matches
+ // Avoid extra work on zero matches
if (matches.size() == 0)
- return (std::string) s;
+ return std::string(s);
std::sort(matches.begin(), matches.end(), [](const auto & a, const auto & b) {
return a.position() < b.position();
diff --git a/src/libutil/json-utils.hh b/src/libutil/json-utils.hh
new file mode 100644
index 000000000..b8a031227
--- /dev/null
+++ b/src/libutil/json-utils.hh
@@ -0,0 +1,21 @@
+#pragma once
+
+#include <nlohmann/json.hpp>
+
+namespace nix {
+
+const nlohmann::json * get(const nlohmann::json & map, const std::string & key)
+{
+ auto i = map.find(key);
+ if (i == map.end()) return nullptr;
+ return &*i;
+}
+
+nlohmann::json * get(nlohmann::json & map, const std::string & key)
+{
+ auto i = map.find(key);
+ if (i == map.end()) return nullptr;
+ return &*i;
+}
+
+}
diff --git a/src/libutil/json.cc b/src/libutil/json.cc
index 3a981376f..2f9e97ff5 100644
--- a/src/libutil/json.cc
+++ b/src/libutil/json.cc
@@ -1,11 +1,13 @@
#include "json.hh"
#include <iomanip>
+#include <cstdint>
#include <cstring>
namespace nix {
-void toJSON(std::ostream & str, const char * start, const char * end)
+template<>
+void toJSON<std::string_view>(std::ostream & str, const std::string_view & s)
{
constexpr size_t BUF_SIZE = 4096;
char buf[BUF_SIZE + 7]; // BUF_SIZE + largest single sequence of puts
@@ -20,7 +22,7 @@ void toJSON(std::ostream & str, const char * start, const char * end)
};
put('"');
- for (auto i = start; i != end; i++) {
+ for (auto i = s.begin(); i != s.end(); i++) {
if (bufPos >= BUF_SIZE) flush();
if (*i == '\"' || *i == '\\') { put('\\'); put(*i); }
else if (*i == '\n') { put('\\'); put('n'); }
@@ -43,7 +45,7 @@ void toJSON(std::ostream & str, const char * start, const char * end)
void toJSON(std::ostream & str, const char * s)
{
- if (!s) str << "null"; else toJSON(str, s, s + strlen(s));
+ if (!s) str << "null"; else toJSON(str, std::string_view(s));
}
template<> void toJSON<int>(std::ostream & str, const int & n) { str << n; }
@@ -54,11 +56,7 @@ template<> void toJSON<long long>(std::ostream & str, const long long & n) { str
template<> void toJSON<unsigned long long>(std::ostream & str, const unsigned long long & n) { str << n; }
template<> void toJSON<float>(std::ostream & str, const float & n) { str << n; }
template<> void toJSON<double>(std::ostream & str, const double & n) { str << n; }
-
-template<> void toJSON<std::string>(std::ostream & str, const std::string & s)
-{
- toJSON(str, s.c_str(), s.c_str() + s.size());
-}
+template<> void toJSON<std::string>(std::ostream & str, const std::string & s) { toJSON(str, (std::string_view) s); }
template<> void toJSON<bool>(std::ostream & str, const bool & b)
{
@@ -153,7 +151,7 @@ JSONObject::~JSONObject()
}
}
-void JSONObject::attr(const std::string & s)
+void JSONObject::attr(std::string_view s)
{
comma();
toJSON(state->str, s);
@@ -161,19 +159,19 @@ void JSONObject::attr(const std::string & s)
if (state->indent) state->str << ' ';
}
-JSONList JSONObject::list(const std::string & name)
+JSONList JSONObject::list(std::string_view name)
{
attr(name);
return JSONList(state);
}
-JSONObject JSONObject::object(const std::string & name)
+JSONObject JSONObject::object(std::string_view name)
{
attr(name);
return JSONObject(state);
}
-JSONPlaceholder JSONObject::placeholder(const std::string & name)
+JSONPlaceholder JSONObject::placeholder(std::string_view name)
{
attr(name);
return JSONPlaceholder(state);
@@ -195,7 +193,11 @@ JSONObject JSONPlaceholder::object()
JSONPlaceholder::~JSONPlaceholder()
{
- assert(!first || std::uncaught_exceptions());
+ if (first) {
+ assert(std::uncaught_exceptions());
+ if (state->stack != 0)
+ write(nullptr);
+ }
}
}
diff --git a/src/libutil/json.hh b/src/libutil/json.hh
index 83213ca66..3790b1a2e 100644
--- a/src/libutil/json.hh
+++ b/src/libutil/json.hh
@@ -6,7 +6,6 @@
namespace nix {
-void toJSON(std::ostream & str, const char * start, const char * end);
void toJSON(std::ostream & str, const char * s);
template<typename T>
@@ -107,7 +106,7 @@ private:
open();
}
- void attr(const std::string & s);
+ void attr(std::string_view s);
public:
@@ -128,18 +127,18 @@ public:
~JSONObject();
template<typename T>
- JSONObject & attr(const std::string & name, const T & v)
+ JSONObject & attr(std::string_view name, const T & v)
{
attr(name);
toJSON(state->str, v);
return *this;
}
- JSONList list(const std::string & name);
+ JSONList list(std::string_view name);
- JSONObject object(const std::string & name);
+ JSONObject object(std::string_view name);
- JSONPlaceholder placeholder(const std::string & name);
+ JSONPlaceholder placeholder(std::string_view name);
};
class JSONPlaceholder : JSONWriter
diff --git a/src/libutil/logging.hh b/src/libutil/logging.hh
index 6f81b92de..d0817b4a9 100644
--- a/src/libutil/logging.hh
+++ b/src/libutil/logging.hh
@@ -111,6 +111,9 @@ public:
virtual std::optional<char> ask(std::string_view s)
{ return {}; }
+
+ virtual void setPrintBuildLogs(bool printBuildLogs)
+ { }
};
ActivityId getCurActivity();
diff --git a/src/libutil/ref.hh b/src/libutil/ref.hh
index f9578afc7..bf26321db 100644
--- a/src/libutil/ref.hh
+++ b/src/libutil/ref.hh
@@ -7,7 +7,7 @@
namespace nix {
/* A simple non-nullable reference-counted pointer. Actually a wrapper
- around std::shared_ptr that prevents non-null constructions. */
+ around std::shared_ptr that prevents null constructions. */
template<typename T>
class ref
{
diff --git a/src/libutil/serialise.cc b/src/libutil/serialise.cc
index 8ff904583..2c3597775 100644
--- a/src/libutil/serialise.cc
+++ b/src/libutil/serialise.cc
@@ -48,24 +48,9 @@ FdSink::~FdSink()
}
-size_t threshold = 256 * 1024 * 1024;
-
-static void warnLargeDump()
-{
- warn("dumping very large path (> 256 MiB); this may run out of memory");
-}
-
-
void FdSink::write(std::string_view data)
{
written += data.size();
- static bool warned = false;
- if (warn && !warned) {
- if (written > threshold) {
- warnLargeDump();
- warned = true;
- }
- }
try {
writeFull(fd, data);
} catch (SysError & e) {
@@ -448,11 +433,6 @@ Error readError(Source & source)
void StringSink::operator () (std::string_view data)
{
- static bool warned = false;
- if (!warned && s.size() > threshold) {
- warnLargeDump();
- warned = true;
- }
s.append(data);
}
diff --git a/src/libutil/serialise.hh b/src/libutil/serialise.hh
index 13da26c6a..84847835a 100644
--- a/src/libutil/serialise.hh
+++ b/src/libutil/serialise.hh
@@ -97,19 +97,17 @@ protected:
struct FdSink : BufferedSink
{
int fd;
- bool warn = false;
size_t written = 0;
FdSink() : fd(-1) { }
FdSink(int fd) : fd(fd) { }
FdSink(FdSink&&) = default;
- FdSink& operator=(FdSink && s)
+ FdSink & operator=(FdSink && s)
{
flush();
fd = s.fd;
s.fd = -1;
- warn = s.warn;
written = s.written;
return *this;
}
diff --git a/src/libutil/tests/git.cc b/src/libutil/tests/git.cc
new file mode 100644
index 000000000..5b5715fc2
--- /dev/null
+++ b/src/libutil/tests/git.cc
@@ -0,0 +1,33 @@
+#include "git.hh"
+#include <gtest/gtest.h>
+
+namespace nix {
+
+ TEST(GitLsRemote, parseSymrefLineWithReference) {
+ auto line = "ref: refs/head/main HEAD";
+ auto res = git::parseLsRemoteLine(line);
+ ASSERT_TRUE(res.has_value());
+ ASSERT_EQ(res->kind, git::LsRemoteRefLine::Kind::Symbolic);
+ ASSERT_EQ(res->target, "refs/head/main");
+ ASSERT_EQ(res->reference, "HEAD");
+ }
+
+ TEST(GitLsRemote, parseSymrefLineWithNoReference) {
+ auto line = "ref: refs/head/main";
+ auto res = git::parseLsRemoteLine(line);
+ ASSERT_TRUE(res.has_value());
+ ASSERT_EQ(res->kind, git::LsRemoteRefLine::Kind::Symbolic);
+ ASSERT_EQ(res->target, "refs/head/main");
+ ASSERT_EQ(res->reference, std::nullopt);
+ }
+
+ TEST(GitLsRemote, parseObjectRefLine) {
+ auto line = "abc123 refs/head/main";
+ auto res = git::parseLsRemoteLine(line);
+ ASSERT_TRUE(res.has_value());
+ ASSERT_EQ(res->kind, git::LsRemoteRefLine::Kind::Object);
+ ASSERT_EQ(res->target, "abc123");
+ ASSERT_EQ(res->reference, "refs/head/main");
+ }
+}
+
diff --git a/src/libutil/tests/json.cc b/src/libutil/tests/json.cc
index dea73f53a..156286999 100644
--- a/src/libutil/tests/json.cc
+++ b/src/libutil/tests/json.cc
@@ -102,8 +102,8 @@ namespace nix {
TEST(toJSON, substringEscape) {
std::stringstream out;
- const char *s = "foo\t";
- toJSON(out, s+3, s + strlen(s));
+ std::string_view s = "foo\t";
+ toJSON(out, s.substr(3));
ASSERT_EQ(out.str(), "\"\\t\"");
}
diff --git a/src/libutil/tests/tests.cc b/src/libutil/tests/tests.cc
index 92972ed14..6e325db98 100644
--- a/src/libutil/tests/tests.cc
+++ b/src/libutil/tests/tests.cc
@@ -548,7 +548,7 @@ namespace nix {
TEST(get, emptyContainer) {
StringMap s = { };
- auto expected = std::nullopt;
+ auto expected = nullptr;
ASSERT_EQ(get(s, "one"), expected);
}
@@ -559,7 +559,23 @@ namespace nix {
s["two"] = "er";
auto expected = "yi";
- ASSERT_EQ(get(s, "one"), expected);
+ ASSERT_EQ(*get(s, "one"), expected);
+ }
+
+ TEST(getOr, emptyContainer) {
+ StringMap s = { };
+ auto expected = "yi";
+
+ ASSERT_EQ(getOr(s, "one", "yi"), expected);
+ }
+
+ TEST(getOr, getFromContainer) {
+ StringMap s;
+ s["one"] = "yi";
+ s["two"] = "er";
+ auto expected = "yi";
+
+ ASSERT_EQ(getOr(s, "one", "nope"), expected);
}
/* ----------------------------------------------------------------------------
diff --git a/src/libutil/url.cc b/src/libutil/url.cc
index f6232d255..5b7abeb49 100644
--- a/src/libutil/url.cc
+++ b/src/libutil/url.cc
@@ -1,6 +1,7 @@
#include "url.hh"
#include "url-parts.hh"
#include "util.hh"
+#include "split.hh"
namespace nix {
@@ -136,4 +137,21 @@ bool ParsedURL::operator ==(const ParsedURL & other) const
&& fragment == other.fragment;
}
+/**
+ * Parse a URL scheme of the form '(applicationScheme\+)?transportScheme'
+ * into a tuple '(applicationScheme, transportScheme)'
+ *
+ * > parseUrlScheme("http") == ParsedUrlScheme{ {}, "http"}
+ * > parseUrlScheme("tarball+http") == ParsedUrlScheme{ {"tarball"}, "http"}
+ */
+ParsedUrlScheme parseUrlScheme(std::string_view scheme)
+{
+ auto application = splitPrefixTo(scheme, '+');
+ auto transport = scheme;
+ return ParsedUrlScheme {
+ .application = application,
+ .transport = transport,
+ };
+}
+
}
diff --git a/src/libutil/url.hh b/src/libutil/url.hh
index 6e77142e3..2a9fb34c1 100644
--- a/src/libutil/url.hh
+++ b/src/libutil/url.hh
@@ -27,4 +27,19 @@ std::map<std::string, std::string> decodeQuery(const std::string & query);
ParsedURL parseURL(const std::string & url);
+/*
+ * Although that’s not really standardized anywhere, an number of tools
+ * use a scheme of the form 'x+y' in urls, where y is the “transport layer”
+ * scheme, and x is the “application layer” scheme.
+ *
+ * For example git uses `git+https` to designate remotes using a Git
+ * protocol over http.
+ */
+struct ParsedUrlScheme {
+ std::optional<std::string_view> application;
+ std::string_view transport;
+};
+
+ParsedUrlScheme parseUrlScheme(std::string_view scheme);
+
}
diff --git a/src/libutil/util.cc b/src/libutil/util.cc
index b49c1e466..96ac11ea2 100644
--- a/src/libutil/util.cc
+++ b/src/libutil/util.cc
@@ -29,11 +29,15 @@
#ifdef __APPLE__
#include <sys/syscall.h>
+#include <mach-o/dyld.h>
#endif
#ifdef __linux__
#include <sys/prctl.h>
#include <sys/resource.h>
+
+#include <mntent.h>
+#include <cmath>
#endif
@@ -504,61 +508,6 @@ void deletePath(const Path & path, uint64_t & bytesFreed)
}
-static Path tempName(Path tmpRoot, const Path & prefix, bool includePid,
- int & counter)
-{
- tmpRoot = canonPath(tmpRoot.empty() ? getEnv("TMPDIR").value_or("/tmp") : tmpRoot, true);
- if (includePid)
- return (format("%1%/%2%-%3%-%4%") % tmpRoot % prefix % getpid() % counter++).str();
- else
- return (format("%1%/%2%-%3%") % tmpRoot % prefix % counter++).str();
-}
-
-
-Path createTempDir(const Path & tmpRoot, const Path & prefix,
- bool includePid, bool useGlobalCounter, mode_t mode)
-{
- static int globalCounter = 0;
- int localCounter = 0;
- int & counter(useGlobalCounter ? globalCounter : localCounter);
-
- while (1) {
- checkInterrupt();
- Path tmpDir = tempName(tmpRoot, prefix, includePid, counter);
- if (mkdir(tmpDir.c_str(), mode) == 0) {
-#if __FreeBSD__
- /* Explicitly set the group of the directory. This is to
- work around around problems caused by BSD's group
- ownership semantics (directories inherit the group of
- the parent). For instance, the group of /tmp on
- FreeBSD is "wheel", so all directories created in /tmp
- will be owned by "wheel"; but if the user is not in
- "wheel", then "tar" will fail to unpack archives that
- have the setgid bit set on directories. */
- if (chown(tmpDir.c_str(), (uid_t) -1, getegid()) != 0)
- throw SysError("setting group of directory '%1%'", tmpDir);
-#endif
- return tmpDir;
- }
- if (errno != EEXIST)
- throw SysError("creating directory '%1%'", tmpDir);
- }
-}
-
-
-std::pair<AutoCloseFD, Path> createTempFile(const Path & prefix)
-{
- Path tmpl(getEnv("TMPDIR").value_or("/tmp") + "/" + prefix + ".XXXXXX");
- // Strictly speaking, this is UB, but who cares...
- // FIXME: use O_TMPFILE.
- AutoCloseFD fd(mkstemp((char *) tmpl.c_str()));
- if (!fd)
- throw SysError("creating temporary file '%s'", tmpl);
- closeOnExec(fd.get());
- return {std::move(fd), tmpl};
-}
-
-
std::string getUserName()
{
auto pw = getpwuid(geteuid());
@@ -573,7 +522,21 @@ Path getHome()
{
static Path homeDir = []()
{
+ std::optional<std::string> unownedUserHomeDir = {};
auto homeDir = getEnv("HOME");
+ if (homeDir) {
+ // Only use $HOME if doesn't exist or is owned by the current user.
+ struct stat st;
+ int result = stat(homeDir->c_str(), &st);
+ if (result != 0) {
+ if (errno != ENOENT) {
+ warn("couldn't stat $HOME ('%s') for reason other than not existing ('%d'), falling back to the one defined in the 'passwd' file", *homeDir, errno);
+ homeDir.reset();
+ }
+ } else if (st.st_uid != geteuid()) {
+ unownedUserHomeDir.swap(homeDir);
+ }
+ }
if (!homeDir) {
std::vector<char> buf(16384);
struct passwd pwbuf;
@@ -582,6 +545,9 @@ Path getHome()
|| !pw || !pw->pw_dir || !pw->pw_dir[0])
throw Error("cannot determine user's home directory");
homeDir = pw->pw_dir;
+ if (unownedUserHomeDir.has_value() && unownedUserHomeDir != homeDir) {
+ warn("$HOME ('%s') is not owned by you, falling back to the one defined in the 'passwd' file ('%s')", *unownedUserHomeDir, *homeDir);
+ }
}
return *homeDir;
}();
@@ -619,6 +585,27 @@ Path getDataDir()
}
+std::optional<Path> getSelfExe()
+{
+ static auto cached = []() -> std::optional<Path>
+ {
+ #if __linux__
+ return readLink("/proc/self/exe");
+ #elif __APPLE__
+ char buf[1024];
+ uint32_t size = sizeof(buf);
+ if (_NSGetExecutablePath(buf, &size) == 0)
+ return buf;
+ else
+ return std::nullopt;
+ #else
+ return std::nullopt;
+ #endif
+ }();
+ return cached;
+}
+
+
Paths createDirs(const Path & path)
{
Paths created;
@@ -642,44 +629,6 @@ Paths createDirs(const Path & path)
}
-void createSymlink(const Path & target, const Path & link,
- std::optional<time_t> mtime)
-{
- if (symlink(target.c_str(), link.c_str()))
- throw SysError("creating symlink from '%1%' to '%2%'", link, target);
- if (mtime) {
- struct timeval times[2];
- times[0].tv_sec = *mtime;
- times[0].tv_usec = 0;
- times[1].tv_sec = *mtime;
- times[1].tv_usec = 0;
- if (lutimes(link.c_str(), times))
- throw SysError("setting time of symlink '%s'", link);
- }
-}
-
-
-void replaceSymlink(const Path & target, const Path & link,
- std::optional<time_t> mtime)
-{
- for (unsigned int n = 0; true; n++) {
- Path tmp = canonPath(fmt("%s/.%d_%s", dirOf(link), n, baseNameOf(link)));
-
- try {
- createSymlink(target, tmp, mtime);
- } catch (SysError & e) {
- if (e.errNo == EEXIST) continue;
- throw;
- }
-
- if (rename(tmp.c_str(), link.c_str()) != 0)
- throw SysError("renaming '%1%' to '%2%'", tmp, link);
-
- break;
- }
-}
-
-
void readFull(int fd, char * buf, size_t count)
{
while (count) {
@@ -752,7 +701,55 @@ void drainFD(int fd, Sink & sink, bool block)
}
}
+//////////////////////////////////////////////////////////////////////
+unsigned int getMaxCPU()
+{
+ #if __linux__
+ try {
+ FILE *fp = fopen("/proc/mounts", "r");
+ if (!fp)
+ return 0;
+
+ Strings cgPathParts;
+
+ struct mntent *ent;
+ while ((ent = getmntent(fp))) {
+ std::string mountType, mountPath;
+
+ mountType = ent->mnt_type;
+ mountPath = ent->mnt_dir;
+
+ if (mountType == "cgroup2") {
+ cgPathParts.push_back(mountPath);
+ break;
+ }
+ }
+
+ fclose(fp);
+
+ if (cgPathParts.size() > 0 && pathExists("/proc/self/cgroup")) {
+ std::string currentCgroup = readFile("/proc/self/cgroup");
+ Strings cgValues = tokenizeString<Strings>(currentCgroup, ":");
+ cgPathParts.push_back(trim(cgValues.back(), "\n"));
+ cgPathParts.push_back("cpu.max");
+ std::string fullCgPath = canonPath(concatStringsSep("/", cgPathParts));
+
+ if (pathExists(fullCgPath)) {
+ std::string cpuMax = readFile(fullCgPath);
+ std::vector<std::string> cpuMaxParts = tokenizeString<std::vector<std::string>>(cpuMax, " ");
+ std::string quota = cpuMaxParts[0];
+ std::string period = trim(cpuMaxParts[1], "\n");
+
+ if (quota != "max")
+ return std::ceil(std::stoi(quota) / std::stof(period));
+ }
+ }
+ } catch (Error &) { ignoreException(); }
+ #endif
+
+ return 0;
+}
//////////////////////////////////////////////////////////////////////
@@ -1588,7 +1585,6 @@ std::string stripIndentation(std::string_view s)
//////////////////////////////////////////////////////////////////////
-
static Sync<std::pair<unsigned short, unsigned short>> windowSize{{0, 0}};
@@ -1819,7 +1815,7 @@ AutoCloseFD createUnixDomainSocket(const Path & path, mode_t mode)
if (chmod(path.c_str(), mode) == -1)
throw SysError("changing permissions on '%1%'", path);
- if (listen(fdSocket.get(), 5) == -1)
+ if (listen(fdSocket.get(), 100) == -1)
throw SysError("cannot listen on socket '%1%'", path);
return fdSocket;
diff --git a/src/libutil/util.hh b/src/libutil/util.hh
index a1d0e0e6b..cd83f250f 100644
--- a/src/libutil/util.hh
+++ b/src/libutil/util.hh
@@ -149,10 +149,14 @@ std::vector<Path> getConfigDirs();
/* Return $XDG_DATA_HOME or $HOME/.local/share. */
Path getDataDir();
+/* Return the path of the current executable. */
+std::optional<Path> getSelfExe();
+
/* Create a directory and all its parents, if necessary. Returns the
list of created directories, in order of creation. */
Paths createDirs(const Path & path);
-inline Paths createDirs(PathView path) {
+inline Paths createDirs(PathView path)
+{
return createDirs(Path(path));
}
@@ -164,6 +168,17 @@ void createSymlink(const Path & target, const Path & link,
void replaceSymlink(const Path & target, const Path & link,
std::optional<time_t> mtime = {});
+void renameFile(const Path & src, const Path & dst);
+
+/**
+ * Similar to 'renameFile', but fallback to a copy+remove if `src` and `dst`
+ * are on a different filesystem.
+ *
+ * Beware that this might not be atomic because of the copy that happens behind
+ * the scenes
+ */
+void moveFile(const Path & src, const Path & dst);
+
/* Wrappers arount read()/write() that read/write exactly the
requested number of bytes. */
@@ -178,6 +193,9 @@ std::string drainFD(int fd, bool block = true, const size_t reserveSize=0);
void drainFD(int fd, Sink & sink, bool block = true);
+/* If cgroups are active, attempt to calculate the number of CPUs available.
+ If cgroups are unavailable or if cpu.max is set to "max", return 0. */
+unsigned int getMaxCPU();
/* Automatic cleanup of resources. */
@@ -543,13 +561,31 @@ std::string stripIndentation(std::string_view s);
/* Get a value for the specified key from an associate container. */
template <class T>
-std::optional<typename T::mapped_type> get(const T & map, const typename T::key_type & key)
+const typename T::mapped_type * get(const T & map, const typename T::key_type & key)
+{
+ auto i = map.find(key);
+ if (i == map.end()) return nullptr;
+ return &i->second;
+}
+
+template <class T>
+typename T::mapped_type * get(T & map, const typename T::key_type & key)
{
auto i = map.find(key);
- if (i == map.end()) return {};
- return std::optional<typename T::mapped_type>(i->second);
+ if (i == map.end()) return nullptr;
+ return &i->second;
}
+/* Get a value for the specified key from an associate container, or a default value if the key isn't present. */
+template <class T>
+const typename T::mapped_type & getOr(T & map,
+ const typename T::key_type & key,
+ const typename T::mapped_type & defaultValue)
+{
+ auto i = map.find(key);
+ if (i == map.end()) return defaultValue;
+ return i->second;
+}
/* Remove and return the first item from a container. */
template <class T>
@@ -682,4 +718,19 @@ template<class... Ts> overloaded(Ts...) -> overloaded<Ts...>;
std::string showBytes(uint64_t bytes);
+/* Provide an addition operator between strings and string_views
+ inexplicably omitted from the standard library. */
+inline std::string operator + (const std::string & s1, std::string_view s2)
+{
+ auto s = s1;
+ s.append(s2);
+ return s;
+}
+
+inline std::string operator + (std::string && s, std::string_view s2)
+{
+ s.append(s2);
+ return std::move(s);
+}
+
}
diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc
index faa8c078f..df292dce6 100755..100644
--- a/src/nix-build/nix-build.cc
+++ b/src/nix-build/nix-build.cc
@@ -257,11 +257,12 @@ static void main_nix_build(int argc, char * * argv)
auto autoArgs = myArgs.getAutoArgs(*state);
+ auto autoArgsWithInNixShell = autoArgs;
if (runEnv) {
- auto newArgs = state->buildBindings(autoArgs->size() + 1);
+ auto newArgs = state->buildBindings(autoArgsWithInNixShell->size() + 1);
newArgs.alloc("inNixShell").mkBool(true);
for (auto & i : *autoArgs) newArgs.insert(i);
- autoArgs = newArgs.finish();
+ autoArgsWithInNixShell = newArgs.finish();
}
if (packages) {
@@ -316,10 +317,39 @@ static void main_nix_build(int argc, char * * argv)
Value vRoot;
state->eval(e, vRoot);
+ std::function<bool(const Value & v)> takesNixShellAttr;
+ takesNixShellAttr = [&](const Value & v) {
+ if (!runEnv) {
+ return false;
+ }
+ bool add = false;
+ if (v.type() == nFunction && v.lambda.fun->hasFormals()) {
+ for (auto & i : v.lambda.fun->formals->formals) {
+ if (state->symbols[i.name] == "inNixShell") {
+ add = true;
+ break;
+ }
+ }
+ }
+ return add;
+ };
+
for (auto & i : attrPaths) {
- Value & v(*findAlongAttrPath(*state, i, *autoArgs, vRoot).first);
+ Value & v(*findAlongAttrPath(
+ *state,
+ i,
+ takesNixShellAttr(vRoot) ? *autoArgsWithInNixShell : *autoArgs,
+ vRoot
+ ).first);
state->forceValue(v, [&]() { return v.determinePos(noPos); });
- getDerivations(*state, v, "", *autoArgs, drvs, false);
+ getDerivations(
+ *state,
+ v,
+ "",
+ takesNixShellAttr(v) ? *autoArgsWithInNixShell : *autoArgs,
+ drvs,
+ false
+ );
}
}
@@ -371,7 +401,7 @@ static void main_nix_build(int argc, char * * argv)
auto bashDrv = drv->requireDrvPath();
pathsToBuild.push_back(DerivedPath::Built {
.drvPath = bashDrv,
- .outputs = {},
+ .outputs = {"out"},
});
pathsToCopy.insert(bashDrv);
shellDrv = bashDrv;
@@ -440,7 +470,7 @@ static void main_nix_build(int argc, char * * argv)
env["NIX_STORE"] = store->storeDir;
env["NIX_BUILD_CORES"] = std::to_string(settings.buildCores);
- auto passAsFile = tokenizeString<StringSet>(get(drv.env, "passAsFile").value_or(""));
+ auto passAsFile = tokenizeString<StringSet>(getOr(drv.env, "passAsFile", ""));
bool keepTmp = false;
int fileNr = 0;
diff --git a/src/nix-collect-garbage/nix-collect-garbage.cc b/src/nix-collect-garbage/nix-collect-garbage.cc
index af6f1c88c..e413faffe 100644
--- a/src/nix-collect-garbage/nix-collect-garbage.cc
+++ b/src/nix-collect-garbage/nix-collect-garbage.cc
@@ -37,6 +37,7 @@ void removeOldGenerations(std::string dir)
link = readLink(path);
} catch (SysError & e) {
if (e.errNo == ENOENT) continue;
+ throw;
}
if (link.find("link") != std::string::npos) {
printInfo(format("removing old generations of profile %1%") % path);
diff --git a/src/nix-env/nix-env.cc b/src/nix-env/nix-env.cc
index 96f3c3b26..fdd66220a 100644
--- a/src/nix-env/nix-env.cc
+++ b/src/nix-env/nix-env.cc
@@ -940,12 +940,12 @@ static void queryJSON(Globals & globals, std::vector<DrvInfo> & elems, bool prin
JSONObject metaObj = pkgObj.object("meta");
StringSet metaNames = i.queryMetaNames();
for (auto & j : metaNames) {
- auto placeholder = metaObj.placeholder(j);
Value * v = i.queryMeta(j);
if (!v) {
printError("derivation '%s' has invalid meta attribute '%s'", i.queryName(), j);
- placeholder.write(nullptr);
+ metaObj.attr(j, nullptr);
} else {
+ auto placeholder = metaObj.placeholder(j);
PathSet context;
printValueAsJSON(*globals.state, true, *v, noPos, placeholder, context);
}
@@ -1485,12 +1485,10 @@ static int main_nix_env(int argc, char * * argv)
if (globals.profile == "")
globals.profile = getDefaultProfile();
- op(globals, opFlags, opArgs);
+ op(globals, std::move(opFlags), std::move(opArgs));
globals.state->printStats();
- logger->stop();
-
return 0;
}
}
diff --git a/src/nix-instantiate/nix-instantiate.cc b/src/nix-instantiate/nix-instantiate.cc
index d3144e131..6b5ba595d 100644
--- a/src/nix-instantiate/nix-instantiate.cc
+++ b/src/nix-instantiate/nix-instantiate.cc
@@ -52,9 +52,10 @@ void processExpr(EvalState & state, const Strings & attrPaths,
state.autoCallFunction(autoArgs, v, vRes);
if (output == okXML)
printValueAsXML(state, strict, location, vRes, std::cout, context, noPos);
- else if (output == okJSON)
+ else if (output == okJSON) {
printValueAsJSON(state, strict, vRes, v.determinePos(noPos), std::cout, context);
- else {
+ std::cout << std::endl;
+ } else {
if (strict) state.forceValueDeep(vRes);
vRes.print(state.symbols, std::cout);
std::cout << std::endl;
diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc
index 153b84137..23f2ad3cf 100644
--- a/src/nix-store/nix-store.cc
+++ b/src/nix-store/nix-store.cc
@@ -922,7 +922,7 @@ static void opServe(Strings opFlags, Strings opArgs)
if (GET_PROTOCOL_MINOR(clientVersion) >= 3)
out << status.timesBuilt << status.isNonDeterministic << status.startTime << status.stopTime;
- if (GET_PROTOCOL_MINOR(clientVersion >= 6)) {
+ if (GET_PROTOCOL_MINOR(clientVersion) >= 6) {
worker_proto::write(*store, out, status.builtOutputs);
}
@@ -1093,9 +1093,7 @@ static int main_nix_store(int argc, char * * argv)
if (op != opDump && op != opRestore) /* !!! hack */
store = openStore();
- op(opFlags, opArgs);
-
- logger->stop();
+ op(std::move(opFlags), std::move(opArgs));
return 0;
}
diff --git a/src/nix/app.cc b/src/nix/app.cc
index cce84d026..821964f86 100644
--- a/src/nix/app.cc
+++ b/src/nix/app.cc
@@ -89,7 +89,7 @@ UnresolvedApp Installable::toApp(EvalState & state)
auto outputName = cursor->getAttr(state.sOutputName)->getString();
auto name = cursor->getAttr(state.sName)->getString();
auto aPname = cursor->maybeGetAttr("pname");
- auto aMeta = cursor->maybeGetAttr("meta");
+ auto aMeta = cursor->maybeGetAttr(state.sMeta);
auto aMainProgram = aMeta ? aMeta->maybeGetAttr("mainProgram") : nullptr;
auto mainProgram =
aMainProgram
diff --git a/src/nix/bundle.cc b/src/nix/bundle.cc
index 80fcad07a..74a7973b0 100644
--- a/src/nix/bundle.cc
+++ b/src/nix/bundle.cc
@@ -75,10 +75,10 @@ struct CmdBundle : InstallableCommand
auto val = installable->toValue(*evalState).first;
- auto [bundlerFlakeRef, bundlerName] = parseFlakeRefWithFragment(bundler, absPath("."));
+ auto [bundlerFlakeRef, bundlerName, outputsSpec] = parseFlakeRefWithFragmentAndOutputsSpec(bundler, absPath("."));
const flake::LockFlags lockFlags{ .writeLockFile = false };
InstallableFlake bundler{this,
- evalState, std::move(bundlerFlakeRef), bundlerName,
+ evalState, std::move(bundlerFlakeRef), bundlerName, outputsSpec,
{"bundlers." + settings.thisSystem.get() + ".default",
"defaultBundler." + settings.thisSystem.get()
},
diff --git a/src/nix/bundle.md b/src/nix/bundle.md
index 2bb70711f..a18161a3c 100644
--- a/src/nix/bundle.md
+++ b/src/nix/bundle.md
@@ -44,7 +44,7 @@ flake output attributes:
* `bundlers.<system>.default`
-If an attribute *name* is given, `nix run` tries the following flake
+If an attribute *name* is given, `nix bundle` tries the following flake
output attributes:
* `bundlers.<system>.<name>`
diff --git a/src/nix/develop.cc b/src/nix/develop.cc
index 7fc74d34e..ba7ba7c25 100644
--- a/src/nix/develop.cc
+++ b/src/nix/develop.cc
@@ -18,6 +18,9 @@ struct DevelopSettings : Config
Setting<std::string> bashPrompt{this, "", "bash-prompt",
"The bash prompt (`PS1`) in `nix develop` shells."};
+ Setting<std::string> bashPromptPrefix{this, "", "bash-prompt-prefix",
+ "Prefix prepended to the `PS1` environment variable in `nix develop` shells."};
+
Setting<std::string> bashPromptSuffix{this, "", "bash-prompt-suffix",
"Suffix appended to the `PS1` environment variable in `nix develop` shells."};
};
@@ -273,15 +276,27 @@ struct Common : InstallableCommand, MixProfile
const BuildEnvironment & buildEnvironment,
const Path & outputsDir = absPath(".") + "/outputs")
{
+ // A list of colon-separated environment variables that should be
+ // prepended to, rather than overwritten, in order to keep the shell usable.
+ // Please keep this list minimal in order to avoid impurities.
+ static const char * const savedVars[] = {
+ "PATH", // for commands
+ "XDG_DATA_DIRS", // for loadable completion
+ };
+
std::ostringstream out;
out << "unset shellHook\n";
- out << "nix_saved_PATH=\"$PATH\"\n";
+ for (auto & var : savedVars) {
+ out << fmt("%s=${%s:-}\n", var, var);
+ out << fmt("nix_saved_%s=\"$%s\"\n", var, var);
+ }
buildEnvironment.toBash(out, ignoreVars);
- out << "PATH=\"$PATH:$nix_saved_PATH\"\n";
+ for (auto & var : savedVars)
+ out << fmt("%s=\"$%s:$nix_saved_%s\"\n", var, var, var);
out << "export NIX_BUILD_TOP=\"$(mktemp -d -t nix-shell.XXXXXX)\"\n";
for (auto & i : {"TMP", "TMPDIR", "TEMP", "TEMPDIR"})
@@ -482,6 +497,9 @@ struct CmdDevelop : Common, MixEnvironment
if (developSettings.bashPrompt != "")
script += fmt("[ -n \"$PS1\" ] && PS1=%s;\n",
shellEscape(developSettings.bashPrompt.get()));
+ if (developSettings.bashPromptPrefix != "")
+ script += fmt("[ -n \"$PS1\" ] && PS1=%s\"$PS1\";\n",
+ shellEscape(developSettings.bashPromptPrefix.get()));
if (developSettings.bashPromptSuffix != "")
script += fmt("[ -n \"$PS1\" ] && PS1+=%s;\n",
shellEscape(developSettings.bashPromptSuffix.get()));
@@ -507,13 +525,25 @@ struct CmdDevelop : Common, MixEnvironment
state,
installable->nixpkgsFlakeRef(),
"bashInteractive",
+ DefaultOutputs(),
Strings{},
Strings{"legacyPackages." + settings.thisSystem.get() + "."},
nixpkgsLockFlags);
- shell = store->printStorePath(
- Installable::toStorePath(getEvalStore(), store, Realise::Outputs, OperateOn::Output, bashInstallable))
- + "/bin/bash";
+ bool found = false;
+
+ for (auto & path : Installable::toStorePaths(getEvalStore(), store, Realise::Outputs, OperateOn::Output, {bashInstallable})) {
+ auto s = store->printStorePath(path) + "/bin/bash";
+ if (pathExists(s)) {
+ shell = s;
+ found = true;
+ break;
+ }
+ }
+
+ if (!found)
+ throw Error("package 'nixpkgs#bashInteractive' does not provide a 'bin/bash'");
+
} catch (Error &) {
ignoreException();
}
diff --git a/src/nix/develop.md b/src/nix/develop.md
index 8bcff66c9..e036ec6b9 100644
--- a/src/nix/develop.md
+++ b/src/nix/develop.md
@@ -80,8 +80,8 @@ initialised by `stdenv` and exits. This build environment can be
recorded into a profile using `--profile`.
The prompt used by the `bash` shell can be customised by setting the
-`bash-prompt` and `bash-prompt-suffix` settings in `nix.conf` or in
-the flake's `nixConfig` attribute.
+`bash-prompt`, `bash-prompt-prefix`, and `bash-prompt-suffix` settings in
+`nix.conf` or in the flake's `nixConfig` attribute.
# Flake output attributes
diff --git a/src/nix/eval.cc b/src/nix/eval.cc
index d07301619..5dedf02c2 100644
--- a/src/nix/eval.cc
+++ b/src/nix/eval.cc
@@ -116,7 +116,8 @@ struct CmdEval : MixJSON, InstallableCommand
else if (json) {
JSONPlaceholder jsonOut(std::cout);
- printValueAsJSON(*state, true, *v, pos, jsonOut, context);
+ printValueAsJSON(*state, true, *v, pos, jsonOut, context, false);
+ std::cout << std::endl;
}
else {
diff --git a/src/nix/flake-update.md b/src/nix/flake-update.md
index 03b50e38e..2ee8a707d 100644
--- a/src/nix/flake-update.md
+++ b/src/nix/flake-update.md
@@ -6,7 +6,7 @@ R""(
lock file:
```console
- # nix flake update
+ # nix flake update --commit-lock-file
* Updated 'nix': 'github:NixOS/nix/9fab14adbc3810d5cc1f88672fde1eee4358405c' -> 'github:NixOS/nix/8927cba62f5afb33b01016d5c4f7f8b7d0adde3c'
* Updated 'nixpkgs': 'github:NixOS/nixpkgs/3d2d8f281a27d466fa54b469b5993f7dde198375' -> 'github:NixOS/nixpkgs/a3a3dda3bacf61e8a39258a0ed9c924eeca8e293'
diff --git a/src/nix/flake.cc b/src/nix/flake.cc
index f88676dc4..3d90cfc05 100644
--- a/src/nix/flake.cc
+++ b/src/nix/flake.cc
@@ -50,9 +50,9 @@ public:
return flake::lockFlake(*getEvalState(), getFlakeRef(), lockFlags);
}
- std::optional<FlakeRef> getFlakeRefForCompletion() override
+ std::vector<std::string> getFlakesForCompletion() override
{
- return getFlakeRef();
+ return {flakeUrl};
}
};
@@ -212,7 +212,8 @@ struct CmdFlakeMetadata : FlakeCommand, MixJSON
ANSI_BOLD "Last modified:" ANSI_NORMAL " %s",
std::put_time(std::localtime(&*lastModified), "%F %T"));
- logger->cout(ANSI_BOLD "Inputs:" ANSI_NORMAL);
+ if (!lockedFlake.lockFile.root->inputs.empty())
+ logger->cout(ANSI_BOLD "Inputs:" ANSI_NORMAL);
std::unordered_set<std::shared_ptr<Node>> visited;
@@ -509,7 +510,7 @@ struct CmdFlakeCheck : FlakeCommand
std::string_view replacement =
name == "defaultPackage" ? "packages.<system>.default" :
- name == "defaultApps" ? "apps.<system>.default" :
+ name == "defaultApp" ? "apps.<system>.default" :
name == "defaultTemplate" ? "templates.default" :
name == "defaultBundler" ? "bundlers.<system>.default" :
name == "overlay" ? "overlays.default" :
@@ -724,7 +725,7 @@ struct CmdFlakeInitCommon : virtual Args, EvalCommand
auto [templateFlakeRef, templateName] = parseFlakeRefWithFragment(templateUrl, absPath("."));
auto installable = InstallableFlake(nullptr,
- evalState, std::move(templateFlakeRef), templateName,
+ evalState, std::move(templateFlakeRef), templateName, DefaultOutputs(),
defaultTemplateAttrPaths,
defaultTemplateAttrPathsPrefixes,
lockFlags);
@@ -740,7 +741,8 @@ struct CmdFlakeInitCommon : virtual Args, EvalCommand
"If you've set '%s' to a string, try using a path instead.",
templateDir, templateDirAttr->getAttrPathStr());
- std::vector<Path> files;
+ std::vector<Path> changedFiles;
+ std::vector<Path> conflictedFiles;
std::function<void(const Path & from, const Path & to)> copyDir;
copyDir = [&](const Path & from, const Path & to)
@@ -757,31 +759,41 @@ struct CmdFlakeInitCommon : virtual Args, EvalCommand
auto contents = readFile(from2);
if (pathExists(to2)) {
auto contents2 = readFile(to2);
- if (contents != contents2)
- throw Error("refusing to overwrite existing file '%s'", to2);
+ if (contents != contents2) {
+ printError("refusing to overwrite existing file '%s'\n please merge it manually with '%s'", to2, from2);
+ conflictedFiles.push_back(to2);
+ } else {
+ notice("skipping identical file: %s", from2);
+ }
+ continue;
} else
writeFile(to2, contents);
}
else if (S_ISLNK(st.st_mode)) {
auto target = readLink(from2);
if (pathExists(to2)) {
- if (readLink(to2) != target)
- throw Error("refusing to overwrite existing symlink '%s'", to2);
+ if (readLink(to2) != target) {
+ printError("refusing to overwrite existing file '%s'\n please merge it manually with '%s'", to2, from2);
+ conflictedFiles.push_back(to2);
+ } else {
+ notice("skipping identical file: %s", from2);
+ }
+ continue;
} else
createSymlink(target, to2);
}
else
throw Error("file '%s' has unsupported type", from2);
- files.push_back(to2);
+ changedFiles.push_back(to2);
notice("wrote: %s", to2);
}
};
copyDir(templateDir, flakeDir);
- if (pathExists(flakeDir + "/.git")) {
+ if (!changedFiles.empty() && pathExists(flakeDir + "/.git")) {
Strings args = { "-C", flakeDir, "add", "--intent-to-add", "--force", "--" };
- for (auto & s : files) args.push_back(s);
+ for (auto & s : changedFiles) args.push_back(s);
runProgram("git", true, args);
}
auto welcomeText = cursor->maybeGetAttr("welcomeText");
@@ -789,6 +801,9 @@ struct CmdFlakeInitCommon : virtual Args, EvalCommand
notice("\n");
notice(renderMarkdownToTerminal(welcomeText->getString()));
}
+
+ if (!conflictedFiles.empty())
+ throw Error("Encountered %d conflicts - see above", conflictedFiles.size());
}
};
@@ -1015,8 +1030,8 @@ struct CmdFlakeShow : FlakeCommand, MixJSON
auto name = visitor.getAttr(state->sName)->getString();
if (json) {
std::optional<std::string> description;
- if (auto aMeta = visitor.maybeGetAttr("meta")) {
- if (auto aDescription = aMeta->maybeGetAttr("description"))
+ if (auto aMeta = visitor.maybeGetAttr(state->sMeta)) {
+ if (auto aDescription = aMeta->maybeGetAttr(state->sDescription))
description = aDescription->getString();
}
j.emplace("type", "derivation");
@@ -1076,9 +1091,13 @@ struct CmdFlakeShow : FlakeCommand, MixJSON
else if (attrPath.size() > 0 && attrPathS[0] == "legacyPackages") {
if (attrPath.size() == 1)
recurse();
- else if (!showLegacy)
- logger->warn(fmt("%s: " ANSI_WARNING "omitted" ANSI_NORMAL " (use '--legacy' to show)", headerPrefix));
- else {
+ else if (!showLegacy){
+ if (!json)
+ logger->cout(fmt("%s " ANSI_WARNING "omitted" ANSI_NORMAL " (use '--legacy' to show)", headerPrefix));
+ else {
+ logger->warn(fmt("%s omitted (use '--legacy' to show)", concatStringsSep(".", attrPathS)));
+ }
+ } else {
if (visitor.isDerivation())
showDerivation();
else if (attrPath.size() <= 2)
diff --git a/src/nix/flake.md b/src/nix/flake.md
index 7d179a6c4..a1ab43281 100644
--- a/src/nix/flake.md
+++ b/src/nix/flake.md
@@ -153,7 +153,7 @@ Currently the `type` attribute can be one of the following:
git(+http|+https|+ssh|+git|+file|):(//<server>)?<path>(\?<params>)?
```
- The `ref` attribute defaults to `master`.
+ The `ref` attribute defaults to resolving the `HEAD` reference.
The `rev` attribute must denote a commit that exists in the branch
or tag specified by the `ref` attribute, since Nix doesn't do a full
@@ -161,6 +161,11 @@ Currently the `type` attribute can be one of the following:
doesn't allow fetching a `rev` without a known `ref`). The default
is the commit currently pointed to by `ref`.
+ When `git+file` is used without specifying `ref` or `rev`, files are
+ fetched directly from the local `path` as long as they have been added
+ to the Git repository. If there are uncommitted changes, the reference
+ is treated as dirty and a warning is printed.
+
For example, the following are valid Git flake references:
* `git+https://example.org/my/repo`
@@ -176,9 +181,17 @@ Currently the `type` attribute can be one of the following:
* `tarball`: Tarballs. The location of the tarball is specified by the
attribute `url`.
- In URL form, the schema must be `http://`, `https://` or `file://`
- URLs and the extension must be `.zip`, `.tar`, `.tgz`, `.tar.gz`,
- `.tar.xz`, `.tar.bz2` or `.tar.zst`.
+ In URL form, the schema must be `tarball+http://`, `tarball+https://` or `tarball+file://`.
+ If the extension corresponds to a known archive format (`.zip`, `.tar`,
+ `.tgz`, `.tar.gz`, `.tar.xz`, `.tar.bz2` or `.tar.zst`), then the `tarball+`
+ can be dropped.
+
+* `file`: Plain files or directory tarballs, either over http(s) or from the local
+ disk.
+
+ In URL form, the schema must be `file+http://`, `file+https://` or `file+file://`.
+ If the extension doesn’t correspond to a known archive format (as defined by the
+ `tarball` fetcher), then the `file+` prefix can be dropped.
* `github`: A more efficient way to fetch repositories from
GitHub. The following attributes are required:
@@ -326,9 +339,10 @@ The following attributes are supported in `flake.nix`:
* `nixConfig`: a set of `nix.conf` options to be set when evaluating any
part of a flake. In the interests of security, only a small set of
- whitelisted options (currently `bash-prompt`, `bash-prompt-suffix`,
- and `flake-registry`) are allowed to be set without confirmation so long as
- `accept-flake-config` is not set in the global configuration.
+ whitelisted options (currently `bash-prompt`, `bash-prompt-prefix`,
+ `bash-prompt-suffix`, and `flake-registry`) are allowed to be set without
+ confirmation so long as `accept-flake-config` is not set in the global
+ configuration.
## Flake inputs
diff --git a/src/nix/fmt.cc b/src/nix/fmt.cc
index e5d44bd38..6f6a4a632 100644
--- a/src/nix/fmt.cc
+++ b/src/nix/fmt.cc
@@ -26,7 +26,8 @@ struct CmdFmt : SourceExprCommand {
Strings getDefaultFlakeAttrPathPrefixes() override { return Strings{}; }
- void run(ref<Store> store) {
+ void run(ref<Store> store) override
+ {
auto evalState = getEvalState();
auto evalStore = getEvalStore();
diff --git a/src/nix/get-env.sh b/src/nix/get-env.sh
index 42c806450..a7a8a01b9 100644
--- a/src/nix/get-env.sh
+++ b/src/nix/get-env.sh
@@ -43,6 +43,7 @@ __dumpEnv() {
local __var_name="${BASH_REMATCH[2]}"
if [[ $__var_name =~ ^BASH_ || \
+ $__var_name =~ ^COMP_ || \
$__var_name = _ || \
$__var_name = DIRSTACK || \
$__var_name = EUID || \
@@ -54,7 +55,9 @@ __dumpEnv() {
$__var_name = PWD || \
$__var_name = RANDOM || \
$__var_name = SHLVL || \
- $__var_name = SECONDS \
+ $__var_name = SECONDS || \
+ $__var_name = EPOCHREALTIME || \
+ $__var_name = EPOCHSECONDS \
]]; then continue; fi
if [[ -z $__first ]]; then printf ',\n'; else __first=; fi
diff --git a/src/nix/key-generate-secret.md b/src/nix/key-generate-secret.md
index 4938f637c..609b1abcc 100644
--- a/src/nix/key-generate-secret.md
+++ b/src/nix/key-generate-secret.md
@@ -30,7 +30,7 @@ convert-secret-to-public` to get the corresponding public key for
verifying signed store paths.
The mandatory argument `--key-name` specifies a key name (such as
-`cache.example.org-1). It is used to look up keys on the client when
+`cache.example.org-1`). It is used to look up keys on the client when
it verifies signatures. It can be anything, but it’s suggested to use
the host name of your cache (e.g. `cache.example.org`) with a suffix
denoting the number of the key (to be incremented every time you need
diff --git a/src/nix/main.cc b/src/nix/main.cc
index 9fab1d6e3..85a3835c2 100644
--- a/src/nix/main.cc
+++ b/src/nix/main.cc
@@ -82,7 +82,7 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs
.shortName = 'L',
.description = "Print full build logs on standard error.",
.category = loggingCategory,
- .handler = {[&]() {setLogFormat(LogFormat::barWithLogs); }},
+ .handler = {[&]() { logger->setPrintBuildLogs(true); }},
});
addFlag({
@@ -261,9 +261,16 @@ void mainWrapped(int argc, char * * argv)
}
#endif
+ Finally f([] { logger->stop(); });
+
programPath = argv[0];
auto programName = std::string(baseNameOf(programPath));
+ if (argc > 0 && std::string_view(argv[0]) == "__build-remote") {
+ programName = "build-remote";
+ argv++; argc--;
+ }
+
{
auto legacy = (*RegisterLegacyCommand::commands)[programName];
if (legacy) return legacy(argc, argv);
@@ -279,8 +286,6 @@ void mainWrapped(int argc, char * * argv)
verbosity = lvlInfo;
}
- Finally f([] { logger->stop(); });
-
NixArgs args;
if (argc == 2 && std::string(argv[1]) == "__dump-args") {
@@ -342,7 +347,10 @@ void mainWrapped(int argc, char * * argv)
if (!completions) throw;
}
- if (completions) return;
+ if (completions) {
+ args.completionHook();
+ return;
+ }
if (args.showVersion) {
printVersion(programName);
@@ -380,6 +388,9 @@ void mainWrapped(int argc, char * * argv)
settings.ttlPositiveNarInfoCache = 0;
}
+ if (args.command->second->forceImpureByDefault() && !evalSettings.pureEval.overridden) {
+ evalSettings.pureEval = false;
+ }
args.command->second->prepare();
args.command->second->run();
}
diff --git a/src/nix/nix.md b/src/nix/nix.md
index 0dacadee6..d48682a94 100644
--- a/src/nix/nix.md
+++ b/src/nix/nix.md
@@ -146,6 +146,51 @@ For most commands, if no installable is specified, the default is `.`,
i.e. Nix will operate on the default flake output attribute of the
flake in the current directory.
+## Derivation output selection
+
+Derivations can have multiple outputs, each corresponding to a
+different store path. For instance, a package can have a `bin` output
+that contains programs, and a `dev` output that provides development
+artifacts like C/C++ header files. The outputs on which `nix` commands
+operate are determined as follows:
+
+* You can explicitly specify the desired outputs using the syntax
+ *installable*`^`*output1*`,`*...*`,`*outputN*. For example, you can
+ obtain the `dev` and `static` outputs of the `glibc` package:
+
+ ```console
+ # nix build 'nixpkgs#glibc^dev,static'
+ # ls ./result-dev/include/ ./result-static/lib/
+ …
+ ```
+
+* You can also specify that *all* outputs should be used using the
+ syntax *installable*`^*`. For example, the following shows the size
+ of all outputs of the `glibc` package in the binary cache:
+
+ ```console
+ # nix path-info -S --eval-store auto --store https://cache.nixos.org 'nixpkgs#glibc^*'
+ /nix/store/g02b1lpbddhymmcjb923kf0l7s9nww58-glibc-2.33-123 33208200
+ /nix/store/851dp95qqiisjifi639r0zzg5l465ny4-glibc-2.33-123-bin 36142896
+ /nix/store/kdgs3q6r7xdff1p7a9hnjr43xw2404z7-glibc-2.33-123-debug 155787312
+ /nix/store/n4xa8h6pbmqmwnq0mmsz08l38abb06zc-glibc-2.33-123-static 42488328
+ /nix/store/q6580lr01jpcsqs4r5arlh4ki2c1m9rv-glibc-2.33-123-dev 44200560
+ ```
+
+* If you didn't specify the desired outputs, but the derivation has an
+ attribute `meta.outputsToInstall`, Nix will use those outputs. For
+ example, since the package `nixpkgs#libxml2` has this attribute:
+
+ ```console
+ # nix eval 'nixpkgs#libxml2.meta.outputsToInstall'
+ [ "bin" "man" ]
+ ```
+
+ a command like `nix shell nixpkgs#libxml2` will provide only those
+ two outputs by default.
+
+* Otherwise, Nix will use all outputs of the derivation.
+
# Nix stores
Most `nix` subcommands operate on a *Nix store*.
diff --git a/src/nix/profile-install.md b/src/nix/profile-install.md
index e3009491e..aed414963 100644
--- a/src/nix/profile-install.md
+++ b/src/nix/profile-install.md
@@ -20,6 +20,13 @@ R""(
# nix profile install nixpkgs/d73407e8e6002646acfdef0e39ace088bacc83da#hello
```
+* Install a specific output of a package:
+
+ ```console
+ # nix profile install nixpkgs#bash^man
+ ```
+
+
# Description
This command adds *installables* to a Nix profile.
diff --git a/src/nix/profile.cc b/src/nix/profile.cc
index b151e48d6..3814e7d5a 100644
--- a/src/nix/profile.cc
+++ b/src/nix/profile.cc
@@ -22,13 +22,13 @@ struct ProfileElementSource
// FIXME: record original attrpath.
FlakeRef resolvedRef;
std::string attrPath;
- // FIXME: output names
+ OutputsSpec outputs;
bool operator < (const ProfileElementSource & other) const
{
return
- std::pair(originalRef.to_string(), attrPath) <
- std::pair(other.originalRef.to_string(), other.attrPath);
+ std::tuple(originalRef.to_string(), attrPath, outputs) <
+ std::tuple(other.originalRef.to_string(), other.attrPath, other.outputs);
}
};
@@ -37,12 +37,12 @@ struct ProfileElement
StorePathSet storePaths;
std::optional<ProfileElementSource> source;
bool active = true;
- // FIXME: priority
+ int priority = 5;
std::string describe() const
{
if (source)
- return fmt("%s#%s", source->originalRef, source->attrPath);
+ return fmt("%s#%s%s", source->originalRef, source->attrPath, printOutputsSpec(source->outputs));
StringSet names;
for (auto & path : storePaths)
names.insert(DrvName(path.name()).name);
@@ -67,7 +67,6 @@ struct ProfileElement
ref<Store> store,
const BuiltPaths & builtPaths)
{
- // FIXME: respect meta.outputsToInstall
storePaths.clear();
for (auto & buildable : builtPaths) {
std::visit(overloaded {
@@ -99,7 +98,7 @@ struct ProfileManifest
auto version = json.value("version", 0);
std::string sUrl;
std::string sOriginalUrl;
- switch(version){
+ switch (version) {
case 1:
sUrl = "uri";
sOriginalUrl = "originalUri";
@@ -117,11 +116,15 @@ struct ProfileManifest
for (auto & p : e["storePaths"])
element.storePaths.insert(state.store->parseStorePath((std::string) p));
element.active = e["active"];
- if (e.value(sUrl,"") != "") {
- element.source = ProfileElementSource{
+ if(e.contains("priority")) {
+ element.priority = e["priority"];
+ }
+ if (e.value(sUrl, "") != "") {
+ element.source = ProfileElementSource {
parseFlakeRef(e[sOriginalUrl]),
parseFlakeRef(e[sUrl]),
- e["attrPath"]
+ e["attrPath"],
+ e["outputs"].get<OutputsSpec>()
};
}
elements.emplace_back(std::move(element));
@@ -153,10 +156,12 @@ struct ProfileManifest
nlohmann::json obj;
obj["storePaths"] = paths;
obj["active"] = element.active;
+ obj["priority"] = element.priority;
if (element.source) {
obj["originalUrl"] = element.source->originalRef.to_string();
obj["url"] = element.source->resolvedRef.to_string();
obj["attrPath"] = element.source->attrPath;
+ obj["outputs"] = element.source->outputs;
}
array.push_back(obj);
}
@@ -176,7 +181,7 @@ struct ProfileManifest
for (auto & element : elements) {
for (auto & path : element.storePaths) {
if (element.active)
- pkgs.emplace_back(store->printStorePath(path), true, 5);
+ pkgs.emplace_back(store->printStorePath(path), true, element.priority);
references.insert(path);
}
}
@@ -258,6 +263,17 @@ builtPathsPerInstallable(
struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile
{
+ std::optional<int64_t> priority;
+
+ CmdProfileInstall() {
+ addFlag({
+ .longName = "priority",
+ .description = "The priority of the package to install.",
+ .labels = {"priority"},
+ .handler = {&priority},
+ });
+ };
+
std::string description() override
{
return "install a package into a profile";
@@ -281,16 +297,27 @@ struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile
for (auto & installable : installables) {
ProfileElement element;
+
+
if (auto installable2 = std::dynamic_pointer_cast<InstallableFlake>(installable)) {
// FIXME: make build() return this?
auto [attrPath, resolvedRef, drv] = installable2->toDerivation();
- element.source = ProfileElementSource{
+ element.source = ProfileElementSource {
installable2->flakeRef,
resolvedRef,
attrPath,
+ installable2->outputsSpec
};
+
+ if(drv.priority) {
+ element.priority = *drv.priority;
+ }
}
+ if(priority) { // if --priority was specified we want to override the priority of the installable
+ element.priority = *priority;
+ };
+
element.updateStorePaths(getEvalStore(), store, builtPaths[installable.get()]);
manifest.elements.push_back(std::move(element));
@@ -444,6 +471,7 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf
getEvalState(),
FlakeRef(element.source->originalRef),
"",
+ element.source->outputs,
Strings{element.source->attrPath},
Strings{},
lockFlags);
@@ -455,10 +483,11 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf
printInfo("upgrading '%s' from flake '%s' to '%s'",
element.source->attrPath, element.source->resolvedRef, resolvedRef);
- element.source = ProfileElementSource{
+ element.source = ProfileElementSource {
installable->flakeRef,
resolvedRef,
attrPath,
+ installable->outputsSpec
};
installables.push_back(installable);
@@ -514,8 +543,8 @@ struct CmdProfileList : virtual EvalCommand, virtual StoreCommand, MixDefaultPro
for (size_t i = 0; i < manifest.elements.size(); ++i) {
auto & element(manifest.elements[i]);
logger->cout("%d %s %s %s", i,
- element.source ? element.source->originalRef.to_string() + "#" + element.source->attrPath : "-",
- element.source ? element.source->resolvedRef.to_string() + "#" + element.source->attrPath : "-",
+ element.source ? element.source->originalRef.to_string() + "#" + element.source->attrPath + printOutputsSpec(element.source->outputs) : "-",
+ element.source ? element.source->resolvedRef.to_string() + "#" + element.source->attrPath + printOutputsSpec(element.source->outputs) : "-",
concatStringsSep(" ", store->printStorePathSet(element.storePaths)));
}
}
diff --git a/src/nix/profile.md b/src/nix/profile.md
index 8dade051d..be3c5ba1a 100644
--- a/src/nix/profile.md
+++ b/src/nix/profile.md
@@ -11,7 +11,7 @@ them to be rolled back easily.
The default profile used by `nix profile` is `$HOME/.nix-profile`,
which, if it does not exist, is created as a symlink to
-`/nix/var/nix/profiles/per-user/default` if Nix is invoked by the
+`/nix/var/nix/profiles/default` if Nix is invoked by the
`root` user, or `/nix/var/nix/profiles/per-user/`*username* otherwise.
You can specify another profile location using `--profile` *path*.
diff --git a/src/nix/registry.md b/src/nix/registry.md
index d5c9ef442..bd3575d1b 100644
--- a/src/nix/registry.md
+++ b/src/nix/registry.md
@@ -29,7 +29,7 @@ highest precedence:
can be specified using the NixOS option `nix.registry`.
* The user registry `~/.config/nix/registry.json`. This registry can
- be modified by commands such as `nix flake pin`.
+ be modified by commands such as `nix registry pin`.
* Overrides specified on the command line using the option
`--override-flake`.
diff --git a/src/nix/repl.md b/src/nix/repl.md
index 9b6f2bee3..23ef0f4e6 100644
--- a/src/nix/repl.md
+++ b/src/nix/repl.md
@@ -24,10 +24,34 @@ R""(
* Interact with Nixpkgs in the REPL:
```console
- # nix repl '<nixpkgs>'
+ # nix repl --file example.nix
+ Loading Installable ''...
+ Added 3 variables.
- Loading '<nixpkgs>'...
- Added 12428 variables.
+ # nix repl --expr '{a={b=3;c=4;};}'
+ Loading Installable ''...
+ Added 1 variables.
+
+ # nix repl --expr '{a={b=3;c=4;};}' a
+ Loading Installable ''...
+ Added 1 variables.
+
+ # nix repl --extra_experimental_features 'flakes repl-flake' nixpkgs
+ Loading Installable 'flake:nixpkgs#'...
+ Added 5 variables.
+
+ nix-repl> legacyPackages.x86_64-linux.emacs.name
+ "emacs-27.1"
+
+ nix-repl> legacyPackages.x86_64-linux.emacs.name
+ "emacs-27.1"
+
+ nix-repl> :q
+
+ # nix repl --expr 'import <nixpkgs>{}'
+
+ Loading Installable ''...
+ Added 12439 variables.
nix-repl> emacs.name
"emacs-27.1"
diff --git a/src/nix/run.cc b/src/nix/run.cc
index 25a8fa8d3..45d2dfd0d 100644
--- a/src/nix/run.cc
+++ b/src/nix/run.cc
@@ -47,7 +47,7 @@ void runProgramInStore(ref<Store> store,
Strings helperArgs = { chrootHelperName, store->storeDir, store2->getRealStoreDir(), program };
for (auto & arg : args) helperArgs.push_back(arg);
- execv(readLink("/proc/self/exe").c_str(), stringsToCharPtrs(helperArgs).data());
+ execv(getSelfExe().value_or("nix").c_str(), stringsToCharPtrs(helperArgs).data());
throw SysError("could not execute chroot helper");
}
diff --git a/src/nix/search.cc b/src/nix/search.cc
index 76451f810..bdd45cbed 100644
--- a/src/nix/search.cc
+++ b/src/nix/search.cc
@@ -18,16 +18,26 @@ using namespace nix;
std::string wrap(std::string prefix, std::string s)
{
- return prefix + s + ANSI_NORMAL;
+ return concatStrings(prefix, s, ANSI_NORMAL);
}
struct CmdSearch : InstallableCommand, MixJSON
{
std::vector<std::string> res;
+ std::vector<std::string> excludeRes;
CmdSearch()
{
expectArgs("regex", &res);
+ addFlag(Flag {
+ .longName = "exclude",
+ .shortName = 'e',
+ .description = "Hide packages whose attribute path, name or description contain *regex*.",
+ .labels = {"regex"},
+ .handler = {[this](std::string s) {
+ excludeRes.push_back(s);
+ }},
+ });
}
std::string description() override
@@ -62,11 +72,16 @@ struct CmdSearch : InstallableCommand, MixJSON
res.push_back("^");
std::vector<std::regex> regexes;
+ std::vector<std::regex> excludeRegexes;
regexes.reserve(res.size());
+ excludeRegexes.reserve(excludeRes.size());
for (auto & re : res)
regexes.push_back(std::regex(re, std::regex::extended | std::regex::icase));
+ for (auto & re : excludeRes)
+ excludeRegexes.emplace_back(re, std::regex::extended | std::regex::icase);
+
auto state = getEvalState();
auto jsonOut = json ? std::make_unique<JSONObject>(std::cout) : nullptr;
@@ -93,10 +108,10 @@ struct CmdSearch : InstallableCommand, MixJSON
};
if (cursor.isDerivation()) {
- DrvName name(cursor.getAttr("name")->getString());
+ DrvName name(cursor.getAttr(state->sName)->getString());
- auto aMeta = cursor.maybeGetAttr("meta");
- auto aDescription = aMeta ? aMeta->maybeGetAttr("description") : nullptr;
+ auto aMeta = cursor.maybeGetAttr(state->sMeta);
+ auto aDescription = aMeta ? aMeta->maybeGetAttr(state->sDescription) : nullptr;
auto description = aDescription ? aDescription->getString() : "";
std::replace(description.begin(), description.end(), '\n', ' ');
auto attrPath2 = concatStringsSep(".", attrPathS);
@@ -106,6 +121,14 @@ struct CmdSearch : InstallableCommand, MixJSON
std::vector<std::smatch> nameMatches;
bool found = false;
+ for (auto & regex : excludeRegexes) {
+ if (
+ std::regex_search(attrPath2, regex)
+ || std::regex_search(name.name, regex)
+ || std::regex_search(description, regex))
+ return;
+ }
+
for (auto & regex : regexes) {
found = false;
auto addAll = [&found](std::sregex_iterator it, std::vector<std::smatch> & vec) {
@@ -133,15 +156,15 @@ struct CmdSearch : InstallableCommand, MixJSON
jsonElem.attr("version", name.version);
jsonElem.attr("description", description);
} else {
- auto name2 = hiliteMatches(name.name, std::move(nameMatches), ANSI_GREEN, "\e[0;2m");
+ auto name2 = hiliteMatches(name.name, nameMatches, ANSI_GREEN, "\e[0;2m");
if (results > 1) logger->cout("");
logger->cout(
"* %s%s",
- wrap("\e[0;1m", hiliteMatches(attrPath2, std::move(attrPathMatches), ANSI_GREEN, "\e[0;1m")),
+ wrap("\e[0;1m", hiliteMatches(attrPath2, attrPathMatches, ANSI_GREEN, "\e[0;1m")),
name.version != "" ? " (" + name.version + ")" : "");
if (description != "")
logger->cout(
- " %s", hiliteMatches(description, std::move(descriptionMatches), ANSI_GREEN, ANSI_NORMAL));
+ " %s", hiliteMatches(description, descriptionMatches, ANSI_GREEN, ANSI_NORMAL));
}
}
}
diff --git a/src/nix/search.md b/src/nix/search.md
index d182788a6..5a5b5ae05 100644
--- a/src/nix/search.md
+++ b/src/nix/search.md
@@ -43,12 +43,23 @@ R""(
# nix search nixpkgs 'firefox|chromium'
```
-* Search for packages containing `git'`and either `frontend` or `gui`:
+* Search for packages containing `git` and either `frontend` or `gui`:
```console
# nix search nixpkgs git 'frontend|gui'
```
+* Search for packages containing `neovim` but hide ones containing either `gui` or `python`:
+
+ ```console
+ # nix search nixpkgs neovim -e 'python|gui'
+ ```
+ or
+
+ ```console
+ # nix search nixpkgs neovim -e 'python' -e 'gui'
+ ```
+
# Description
`nix search` searches *installable* (which must be evaluatable, e.g. a
diff --git a/src/nix/upgrade-nix.cc b/src/nix/upgrade-nix.cc
index 0c317de16..17796d6b8 100644
--- a/src/nix/upgrade-nix.cc
+++ b/src/nix/upgrade-nix.cc
@@ -34,7 +34,7 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand
std::string description() override
{
- return "upgrade Nix to the latest stable version";
+ return "upgrade Nix to the stable version declared in Nixpkgs";
}
std::string doc() override
diff --git a/src/nix/upgrade-nix.md b/src/nix/upgrade-nix.md
index 4d27daad9..084c80ba2 100644
--- a/src/nix/upgrade-nix.md
+++ b/src/nix/upgrade-nix.md
@@ -2,7 +2,7 @@ R""(
# Examples
-* Upgrade Nix to the latest stable version:
+* Upgrade Nix to the stable version declared in Nixpkgs:
```console
# nix upgrade-nix
@@ -16,8 +16,11 @@ R""(
# Description
-This command upgrades Nix to the latest version. By default, it
-locates the directory containing the `nix` binary in the `$PATH`
+This command upgrades Nix to the stable version declared in Nixpkgs.
+This stable version is defined in [nix-fallback-paths.nix](https://github.com/NixOS/nixpkgs/raw/master/nixos/modules/installer/tools/nix-fallback-paths.nix)
+and updated manually. It may not always be the latest tagged release.
+
+By default, it locates the directory containing the `nix` binary in the `$PATH`
environment variable. If that directory is a Nix profile, it will
upgrade the `nix` package in that profile to the latest stable binary
release.
diff --git a/src/resolve-system-dependencies/resolve-system-dependencies.cc b/src/resolve-system-dependencies/resolve-system-dependencies.cc
index 4dd691981..c6023eb03 100644
--- a/src/resolve-system-dependencies/resolve-system-dependencies.cc
+++ b/src/resolve-system-dependencies/resolve-system-dependencies.cc
@@ -176,7 +176,7 @@ int main(int argc, char ** argv)
impurePaths.insert(argv[2]);
else {
auto drv = store->derivationFromPath(store->parseStorePath(argv[1]));
- impurePaths = tokenizeString<StringSet>(get(drv.env, "__impureHostDeps").value_or(""));
+ impurePaths = tokenizeString<StringSet>(getOr(drv.env, "__impureHostDeps", ""));
impurePaths.insert("/usr/lib/libSystem.dylib");
}
diff --git a/tests/build-remote.sh b/tests/build-remote.sh
index d1da134dc..e73c37ea4 100644
--- a/tests/build-remote.sh
+++ b/tests/build-remote.sh
@@ -34,7 +34,7 @@ outPath=$(readlink -f $TEST_ROOT/result)
grep 'FOO BAR BAZ' $TEST_ROOT/machine0/$outPath
-testPrintOutPath=$(nix build -L -v -f $file --print-out-paths --max-jobs 0 \
+testPrintOutPath=$(nix build -L -v -f $file --no-link --print-out-paths --max-jobs 0 \
--arg busybox $busybox \
--store $TEST_ROOT/machine0 \
--builders "$(join_by '; ' "${builders[@]}")"
@@ -72,6 +72,7 @@ fi
# Behavior of keep-failed
out="$(nix-build 2>&1 failing.nix \
+ --no-out-link \
--builders "$(join_by '; ' "${builders[@]}")" \
--keep-failed \
--store $TEST_ROOT/machine0 \
diff --git a/tests/build.sh b/tests/build.sh
index 13a0f42be..fc6825e25 100644
--- a/tests/build.sh
+++ b/tests/build.sh
@@ -2,15 +2,10 @@ source common.sh
clearStore
-# Make sure that 'nix build' only returns the outputs we asked for.
-nix build -f multiple-outputs.nix --json a --no-link | jq --exit-status '
- (.[0] |
- (.drvPath | match(".*multiple-outputs-a.drv")) and
- (.outputs | keys | length == 1) and
- (.outputs.first | match(".*multiple-outputs-a-first")))
-'
+set -o pipefail
-nix build -f multiple-outputs.nix --json a.all b.all --no-link | jq --exit-status '
+# Make sure that 'nix build' returns all outputs by default.
+nix build -f multiple-outputs.nix --json a b --no-link | jq --exit-status '
(.[0] |
(.drvPath | match(".*multiple-outputs-a.drv")) and
(.outputs | keys | length == 2) and
@@ -22,6 +17,52 @@ nix build -f multiple-outputs.nix --json a.all b.all --no-link | jq --exit-statu
(.outputs.out | match(".*multiple-outputs-b")))
'
+# Test output selection using the '^' syntax.
+nix build -f multiple-outputs.nix --json a^first --no-link | jq --exit-status '
+ (.[0] |
+ (.drvPath | match(".*multiple-outputs-a.drv")) and
+ (.outputs | keys == ["first"]))
+'
+
+nix build -f multiple-outputs.nix --json a^second,first --no-link | jq --exit-status '
+ (.[0] |
+ (.drvPath | match(".*multiple-outputs-a.drv")) and
+ (.outputs | keys == ["first", "second"]))
+'
+
+nix build -f multiple-outputs.nix --json 'a^*' --no-link | jq --exit-status '
+ (.[0] |
+ (.drvPath | match(".*multiple-outputs-a.drv")) and
+ (.outputs | keys == ["first", "second"]))
+'
+
+# Test that 'outputsToInstall' is respected by default.
+nix build -f multiple-outputs.nix --json e --no-link | jq --exit-status '
+ (.[0] |
+ (.drvPath | match(".*multiple-outputs-e.drv")) and
+ (.outputs | keys == ["a", "b"]))
+'
+
+# But not when it's overriden.
+nix build -f multiple-outputs.nix --json e^a --no-link | jq --exit-status '
+ (.[0] |
+ (.drvPath | match(".*multiple-outputs-e.drv")) and
+ (.outputs | keys == ["a"]))
+'
+
+nix build -f multiple-outputs.nix --json 'e^*' --no-link | jq --exit-status '
+ (.[0] |
+ (.drvPath | match(".*multiple-outputs-e.drv")) and
+ (.outputs | keys == ["a", "b", "c"]))
+'
+
+# Make sure that `--impure` works (regression test for https://github.com/NixOS/nix/issues/6488)
+nix build --impure -f multiple-outputs.nix --json e --no-link | jq --exit-status '
+ (.[0] |
+ (.drvPath | match(".*multiple-outputs-e.drv")) and
+ (.outputs | keys == ["a", "b"]))
+'
+
testNormalization () {
clearStore
outPath=$(nix-build ./simple.nix --no-out-link)
diff --git a/tests/ca-shell.nix b/tests/ca-shell.nix
index ad2ab6aff..36e1d1526 100644
--- a/tests/ca-shell.nix
+++ b/tests/ca-shell.nix
@@ -1 +1 @@
-{ ... }@args: import ./shell.nix (args // { contentAddressed = true; })
+{ inNixShell ? false, ... }@args: import ./shell.nix (args // { contentAddressed = true; })
diff --git a/tests/ca/content-addressed.nix b/tests/ca/content-addressed.nix
index 1be3eeb6e..81bc4bf5c 100644
--- a/tests/ca/content-addressed.nix
+++ b/tests/ca/content-addressed.nix
@@ -23,7 +23,7 @@ rec {
};
rootCA = mkCADerivation {
name = "rootCA";
- outputs = [ "out" "dev" "foo"];
+ outputs = [ "out" "dev" "foo" ];
buildCommand = ''
echo "building a CA derivation"
echo "The seed is ${toString seed}"
@@ -75,7 +75,7 @@ rec {
buildCommand = ''
mkdir -p $out/bin
echo ${rootCA} # Just to make it depend on it
- echo "" > $out/bin/${name}
+ echo "#! ${shell}" > $out/bin/${name}
chmod +x $out/bin/${name}
'';
};
diff --git a/tests/ca/substitute.sh b/tests/ca/substitute.sh
index 3d9001bb8..819f3fd85 100644
--- a/tests/ca/substitute.sh
+++ b/tests/ca/substitute.sh
@@ -25,7 +25,8 @@ buildDrvs --substitute --substituters $REMOTE_STORE --no-require-sigs -j0 transi
# Check that the thing we’ve just substituted has its realisation stored
nix realisation info --file ./content-addressed.nix transitivelyDependentCA
# Check that its dependencies have it too
-nix realisation info --file ./content-addressed.nix dependentCA rootCA
+nix realisation info --file ./content-addressed.nix dependentCA
+# nix realisation info --file ./content-addressed.nix rootCA --outputs out
# Same thing, but
# 1. With non-ca derivations
diff --git a/tests/check.sh b/tests/check.sh
index ab48ff865..495202781 100644
--- a/tests/check.sh
+++ b/tests/check.sh
@@ -40,6 +40,14 @@ nix-build check.nix -A deterministic --argstr checkBuildId $checkBuildId \
if grep -q 'may not be deterministic' $TEST_ROOT/log; then false; fi
checkBuildTempDirRemoved $TEST_ROOT/log
+nix build -f check.nix deterministic --rebuild --repeat 1 \
+ --argstr checkBuildId $checkBuildId --keep-failed --no-link \
+ 2> $TEST_ROOT/log
+if grep -q 'checking is not possible' $TEST_ROOT/log; then false; fi
+# Repeat is set to 1, ie. nix should build deterministic twice.
+if [ "$(grep "checking outputs" $TEST_ROOT/log | wc -l)" -ne 2 ]; then false; fi
+checkBuildTempDirRemoved $TEST_ROOT/log
+
nix-build check.nix -A nondeterministic --argstr checkBuildId $checkBuildId \
--no-out-link 2> $TEST_ROOT/log
checkBuildTempDirRemoved $TEST_ROOT/log
@@ -50,6 +58,12 @@ grep 'may not be deterministic' $TEST_ROOT/log
[ "$status" = "104" ]
checkBuildTempDirRemoved $TEST_ROOT/log
+nix build -f check.nix nondeterministic --rebuild --repeat 1 \
+ --argstr checkBuildId $checkBuildId --keep-failed --no-link \
+ 2> $TEST_ROOT/log || status=$?
+grep 'may not be deterministic' $TEST_ROOT/log
+checkBuildTempDirRemoved $TEST_ROOT/log
+
nix-build check.nix -A nondeterministic --argstr checkBuildId $checkBuildId \
--no-out-link --check --keep-failed 2> $TEST_ROOT/log || status=$?
grep 'may not be deterministic' $TEST_ROOT/log
diff --git a/tests/common.sh.in b/tests/common.sh.in
index 8ce28d318..73c2d2309 100644
--- a/tests/common.sh.in
+++ b/tests/common.sh.in
@@ -50,6 +50,8 @@ export busybox="@sandbox_shell@"
export version=@PACKAGE_VERSION@
export system=@system@
+export BUILD_SHARED_LIBS=@BUILD_SHARED_LIBS@
+
export IMPURE_VAR1=foo
export IMPURE_VAR2=bar
@@ -117,11 +119,11 @@ killDaemon() {
}
restartDaemon() {
- [[ -z "${pidDaemon:-}" ]] && return 0
+ [[ -z "${pidDaemon:-}" ]] && return 0
- killDaemon
- unset NIX_REMOTE
- startDaemon
+ killDaemon
+ unset NIX_REMOTE
+ startDaemon
}
if [[ $(uname) == Linux ]] && [[ -L /proc/self/ns/user ]] && unshare --user true; then
@@ -157,11 +159,12 @@ expect() {
local expected res
expected="$1"
shift
- set +e
- "$@"
- res="$?"
- set -e
- [[ $res -eq $expected ]]
+ "$@" || res="$?"
+ if [[ $res -ne $expected ]]; then
+ echo "Expected '$expected' but got '$res' while running '$*'"
+ return 1
+ fi
+ return 0
}
needLocalStore() {
@@ -187,4 +190,15 @@ if [[ -n "${NIX_DAEMON_PACKAGE:-}" ]]; then
startDaemon
fi
+onError() {
+ set +x
+ echo "$0: test failed at:" >&2
+ for ((i = 1; i < ${#BASH_SOURCE[@]}; i++)); do
+ if [[ -z ${BASH_SOURCE[i]} ]]; then break; fi
+ echo " ${FUNCNAME[i]} in ${BASH_SOURCE[i]}:${BASH_LINENO[i-1]}" >&2
+ done
+}
+
+trap onError ERR
+
fi # COMMON_SH_SOURCED
diff --git a/tests/completions.sh b/tests/completions.sh
new file mode 100644
index 000000000..522aa1c86
--- /dev/null
+++ b/tests/completions.sh
@@ -0,0 +1,62 @@
+source common.sh
+
+cd "$TEST_ROOT"
+
+mkdir -p dep
+cat <<EOF > dep/flake.nix
+{
+ outputs = i: { };
+}
+EOF
+mkdir -p foo
+cat <<EOF > foo/flake.nix
+{
+ inputs.a.url = "path:$(realpath dep)";
+
+ outputs = i: {
+ sampleOutput = 1;
+ };
+}
+EOF
+mkdir -p bar
+cat <<EOF > bar/flake.nix
+{
+ inputs.b.url = "path:$(realpath dep)";
+
+ outputs = i: {
+ sampleOutput = 1;
+ };
+}
+EOF
+
+# Test the completion of a subcommand
+[[ "$(NIX_GET_COMPLETIONS=1 nix buil)" == $'normal\nbuild\t' ]]
+[[ "$(NIX_GET_COMPLETIONS=2 nix flake metad)" == $'normal\nmetadata\t' ]]
+
+# Filename completion
+[[ "$(NIX_GET_COMPLETIONS=2 nix build ./f)" == $'filenames\n./foo\t' ]]
+[[ "$(NIX_GET_COMPLETIONS=2 nix build ./nonexistent)" == $'filenames' ]]
+
+# Input override completion
+[[ "$(NIX_GET_COMPLETIONS=4 nix build ./foo --override-input '')" == $'normal\na\t' ]]
+[[ "$(NIX_GET_COMPLETIONS=5 nix flake show ./foo --override-input '')" == $'normal\na\t' ]]
+## With multiple input flakes
+[[ "$(NIX_GET_COMPLETIONS=5 nix build ./foo ./bar --override-input '')" == $'normal\na\t\nb\t' ]]
+## With tilde expansion
+[[ "$(HOME=$PWD NIX_GET_COMPLETIONS=4 nix build '~/foo' --override-input '')" == $'normal\na\t' ]]
+## Out of order
+[[ "$(NIX_GET_COMPLETIONS=3 nix build --update-input '' ./foo)" == $'normal\na\t' ]]
+[[ "$(NIX_GET_COMPLETIONS=4 nix build ./foo --update-input '' ./bar)" == $'normal\na\t\nb\t' ]]
+
+# Cli flag completion
+NIX_GET_COMPLETIONS=2 nix build --log-form | grep -- "--log-format"
+
+# Config option completion
+## With `--option`
+NIX_GET_COMPLETIONS=3 nix build --option allow-import-from | grep -- "allow-import-from-derivation"
+## As a cli flag – not working atm
+# NIX_GET_COMPLETIONS=2 nix build --allow-import-from | grep -- "allow-import-from-derivation"
+
+# Attr path completions
+[[ "$(NIX_GET_COMPLETIONS=2 nix eval ./foo\#sam)" == $'attrs\n./foo#sampleOutput\t' ]]
+[[ "$(NIX_GET_COMPLETIONS=4 nix eval --file ./foo/flake.nix outp)" == $'attrs\noutputs\t' ]]
diff --git a/tests/fetchClosure.sh b/tests/fetchClosure.sh
index 96e4bb741..44050c878 100644
--- a/tests/fetchClosure.sh
+++ b/tests/fetchClosure.sh
@@ -7,7 +7,7 @@ clearStore
clearCacheCache
# Initialize binary cache.
-nonCaPath=$(nix build --json --file ./dependencies.nix | jq -r .[].outputs.out)
+nonCaPath=$(nix build --json --file ./dependencies.nix --no-link | jq -r .[].outputs.out)
caPath=$(nix store make-content-addressed --json $nonCaPath | jq -r '.rewrites | map(.) | .[]')
nix copy --to file://$cacheDir $nonCaPath
diff --git a/tests/fetchGit.sh b/tests/fetchGit.sh
index 9179e2071..166bccfc7 100644
--- a/tests/fetchGit.sh
+++ b/tests/fetchGit.sh
@@ -161,6 +161,14 @@ path4=$(nix eval --impure --raw --expr "(builtins.fetchGit $repo).outPath")
[[ $(cat $path4/hello) = dev ]]
[[ $path3 = $path4 ]]
+# Using remote path with branch other than 'master' should fetch the HEAD revision.
+# (--tarball-ttl 0 to prevent using the cached repo above)
+export _NIX_FORCE_HTTP=1
+path4=$(nix eval --tarball-ttl 0 --impure --raw --expr "(builtins.fetchGit $repo).outPath")
+[[ $(cat $path4/hello) = dev ]]
+[[ $path3 = $path4 ]]
+unset _NIX_FORCE_HTTP
+
# Confirm same as 'dev' branch
path5=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = $repo; ref = \"dev\"; }).outPath")
[[ $path3 = $path5 ]]
diff --git a/tests/fetchTree-file.sh b/tests/fetchTree-file.sh
new file mode 100644
index 000000000..f0c530466
--- /dev/null
+++ b/tests/fetchTree-file.sh
@@ -0,0 +1,105 @@
+source common.sh
+
+clearStore
+
+cd "$TEST_ROOT"
+
+test_fetch_file () {
+ echo foo > test_input
+
+ input_hash="$(nix hash path test_input)"
+
+ nix eval --impure --file - <<EOF
+ let
+ tree = builtins.fetchTree { type = "file"; url = "file://$PWD/test_input"; };
+ in
+ assert (tree.narHash == "$input_hash");
+ tree
+EOF
+}
+
+# Make sure that `http(s)` and `file` flake inputs are properly extracted when
+# they should be, and treated as opaque files when they should be
+test_file_flake_input () {
+ rm -fr "$TEST_ROOT/testFlake";
+ mkdir "$TEST_ROOT/testFlake";
+ pushd testFlake
+
+ mkdir inputs
+ echo foo > inputs/test_input_file
+ tar cfa test_input.tar.gz inputs
+ cp test_input.tar.gz test_input_no_ext
+ input_tarball_hash="$(nix hash path test_input.tar.gz)"
+ input_directory_hash="$(nix hash path inputs)"
+
+ cat <<EOF > flake.nix
+ {
+ inputs.no_ext_default_no_unpack = {
+ url = "file://$PWD/test_input_no_ext";
+ flake = false;
+ };
+ inputs.no_ext_explicit_unpack = {
+ url = "tarball+file://$PWD/test_input_no_ext";
+ flake = false;
+ };
+ inputs.tarball_default_unpack = {
+ url = "file://$PWD/test_input.tar.gz";
+ flake = false;
+ };
+ inputs.tarball_explicit_no_unpack = {
+ url = "file+file://$PWD/test_input.tar.gz";
+ flake = false;
+ };
+ outputs = { ... }: {};
+ }
+EOF
+
+ nix flake update
+ nix eval --file - <<EOF
+ with (builtins.fromJSON (builtins.readFile ./flake.lock));
+
+ # Url inputs whose extension doesn’t match a known archive format should
+ # not be unpacked by default
+ assert (nodes.no_ext_default_no_unpack.locked.type == "file");
+ assert (nodes.no_ext_default_no_unpack.locked.unpack or false == false);
+ assert (nodes.no_ext_default_no_unpack.locked.narHash == "$input_tarball_hash");
+
+ # For backwards compatibility, flake inputs that correspond to the
+ # old 'tarball' fetcher should still have their type set to 'tarball'
+ assert (nodes.tarball_default_unpack.locked.type == "tarball");
+ # Unless explicitely specified, the 'unpack' parameter shouldn’t appear here
+ # because that would break older Nix versions
+ assert (!nodes.tarball_default_unpack.locked ? unpack);
+ assert (nodes.tarball_default_unpack.locked.narHash == "$input_directory_hash");
+
+ # Explicitely passing the unpack parameter should enforce the desired behavior
+ assert (nodes.no_ext_explicit_unpack.locked.narHash == nodes.tarball_default_unpack.locked.narHash);
+ assert (nodes.tarball_explicit_no_unpack.locked.narHash == nodes.no_ext_default_no_unpack.locked.narHash);
+ true
+EOF
+ popd
+
+ [[ -z "${NIX_DAEMON_PACKAGE}" ]] && return 0
+
+ # Ensure that a lockfile generated by the current Nix for tarball inputs
+ # can still be read by an older Nix
+
+ cat <<EOF > flake.nix
+ {
+ inputs.tarball = {
+ url = "file://$PWD/test_input.tar.gz";
+ flake = false;
+ };
+ outputs = { self, tarball }: {
+ foo = builtins.readFile "${tarball}/test_input_file";
+ };
+ }
+ nix flake update
+
+ clearStore
+ "$NIX_DAEMON_PACKAGE/bin/nix" eval .#foo
+EOF
+}
+
+test_fetch_file
+test_file_flake_input
diff --git a/tests/flake-bundler.sh b/tests/flakes/bundle.sh
index 9496b8f92..67bbb05ac 100644
--- a/tests/flake-bundler.sh
+++ b/tests/flakes/bundle.sh
@@ -1,9 +1,6 @@
source common.sh
-clearStore
-rm -rf $TEST_HOME/.cache $TEST_HOME/.config $TEST_HOME/.local
-
-cp ./simple.nix ./simple.builder.sh ./config.nix $TEST_HOME
+cp ../simple.nix ../simple.builder.sh ../config.nix $TEST_HOME
cd $TEST_HOME
@@ -25,6 +22,7 @@ cat <<EOF > flake.nix
};
}
EOF
+
nix build .#
nix bundle --bundler .# .#
nix bundle --bundler .#bundlers.$system.default .#packages.$system.default
@@ -32,6 +30,3 @@ nix bundle --bundler .#bundlers.$system.simple .#packages.$system.default
nix bundle --bundler .#bundlers.$system.default .#apps.$system.default
nix bundle --bundler .#bundlers.$system.simple .#apps.$system.default
-
-clearStore
-
diff --git a/tests/flakes/check.sh b/tests/flakes/check.sh
new file mode 100644
index 000000000..f572aa75c
--- /dev/null
+++ b/tests/flakes/check.sh
@@ -0,0 +1,89 @@
+source common.sh
+
+flakeDir=$TEST_ROOT/flake3
+mkdir -p $flakeDir
+
+cat > $flakeDir/flake.nix <<EOF
+{
+ outputs = { self }: {
+ overlay = final: prev: {
+ };
+ };
+}
+EOF
+
+nix flake check $flakeDir
+
+cat > $flakeDir/flake.nix <<EOF
+{
+ outputs = { self }: {
+ overlay = finalll: prev: {
+ };
+ };
+}
+EOF
+
+(! nix flake check $flakeDir)
+
+cat > $flakeDir/flake.nix <<EOF
+{
+ outputs = { self }: {
+ nixosModules.foo = {
+ a.b.c = 123;
+ foo = true;
+ };
+ };
+}
+EOF
+
+nix flake check $flakeDir
+
+cat > $flakeDir/flake.nix <<EOF
+{
+ outputs = { self }: {
+ nixosModules.foo = {
+ a.b.c = 123;
+ foo = assert false; true;
+ };
+ };
+}
+EOF
+
+(! nix flake check $flakeDir)
+
+cat > $flakeDir/flake.nix <<EOF
+{
+ outputs = { self }: {
+ nixosModule = { config, pkgs, ... }: {
+ a.b.c = 123;
+ };
+ };
+}
+EOF
+
+nix flake check $flakeDir
+
+cat > $flakeDir/flake.nix <<EOF
+{
+ outputs = { self }: {
+ nixosModule = { config, pkgs }: {
+ a.b.c = 123;
+ };
+ };
+}
+EOF
+
+(! nix flake check $flakeDir)
+
+cat > $flakeDir/flake.nix <<EOF
+{
+ outputs = { self }: {
+ packages.system-1.default = "foo";
+ packages.system-2.default = "bar";
+ };
+}
+EOF
+
+checkRes=$(nix flake check --keep-going $flakeDir 2>&1 && fail "nix flake check should have failed" || true)
+echo "$checkRes" | grep -q "packages.system-1.default"
+echo "$checkRes" | grep -q "packages.system-2.default"
diff --git a/tests/flakes/circular.sh b/tests/flakes/circular.sh
new file mode 100644
index 000000000..09cd02edf
--- /dev/null
+++ b/tests/flakes/circular.sh
@@ -0,0 +1,49 @@
+# Test circular flake dependencies.
+source ./common.sh
+
+requireGit
+
+flakeA=$TEST_ROOT/flakeA
+flakeB=$TEST_ROOT/flakeB
+
+createGitRepo $flakeA
+createGitRepo $flakeB
+
+cat > $flakeA/flake.nix <<EOF
+{
+ inputs.b.url = git+file://$flakeB;
+ inputs.b.inputs.a.follows = "/";
+
+ outputs = { self, b }: {
+ foo = 123 + b.bar;
+ xyzzy = 1000;
+ };
+}
+EOF
+
+git -C $flakeA add flake.nix
+
+cat > $flakeB/flake.nix <<EOF
+{
+ inputs.a.url = git+file://$flakeA;
+
+ outputs = { self, a }: {
+ bar = 456 + a.xyzzy;
+ };
+}
+EOF
+
+git -C $flakeB add flake.nix
+git -C $flakeB commit -a -m 'Foo'
+
+[[ $(nix eval $flakeA#foo) = 1579 ]]
+[[ $(nix eval $flakeA#foo) = 1579 ]]
+
+sed -i $flakeB/flake.nix -e 's/456/789/'
+git -C $flakeB commit -a -m 'Foo'
+
+[[ $(nix eval --update-input b $flakeA#foo) = 1912 ]]
+
+# Test list-inputs with circular dependencies
+nix flake metadata $flakeA
+
diff --git a/tests/flakes/common.sh b/tests/flakes/common.sh
new file mode 100644
index 000000000..c333733c2
--- /dev/null
+++ b/tests/flakes/common.sh
@@ -0,0 +1,73 @@
+source ../common.sh
+
+registry=$TEST_ROOT/registry.json
+
+requireGit() {
+ if [[ -z $(type -p git) ]]; then
+ echo "Git not installed; skipping flake tests"
+ exit 99
+ fi
+}
+
+writeSimpleFlake() {
+ local flakeDir="$1"
+ cat > $flakeDir/flake.nix <<EOF
+{
+ description = "Bla bla";
+
+ outputs = inputs: rec {
+ packages.$system = rec {
+ foo = import ./simple.nix;
+ default = foo;
+ };
+
+ # To test "nix flake init".
+ legacyPackages.x86_64-linux.hello = import ./simple.nix;
+ };
+}
+EOF
+
+ cp ../simple.nix ../simple.builder.sh ../config.nix $flakeDir/
+}
+
+createSimpleGitFlake() {
+ local flakeDir="$1"
+ writeSimpleFlake $flakeDir
+ git -C $flakeDir add flake.nix simple.nix simple.builder.sh config.nix
+ git -C $flakeDir commit -m 'Initial'
+}
+
+writeDependentFlake() {
+ local flakeDir="$1"
+ cat > $flakeDir/flake.nix <<EOF
+{
+ outputs = { self, flake1 }: {
+ packages.$system.default = flake1.packages.$system.default;
+ expr = assert builtins.pathExists ./flake.lock; 123;
+ };
+}
+EOF
+}
+
+writeTrivialFlake() {
+ local flakeDir="$1"
+ cat > $flakeDir/flake.nix <<EOF
+{
+ outputs = { self }: {
+ expr = 123;
+ };
+}
+EOF
+}
+
+createGitRepo() {
+ local repo="$1"
+ local extraArgs="$2"
+
+ rm -rf $repo $repo.tmp
+ mkdir -p $repo
+
+ git -C $repo init $extraArgs
+ git -C $repo config user.email "foobar@example.com"
+ git -C $repo config user.name "Foobar"
+}
diff --git a/tests/flake-local-settings.sh b/tests/flakes/config.sh
index e92c16f87..d1941a6be 100644
--- a/tests/flake-local-settings.sh
+++ b/tests/flakes/config.sh
@@ -1,9 +1,6 @@
source common.sh
-clearStore
-rm -rf $TEST_HOME/.cache $TEST_HOME/.config $TEST_HOME/.local
-
-cp ./simple.nix ./simple.builder.sh ./config.nix $TEST_HOME
+cp ../simple.nix ../simple.builder.sh ../config.nix $TEST_HOME
cd $TEST_HOME
diff --git a/tests/flakes.sh b/tests/flakes/flakes.sh
index 46e6a7982..267e2cd6f 100644
--- a/tests/flakes.sh
+++ b/tests/flakes/flakes.sh
@@ -1,60 +1,30 @@
-source common.sh
+source ./common.sh
-if [[ -z $(type -p git) ]]; then
- echo "Git not installed; skipping flake tests"
- exit 99
-fi
+requireGit
clearStore
rm -rf $TEST_HOME/.cache $TEST_HOME/.config
-registry=$TEST_ROOT/registry.json
-
flake1Dir=$TEST_ROOT/flake1
flake2Dir=$TEST_ROOT/flake2
flake3Dir=$TEST_ROOT/flake3
flake5Dir=$TEST_ROOT/flake5
-flake6Dir=$TEST_ROOT/flake6
flake7Dir=$TEST_ROOT/flake7
-templatesDir=$TEST_ROOT/templates
nonFlakeDir=$TEST_ROOT/nonFlake
badFlakeDir=$TEST_ROOT/badFlake
-flakeA=$TEST_ROOT/flakeA
-flakeB=$TEST_ROOT/flakeB
flakeGitBare=$TEST_ROOT/flakeGitBare
-flakeFollowsA=$TEST_ROOT/follows/flakeA
-flakeFollowsB=$TEST_ROOT/follows/flakeA/flakeB
-flakeFollowsC=$TEST_ROOT/follows/flakeA/flakeB/flakeC
-flakeFollowsD=$TEST_ROOT/follows/flakeA/flakeD
-flakeFollowsE=$TEST_ROOT/follows/flakeA/flakeE
-
-for repo in $flake1Dir $flake2Dir $flake3Dir $flake7Dir $templatesDir $nonFlakeDir $flakeA $flakeB $flakeFollowsA; do
- rm -rf $repo $repo.tmp
- mkdir -p $repo
- git -C $repo init
- git -C $repo config user.email "foobar@example.com"
- git -C $repo config user.name "Foobar"
-done
-
-cat > $flake1Dir/flake.nix <<EOF
-{
- description = "Bla bla";
- outputs = inputs: rec {
- packages.$system = rec {
- foo = import ./simple.nix;
- default = foo;
- };
+for repo in $flake1Dir $flake2Dir $flake3Dir $flake7Dir $nonFlakeDir; do
+ # Give one repo a non-main initial branch.
+ extraArgs=
+ if [[ $repo == $flake2Dir ]]; then
+ extraArgs="--initial-branch=main"
+ fi
- # To test "nix flake init".
- legacyPackages.x86_64-linux.hello = import ./simple.nix;
- };
-}
-EOF
+ createGitRepo "$repo" "$extraArgs"
+done
-cp ./simple.nix ./simple.builder.sh ./config.nix $flake1Dir/
-git -C $flake1Dir add flake.nix simple.nix simple.builder.sh config.nix
-git -C $flake1Dir commit -m 'Initial'
+createSimpleGitFlake $flake1Dir
cat > $flake2Dir/flake.nix <<EOF
{
@@ -98,12 +68,10 @@ nix registry add --registry $registry flake1 git+file://$flake1Dir
nix registry add --registry $registry flake2 git+file://$flake2Dir
nix registry add --registry $registry flake3 git+file://$flake3Dir
nix registry add --registry $registry flake4 flake3
-nix registry add --registry $registry flake5 hg+file://$flake5Dir
nix registry add --registry $registry nixpkgs flake1
-nix registry add --registry $registry templates git+file://$templatesDir
# Test 'nix flake list'.
-[[ $(nix registry list | wc -l) == 7 ]]
+[[ $(nix registry list | wc -l) == 5 ]]
# Test 'nix flake metadata'.
nix flake metadata flake1
@@ -156,6 +124,7 @@ nix build -o $TEST_ROOT/result --expr "(builtins.getFlake \"git+file://$flake1Di
# But should succeed in impure mode.
(! nix build -o $TEST_ROOT/result flake2#bar --impure)
nix build -o $TEST_ROOT/result flake2#bar --impure --no-write-lock-file
+nix eval --expr "builtins.getFlake \"$flake2Dir\"" --impure
# Building a local flake with an unlocked dependency should fail with --no-update-lock-file.
nix build -o $TEST_ROOT/result $flake2Dir#bar --no-update-lock-file 2>&1 | grep 'requires lock file changes'
@@ -165,11 +134,11 @@ nix build -o $TEST_ROOT/result $flake2Dir#bar --no-write-lock-file
nix build -o $TEST_ROOT/result $flake2Dir#bar --no-update-lock-file 2>&1 | grep 'requires lock file changes'
nix build -o $TEST_ROOT/result $flake2Dir#bar --commit-lock-file
[[ -e $flake2Dir/flake.lock ]]
-[[ -z $(git -C $flake2Dir diff master) ]]
+[[ -z $(git -C $flake2Dir diff main || echo failed) ]]
# Rerunning the build should not change the lockfile.
nix build -o $TEST_ROOT/result $flake2Dir#bar
-[[ -z $(git -C $flake2Dir diff master) ]]
+[[ -z $(git -C $flake2Dir diff main || echo failed) ]]
# Building with a lockfile should not require a fetch of the registry.
nix build -o $TEST_ROOT/result --flake-registry file:///no-registry.json $flake2Dir#bar --refresh
@@ -178,7 +147,7 @@ nix build -o $TEST_ROOT/result --no-use-registries $flake2Dir#bar --refresh
# Updating the flake should not change the lockfile.
nix flake lock $flake2Dir
-[[ -z $(git -C $flake2Dir diff master) ]]
+[[ -z $(git -C $flake2Dir diff main || echo failed) ]]
# Now we should be able to build the flake in pure mode.
nix build -o $TEST_ROOT/result flake2#bar
@@ -213,7 +182,7 @@ nix build -o $TEST_ROOT/result $flake3Dir#"sth sth"
nix build -o $TEST_ROOT/result $flake3Dir#"sth%20sth"
# Check whether it saved the lockfile
-(! [[ -z $(git -C $flake3Dir diff master) ]])
+[[ -n $(git -C $flake3Dir diff master) ]]
git -C $flake3Dir add flake.lock
@@ -283,7 +252,7 @@ cat > $flake3Dir/flake.nix <<EOF
}
EOF
-cp ./config.nix $flake3Dir
+cp ../config.nix $flake3Dir
git -C $flake3Dir add flake.nix config.nix
git -C $flake3Dir commit -m 'Add nonFlakeInputs'
@@ -313,10 +282,10 @@ nix build -o $TEST_ROOT/result flake4#xyzzy
# Test 'nix flake update' and --override-flake.
nix flake lock $flake3Dir
-[[ -z $(git -C $flake3Dir diff master) ]]
+[[ -z $(git -C $flake3Dir diff master || echo failed) ]]
nix flake update $flake3Dir --override-flake flake2 nixpkgs
-[[ ! -z $(git -C $flake3Dir diff master) ]]
+[[ ! -z $(git -C $flake3Dir diff master || echo failed) ]]
# Make branch "removeXyzzy" where flake3 doesn't have xyzzy anymore
git -C $flake3Dir checkout -b removeXyzzy
@@ -358,161 +327,19 @@ nix build -o $TEST_ROOT/result flake4/removeXyzzy#sth
# Testing the nix CLI
nix registry add flake1 flake3
-[[ $(nix registry list | wc -l) == 8 ]]
+[[ $(nix registry list | wc -l) == 6 ]]
nix registry pin flake1
-[[ $(nix registry list | wc -l) == 8 ]]
+[[ $(nix registry list | wc -l) == 6 ]]
nix registry pin flake1 flake3
-[[ $(nix registry list | wc -l) == 8 ]]
+[[ $(nix registry list | wc -l) == 6 ]]
nix registry remove flake1
-[[ $(nix registry list | wc -l) == 7 ]]
-
-# Test 'nix flake init'.
-cat > $templatesDir/flake.nix <<EOF
-{
- description = "Some templates";
-
- outputs = { self }: {
- templates = rec {
- trivial = {
- path = ./trivial;
- description = "A trivial flake";
- welcomeText = ''
- Welcome to my trivial flake
- '';
- };
- default = trivial;
- };
- };
-}
-EOF
-
-mkdir $templatesDir/trivial
-
-cat > $templatesDir/trivial/flake.nix <<EOF
-{
- description = "A flake for building Hello World";
-
- outputs = { self, nixpkgs }: {
- packages.x86_64-linux = rec {
- hello = nixpkgs.legacyPackages.x86_64-linux.hello;
- default = hello;
- };
- };
-}
-EOF
-
-git -C $templatesDir add flake.nix trivial/flake.nix
-git -C $templatesDir commit -m 'Initial'
-
-nix flake check templates
-nix flake show templates
-nix flake show templates --json | jq
-
-(cd $flake7Dir && nix flake init)
-(cd $flake7Dir && nix flake init) # check idempotence
-git -C $flake7Dir add flake.nix
-nix flake check $flake7Dir
-nix flake show $flake7Dir
-nix flake show $flake7Dir --json | jq
-git -C $flake7Dir commit -a -m 'Initial'
-
-# Test 'nix flake new'.
-rm -rf $flake6Dir
-nix flake new -t templates#trivial $flake6Dir
-nix flake new -t templates#trivial $flake6Dir # check idempotence
-nix flake check $flake6Dir
+[[ $(nix registry list | wc -l) == 5 ]]
# Test 'nix flake clone'.
rm -rf $TEST_ROOT/flake1-v2
nix flake clone flake1 --dest $TEST_ROOT/flake1-v2
[ -e $TEST_ROOT/flake1-v2/flake.nix ]
-# More 'nix flake check' tests.
-cat > $flake3Dir/flake.nix <<EOF
-{
- outputs = { flake1, self }: {
- overlay = final: prev: {
- };
- };
-}
-EOF
-
-nix flake check $flake3Dir
-
-cat > $flake3Dir/flake.nix <<EOF
-{
- outputs = { flake1, self }: {
- overlay = finalll: prev: {
- };
- };
-}
-EOF
-
-(! nix flake check $flake3Dir)
-
-cat > $flake3Dir/flake.nix <<EOF
-{
- outputs = { flake1, self }: {
- nixosModules.foo = {
- a.b.c = 123;
- foo = true;
- };
- };
-}
-EOF
-
-nix flake check $flake3Dir
-
-cat > $flake3Dir/flake.nix <<EOF
-{
- outputs = { flake1, self }: {
- nixosModules.foo = {
- a.b.c = 123;
- foo = assert false; true;
- };
- };
-}
-EOF
-
-(! nix flake check $flake3Dir)
-
-cat > $flake3Dir/flake.nix <<EOF
-{
- outputs = { flake1, self }: {
- nixosModule = { config, pkgs, ... }: {
- a.b.c = 123;
- };
- };
-}
-EOF
-
-nix flake check $flake3Dir
-
-cat > $flake3Dir/flake.nix <<EOF
-{
- outputs = { flake1, self }: {
- nixosModule = { config, pkgs }: {
- a.b.c = 123;
- };
- };
-}
-EOF
-
-(! nix flake check $flake3Dir)
-
-cat > $flake3Dir/flake.nix <<EOF
-{
- outputs = { flake1, self }: {
- packages.system-1.default = "foo";
- packages.system-2.default = "bar";
- };
-}
-EOF
-
-checkRes=$(nix flake check --keep-going $flake3Dir 2>&1 && fail "nix flake check should have failed" || true)
-echo "$checkRes" | grep -q "packages.system-1.default"
-echo "$checkRes" | grep -q "packages.system-2.default"
-
# Test 'follows' inputs.
cat > $flake3Dir/flake.nix <<EOF
{
@@ -555,6 +382,10 @@ nix flake lock $flake3Dir
[[ $(jq -c .nodes.root.inputs.bar $flake3Dir/flake.lock) = '["flake2"]' ]]
# Test overriding inputs of inputs.
+writeTrivialFlake $flake7Dir
+git -C $flake7Dir add flake.nix
+git -C $flake7Dir commit -m 'Initial'
+
cat > $flake3Dir/flake.nix <<EOF
{
inputs.flake2.inputs.flake1 = {
@@ -589,50 +420,9 @@ rm -rf $flakeGitBare
git clone --bare $flake1Dir $flakeGitBare
nix build -o $TEST_ROOT/result git+file://$flakeGitBare
-# Test Mercurial flakes.
-rm -rf $flake5Dir
-mkdir $flake5Dir
-
-cat > $flake5Dir/flake.nix <<EOF
-{
- outputs = { self, flake1 }: {
- packages.$system.default = flake1.packages.$system.default;
- expr = assert builtins.pathExists ./flake.lock; 123;
- };
-}
-EOF
-
-if [[ -n $(type -p hg) ]]; then
- hg init $flake5Dir
-
- hg add $flake5Dir/flake.nix
- hg commit --config ui.username=foobar@example.org $flake5Dir -m 'Initial commit'
-
- nix build -o $TEST_ROOT/result hg+file://$flake5Dir
- [[ -e $TEST_ROOT/result/hello ]]
-
- (! nix flake metadata --json hg+file://$flake5Dir | jq -e -r .revision)
-
- nix eval hg+file://$flake5Dir#expr
-
- nix eval hg+file://$flake5Dir#expr
-
- (! nix eval hg+file://$flake5Dir#expr --no-allow-dirty)
-
- (! nix flake metadata --json hg+file://$flake5Dir | jq -e -r .revision)
-
- hg commit --config ui.username=foobar@example.org $flake5Dir -m 'Add lock file'
-
- nix flake metadata --json hg+file://$flake5Dir --refresh | jq -e -r .revision
- nix flake metadata --json hg+file://$flake5Dir
- [[ $(nix flake metadata --json hg+file://$flake5Dir | jq -e -r .revCount) = 1 ]]
-
- nix build -o $TEST_ROOT/result hg+file://$flake5Dir --no-registries --no-allow-dirty
- nix build -o $TEST_ROOT/result hg+file://$flake5Dir --no-use-registries --no-allow-dirty
-fi
-
# Test path flakes.
-rm -rf $flake5Dir/.hg $flake5Dir/flake.lock
+mkdir -p $flake5Dir
+writeDependentFlake $flake5Dir
nix flake lock path://$flake5Dir
# Test tarball flakes.
@@ -670,165 +460,6 @@ nix flake lock $flake3Dir --update-input flake2/flake1
# Test 'nix flake metadata --json'.
nix flake metadata $flake3Dir --json | jq .
-# Test circular flake dependencies.
-cat > $flakeA/flake.nix <<EOF
-{
- inputs.b.url = git+file://$flakeB;
- inputs.b.inputs.a.follows = "/";
-
- outputs = { self, nixpkgs, b }: {
- foo = 123 + b.bar;
- xyzzy = 1000;
- };
-}
-EOF
-
-git -C $flakeA add flake.nix
-
-cat > $flakeB/flake.nix <<EOF
-{
- inputs.a.url = git+file://$flakeA;
-
- outputs = { self, nixpkgs, a }: {
- bar = 456 + a.xyzzy;
- };
-}
-EOF
-
-git -C $flakeB add flake.nix
-git -C $flakeB commit -a -m 'Foo'
-
-[[ $(nix eval $flakeA#foo) = 1579 ]]
-[[ $(nix eval $flakeA#foo) = 1579 ]]
-
-sed -i $flakeB/flake.nix -e 's/456/789/'
-git -C $flakeB commit -a -m 'Foo'
-
-[[ $(nix eval --update-input b $flakeA#foo) = 1912 ]]
-
-# Test list-inputs with circular dependencies
-nix flake metadata $flakeA
-
-# Test flake follow paths
-mkdir -p $flakeFollowsB
-mkdir -p $flakeFollowsC
-mkdir -p $flakeFollowsD
-mkdir -p $flakeFollowsE
-
-cat > $flakeFollowsA/flake.nix <<EOF
-{
- description = "Flake A";
- inputs = {
- B = {
- url = "path:./flakeB";
- inputs.foobar.follows = "foobar";
- };
-
- foobar.url = "path:$flakeFollowsA/flakeE";
- };
- outputs = { ... }: {};
-}
-EOF
-
-cat > $flakeFollowsB/flake.nix <<EOF
-{
- description = "Flake B";
- inputs = {
- foobar.url = "path:$flakeFollowsA/flakeE";
- goodoo.follows = "C/goodoo";
- C = {
- url = "path:./flakeC";
- inputs.foobar.follows = "foobar";
- };
- };
- outputs = { ... }: {};
-}
-EOF
-
-cat > $flakeFollowsC/flake.nix <<EOF
-{
- description = "Flake C";
- inputs = {
- foobar.url = "path:$flakeFollowsA/flakeE";
- goodoo.follows = "foobar";
- };
- outputs = { ... }: {};
-}
-EOF
-
-cat > $flakeFollowsD/flake.nix <<EOF
-{
- description = "Flake D";
- inputs = {};
- outputs = { ... }: {};
-}
-EOF
-
-cat > $flakeFollowsE/flake.nix <<EOF
-{
- description = "Flake E";
- inputs = {};
- outputs = { ... }: {};
-}
-EOF
-
-git -C $flakeFollowsA add flake.nix flakeB/flake.nix \
- flakeB/flakeC/flake.nix flakeD/flake.nix flakeE/flake.nix
-
-nix flake metadata $flakeFollowsA
-
-nix flake update $flakeFollowsA
-
-oldLock="$(cat "$flakeFollowsA/flake.lock")"
-
-# Ensure that locking twice doesn't change anything
-
-nix flake lock $flakeFollowsA
-
-newLock="$(cat "$flakeFollowsA/flake.lock")"
-
-diff <(echo "$newLock") <(echo "$oldLock")
-
-[[ $(jq -c .nodes.B.inputs.C $flakeFollowsA/flake.lock) = '"C"' ]]
-[[ $(jq -c .nodes.B.inputs.foobar $flakeFollowsA/flake.lock) = '["foobar"]' ]]
-[[ $(jq -c .nodes.C.inputs.foobar $flakeFollowsA/flake.lock) = '["B","foobar"]' ]]
-
-# Ensure removing follows from flake.nix removes them from the lockfile
-
-cat > $flakeFollowsA/flake.nix <<EOF
-{
- description = "Flake A";
- inputs = {
- B = {
- url = "path:./flakeB";
- inputs.nonFlake.follows = "D";
- };
- D.url = "path:./flakeD";
- };
- outputs = { ... }: {};
-}
-EOF
-
-nix flake lock $flakeFollowsA
-
-[[ $(jq -c .nodes.B.inputs.foobar $flakeFollowsA/flake.lock) = '"foobar"' ]]
-jq -r -c '.nodes | keys | .[]' $flakeFollowsA/flake.lock | grep "^foobar$"
-
-# Ensure a relative path is not allowed to go outside the store path
-cat > $flakeFollowsA/flake.nix <<EOF
-{
- description = "Flake A";
- inputs = {
- B.url = "path:../flakeB";
- };
- outputs = { ... }: {};
-}
-EOF
-
-git -C $flakeFollowsA add flake.nix
-
-nix flake lock $flakeFollowsA 2>&1 | grep 'points outside'
-
# Test flake in store does not evaluate
rm -rf $badFlakeDir
mkdir $badFlakeDir
diff --git a/tests/flakes/follow-paths.sh b/tests/flakes/follow-paths.sh
new file mode 100644
index 000000000..19cc1bafa
--- /dev/null
+++ b/tests/flakes/follow-paths.sh
@@ -0,0 +1,150 @@
+source ./common.sh
+
+requireGit
+
+flakeFollowsA=$TEST_ROOT/follows/flakeA
+flakeFollowsB=$TEST_ROOT/follows/flakeA/flakeB
+flakeFollowsC=$TEST_ROOT/follows/flakeA/flakeB/flakeC
+flakeFollowsD=$TEST_ROOT/follows/flakeA/flakeD
+flakeFollowsE=$TEST_ROOT/follows/flakeA/flakeE
+
+# Test following path flakerefs.
+createGitRepo $flakeFollowsA
+mkdir -p $flakeFollowsB
+mkdir -p $flakeFollowsC
+mkdir -p $flakeFollowsD
+mkdir -p $flakeFollowsE
+
+cat > $flakeFollowsA/flake.nix <<EOF
+{
+ description = "Flake A";
+ inputs = {
+ B = {
+ url = "path:./flakeB";
+ inputs.foobar.follows = "foobar";
+ };
+
+ foobar.url = "path:$flakeFollowsA/flakeE";
+ };
+ outputs = { ... }: {};
+}
+EOF
+
+cat > $flakeFollowsB/flake.nix <<EOF
+{
+ description = "Flake B";
+ inputs = {
+ foobar.url = "path:$flakeFollowsA/flakeE";
+ goodoo.follows = "C/goodoo";
+ C = {
+ url = "path:./flakeC";
+ inputs.foobar.follows = "foobar";
+ };
+ };
+ outputs = { ... }: {};
+}
+EOF
+
+cat > $flakeFollowsC/flake.nix <<EOF
+{
+ description = "Flake C";
+ inputs = {
+ foobar.url = "path:$flakeFollowsA/flakeE";
+ goodoo.follows = "foobar";
+ };
+ outputs = { ... }: {};
+}
+EOF
+
+cat > $flakeFollowsD/flake.nix <<EOF
+{
+ description = "Flake D";
+ inputs = {};
+ outputs = { ... }: {};
+}
+EOF
+
+cat > $flakeFollowsE/flake.nix <<EOF
+{
+ description = "Flake E";
+ inputs = {};
+ outputs = { ... }: {};
+}
+EOF
+
+git -C $flakeFollowsA add flake.nix flakeB/flake.nix \
+ flakeB/flakeC/flake.nix flakeD/flake.nix flakeE/flake.nix
+
+nix flake metadata $flakeFollowsA
+
+nix flake update $flakeFollowsA
+
+nix flake lock $flakeFollowsA
+
+oldLock="$(cat "$flakeFollowsA/flake.lock")"
+
+# Ensure that locking twice doesn't change anything
+
+nix flake lock $flakeFollowsA
+
+newLock="$(cat "$flakeFollowsA/flake.lock")"
+
+diff <(echo "$newLock") <(echo "$oldLock")
+
+[[ $(jq -c .nodes.B.inputs.C $flakeFollowsA/flake.lock) = '"C"' ]]
+[[ $(jq -c .nodes.B.inputs.foobar $flakeFollowsA/flake.lock) = '["foobar"]' ]]
+[[ $(jq -c .nodes.C.inputs.foobar $flakeFollowsA/flake.lock) = '["B","foobar"]' ]]
+
+# Ensure removing follows from flake.nix removes them from the lockfile
+
+cat > $flakeFollowsA/flake.nix <<EOF
+{
+ description = "Flake A";
+ inputs = {
+ B = {
+ url = "path:./flakeB";
+ };
+ D.url = "path:./flakeD";
+ };
+ outputs = { ... }: {};
+}
+EOF
+
+nix flake lock $flakeFollowsA
+
+[[ $(jq -c .nodes.B.inputs.foobar $flakeFollowsA/flake.lock) = '"foobar"' ]]
+jq -r -c '.nodes | keys | .[]' $flakeFollowsA/flake.lock | grep "^foobar$"
+
+# Ensure a relative path is not allowed to go outside the store path
+cat > $flakeFollowsA/flake.nix <<EOF
+{
+ description = "Flake A";
+ inputs = {
+ B.url = "path:../flakeB";
+ };
+ outputs = { ... }: {};
+}
+EOF
+
+git -C $flakeFollowsA add flake.nix
+
+nix flake lock $flakeFollowsA 2>&1 | grep 'points outside'
+
+# Non-existant follows should print a warning.
+cat >$flakeFollowsA/flake.nix <<EOF
+{
+ description = "Flake A";
+ inputs.B = {
+ url = "path:./flakeB";
+ inputs.invalid.follows = "D";
+ inputs.invalid2.url = "path:./flakeD";
+ };
+ inputs.D.url = "path:./flakeD";
+ outputs = { ... }: {};
+}
+EOF
+
+git -C $flakeFollowsA add flake.nix
+
+nix flake lock $flakeFollowsA 2>&1 | grep "warning: input 'B' has an override for a non-existent input 'invalid'"
+nix flake lock $flakeFollowsA 2>&1 | grep "warning: input 'B' has an override for a non-existent input 'invalid2'"
diff --git a/tests/flakes/init.sh b/tests/flakes/init.sh
new file mode 100644
index 000000000..36cb9956a
--- /dev/null
+++ b/tests/flakes/init.sh
@@ -0,0 +1,87 @@
+source ./common.sh
+
+requireGit
+
+templatesDir=$TEST_ROOT/templates
+flakeDir=$TEST_ROOT/flake
+nixpkgsDir=$TEST_ROOT/nixpkgs
+
+nix registry add --registry $registry templates git+file://$templatesDir
+nix registry add --registry $registry nixpkgs git+file://$nixpkgsDir
+
+createGitRepo $nixpkgsDir
+createSimpleGitFlake $nixpkgsDir
+
+# Test 'nix flake init'.
+createGitRepo $templatesDir
+
+cat > $templatesDir/flake.nix <<EOF
+{
+ description = "Some templates";
+
+ outputs = { self }: {
+ templates = rec {
+ trivial = {
+ path = ./trivial;
+ description = "A trivial flake";
+ welcomeText = ''
+ Welcome to my trivial flake
+ '';
+ };
+ default = trivial;
+ };
+ };
+}
+EOF
+
+mkdir $templatesDir/trivial
+
+cat > $templatesDir/trivial/flake.nix <<EOF
+{
+ description = "A flake for building Hello World";
+
+ outputs = { self, nixpkgs }: {
+ packages.x86_64-linux = rec {
+ hello = nixpkgs.legacyPackages.x86_64-linux.hello;
+ default = hello;
+ };
+ };
+}
+EOF
+echo a > $templatesDir/trivial/a
+echo b > $templatesDir/trivial/b
+
+git -C $templatesDir add flake.nix trivial/
+git -C $templatesDir commit -m 'Initial'
+
+nix flake check templates
+nix flake show templates
+nix flake show templates --json | jq
+
+createGitRepo $flakeDir
+(cd $flakeDir && nix flake init)
+(cd $flakeDir && nix flake init) # check idempotence
+git -C $flakeDir add flake.nix
+nix flake check $flakeDir
+nix flake show $flakeDir
+nix flake show $flakeDir --json | jq
+git -C $flakeDir commit -a -m 'Initial'
+
+# Test 'nix flake init' with benign conflicts
+createGitRepo "$flakeDir"
+echo a > $flakeDir/a
+(cd $flakeDir && nix flake init) # check idempotence
+
+# Test 'nix flake init' with conflicts
+createGitRepo "$flakeDir"
+echo b > $flakeDir/a
+pushd $flakeDir
+(! nix flake init) |& grep "refusing to overwrite existing file '$flakeDir/a'"
+popd
+git -C $flakeDir commit -a -m 'Changed'
+
+# Test 'nix flake new'.
+rm -rf $flakeDir
+nix flake new -t templates#trivial $flakeDir
+nix flake new -t templates#trivial $flakeDir # check idempotence
+nix flake check $flakeDir
diff --git a/tests/flakes/mercurial.sh b/tests/flakes/mercurial.sh
new file mode 100644
index 000000000..2614006c8
--- /dev/null
+++ b/tests/flakes/mercurial.sh
@@ -0,0 +1,46 @@
+source ./common.sh
+
+if [[ -z $(type -p hg) ]]; then
+ echo "Mercurial not installed; skipping"
+ exit 99
+fi
+
+flake1Dir=$TEST_ROOT/flake-hg1
+mkdir -p $flake1Dir
+writeSimpleFlake $flake1Dir
+hg init $flake1Dir
+
+nix registry add --registry $registry flake1 hg+file://$flake1Dir
+
+flake2Dir=$TEST_ROOT/flake-hg2
+mkdir -p $flake2Dir
+writeDependentFlake $flake2Dir
+hg init $flake2Dir
+
+hg add $flake1Dir/*
+hg commit --config ui.username=foobar@example.org $flake1Dir -m 'Initial commit'
+
+hg add $flake2Dir/flake.nix
+hg commit --config ui.username=foobar@example.org $flake2Dir -m 'Initial commit'
+
+nix build -o $TEST_ROOT/result hg+file://$flake2Dir
+[[ -e $TEST_ROOT/result/hello ]]
+
+(! nix flake metadata --json hg+file://$flake2Dir | jq -e -r .revision)
+
+nix eval hg+file://$flake2Dir#expr
+
+nix eval hg+file://$flake2Dir#expr
+
+(! nix eval hg+file://$flake2Dir#expr --no-allow-dirty)
+
+(! nix flake metadata --json hg+file://$flake2Dir | jq -e -r .revision)
+
+hg commit --config ui.username=foobar@example.org $flake2Dir -m 'Add lock file'
+
+nix flake metadata --json hg+file://$flake2Dir --refresh | jq -e -r .revision
+nix flake metadata --json hg+file://$flake2Dir
+[[ $(nix flake metadata --json hg+file://$flake2Dir | jq -e -r .revCount) = 1 ]]
+
+nix build -o $TEST_ROOT/result hg+file://$flake2Dir --no-registries --no-allow-dirty
+nix build -o $TEST_ROOT/result hg+file://$flake2Dir --no-use-registries --no-allow-dirty
diff --git a/tests/flakes-run.sh b/tests/flakes/run.sh
index 88fc3e628..9fa51d1c7 100644
--- a/tests/flakes-run.sh
+++ b/tests/flakes/run.sh
@@ -1,8 +1,8 @@
-source common.sh
+source ../common.sh
clearStore
rm -rf $TEST_HOME/.cache $TEST_HOME/.config $TEST_HOME/.local
-cp ./shell-hello.nix ./config.nix $TEST_HOME
+cp ../shell-hello.nix ../config.nix $TEST_HOME
cd $TEST_HOME
cat <<EOF > flake.nix
diff --git a/tests/flake-searching.sh b/tests/flakes/search-root.sh
index db241f6d2..d8586dc8a 100644
--- a/tests/flake-searching.sh
+++ b/tests/flakes/search-root.sh
@@ -1,15 +1,11 @@
source common.sh
-if [[ -z $(type -p git) ]]; then
- echo "Git not installed; skipping flake search tests"
- exit 99
-fi
-
clearStore
-cp ./simple.nix ./simple.builder.sh ./config.nix $TEST_HOME
+writeSimpleFlake $TEST_HOME
cd $TEST_HOME
mkdir -p foo/subdir
+
echo '{ outputs = _: {}; }' > foo/flake.nix
cat <<EOF > flake.nix
{
@@ -43,10 +39,12 @@ nix build --override-input foo . || fail "flake should search up directories whe
sed "s,$PWD/foo,$PWD/foo/subdir,g" -i flake.nix
! nix build || fail "flake should not search upwards when part of inputs"
-pushd subdir
-git init
-for i in "${success[@]}" "${failure[@]}"; do
- ! nix build $i || fail "flake should not search past a git repository"
-done
-rm -rf .git
-popd
+if [[ -n $(type -p git) ]]; then
+ pushd subdir
+ git init
+ for i in "${success[@]}" "${failure[@]}"; do
+ ! nix build $i || fail "flake should not search past a git repository"
+ done
+ rm -rf .git
+ popd
+fi
diff --git a/tests/fmt.sh b/tests/fmt.sh
index bc05118ff..254681ca2 100644
--- a/tests/fmt.sh
+++ b/tests/fmt.sh
@@ -18,7 +18,12 @@ cat << EOF > flake.nix
with import ./config.nix;
mkDerivation {
name = "formatter";
- buildCommand = "mkdir -p \$out/bin; cp \${./fmt.simple.sh} \$out/bin/formatter";
+ buildCommand = ''
+ mkdir -p \$out/bin
+ echo "#! ${shell}" > \$out/bin/formatter
+ cat \${./fmt.simple.sh} >> \$out/bin/formatter
+ chmod +x \$out/bin/formatter
+ '';
};
};
}
diff --git a/tests/github-flakes.nix b/tests/github-flakes.nix
index 7ac397d81..fc481c7e3 100644
--- a/tests/github-flakes.nix
+++ b/tests/github-flakes.nix
@@ -7,7 +7,7 @@ with import (nixpkgs + "/nixos/lib/testing-python.nix") {
let
- # Generate a fake root CA and a fake github.com certificate.
+ # Generate a fake root CA and a fake api.github.com / channels.nixos.org certificate.
cert = pkgs.runCommand "cert" { buildInputs = [ pkgs.openssl ]; }
''
mkdir -p $out
@@ -18,7 +18,7 @@ let
openssl req -newkey rsa:2048 -nodes -keyout $out/server.key \
-subj "/C=CN/ST=Denial/L=Springfield/O=Dis/CN=github.com" -out server.csr
- openssl x509 -req -extfile <(printf "subjectAltName=DNS:api.github.com,DNS:github.com,DNS:raw.githubusercontent.com") \
+ openssl x509 -req -extfile <(printf "subjectAltName=DNS:api.github.com,DNS:channels.nixos.org") \
-days 36500 -in server.csr -CA $out/ca.crt -CAkey ca.key -CAcreateserial -out $out/server.crt
'';
@@ -67,7 +67,7 @@ makeTest (
name = "github-flakes";
nodes =
- { # Impersonate github.com and api.github.com.
+ {
github =
{ config, pkgs, ... }:
{ networking.firewall.allowedTCPPorts = [ 80 443 ];
@@ -77,12 +77,12 @@ makeTest (
services.httpd.extraConfig = ''
ErrorLog syslog:local6
'';
- services.httpd.virtualHosts."github.com" =
+ services.httpd.virtualHosts."channels.nixos.org" =
{ forceSSL = true;
sslServerKey = "${cert}/server.key";
sslServerCert = "${cert}/server.crt";
servedDirs =
- [ { urlPath = "/NixOS/flake-registry/raw/master";
+ [ { urlPath = "/";
dir = registry;
}
];
@@ -103,13 +103,13 @@ makeTest (
{ config, lib, pkgs, nodes, ... }:
{ virtualisation.writableStore = true;
virtualisation.diskSize = 2048;
- virtualisation.pathsInNixDB = [ pkgs.hello pkgs.fuse ];
+ virtualisation.additionalPaths = [ pkgs.hello pkgs.fuse ];
virtualisation.memorySize = 4096;
nix.binaryCaches = lib.mkForce [ ];
nix.extraOptions = "experimental-features = nix-command flakes";
environment.systemPackages = [ pkgs.jq ];
networking.hosts.${(builtins.head nodes.github.config.networking.interfaces.eth1.ipv4.addresses).address} =
- [ "github.com" "api.github.com" "raw.githubusercontent.com" ];
+ [ "channels.nixos.org" "api.github.com" ];
security.pki.certificateFiles = [ "${cert}/ca.crt" ];
};
};
@@ -123,7 +123,7 @@ makeTest (
github.wait_for_unit("httpd.service")
- client.succeed("curl -v https://github.com/ >&2")
+ client.succeed("curl -v https://api.github.com/ >&2")
client.succeed("nix registry list | grep nixpkgs")
rev = client.succeed("nix flake info nixpkgs --json | jq -r .revision")
diff --git a/tests/lang.sh b/tests/lang.sh
index 61bb444ba..c0b0fc58c 100644
--- a/tests/lang.sh
+++ b/tests/lang.sh
@@ -4,6 +4,9 @@ export TEST_VAR=foo # for eval-okay-getenv.nix
export NIX_REMOTE=dummy://
nix-instantiate --eval -E 'builtins.trace "Hello" 123' 2>&1 | grep -q Hello
+nix-instantiate --eval -E 'builtins.addErrorContext "Hello" 123' 2>&1
+nix-instantiate --trace-verbose --eval -E 'builtins.traceVerbose "Hello" 123' 2>&1 | grep -q Hello
+(! nix-instantiate --eval -E 'builtins.traceVerbose "Hello" 123' 2>&1 | grep -q Hello)
(! nix-instantiate --show-trace --eval -E 'builtins.addErrorContext "Hello" 123' 2>&1 | grep -q Hello)
nix-instantiate --show-trace --eval -E 'builtins.addErrorContext "Hello" (throw "Foo")' 2>&1 | grep -q Hello
@@ -14,7 +17,7 @@ fail=0
for i in lang/parse-fail-*.nix; do
echo "parsing $i (should fail)";
i=$(basename $i .nix)
- if nix-instantiate --parse - < lang/$i.nix; then
+ if ! expect 1 nix-instantiate --parse - < lang/$i.nix; then
echo "FAIL: $i shouldn't parse"
fail=1
fi
@@ -23,7 +26,7 @@ done
for i in lang/parse-okay-*.nix; do
echo "parsing $i (should succeed)";
i=$(basename $i .nix)
- if ! nix-instantiate --parse - < lang/$i.nix > lang/$i.out; then
+ if ! expect 0 nix-instantiate --parse - < lang/$i.nix > lang/$i.out; then
echo "FAIL: $i should parse"
fail=1
fi
@@ -32,7 +35,7 @@ done
for i in lang/eval-fail-*.nix; do
echo "evaluating $i (should fail)";
i=$(basename $i .nix)
- if nix-instantiate --eval lang/$i.nix; then
+ if ! expect 1 nix-instantiate --eval lang/$i.nix; then
echo "FAIL: $i shouldn't evaluate"
fail=1
fi
@@ -47,7 +50,7 @@ for i in lang/eval-okay-*.nix; do
if test -e lang/$i.flags; then
flags=$(cat lang/$i.flags)
fi
- if ! NIX_PATH=lang/dir3:lang/dir4 nix-instantiate $flags --eval --strict lang/$i.nix > lang/$i.out; then
+ if ! expect 0 env NIX_PATH=lang/dir3:lang/dir4 nix-instantiate $flags --eval --strict lang/$i.nix > lang/$i.out; then
echo "FAIL: $i should evaluate"
fail=1
elif ! diff lang/$i.out lang/$i.exp; then
@@ -57,7 +60,7 @@ for i in lang/eval-okay-*.nix; do
fi
if test -e lang/$i.exp.xml; then
- if ! nix-instantiate --eval --xml --no-location --strict \
+ if ! expect 0 nix-instantiate --eval --xml --no-location --strict \
lang/$i.nix > lang/$i.out.xml; then
echo "FAIL: $i should evaluate"
fail=1
diff --git a/tests/lang/eval-okay-fromjson.nix b/tests/lang/eval-okay-fromjson.nix
index 102ee82b5..e1c0f86cc 100644
--- a/tests/lang/eval-okay-fromjson.nix
+++ b/tests/lang/eval-okay-fromjson.nix
@@ -1,36 +1,35 @@
-# RFC 7159, section 13.
builtins.fromJSON
''
{
- "Image": {
- "Width": 800,
- "Height": 600,
- "Title": "View from 15th Floor",
- "Thumbnail": {
- "Url": "http://www.example.com/image/481989943",
- "Height": 125,
- "Width": 100
+ "Video": {
+ "Title": "The Penguin Chronicles",
+ "Width": 1920,
+ "Height": 1080,
+ "EmbeddedData": [3.14159, 23493,null, true ,false, -10],
+ "Thumb": {
+ "Url": "http://www.example.com/video/5678931",
+ "Width": 200,
+ "Height": 250
},
- "Animated" : false,
- "IDs": [116, 943, 234, 38793, true ,false,null, -100],
- "Latitude": 37.7668,
- "Longitude": -122.3959
+ "Subtitle" : false,
+ "Latitude": 46.2051,
+ "Longitude": 6.0723
}
}
''
==
- { Image =
- { Width = 800;
- Height = 600;
- Title = "View from 15th Floor";
- Thumbnail =
- { Url = http://www.example.com/image/481989943;
- Height = 125;
- Width = 100;
+ { Video =
+ { Title = "The Penguin Chronicles";
+ Width = 1920;
+ Height = 1080;
+ EmbeddedData = [ 3.14159 23493 null true false (0-10) ];
+ Thumb =
+ { Url = "http://www.example.com/video/5678931";
+ Width = 200;
+ Height = 250;
};
- Animated = false;
- IDs = [ 116 943 234 38793 true false null (0-100) ];
- Latitude = 37.7668;
- Longitude = -122.3959;
+ Subtitle = false;
+ Latitude = 46.2051;
+ Longitude = 6.0723;
};
}
diff --git a/tests/lang/parse-fail-eof-in-string.nix b/tests/lang/parse-fail-eof-in-string.nix
new file mode 100644
index 000000000..19775d2ec
--- /dev/null
+++ b/tests/lang/parse-fail-eof-in-string.nix
@@ -0,0 +1,3 @@
+# https://github.com/NixOS/nix/issues/6562
+# Note that this file must not end with a newline.
+a 1"$ \ No newline at end of file
diff --git a/tests/local.mk b/tests/local.mk
index e3c4ff4eb..5e48ceae1 100644
--- a/tests/local.mk
+++ b/tests/local.mk
@@ -1,6 +1,12 @@
nix_tests = \
- flakes.sh \
- flakes-run.sh \
+ flakes/flakes.sh \
+ flakes/run.sh \
+ flakes/mercurial.sh \
+ flakes/circular.sh \
+ flakes/init.sh \
+ flakes/follow-paths.sh \
+ flakes/bundle.sh \
+ flakes/check.sh \
ca/gc.sh \
gc.sh \
remote-store.sh \
@@ -23,6 +29,7 @@ nix_tests = \
fetchGit.sh \
fetchurl.sh \
fetchPath.sh \
+ fetchTree-file.sh \
simple.sh \
referrers.sh \
optimise-store.sh \
@@ -43,7 +50,7 @@ nix_tests = \
secure-drv-outputs.sh \
restricted.sh \
fetchGitSubmodules.sh \
- flake-searching.sh \
+ flakes/search-root.sh \
ca/duplicate-realisation-in-closure.sh \
readfile-context.sh \
nix-channel.sh \
@@ -79,7 +86,7 @@ nix_tests = \
nix-copy-ssh.sh \
post-hook.sh \
function-trace.sh \
- flake-local-settings.sh \
+ flakes/config.sh \
fmt.sh \
eval-store.sh \
why-depends.sh \
@@ -101,6 +108,7 @@ nix_tests = \
suggestions.sh \
store-ping.sh \
fetchClosure.sh \
+ completions.sh \
impure-derivations.sh
ifeq ($(HAVE_LIBCPUID), 1)
@@ -113,4 +121,8 @@ tests-environment = NIX_REMOTE= $(bash) -e
clean-files += $(d)/common.sh $(d)/config.nix $(d)/ca/config.nix
-test-deps += tests/common.sh tests/config.nix tests/ca/config.nix tests/plugins/libplugintest.$(SO_EXT)
+test-deps += tests/common.sh tests/config.nix tests/ca/config.nix
+
+ifeq ($(BUILD_SHARED_LIBS), 1)
+ test-deps += tests/plugins/libplugintest.$(SO_EXT)
+endif
diff --git a/tests/multiple-outputs.nix b/tests/multiple-outputs.nix
index b915493f7..624a5dade 100644
--- a/tests/multiple-outputs.nix
+++ b/tests/multiple-outputs.nix
@@ -80,4 +80,11 @@ rec {
'';
}).a;
+ e = mkDerivation {
+ name = "multiple-outputs-e";
+ outputs = [ "a" "b" "c" ];
+ meta.outputsToInstall = [ "a" "b" ];
+ buildCommand = "mkdir $a $b $c";
+ };
+
}
diff --git a/tests/nix-copy-closure.nix b/tests/nix-copy-closure.nix
index 1b63a3fca..ba8b2cfc9 100644
--- a/tests/nix-copy-closure.nix
+++ b/tests/nix-copy-closure.nix
@@ -14,7 +14,7 @@ makeTest (let pkgA = pkgs.cowsay; pkgB = pkgs.wget; pkgC = pkgs.hello; pkgD = pk
{ client =
{ config, lib, pkgs, ... }:
{ virtualisation.writableStore = true;
- virtualisation.pathsInNixDB = [ pkgA pkgD.drvPath ];
+ virtualisation.additionalPaths = [ pkgA pkgD.drvPath ];
nix.binaryCaches = lib.mkForce [ ];
};
@@ -22,7 +22,7 @@ makeTest (let pkgA = pkgs.cowsay; pkgB = pkgs.wget; pkgC = pkgs.hello; pkgD = pk
{ config, pkgs, ... }:
{ services.openssh.enable = true;
virtualisation.writableStore = true;
- virtualisation.pathsInNixDB = [ pkgB pkgC ];
+ virtualisation.additionalPaths = [ pkgB pkgC ];
};
};
diff --git a/tests/nix-profile.sh b/tests/nix-profile.sh
index fad62b993..7ba3235fa 100644
--- a/tests/nix-profile.sh
+++ b/tests/nix-profile.sh
@@ -17,6 +17,7 @@ cat > $flake1Dir/flake.nix <<EOF
outputs = { self }: with import ./config.nix; rec {
packages.$system.default = mkDerivation {
name = "profile-test-\${builtins.readFile ./version}";
+ outputs = [ "out" "man" "dev" ];
builder = builtins.toFile "builder.sh"
''
mkdir -p \$out/bin
@@ -26,10 +27,13 @@ cat > $flake1Dir/flake.nix <<EOF
EOF
chmod +x \$out/bin/hello
echo DONE
+ mkdir -p \$man/share/man
+ mkdir -p \$dev/include
'';
__contentAddressed = import ./ca.nix;
outputHashMode = "recursive";
outputHashAlgo = "sha256";
+ meta.outputsToInstall = [ "out" "man" ];
};
};
}
@@ -46,6 +50,8 @@ nix-env -f ./user-envs.nix -i foo-1.0
nix profile list | grep '0 - - .*-foo-1.0'
nix profile install $flake1Dir -L
[[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello World" ]]
+[ -e $TEST_HOME/.nix-profile/share/man ]
+(! [ -e $TEST_HOME/.nix-profile/include ])
nix profile history
nix profile history | grep "packages.$system.default: ∅ -> 1.0"
nix profile diff-closures | grep 'env-manifest.nix: ε → ∅'
@@ -55,7 +61,7 @@ printf NixOS > $flake1Dir/who
printf 2.0 > $flake1Dir/version
nix profile upgrade 1
[[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello NixOS" ]]
-nix profile history | grep "packages.$system.default: 1.0 -> 2.0"
+nix profile history | grep "packages.$system.default: 1.0, 1.0-man -> 2.0, 2.0-man"
# Test 'history', 'diff-closures'.
nix profile diff-closures
@@ -86,7 +92,7 @@ nix profile wipe-history
printf true > $flake1Dir/ca.nix
printf 3.0 > $flake1Dir/version
nix profile upgrade 0
-nix profile history | grep "packages.$system.default: 1.0 -> 3.0"
+nix profile history | grep "packages.$system.default: 1.0, 1.0-man -> 3.0, 3.0-man"
# Test new install of CA package.
nix profile remove 0
@@ -95,3 +101,40 @@ printf Utrecht > $flake1Dir/who
nix profile install $flake1Dir
[[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello Utrecht" ]]
[[ $(nix path-info --json $(realpath $TEST_HOME/.nix-profile/bin/hello) | jq -r .[].ca) =~ fixed:r:sha256: ]]
+
+# Override the outputs.
+nix profile remove 0 1
+nix profile install "$flake1Dir^*"
+[[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello Utrecht" ]]
+[ -e $TEST_HOME/.nix-profile/share/man ]
+[ -e $TEST_HOME/.nix-profile/include ]
+
+printf Nix > $flake1Dir/who
+nix profile upgrade 0
+[[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello Nix" ]]
+[ -e $TEST_HOME/.nix-profile/share/man ]
+[ -e $TEST_HOME/.nix-profile/include ]
+
+nix profile remove 0
+nix profile install "$flake1Dir^man"
+(! [ -e $TEST_HOME/.nix-profile/bin/hello ])
+[ -e $TEST_HOME/.nix-profile/share/man ]
+(! [ -e $TEST_HOME/.nix-profile/include ])
+
+# test priority
+nix profile remove 0
+
+# Make another flake.
+flake2Dir=$TEST_ROOT/flake2
+printf World > $flake1Dir/who
+cp -r $flake1Dir $flake2Dir
+printf World2 > $flake2Dir/who
+
+nix profile install $flake1Dir
+[[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello World" ]]
+nix profile install $flake2Dir --priority 100
+[[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello World" ]]
+nix profile install $flake2Dir --priority 0
+[[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello World2" ]]
+# nix profile install $flake1Dir --priority 100
+# [[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello World" ]]
diff --git a/tests/nix-shell.sh b/tests/nix-shell.sh
index 3241d7a0f..f291c6f79 100644
--- a/tests/nix-shell.sh
+++ b/tests/nix-shell.sh
@@ -102,3 +102,11 @@ source <(nix print-dev-env -f "$shellDotNix" shellDrv)
[[ ${arr2[1]} = $'\n' ]]
[[ ${arr2[2]} = $'x\ny' ]]
[[ $(fun) = blabla ]]
+
+# Test nix-shell with ellipsis and no `inNixShell` argument (for backwards compat with old nixpkgs)
+cat >$TEST_ROOT/shell-ellipsis.nix <<EOF
+{ system ? "x86_64-linux", ... }@args:
+assert (!(args ? inNixShell));
+(import $shellDotNix { }).shellDrv
+EOF
+nix-shell $TEST_ROOT/shell-ellipsis.nix --run "true"
diff --git a/tests/nss-preload.nix b/tests/nss-preload.nix
index 2610d2b30..64b655ba2 100644
--- a/tests/nss-preload.nix
+++ b/tests/nss-preload.nix
@@ -5,6 +5,42 @@ with import (nixpkgs + "/nixos/lib/testing-python.nix") {
extraConfigurations = [ { nixpkgs.overlays = [ overlay ]; } ];
};
+let
+ nix-fetch = pkgs.writeText "fetch.nix" ''
+ derivation {
+ # This derivation is an copy from what is available over at
+ # nix.git:corepkgs/fetchurl.nix
+ builder = "builtin:fetchurl";
+
+ # We're going to fetch data from the http_dns instance created before
+ # we expect the content to be the same as the content available there.
+ # ```
+ # $ nix-hash --type sha256 --to-base32 $(echo "hello world" | sha256sum | cut -d " " -f 1)
+ # 0ix4jahrkll5zg01wandq78jw3ab30q4nscph67rniqg5x7r0j59
+ # ```
+ outputHash = "0ix4jahrkll5zg01wandq78jw3ab30q4nscph67rniqg5x7r0j59";
+ outputHashAlgo = "sha256";
+ outputHashMode = "flat";
+
+ name = "example.com";
+ url = "http://example.com";
+
+ unpack = false;
+ executable = false;
+
+ system = "builtin";
+
+ preferLocalBuild = true;
+
+ impureEnvVars = [
+ "http_proxy" "https_proxy" "ftp_proxy" "all_proxy" "no_proxy"
+ ];
+
+ urls = [ "http://example.com" ];
+ }
+ '';
+in
+
makeTest (
rec {
@@ -68,40 +104,6 @@ rec {
};
};
- nix-fetch = pkgs.writeText "fetch.nix" ''
- derivation {
- # This derivation is an copy from what is available over at
- # nix.git:corepkgs/fetchurl.nix
- builder = "builtin:fetchurl";
-
- # We're going to fetch data from the http_dns instance created before
- # we expect the content to be the same as the content available there.
- # ```
- # $ nix-hash --type sha256 --to-base32 $(echo "hello world" | sha256sum | cut -d " " -f 1)
- # 0ix4jahrkll5zg01wandq78jw3ab30q4nscph67rniqg5x7r0j59
- # ```
- outputHash = "0ix4jahrkll5zg01wandq78jw3ab30q4nscph67rniqg5x7r0j59";
- outputHashAlgo = "sha256";
- outputHashMode = "flat";
-
- name = "example.com";
- url = "http://example.com";
-
- unpack = false;
- executable = false;
-
- system = "builtin";
-
- preferLocalBuild = true;
-
- impureEnvVars = [
- "http_proxy" "https_proxy" "ftp_proxy" "all_proxy" "no_proxy"
- ];
-
- urls = [ "http://example.com" ];
- }
- '';
-
testScript = { nodes, ... }: ''
http_dns.wait_for_unit("nginx")
http_dns.wait_for_open_port(80)
diff --git a/tests/plugins.sh b/tests/plugins.sh
index e22bf4408..6e278ad9d 100644
--- a/tests/plugins.sh
+++ b/tests/plugins.sh
@@ -2,6 +2,11 @@ source common.sh
set -o pipefail
+if [[ $BUILD_SHARED_LIBS != 1 ]]; then
+ echo "plugins are not supported"
+ exit 99
+fi
+
res=$(nix --option setting-set true --option plugin-files $PWD/plugins/libplugintest* eval --expr builtins.anotherNull)
[ "$res"x = "nullx" ]
diff --git a/tests/post-hook.sh b/tests/post-hook.sh
index 049e40749..4eff5f511 100644
--- a/tests/post-hook.sh
+++ b/tests/post-hook.sh
@@ -9,12 +9,12 @@ echo 'require-sigs = false' >> $NIX_CONF_DIR/nix.conf
restartDaemon
-# Build the dependencies and push them to the remote store
+# Build the dependencies and push them to the remote store.
nix-build -o $TEST_ROOT/result dependencies.nix --post-build-hook $PWD/push-to-store.sh
clearStore
-# Ensure that we the remote store contains both the runtime and buildtime
-# closure of what we've just built
+# Ensure that the remote store contains both the runtime and build-time
+# closure of what we've just built.
nix copy --from "$REMOTE_STORE" --no-require-sigs -f dependencies.nix
nix copy --from "$REMOTE_STORE" --no-require-sigs -f dependencies.nix input1_drv
diff --git a/tests/pure-eval.sh b/tests/pure-eval.sh
index 1a4568ea6..b83ab8afe 100644
--- a/tests/pure-eval.sh
+++ b/tests/pure-eval.sh
@@ -30,3 +30,5 @@ nix eval --store dummy:// --write-to $TEST_ROOT/eval-out --expr '{ x = "foo" + "
rm -rf $TEST_ROOT/eval-out
(! nix eval --store dummy:// --write-to $TEST_ROOT/eval-out --expr '{ "." = "bla"; }')
+
+(! nix eval --expr '~/foo')
diff --git a/tests/push-to-store.sh b/tests/push-to-store.sh
index 25352c751..b1495c9e2 100755
--- a/tests/push-to-store.sh
+++ b/tests/push-to-store.sh
@@ -1,6 +1,10 @@
#!/bin/sh
set -x
+set -e
+
+[ -n "$OUT_PATHS" ]
+[ -n "$DRV_PATH" ]
echo Pushing "$OUT_PATHS" to "$REMOTE_STORE"
printf "%s" "$DRV_PATH" | xargs nix copy --to "$REMOTE_STORE" --no-require-sigs
diff --git a/tests/remote-builds.nix b/tests/remote-builds.nix
index b9e7352c0..7b2e6f708 100644
--- a/tests/remote-builds.nix
+++ b/tests/remote-builds.nix
@@ -61,7 +61,7 @@ in
}
];
virtualisation.writableStore = true;
- virtualisation.pathsInNixDB = [ config.system.build.extraUtils ];
+ virtualisation.additionalPaths = [ config.system.build.extraUtils ];
nix.binaryCaches = lib.mkForce [ ];
programs.ssh.extraConfig = "ConnectTimeout 30";
};
diff --git a/tests/repl.sh b/tests/repl.sh
index b6937b9e9..c555560cc 100644
--- a/tests/repl.sh
+++ b/tests/repl.sh
@@ -42,6 +42,11 @@ testRepl () {
echo "$replOutput"
echo "$replOutput" | grep -qs "while evaluating the file" \
|| fail "nix repl --show-trace doesn't show the trace"
+
+ nix repl "${nixArgs[@]}" --option pure-eval true 2>&1 <<< "builtins.currentSystem" \
+ | grep "attribute 'currentSystem' missing"
+ nix repl "${nixArgs[@]}" 2>&1 <<< "builtins.currentSystem" \
+ | grep "$(nix-instantiate --eval -E 'builtins.currentSystem')"
}
# Simple test, try building a drv
@@ -50,15 +55,17 @@ testRepl
testRepl --store "$TEST_ROOT/store?real=$NIX_STORE_DIR"
testReplResponse () {
- local response="$(nix repl <<< "$1")"
- echo "$response" | grep -qs "$2" \
+ local commands="$1"; shift
+ local expectedResponse="$1"; shift
+ local response="$(nix repl "$@" <<< "$commands")"
+ echo "$response" | grep -qs "$expectedResponse" \
|| fail "repl command set:
-$1
+$commands
does not respond with:
-$2
+$expectedResponse
but with:
@@ -71,3 +78,48 @@ testReplResponse '
:a { a = "2"; }
"result: ${a}"
' "result: 2"
+
+testReplResponse '
+drvPath
+' '".*-simple.drv"' \
+$testDir/simple.nix
+
+testReplResponse '
+drvPath
+' '".*-simple.drv"' \
+--file $testDir/simple.nix --experimental-features 'ca-derivations'
+
+testReplResponse '
+drvPath
+' '".*-simple.drv"' \
+--file $testDir/simple.nix --extra-experimental-features 'repl-flake ca-derivations'
+
+mkdir -p flake && cat <<EOF > flake/flake.nix
+{
+ outputs = { self }: {
+ foo = 1;
+ bar.baz = 2;
+
+ changingThing = "beforeChange";
+ };
+}
+EOF
+testReplResponse '
+foo + baz
+' "3" \
+ ./flake ./flake\#bar --experimental-features 'flakes repl-flake'
+
+# Test the `:reload` mechansim with flakes:
+# - Eval `./flake#changingThing`
+# - Modify the flake
+# - Re-eval it
+# - Check that the result has changed
+replResult=$( (
+echo "changingThing"
+sleep 1 # Leave the repl the time to eval 'foo'
+sed -i 's/beforeChange/afterChange/' flake/flake.nix
+echo ":reload"
+echo "changingThing"
+) | nix repl ./flake --experimental-features 'flakes repl-flake')
+echo "$replResult" | grep -qs beforeChange
+echo "$replResult" | grep -qs afterChange
diff --git a/tests/search.sh b/tests/search.sh
index 52e12f381..1a98f5b49 100644
--- a/tests/search.sh
+++ b/tests/search.sh
@@ -28,11 +28,19 @@ nix search -f search.nix '' |grep -q hello
e=$'\x1b' # grep doesn't support \e, \033 or even \x1b
# Multiple overlapping regexes
-(( $(nix search -f search.nix '' 'oo' 'foo' 'oo' | grep "$e\[32;1mfoo$e\\[0;1m" | wc -l) == 1 ))
-(( $(nix search -f search.nix '' 'broken b' 'en bar' | grep "$e\[32;1mbroken bar$e\\[0m" | wc -l) == 1 ))
+(( $(nix search -f search.nix '' 'oo' 'foo' 'oo' | grep -c "$e\[32;1mfoo$e\\[0;1m") == 1 ))
+(( $(nix search -f search.nix '' 'broken b' 'en bar' | grep -c "$e\[32;1mbroken bar$e\\[0m") == 1 ))
# Multiple matches
# Searching for 'o' should yield the 'o' in 'broken bar', the 'oo' in foo and 'o' in hello
-(( $(nix search -f search.nix '' 'o' | grep -Eo "$e\[32;1mo{1,2}$e\[(0|0;1)m" | wc -l) == 3 ))
+(( $(nix search -f search.nix '' 'o' | grep -Eoc "$e\[32;1mo{1,2}$e\[(0|0;1)m") == 3 ))
# Searching for 'b' should yield the 'b' in bar and the two 'b's in 'broken bar'
+# NOTE: This does not work with `grep -c` because it counts the two 'b's in 'broken bar' as one matched line
(( $(nix search -f search.nix '' 'b' | grep -Eo "$e\[32;1mb$e\[(0|0;1)m" | wc -l) == 3 ))
+
+## Tests for --exclude
+(( $(nix search -f search.nix -e hello | grep -c hello) == 0 ))
+
+(( $(nix search -f search.nix foo --exclude 'foo|bar' | grep -Ec 'foo|bar') == 0 ))
+(( $(nix search -f search.nix foo -e foo --exclude bar | grep -Ec 'foo|bar') == 0 ))
+[[ $(nix search -f search.nix -e bar --json | jq -c 'keys') == '["foo","hello"]' ]]
diff --git a/tests/setuid.nix b/tests/setuid.nix
index 35eb304ed..a83b1fc3a 100644
--- a/tests/setuid.nix
+++ b/tests/setuid.nix
@@ -10,12 +10,12 @@ with import (nixpkgs + "/nixos/lib/testing-python.nix") {
makeTest {
name = "setuid";
- machine =
+ nodes.machine =
{ config, lib, pkgs, ... }:
{ virtualisation.writableStore = true;
nix.binaryCaches = lib.mkForce [ ];
nix.nixPath = [ "nixpkgs=${lib.cleanSource pkgs.path}" ];
- virtualisation.pathsInNixDB = [ pkgs.stdenv pkgs.pkgsi686Linux.stdenv ];
+ virtualisation.additionalPaths = [ pkgs.stdenv pkgs.pkgsi686Linux.stdenv ];
};
testScript = { nodes }: ''
diff --git a/tests/shell-hello.nix b/tests/shell-hello.nix
index 77dcbd2a9..3fdd3501d 100644
--- a/tests/shell-hello.nix
+++ b/tests/shell-hello.nix
@@ -3,15 +3,24 @@ with import ./config.nix;
{
hello = mkDerivation {
name = "hello";
+ outputs = [ "out" "dev" ];
+ meta.outputsToInstall = [ "out" ];
buildCommand =
''
- mkdir -p $out/bin
+ mkdir -p $out/bin $dev/bin
+
cat > $out/bin/hello <<EOF
#! ${shell}
who=\$1
echo "Hello \''${who:-World} from $out/bin/hello"
EOF
chmod +x $out/bin/hello
+
+ cat > $dev/bin/hello2 <<EOF
+ #! ${shell}
+ echo "Hello2"
+ EOF
+ chmod +x $dev/bin/hello2
'';
};
}
diff --git a/tests/shell.sh b/tests/shell.sh
index 2b85bb337..6a80e8385 100644
--- a/tests/shell.sh
+++ b/tests/shell.sh
@@ -6,6 +6,10 @@ clearCache
nix shell -f shell-hello.nix hello -c hello | grep 'Hello World'
nix shell -f shell-hello.nix hello -c hello NixOS | grep 'Hello NixOS'
+# Test output selection.
+nix shell -f shell-hello.nix hello^dev -c hello2 | grep 'Hello2'
+nix shell -f shell-hello.nix 'hello^*' -c hello2 | grep 'Hello2'
+
if ! canUseSandbox; then exit 99; fi
chmod -R u+w $TEST_ROOT/store0 || true
diff --git a/tests/sourcehut-flakes.nix b/tests/sourcehut-flakes.nix
index 6a1930904..daa259dd6 100644
--- a/tests/sourcehut-flakes.nix
+++ b/tests/sourcehut-flakes.nix
@@ -59,7 +59,7 @@ let
echo 'ref: refs/heads/master' > $out/HEAD
mkdir -p $out/info
- echo -e '${nixpkgs.rev}\trefs/heads/master' > $out/info/refs
+ echo -e '${nixpkgs.rev}\trefs/heads/master\n${nixpkgs.rev}\trefs/tags/foo-bar' > $out/info/refs
'';
in
@@ -106,7 +106,7 @@ makeTest (
{
virtualisation.writableStore = true;
virtualisation.diskSize = 2048;
- virtualisation.pathsInNixDB = [ pkgs.hello pkgs.fuse ];
+ virtualisation.additionalPaths = [ pkgs.hello pkgs.fuse ];
virtualisation.memorySize = 4096;
nix.binaryCaches = lib.mkForce [ ];
nix.extraOptions = ''
@@ -132,6 +132,17 @@ makeTest (
client.succeed("curl -v https://git.sr.ht/ >&2")
client.succeed("nix registry list | grep nixpkgs")
+ # Test that it resolves HEAD
+ rev = client.succeed("nix flake info sourcehut:~NixOS/nixpkgs --json | jq -r .revision")
+ assert rev.strip() == "${nixpkgs.rev}", "revision mismatch"
+ # Test that it resolves branches
+ rev = client.succeed("nix flake info sourcehut:~NixOS/nixpkgs/master --json | jq -r .revision")
+ assert rev.strip() == "${nixpkgs.rev}", "revision mismatch"
+ # Test that it resolves tags
+ rev = client.succeed("nix flake info sourcehut:~NixOS/nixpkgs/foo-bar --json | jq -r .revision")
+ assert rev.strip() == "${nixpkgs.rev}", "revision mismatch"
+
+ # Registry and pinning test
rev = client.succeed("nix flake info nixpkgs --json | jq -r .revision")
assert rev.strip() == "${nixpkgs.rev}", "revision mismatch"