aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.github/PULL_REQUEST_TEMPLATE.md5
-rw-r--r--.github/PULL_REQUEST_TEMPLATE/pull_request_template.md11
-rw-r--r--.github/labeler.yml6
-rw-r--r--.github/workflows/ci.yml14
-rw-r--r--.github/workflows/labels.yml24
-rw-r--r--.version2
-rw-r--r--CONTRIBUTING.md61
-rw-r--r--Makefile18
-rw-r--r--Makefile.config.in3
-rw-r--r--configure.ac17
-rw-r--r--default.nix13
-rw-r--r--doc/internal-api/.gitignore3
-rw-r--r--doc/internal-api/doxygen.cfg.in63
-rw-r--r--doc/internal-api/local.mk19
-rw-r--r--doc/manual/generate-manpage.nix41
-rw-r--r--doc/manual/generate-options.nix41
-rw-r--r--doc/manual/local.mk115
-rw-r--r--doc/manual/src/SUMMARY.md.in35
-rw-r--r--doc/manual/src/command-ref/env-common.md49
-rw-r--r--doc/manual/src/command-ref/nix-build.md11
-rw-r--r--doc/manual/src/command-ref/nix-channel.md8
-rw-r--r--doc/manual/src/command-ref/nix-collect-garbage.md6
-rw-r--r--doc/manual/src/command-ref/nix-copy-closure.md4
-rw-r--r--doc/manual/src/command-ref/nix-env.md816
-rw-r--r--doc/manual/src/command-ref/nix-env/delete-generations.md46
-rw-r--r--doc/manual/src/command-ref/nix-env/env-common.md6
-rw-r--r--doc/manual/src/command-ref/nix-env/install.md187
-rw-r--r--doc/manual/src/command-ref/nix-env/list-generations.md33
-rw-r--r--doc/manual/src/command-ref/nix-env/opt-common.md35
-rw-r--r--doc/manual/src/command-ref/nix-env/query.md215
-rw-r--r--doc/manual/src/command-ref/nix-env/rollback.md34
-rw-r--r--doc/manual/src/command-ref/nix-env/set-flag.md82
-rw-r--r--doc/manual/src/command-ref/nix-env/set.md30
-rw-r--r--doc/manual/src/command-ref/nix-env/switch-generation.md33
-rw-r--r--doc/manual/src/command-ref/nix-env/switch-profile.md26
-rw-r--r--doc/manual/src/command-ref/nix-env/uninstall.md28
-rw-r--r--doc/manual/src/command-ref/nix-env/upgrade.md141
-rw-r--r--doc/manual/src/command-ref/nix-hash.md46
-rw-r--r--doc/manual/src/command-ref/nix-instantiate.md4
-rw-r--r--doc/manual/src/command-ref/nix-shell.md4
-rw-r--r--doc/manual/src/command-ref/nix-store.md862
-rw-r--r--doc/manual/src/command-ref/nix-store/add-fixed.md35
-rw-r--r--doc/manual/src/command-ref/nix-store/add.md25
-rw-r--r--doc/manual/src/command-ref/nix-store/delete.md33
-rw-r--r--doc/manual/src/command-ref/nix-store/dump-db.md26
-rw-r--r--doc/manual/src/command-ref/nix-store/dump.md40
-rw-r--r--doc/manual/src/command-ref/nix-store/export.md41
-rw-r--r--doc/manual/src/command-ref/nix-store/gc.md72
-rw-r--r--doc/manual/src/command-ref/nix-store/generate-binary-cache-key.md29
-rw-r--r--doc/manual/src/command-ref/nix-store/import.md21
-rw-r--r--doc/manual/src/command-ref/nix-store/load-db.md18
-rw-r--r--doc/manual/src/command-ref/nix-store/opt-common.md36
-rw-r--r--doc/manual/src/command-ref/nix-store/optimise.md40
-rw-r--r--doc/manual/src/command-ref/nix-store/print-env.md31
-rw-r--r--doc/manual/src/command-ref/nix-store/query.md220
-rw-r--r--doc/manual/src/command-ref/nix-store/read-log.md38
-rw-r--r--doc/manual/src/command-ref/nix-store/realise.md118
-rw-r--r--doc/manual/src/command-ref/nix-store/repair-path.md35
-rw-r--r--doc/manual/src/command-ref/nix-store/restore.md18
-rw-r--r--doc/manual/src/command-ref/nix-store/serve.md38
-rw-r--r--doc/manual/src/command-ref/nix-store/verify-path.md29
-rw-r--r--doc/manual/src/command-ref/nix-store/verify.md36
-rw-r--r--doc/manual/src/command-ref/opt-common.md48
-rw-r--r--doc/manual/src/contributing/hacking.md32
-rw-r--r--doc/manual/src/glossary.md24
-rw-r--r--doc/manual/src/installation/env-variables.md11
-rw-r--r--doc/manual/src/installation/prerequisites-source.md5
-rw-r--r--doc/manual/src/language/advanced-attributes.md3
-rw-r--r--doc/manual/src/language/operators.md4
-rw-r--r--doc/manual/src/language/values.md2
-rw-r--r--doc/manual/src/package-management/s3-substituter.md34
-rw-r--r--doc/manual/src/release-notes/rl-next.md23
-rw-r--r--doc/manual/utils.nix42
-rw-r--r--docker.nix12
-rw-r--r--flake.lock17
-rw-r--r--flake.nix111
-rw-r--r--local.mk4
-rw-r--r--maintainers/README.md29
-rw-r--r--maintainers/release-process.md3
-rw-r--r--misc/launchd/org.nixos.nix-daemon.plist.in2
-rwxr-xr-xmk/debug-test.sh2
-rw-r--r--mk/disable-tests.mk12
-rw-r--r--mk/libraries.mk2
-rw-r--r--mk/patterns.mk4
-rwxr-xr-xmk/run-test.sh17
-rw-r--r--scripts/nix-profile-daemon.sh.in2
-rw-r--r--scripts/nix-profile.sh.in2
-rw-r--r--src/build-remote/build-remote.cc6
-rw-r--r--src/libcmd/command-installable-value.cc11
-rw-r--r--src/libcmd/command-installable-value.hh23
-rw-r--r--src/libcmd/command.cc6
-rw-r--r--src/libcmd/command.hh62
-rw-r--r--src/libcmd/common-eval-args.cc8
-rw-r--r--src/libcmd/common-eval-args.hh1
-rw-r--r--src/libcmd/editor-for.hh1
-rw-r--r--src/libcmd/installable-attr-path.cc4
-rw-r--r--src/libcmd/installable-attr-path.hh3
-rw-r--r--src/libcmd/installable-derived-path.cc5
-rw-r--r--src/libcmd/installable-derived-path.hh1
-rw-r--r--src/libcmd/installable-flake.cc29
-rw-r--r--src/libcmd/installable-flake.hh31
-rw-r--r--src/libcmd/installable-value.cc44
-rw-r--r--src/libcmd/installable-value.hh94
-rw-r--r--src/libcmd/installables.cc135
-rw-r--r--src/libcmd/installables.hh173
-rw-r--r--src/libcmd/legacy.hh1
-rw-r--r--src/libcmd/markdown.hh3
-rw-r--r--src/libcmd/repl.cc2
-rw-r--r--src/libcmd/repl.hh1
-rw-r--r--src/libexpr/attr-path.hh1
-rw-r--r--src/libexpr/attr-set.hh1
-rw-r--r--src/libexpr/eval-cache.hh1
-rw-r--r--src/libexpr/eval-inline.hh1
-rw-r--r--src/libexpr/eval.cc28
-rw-r--r--src/libexpr/eval.hh1
-rw-r--r--src/libexpr/flake/flake.cc40
-rw-r--r--src/libexpr/flake/flake.hh7
-rw-r--r--src/libexpr/flake/flakeref.hh1
-rw-r--r--src/libexpr/flake/lockfile.hh1
-rw-r--r--src/libexpr/function-trace.hh1
-rw-r--r--src/libexpr/get-drvs.hh1
-rw-r--r--src/libexpr/json-to-value.hh1
-rw-r--r--src/libexpr/local.mk2
-rw-r--r--src/libexpr/nixexpr.hh1
-rw-r--r--src/libexpr/parser.y8
-rw-r--r--src/libexpr/primops.cc23
-rw-r--r--src/libexpr/primops.hh1
-rw-r--r--src/libexpr/primops/fetchTree.cc64
-rw-r--r--src/libexpr/symbol-table.hh1
-rw-r--r--src/libexpr/tests/libexpr.hh3
-rw-r--r--src/libexpr/tests/primops.cc4
-rw-r--r--src/libexpr/tests/value/context.hh1
-rw-r--r--src/libexpr/value-to-json.hh1
-rw-r--r--src/libexpr/value-to-xml.cc8
-rw-r--r--src/libexpr/value-to-xml.hh1
-rw-r--r--src/libexpr/value.hh1
-rw-r--r--src/libexpr/value/context.hh1
-rw-r--r--src/libfetchers/attrs.hh1
-rw-r--r--src/libfetchers/cache.hh1
-rw-r--r--src/libfetchers/fetch-settings.hh13
-rw-r--r--src/libfetchers/fetchers.hh6
-rw-r--r--src/libfetchers/git.cc2
-rw-r--r--src/libfetchers/registry.hh1
-rw-r--r--src/libmain/common-args.hh1
-rw-r--r--src/libmain/loggers.hh1
-rw-r--r--src/libmain/progress-bar.cc21
-rw-r--r--src/libmain/progress-bar.hh1
-rw-r--r--src/libmain/shared.cc16
-rw-r--r--src/libmain/shared.hh1
-rw-r--r--src/libstore/binary-cache-store.hh35
-rw-r--r--src/libstore/build-result.hh2
-rw-r--r--src/libstore/build/derivation-goal.cc20
-rw-r--r--src/libstore/build/derivation-goal.hh1
-rw-r--r--src/libstore/build/drv-output-substitution-goal.cc23
-rw-r--r--src/libstore/build/drv-output-substitution-goal.hh13
-rw-r--r--src/libstore/build/goal.cc4
-rw-r--r--src/libstore/build/goal.hh3
-rw-r--r--src/libstore/build/hook-instance.cc5
-rw-r--r--src/libstore/build/hook-instance.hh1
-rw-r--r--src/libstore/build/local-derivation-goal.cc104
-rw-r--r--src/libstore/build/local-derivation-goal.hh6
-rw-r--r--src/libstore/build/personality.hh1
-rw-r--r--src/libstore/build/substitution-goal.hh1
-rw-r--r--src/libstore/build/worker.hh1
-rw-r--r--src/libstore/builtins.hh1
-rw-r--r--src/libstore/builtins/buildenv.hh1
-rw-r--r--src/libstore/content-address.hh73
-rw-r--r--src/libstore/crypto.hh1
-rw-r--r--src/libstore/daemon.cc8
-rw-r--r--src/libstore/daemon.hh1
-rw-r--r--src/libstore/derivations.cc4
-rw-r--r--src/libstore/derivations.hh349
-rw-r--r--src/libstore/derived-path.cc2
-rw-r--r--src/libstore/derived-path.hh3
-rw-r--r--src/libstore/dummy-store.cc7
-rw-r--r--src/libstore/dummy-store.md13
-rw-r--r--src/libstore/export-import.cc4
-rw-r--r--src/libstore/filetransfer.cc17
-rw-r--r--src/libstore/filetransfer.hh40
-rw-r--r--src/libstore/fs-accessor.hh1
-rw-r--r--src/libstore/gc-store.hh1
-rw-r--r--src/libstore/gc.cc53
-rw-r--r--src/libstore/globals.cc44
-rw-r--r--src/libstore/globals.hh44
-rw-r--r--src/libstore/http-binary-cache-store.cc9
-rw-r--r--src/libstore/http-binary-cache-store.md8
-rw-r--r--src/libstore/legacy-ssh-store.cc29
-rw-r--r--src/libstore/legacy-ssh-store.md8
-rw-r--r--src/libstore/local-binary-cache-store.cc7
-rw-r--r--src/libstore/local-binary-cache-store.md16
-rw-r--r--src/libstore/local-fs-store.hh17
-rw-r--r--src/libstore/local-store.cc79
-rw-r--r--src/libstore/local-store.hh14
-rw-r--r--src/libstore/local-store.md39
-rw-r--r--src/libstore/lock.cc2
-rw-r--r--src/libstore/lock.hh1
-rw-r--r--src/libstore/log-store.hh1
-rw-r--r--src/libstore/machines.hh1
-rw-r--r--src/libstore/make-content-addressed.hh1
-rw-r--r--src/libstore/misc.cc2
-rw-r--r--src/libstore/names.hh1
-rw-r--r--src/libstore/nar-accessor.cc1
-rw-r--r--src/libstore/nar-accessor.hh1
-rw-r--r--src/libstore/nar-info-disk-cache.hh1
-rw-r--r--src/libstore/nar-info.hh1
-rw-r--r--src/libstore/optimise-store.cc17
-rw-r--r--src/libstore/outputs-spec.hh34
-rw-r--r--src/libstore/parsed-derivations.hh1
-rw-r--r--src/libstore/path-info.hh1
-rw-r--r--src/libstore/path-regex.hh1
-rw-r--r--src/libstore/path-with-outputs.hh1
-rw-r--r--src/libstore/path.hh22
-rw-r--r--src/libstore/pathlocks.cc8
-rw-r--r--src/libstore/pathlocks.hh1
-rw-r--r--src/libstore/profiles.cc34
-rw-r--r--src/libstore/profiles.hh30
-rw-r--r--src/libstore/realisation.hh1
-rw-r--r--src/libstore/references.cc3
-rw-r--r--src/libstore/references.hh1
-rw-r--r--src/libstore/remote-fs-accessor.hh1
-rw-r--r--src/libstore/remote-store.cc4
-rw-r--r--src/libstore/remote-store.hh13
-rw-r--r--src/libstore/repair-flag.hh1
-rw-r--r--src/libstore/s3-binary-cache-store.cc83
-rw-r--r--src/libstore/s3-binary-cache-store.hh1
-rw-r--r--src/libstore/s3-binary-cache-store.md8
-rw-r--r--src/libstore/s3.hh1
-rw-r--r--src/libstore/serve-protocol.hh1
-rw-r--r--src/libstore/sqlite.hh1
-rw-r--r--src/libstore/ssh-store-config.hh29
-rw-r--r--src/libstore/ssh-store.cc24
-rw-r--r--src/libstore/ssh-store.md8
-rw-r--r--src/libstore/ssh.cc30
-rw-r--r--src/libstore/ssh.hh1
-rw-r--r--src/libstore/store-api.cc72
-rw-r--r--src/libstore/store-api.hh713
-rw-r--r--src/libstore/store-cast.hh8
-rw-r--r--src/libstore/tests/derived-path.hh1
-rw-r--r--src/libstore/tests/libstore.hh3
-rw-r--r--src/libstore/tests/outputs-spec.hh1
-rw-r--r--src/libstore/tests/path.hh1
-rw-r--r--src/libstore/uds-remote-store.cc6
-rw-r--r--src/libstore/uds-remote-store.hh11
-rw-r--r--src/libstore/uds-remote-store.md9
-rw-r--r--src/libstore/worker-protocol.hh37
-rw-r--r--src/libutil/abstract-setting-to-json.hh1
-rw-r--r--src/libutil/ansicolor.hh1
-rw-r--r--src/libutil/archive.cc4
-rw-r--r--src/libutil/archive.hh108
-rw-r--r--src/libutil/args.cc38
-rw-r--r--src/libutil/args.hh73
-rw-r--r--src/libutil/callback.hh9
-rw-r--r--src/libutil/canon-path.cc26
-rw-r--r--src/libutil/canon-path.hh99
-rw-r--r--src/libutil/cgroup.hh11
-rw-r--r--src/libutil/chunked-vector.hh21
-rw-r--r--src/libutil/closure.hh3
-rw-r--r--src/libutil/comparator.hh4
-rw-r--r--src/libutil/compression.hh1
-rw-r--r--src/libutil/compute-levels.hh3
-rw-r--r--src/libutil/config.cc63
-rw-r--r--src/libutil/config.hh96
-rw-r--r--src/libutil/error.cc6
-rw-r--r--src/libutil/error.hh19
-rw-r--r--src/libutil/experimental-features.hh3
-rw-r--r--src/libutil/filesystem.cc4
-rw-r--r--src/libutil/finally.hh5
-rw-r--r--src/libutil/fmt.hh37
-rw-r--r--src/libutil/git.hh33
-rw-r--r--src/libutil/hash.cc13
-rw-r--r--src/libutil/hash.hh109
-rw-r--r--src/libutil/hilite.hh13
-rw-r--r--src/libutil/json-impls.hh1
-rw-r--r--src/libutil/json-utils.hh1
-rw-r--r--src/libutil/logging.cc18
-rw-r--r--src/libutil/logging.hh20
-rw-r--r--src/libutil/lru-cache.hh23
-rw-r--r--src/libutil/monitor-fd.hh1
-rw-r--r--src/libutil/namespaces.hh1
-rw-r--r--src/libutil/pool.hh45
-rw-r--r--src/libutil/ref.hh7
-rw-r--r--src/libutil/regex-combinators.hh1
-rw-r--r--src/libutil/serialise.cc4
-rw-r--r--src/libutil/serialise.hh146
-rw-r--r--src/libutil/split.hh11
-rw-r--r--src/libutil/suggestions.hh8
-rw-r--r--src/libutil/sync.hh33
-rw-r--r--src/libutil/tarfile.hh5
-rw-r--r--src/libutil/tests/canon-path.cc73
-rw-r--r--src/libutil/tests/hash.hh1
-rw-r--r--src/libutil/thread-pool.hh44
-rw-r--r--src/libutil/topo-sort.hh1
-rw-r--r--src/libutil/types.hh47
-rw-r--r--src/libutil/url-parts.hh16
-rw-r--r--src/libutil/url.hh6
-rw-r--r--src/libutil/util.cc23
-rw-r--r--src/libutil/util.hh10
-rw-r--r--src/libutil/xml-writer.hh1
-rw-r--r--src/nix-build/nix-build.cc6
-rwxr-xr-xsrc/nix-channel/nix-channel.cc3
-rw-r--r--src/nix-collect-garbage/nix-collect-garbage.cc5
-rwxr-xr-xsrc/nix-copy-closure/nix-copy-closure.cc2
-rw-r--r--src/nix-env/nix-env.cc75
-rw-r--r--src/nix-env/user-env.cc4
-rw-r--r--src/nix-env/user-env.hh1
-rw-r--r--src/nix-store/dotgraph.hh1
-rw-r--r--src/nix-store/graphml.cc2
-rw-r--r--src/nix-store/graphml.hh1
-rw-r--r--src/nix-store/nix-store.cc112
-rw-r--r--src/nix/app.cc7
-rw-r--r--src/nix/build.cc49
-rw-r--r--src/nix/build.md2
-rw-r--r--src/nix/bundle.cc7
-rw-r--r--src/nix/bundle.md2
-rw-r--r--src/nix/cat.cc2
-rw-r--r--src/nix/copy.cc2
-rw-r--r--src/nix/daemon.cc6
-rw-r--r--src/nix/describe-stores.cc44
-rw-r--r--src/nix/develop.cc27
-rw-r--r--src/nix/develop.md2
-rw-r--r--src/nix/diff-closures.cc2
-rw-r--r--src/nix/doctor.cc10
-rw-r--r--src/nix/edit.cc6
-rw-r--r--src/nix/eval.cc12
-rw-r--r--src/nix/eval.md2
-rw-r--r--src/nix/flake.cc60
-rw-r--r--src/nix/flake.md47
-rw-r--r--src/nix/fmt.cc6
-rw-r--r--src/nix/hash.cc39
-rw-r--r--src/nix/help-stores.md46
-rw-r--r--src/nix/log.cc4
-rw-r--r--src/nix/log.md3
-rw-r--r--src/nix/ls.cc2
-rw-r--r--src/nix/main.cc105
-rw-r--r--src/nix/make-content-addressed.cc3
-rw-r--r--src/nix/make-content-addressed.md4
-rw-r--r--src/nix/nar.cc1
-rw-r--r--src/nix/nix.md173
-rw-r--r--src/nix/path-info.md2
-rw-r--r--src/nix/prefetch.cc4
-rw-r--r--src/nix/print-dev-env.md2
-rw-r--r--src/nix/profile-install.md2
-rw-r--r--src/nix/profile.cc78
-rw-r--r--src/nix/profile.md20
-rw-r--r--src/nix/realisation.cc15
-rw-r--r--src/nix/registry.cc3
-rw-r--r--src/nix/repl.cc55
-rw-r--r--src/nix/run.cc8
-rw-r--r--src/nix/run.hh1
-rw-r--r--src/nix/run.md2
-rw-r--r--src/nix/search.cc11
-rw-r--r--src/nix/search.md6
-rw-r--r--src/nix/shell.md2
-rw-r--r--src/nix/show-derivation.cc4
-rw-r--r--src/nix/show-derivation.md2
-rw-r--r--src/nix/sigs.cc7
-rw-r--r--src/nix/store-copy-log.cc4
-rw-r--r--src/nix/store-delete.cc2
-rw-r--r--src/nix/store-delete.md2
-rw-r--r--src/nix/store-dump-path.md2
-rw-r--r--src/nix/store-repair.cc2
-rw-r--r--src/nix/store-repair.md2
-rw-r--r--src/nix/store.cc1
-rw-r--r--src/nix/upgrade-nix.cc8
-rw-r--r--src/nix/upgrade-nix.md2
-rw-r--r--src/nix/verify.md2
-rw-r--r--src/resolve-system-dependencies/resolve-system-dependencies.cc8
-rw-r--r--tests/binary-cache.sh24
-rw-r--r--tests/build-delete.sh2
-rw-r--r--tests/build-dry.sh2
-rw-r--r--tests/build-remote.sh35
-rw-r--r--tests/build.sh2
-rw-r--r--tests/ca/build.sh16
-rw-r--r--tests/check-refs.sh10
-rw-r--r--tests/check-reqs.sh4
-rw-r--r--tests/check.sh2
-rw-r--r--tests/common.sh2
-rw-r--r--tests/common/vars-and-functions.sh.in87
-rw-r--r--tests/compute-levels.sh2
-rw-r--r--tests/db-migration.sh10
-rw-r--r--tests/dependencies.sh6
-rw-r--r--tests/describe-stores.sh8
-rw-r--r--tests/eval-store.sh2
-rw-r--r--tests/experimental-features.sh40
-rw-r--r--tests/export-graph.sh2
-rw-r--r--tests/fetchClosure.sh4
-rw-r--r--tests/fetchGit.sh5
-rw-r--r--tests/fetchGitRefs.sh7
-rw-r--r--tests/fetchGitSubmodules.sh5
-rw-r--r--tests/fetchMercurial.sh5
-rw-r--r--tests/fetchTree-file.sh4
-rw-r--r--tests/fetchurl.sh2
-rw-r--r--tests/flakes/build-paths.sh2
-rw-r--r--tests/flakes/check.sh4
-rw-r--r--tests/flakes/common.sh9
-rw-r--r--tests/flakes/flake-in-submodule.sh52
-rw-r--r--tests/flakes/flakes.sh39
-rw-r--r--tests/flakes/follow-paths.sh2
-rw-r--r--tests/flakes/mercurial.sh5
-rw-r--r--tests/flakes/show.sh21
-rw-r--r--tests/fmt.sh2
-rwxr-xr-xtests/function-trace.sh10
-rw-r--r--tests/gc-runtime.sh2
-rw-r--r--tests/gc.sh17
-rw-r--r--tests/hash.sh32
-rw-r--r--tests/impure-derivations.sh2
-rwxr-xr-xtests/init.sh2
-rwxr-xr-xtests/install-darwin.sh2
-rw-r--r--tests/installer/default.nix26
-rw-r--r--tests/lang.sh17
-rw-r--r--tests/linux-sandbox.sh8
-rw-r--r--tests/local.mk8
-rw-r--r--tests/misc.sh12
-rw-r--r--tests/multiple-outputs.sh8
-rw-r--r--tests/nar-access.sh4
-rw-r--r--tests/nix-channel.sh14
-rw-r--r--tests/nix-shell.sh43
-rw-r--r--tests/nixos/nix-copy.nix85
-rw-r--r--tests/plugins.sh5
-rw-r--r--tests/pure-eval.sh2
-rw-r--r--tests/recursive.sh2
-rw-r--r--tests/repl.sh12
-rw-r--r--tests/restricted.sh2
-rw-r--r--tests/search.sh6
-rw-r--r--tests/shell.sh2
-rw-r--r--tests/tarball.sh4
-rw-r--r--tests/test-infra.sh85
-rw-r--r--tests/timeout.sh7
-rw-r--r--tests/user-envs-migration.sh2
-rw-r--r--tests/user-envs.sh50
-rw-r--r--tests/why-depends.sh10
431 files changed, 7246 insertions, 4041 deletions
diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md
index db69e51db..4488c7b7d 100644
--- a/.github/PULL_REQUEST_TEMPLATE.md
+++ b/.github/PULL_REQUEST_TEMPLATE.md
@@ -23,6 +23,11 @@ Maintainers: tick if completed or explain if not relevant
- unit tests - `src/*/tests`
- integration tests - `tests/nixos/*`
- [ ] documentation in the manual
+ - [ ] documentation in the internal API docs
- [ ] code and comments are self-explanatory
- [ ] commit message explains why the change was made
- [ ] new feature or incompatible change: updated release notes
+
+# Priorities
+
+Add :+1: to [pull requests you find important](https://github.com/NixOS/nix/pulls?q=is%3Aopen+sort%3Areactions-%2B1-desc).
diff --git a/.github/PULL_REQUEST_TEMPLATE/pull_request_template.md b/.github/PULL_REQUEST_TEMPLATE/pull_request_template.md
deleted file mode 100644
index 5311be01f..000000000
--- a/.github/PULL_REQUEST_TEMPLATE/pull_request_template.md
+++ /dev/null
@@ -1,11 +0,0 @@
-**Release Notes**
-Please include relevant [release notes](https://github.com/NixOS/nix/blob/master/doc/manual/src/release-notes/rl-next.md) as needed.
-
-
-**Testing**
-
-If this issue is a regression or something that should block release, please consider including a test either in the [testsuite](https://github.com/NixOS/nix/tree/master/tests) or as a [hydraJob]( https://github.com/NixOS/nix/blob/master/flake.nix#L396) so that it can be part of the [automatic checks](https://hydra.nixos.org/jobset/nix/master).
-
-**Priorities**
-
-Add :+1: to [pull requests you find important](https://github.com/NixOS/nix/pulls?q=is%3Aopen+sort%3Areactions-%2B1-desc).
diff --git a/.github/labeler.yml b/.github/labeler.yml
new file mode 100644
index 000000000..dc502b6d5
--- /dev/null
+++ b/.github/labeler.yml
@@ -0,0 +1,6 @@
+"documentation":
+ - doc/manual/*
+ - src/nix/**/*.md
+
+"tests":
+ - tests/**/*
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 325579a5b..c06c77043 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -19,7 +19,7 @@ jobs:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- - uses: cachix/install-nix-action@v19
+ - uses: cachix/install-nix-action@v20
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
- uses: cachix/cachix-action@v12
if: needs.check_secrets.outputs.cachix == 'true'
@@ -58,7 +58,9 @@ jobs:
with:
fetch-depth: 0
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
- - uses: cachix/install-nix-action@v19
+ - uses: cachix/install-nix-action@v20
+ with:
+ install_url: https://releases.nixos.org/nix/nix-2.13.3/install
- uses: cachix/cachix-action@v12
with:
name: '${{ env.CACHIX_NAME }}'
@@ -77,7 +79,7 @@ jobs:
steps:
- uses: actions/checkout@v3
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
- - uses: cachix/install-nix-action@v19
+ - uses: cachix/install-nix-action@v20
with:
install_url: '${{needs.installer.outputs.installerURL}}'
install_options: "--tarball-url-prefix https://${{ env.CACHIX_NAME }}.cachix.org/serve"
@@ -89,6 +91,8 @@ jobs:
- run: exec sh -c "nix-instantiate -E 'builtins.currentTime' --eval"
- run: exec zsh -c "nix-instantiate -E 'builtins.currentTime' --eval"
- run: exec fish -c "nix-instantiate -E 'builtins.currentTime' --eval"
+ - run: exec bash -c "nix-channel --add https://releases.nixos.org/nixos/unstable/nixos-23.05pre466020.60c1d71f2ba nixpkgs"
+ - run: exec bash -c "nix-channel --update && nix-env -iA nixpkgs.hello && hello"
docker_push_image:
needs: [check_secrets, tests]
@@ -102,7 +106,9 @@ jobs:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- - uses: cachix/install-nix-action@v19
+ - uses: cachix/install-nix-action@v20
+ with:
+ install_url: https://releases.nixos.org/nix/nix-2.13.3/install
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
- run: echo NIX_VERSION="$(nix --experimental-features 'nix-command flakes' eval .\#default.version | tr -d \")" >> $GITHUB_ENV
- uses: cachix/cachix-action@v12
diff --git a/.github/workflows/labels.yml b/.github/workflows/labels.yml
new file mode 100644
index 000000000..5f949ddc5
--- /dev/null
+++ b/.github/workflows/labels.yml
@@ -0,0 +1,24 @@
+name: "Label PR"
+
+on:
+ pull_request_target:
+ types: [edited, opened, synchronize, reopened]
+
+# WARNING:
+# When extending this action, be aware that $GITHUB_TOKEN allows some write
+# access to the GitHub API. This means that it should not evaluate user input in
+# a way that allows code injection.
+
+permissions:
+ contents: read
+ pull-requests: write
+
+jobs:
+ labels:
+ runs-on: ubuntu-latest
+ if: github.repository_owner == 'NixOS'
+ steps:
+ - uses: actions/labeler@v4
+ with:
+ repo-token: ${{ secrets.GITHUB_TOKEN }}
+ sync-labels: true
diff --git a/.version b/.version
index 575a07b9f..c910885a0 100644
--- a/.version
+++ b/.version
@@ -1 +1 @@
-2.14.0 \ No newline at end of file
+2.15.0 \ No newline at end of file
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
new file mode 100644
index 000000000..1b0ecaf36
--- /dev/null
+++ b/CONTRIBUTING.md
@@ -0,0 +1,61 @@
+# Contributing to Nix
+
+Welcome and thank you for your interest in contributing to Nix!
+We appreciate your support.
+
+Reading and following these guidelines will help us make the contribution process easy and effective for everyone involved.
+
+
+## Report a bug
+
+1. Check on the [GitHub issue tracker](https://github.com/NixOS/nix/issues) if your bug was already reported.
+
+2. If you were not able to find the bug or feature [open a new issue](https://github.com/NixOS/nix/issues/new/choose)
+
+3. The issue templates will guide you in specifying your issue.
+ The more complete the information you provide, the more likely it can be found by others and the more useful it is in the future.
+ Make sure reported bugs can be reproduced easily.
+
+4. Once submitted, do not expect issues to be picked up or solved right away.
+ The only way to ensure this, is to [work on the issue yourself](#making-changes-to-nix).
+
+## Report a security vulnerability
+
+Check out the [security policy](https://github.com/NixOS/nix/security/policy).
+
+## Making changes to Nix
+
+1. Check for [pull requests](https://github.com/NixOS/nix/pulls) that might already cover the contribution you are about to make.
+ There are many open pull requests that might already do what you intent to work on.
+ You can use [labels](https://github.com/NixOS/nix/labels) to filter for relevant topics.
+
+2. Search for related issues that cover what you're going to work on. It could help to mention there that you will work on the issue.
+
+3. Check the [Nix reference manual](https://nixos.org/manual/nix/unstable/contributing/hacking.html) for information on building Nix and running its tests.
+
+ For contributions to the command line interface, please check the [CLI guidelines](https://nixos.org/manual/nix/unstable/contributing/cli-guideline.html).
+
+4. Make your changes!
+
+5. [Create a pull request](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/creating-a-pull-request) for your changes.
+ * [Mark the pull request as draft](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/changing-the-stage-of-a-pull-request) if you're not done with the changes.
+ * Make sure to have [a clean history of commits on your branch by using rebase](https://www.digitalocean.com/community/tutorials/how-to-rebase-and-update-a-pull-request).
+ * Link related issues in your pull request to inform interested parties and future contributors about your change.
+ If your pull request closes one or multiple issues, note that in the description using `Closes: #<number>`, as it will then happen automatically when your change is merged.
+
+6. Do not expect your pull request to be reviewed immediately.
+ Nix maintainers follow a [structured process for reviews and design decisions](https://github.com/NixOS/nix/tree/master/maintainers#project-board-protocol), which may or may not prioritise your work.
+
+7. If you need additional feedback or help to getting pull request into shape, ask other contributors using [@mentions](https://docs.github.com/en/get-started/writing-on-github/getting-started-with-writing-and-formatting-on-github/basic-writing-and-formatting-syntax#mentioning-people-and-teams).
+
+## Making changes to the Nix manual
+
+The Nix reference manual is hosted on https://nixos.org/manual/nix.
+The underlying source files are located in [`doc/manual/src`](./doc/manual/src).
+For small changes you can [use GitHub to edit these files](https://docs.github.com/en/repositories/working-with-files/managing-files/editing-files)
+For larger changes see the [Nix reference manual](https://nixos.org/manual/nix/unstable/contributing/hacking.html).
+
+## Getting help
+
+Whenever you're stuck or do not know how to proceed, you can always ask for help.
+The appropriate channels to do so can be found on the [NixOS Community](https://nixos.org/community/) page.
diff --git a/Makefile b/Makefile
index 8675c9925..d6b49473a 100644
--- a/Makefile
+++ b/Makefile
@@ -2,13 +2,10 @@ makefiles = \
mk/precompiled-headers.mk \
local.mk \
src/libutil/local.mk \
- src/libutil/tests/local.mk \
src/libstore/local.mk \
- src/libstore/tests/local.mk \
src/libfetchers/local.mk \
src/libmain/local.mk \
src/libexpr/local.mk \
- src/libexpr/tests/local.mk \
src/libcmd/local.mk \
src/nix/local.mk \
src/resolve-system-dependencies/local.mk \
@@ -20,11 +17,22 @@ makefiles = \
misc/launchd/local.mk \
misc/upstart/local.mk \
doc/manual/local.mk \
- tests/local.mk \
- tests/plugins/local.mk
+ doc/internal-api/local.mk
-include Makefile.config
+ifeq ($(tests), yes)
+makefiles += \
+ src/libutil/tests/local.mk \
+ src/libstore/tests/local.mk \
+ src/libexpr/tests/local.mk \
+ tests/local.mk \
+ tests/plugins/local.mk
+else
+makefiles += \
+ mk/disable-tests.mk
+endif
+
OPTIMIZE = 1
ifeq ($(OPTIMIZE), 1)
diff --git a/Makefile.config.in b/Makefile.config.in
index 1c5405c6d..707cfe0e3 100644
--- a/Makefile.config.in
+++ b/Makefile.config.in
@@ -22,6 +22,7 @@ LOWDOWN_LIBS = @LOWDOWN_LIBS@
OPENSSL_LIBS = @OPENSSL_LIBS@
PACKAGE_NAME = @PACKAGE_NAME@
PACKAGE_VERSION = @PACKAGE_VERSION@
+RAPIDCHECK_HEADERS = @RAPIDCHECK_HEADERS@
SHELL = @bash@
SODIUM_LIBS = @SODIUM_LIBS@
SQLITE3_LIBS = @SQLITE3_LIBS@
@@ -45,3 +46,5 @@ sandbox_shell = @sandbox_shell@
storedir = @storedir@
sysconfdir = @sysconfdir@
system = @system@
+tests = @tests@
+internal_api_docs = @internal_api_docs@
diff --git a/configure.ac b/configure.ac
index 09b3651b9..f1f45f868 100644
--- a/configure.ac
+++ b/configure.ac
@@ -145,6 +145,18 @@ if test "x$GCC_ATOMIC_BUILTINS_NEED_LIBATOMIC" = xyes; then
LDFLAGS="-latomic $LDFLAGS"
fi
+# Building without tests is useful for bootstrapping with a smaller footprint
+# or running the tests in a separate derivation. Otherwise, we do compile and
+# run them.
+AC_ARG_ENABLE(tests, AS_HELP_STRING([--disable-tests],[Do not build the tests]),
+ tests=$enableval, tests=yes)
+AC_SUBST(tests)
+
+# Building without API docs is the default as Nix' C++ interfaces are internal and unstable.
+AC_ARG_ENABLE(internal_api_docs, AS_HELP_STRING([--enable-internal-api-docs],[Build API docs for Nix's internal unstable C++ interfaces]),
+ internal_api_docs=$enableval, internal_api_docs=no)
+AC_SUBST(internal_api_docs)
+
# LTO is currently broken with clang for unknown reasons; ld segfaults in the llvm plugin
AC_ARG_ENABLE(lto, AS_HELP_STRING([--enable-lto],[Enable LTO (only supported with GCC) [default=no]]),
lto=$enableval, lto=no)
@@ -270,6 +282,8 @@ if test "$gc" = yes; then
fi
+if test "$tests" = yes; then
+
# Look for gtest.
PKG_CHECK_MODULES([GTEST], [gtest_main])
@@ -277,11 +291,14 @@ PKG_CHECK_MODULES([GTEST], [gtest_main])
# Look for rapidcheck.
# No pkg-config yet, https://github.com/emil-e/rapidcheck/issues/302
AC_LANG_PUSH(C++)
+AC_SUBST(RAPIDCHECK_HEADERS)
+[CXXFLAGS="-I $RAPIDCHECK_HEADERS $CXXFLAGS"]
AC_CHECK_HEADERS([rapidcheck/gtest.h], [], [], [#include <gtest/gtest.h>])
dnl No good for C++ libs with mangled symbols
dnl AC_CHECK_LIB([rapidcheck], [])
AC_LANG_POP(C++)
+fi
# Look for nlohmann/json.
PKG_CHECK_MODULES([NLOHMANN_JSON], [nlohmann_json >= 3.9])
diff --git a/default.nix b/default.nix
index 00ec5b617..2cccff28d 100644
--- a/default.nix
+++ b/default.nix
@@ -1,3 +1,10 @@
-(import (fetchTarball "https://github.com/edolstra/flake-compat/archive/master.tar.gz") {
- src = ./.;
-}).defaultNix
+(import
+ (
+ let lock = builtins.fromJSON (builtins.readFile ./flake.lock); in
+ fetchTarball {
+ url = "https://github.com/edolstra/flake-compat/archive/${lock.nodes.flake-compat.locked.rev}.tar.gz";
+ sha256 = lock.nodes.flake-compat.locked.narHash;
+ }
+ )
+ { src = ./.; }
+).defaultNix
diff --git a/doc/internal-api/.gitignore b/doc/internal-api/.gitignore
new file mode 100644
index 000000000..dab28b6b0
--- /dev/null
+++ b/doc/internal-api/.gitignore
@@ -0,0 +1,3 @@
+/doxygen.cfg
+/html
+/latex
diff --git a/doc/internal-api/doxygen.cfg.in b/doc/internal-api/doxygen.cfg.in
new file mode 100644
index 000000000..8f526536d
--- /dev/null
+++ b/doc/internal-api/doxygen.cfg.in
@@ -0,0 +1,63 @@
+# Doxyfile 1.9.5
+
+# The PROJECT_NAME tag is a single word (or a sequence of words surrounded by
+# double-quotes, unless you are using Doxywizard) that should identify the
+# project for which the documentation is generated. This name is used in the
+# title of most generated pages and in a few other places.
+# The default value is: My Project.
+
+PROJECT_NAME = "Nix"
+
+# The PROJECT_NUMBER tag can be used to enter a project or revision number. This
+# could be handy for archiving the generated documentation or if some version
+# control system is used.
+
+PROJECT_NUMBER = @PACKAGE_VERSION@
+
+# Using the PROJECT_BRIEF tag one can provide an optional one line description
+# for a project that appears at the top of each page and should give viewer a
+# quick idea about the purpose of the project. Keep the description short.
+
+PROJECT_BRIEF = "Nix, the purely functional package manager; unstable internal interfaces"
+
+# If the GENERATE_LATEX tag is set to YES, doxygen will generate LaTeX output.
+# The default value is: YES.
+
+GENERATE_LATEX = NO
+
+# The INPUT tag is used to specify the files and/or directories that contain
+# documented source files. You may enter file names like myfile.cpp or
+# directories like /usr/src/myproject. Separate the files or directories with
+# spaces. See also FILE_PATTERNS and EXTENSION_MAPPING
+# Note: If this tag is empty the current directory is searched.
+
+# FIXME Make this list more maintainable somehow. We could maybe generate this
+# in the Makefile, but we would need to change how `.in` files are preprocessed
+# so they can expand variables despite configure variables.
+
+INPUT = \
+ src/libcmd \
+ src/libexpr \
+ src/libexpr/flake \
+ src/libexpr/tests \
+ src/libexpr/tests/value \
+ src/libexpr/value \
+ src/libfetchers \
+ src/libmain \
+ src/libstore \
+ src/libstore/build \
+ src/libstore/builtins \
+ src/libstore/tests \
+ src/libutil \
+ src/libutil/tests \
+ src/nix \
+ src/nix-env \
+ src/nix-store
+
+# The INCLUDE_PATH tag can be used to specify one or more directories that
+# contain include files that are not input files but should be processed by the
+# preprocessor. Note that the INCLUDE_PATH is not recursive, so the setting of
+# RECURSIVE has no effect here.
+# This tag requires that the tag SEARCH_INCLUDES is set to YES.
+
+INCLUDE_PATH = @RAPIDCHECK_HEADERS@
diff --git a/doc/internal-api/local.mk b/doc/internal-api/local.mk
new file mode 100644
index 000000000..890f341b7
--- /dev/null
+++ b/doc/internal-api/local.mk
@@ -0,0 +1,19 @@
+.PHONY: internal-api-html
+
+ifeq ($(internal_api_docs), yes)
+
+$(docdir)/internal-api/html/index.html $(docdir)/internal-api/latex: $(d)/doxygen.cfg
+ mkdir -p $(docdir)/internal-api
+ { cat $< ; echo "OUTPUT_DIRECTORY=$(docdir)/internal-api" ; } | doxygen -
+
+# Generate the HTML API docs for Nix's unstable internal interfaces.
+internal-api-html: $(docdir)/internal-api/html/index.html
+
+else
+
+# Make a nicer error message
+internal-api-html:
+ @echo "Internal API docs are disabled. Configure with '--enable-internal-api-docs', or avoid calling 'make internal-api-html'."
+ @exit 1
+
+endif
diff --git a/doc/manual/generate-manpage.nix b/doc/manual/generate-manpage.nix
index 8c7c4d358..86f2ca567 100644
--- a/doc/manual/generate-manpage.nix
+++ b/doc/manual/generate-manpage.nix
@@ -1,4 +1,4 @@
-{ toplevel }:
+cliDumpStr:
with builtins;
with import ./utils.nix;
@@ -7,6 +7,7 @@ let
showCommand = { command, details, filename, toplevel }:
let
+
result = ''
> **Warning** \
> This program is **experimental** and its interface is subject to change.
@@ -25,6 +26,7 @@ let
${maybeOptions}
'';
+
showSynopsis = command: args:
let
showArgument = arg: "*${arg.label}*" + (if arg ? arity then "" else "...");
@@ -32,6 +34,7 @@ let
in ''
`${command}` [*option*...] ${arguments}
'';
+
maybeSubcommands = if details ? commands && details.commands != {}
then ''
where *subcommand* is one of the following:
@@ -39,26 +42,38 @@ let
${subcommands}
''
else "";
+
subcommands = if length categories > 1
then listCategories
else listSubcommands details.commands;
+
categories = sort (x: y: x.id < y.id) (unique (map (cmd: cmd.category) (attrValues details.commands)));
+
listCategories = concatStrings (map showCategory categories);
+
showCategory = cat: ''
**${toString cat.description}:**
${listSubcommands (filterAttrs (n: v: v.category == cat) details.commands)}
'';
+
listSubcommands = cmds: concatStrings (attrValues (mapAttrs showSubcommand cmds));
+
showSubcommand = name: subcmd: ''
* [`${command} ${name}`](./${appendName filename name}.md) - ${subcmd.description}
'';
- maybeDocumentation = if details ? doc then details.doc else "";
+
+ maybeDocumentation =
+ if details ? doc
+ then replaceStrings ["@stores@"] [storeDocs] details.doc
+ else "";
+
maybeOptions = if details.flags == {} then "" else ''
# Options
${showOptions details.flags toplevel.flags}
'';
+
showOptions = options: commonOptions:
let
allOptions = options // commonOptions;
@@ -98,13 +113,13 @@ let
};
in [ cmd ] ++ concatMap subcommand (attrNames details.commands or {});
- parsedToplevel = builtins.fromJSON toplevel;
-
+ cliDump = builtins.fromJSON cliDumpStr;
+
manpages = processCommand {
command = "nix";
- details = parsedToplevel;
+ details = cliDump.args;
filename = "nix";
- toplevel = parsedToplevel;
+ toplevel = cliDump.args;
};
tableOfContents = let
@@ -112,4 +127,18 @@ let
" - [${page.command}](command-ref/new-cli/${page.name})";
in concatStringsSep "\n" (map showEntry manpages) + "\n";
+ storeDocs =
+ let
+ showStore = name: { settings, doc }:
+ ''
+ ## ${name}
+
+ ${doc}
+
+ **Settings**:
+
+ ${showSettings { useAnchors = false; } settings}
+ '';
+ in concatStrings (attrValues (mapAttrs showStore cliDump.stores));
+
in (listToAttrs manpages) // { "SUMMARY.md" = tableOfContents; }
diff --git a/doc/manual/generate-options.nix b/doc/manual/generate-options.nix
deleted file mode 100644
index a4ec36477..000000000
--- a/doc/manual/generate-options.nix
+++ /dev/null
@@ -1,41 +0,0 @@
-let
- inherit (builtins) attrNames concatStringsSep isAttrs isBool;
- inherit (import ./utils.nix) concatStrings squash splitLines;
-in
-
-optionsInfo:
-let
- showOption = name:
- let
- inherit (optionsInfo.${name}) description documentDefault defaultValue aliases;
- result = squash ''
- - <span id="conf-${name}">[`${name}`](#conf-${name})</span>
-
- ${indent " " body}
- '';
- # separate body to cleanly handle indentation
- body = ''
- ${description}
-
- **Default:** ${showDefault documentDefault defaultValue}
-
- ${showAliases aliases}
- '';
- showDefault = documentDefault: defaultValue:
- if documentDefault then
- # a StringMap value type is specified as a string, but
- # this shows the value type. The empty stringmap is `null` in
- # JSON, but that converts to `{ }` here.
- if defaultValue == "" || defaultValue == [] || isAttrs defaultValue
- then "*empty*"
- else if isBool defaultValue then
- if defaultValue then "`true`" else "`false`"
- else "`${toString defaultValue}`"
- else "*machine-specific*";
- showAliases = aliases:
- if aliases == [] then "" else
- "**Deprecated alias:** ${(concatStringsSep ", " (map (s: "`${s}`") aliases))}";
- indent = prefix: s:
- concatStringsSep "\n" (map (x: if x == "" then x else "${prefix}${x}") (splitLines s));
- in result;
-in concatStrings (map showOption (attrNames optionsInfo))
diff --git a/doc/manual/local.mk b/doc/manual/local.mk
index f43510b6d..df941d460 100644
--- a/doc/manual/local.mk
+++ b/doc/manual/local.mk
@@ -1,17 +1,24 @@
ifeq ($(doc_generate),yes)
MANUAL_SRCS := \
- $(call rwildcard, $(d)/src, *.md) \
- $(call rwildcard, $(d)/src, */*.md)
+ $(call rwildcard, $(d)/src, *.md) \
+ $(call rwildcard, $(d)/src, */*.md)
-# Generate man pages.
man-pages := $(foreach n, \
- nix-env.1 nix-build.1 nix-shell.1 nix-store.1 nix-instantiate.1 \
- nix-collect-garbage.1 \
- nix-prefetch-url.1 nix-channel.1 \
- nix-hash.1 nix-copy-closure.1 \
- nix.conf.5 nix-daemon.8, \
- $(d)/$(n))
+ nix-env.1 nix-store.1 \
+ nix-build.1 nix-shell.1 nix-instantiate.1 \
+ nix-collect-garbage.1 \
+ nix-prefetch-url.1 nix-channel.1 \
+ nix-hash.1 nix-copy-closure.1 \
+ nix.conf.5 nix-daemon.8 \
+, $(d)/$(n))
+
+# man pages for subcommands
+# convert from `$(d)/src/command-ref/nix-{1}/{2}.md` to `$(d)/nix-{1}-{2}.1`
+# FIXME: unify with how nix3-cli man pages are generated
+man-pages += $(foreach subcommand, \
+ $(filter-out %opt-common.md %env-common.md, $(wildcard $(d)/src/command-ref/nix-*/*.md)), \
+ $(d)/$(subst /,-,$(subst $(d)/src/command-ref/,,$(subst .md,.1,$(subcommand)))))
clean-files += $(d)/*.1 $(d)/*.5 $(d)/*.8
@@ -26,9 +33,42 @@ dummy-env = env -i \
nix-eval = $(dummy-env) $(bindir)/nix eval --experimental-features nix-command -I nix/corepkgs=corepkgs --store dummy:// --impure --raw
+# re-implement mdBook's include directive to make it usable for terminal output and for proper @docroot@ substitution
+define process-includes
+ while read -r line; do \
+ set -euo pipefail; \
+ filename="$$(dirname $(1))/$$(sed 's/{{#include \(.*\)}}/\1/'<<< $$line)"; \
+ test -f "$$filename" || ( echo "#include-d file '$$filename' does not exist." >&2; exit 1; ); \
+ matchline="$$(sed 's|/|\\/|g' <<< $$line)"; \
+ sed -i "/$$matchline/r $$filename" $(2); \
+ sed -i "s/$$matchline//" $(2); \
+ done < <(grep '{{#include' $(1))
+endef
+
+$(d)/nix-env-%.1: $(d)/src/command-ref/nix-env/%.md
+ @printf "Title: %s\n\n" "$(subst nix-env-,nix-env --,$$(basename "$@" .1))" > $^.tmp
+ $(render-subcommand)
+
+$(d)/nix-store-%.1: $(d)/src/command-ref/nix-store/%.md
+ @printf -- 'Title: %s\n\n' "$(subst nix-store-,nix-store --,$$(basename "$@" .1))" > $^.tmp
+ $(render-subcommand)
+
+# FIXME: there surely is some more deduplication to be achieved here with even darker Make magic
+define render-subcommand
+ @cat $^ >> $^.tmp
+ @$(call process-includes,$^,$^.tmp)
+ $(trace-gen) lowdown -sT man --nroff-nolinks -M section=1 $^.tmp -o $@
+ @# fix up `lowdown`'s automatic escaping of `--`
+ @# https://github.com/kristapsdz/lowdown/blob/edca6ce6d5336efb147321a43c47a698de41bb7c/entity.c#L202
+ @sed -i 's/\e\[u2013\]/--/' $@
+ @rm $^.tmp
+endef
+
+
$(d)/%.1: $(d)/src/command-ref/%.md
@printf "Title: %s\n\n" "$$(basename $@ .1)" > $^.tmp
@cat $^ >> $^.tmp
+ @$(call process-includes,$^,$^.tmp)
$(trace-gen) lowdown -sT man --nroff-nolinks -M section=1 $^.tmp -o $@
@rm $^.tmp
@@ -45,25 +85,21 @@ $(d)/nix.conf.5: $(d)/src/command-ref/conf-file.md
@rm $^.tmp
$(d)/src/SUMMARY.md: $(d)/src/SUMMARY.md.in $(d)/src/command-ref/new-cli
- $(trace-gen) cat doc/manual/src/SUMMARY.md.in | while IFS= read line; do if [[ $$line = @manpages@ ]]; then cat doc/manual/src/command-ref/new-cli/SUMMARY.md; else echo "$$line"; fi; done > $@.tmp
- @mv $@.tmp $@
+ @cp $< $@
+ @$(call process-includes,$@,$@)
$(d)/src/command-ref/new-cli: $(d)/nix.json $(d)/generate-manpage.nix $(bindir)/nix
- @rm -rf $@
- $(trace-gen) $(nix-eval) --write-to $@.tmp --expr 'import doc/manual/generate-manpage.nix { toplevel = builtins.readFile $<; }'
- @# @docroot@: https://nixos.org/manual/nix/unstable/contributing/hacking.html#docroot-variable
- $(trace-gen) sed -i $@.tmp/*.md -e 's^@docroot@^../..^g'
+ @rm -rf $@ $@.tmp
+ $(trace-gen) $(nix-eval) --write-to $@.tmp --expr 'import doc/manual/generate-manpage.nix (builtins.readFile $<)'
@mv $@.tmp $@
-$(d)/src/command-ref/conf-file.md: $(d)/conf-file.json $(d)/generate-options.nix $(d)/src/command-ref/conf-file-prefix.md $(bindir)/nix
+$(d)/src/command-ref/conf-file.md: $(d)/conf-file.json $(d)/utils.nix $(d)/src/command-ref/conf-file-prefix.md $(bindir)/nix
@cat doc/manual/src/command-ref/conf-file-prefix.md > $@.tmp
- @# @docroot@: https://nixos.org/manual/nix/unstable/contributing/hacking.html#docroot-variable
- $(trace-gen) $(nix-eval) --expr 'import doc/manual/generate-options.nix (builtins.fromJSON (builtins.readFile $<))' \
- | sed -e 's^@docroot@^..^g'>> $@.tmp
+ $(trace-gen) $(nix-eval) --expr '(import doc/manual/utils.nix).showSettings { useAnchors = true; } (builtins.fromJSON (builtins.readFile $<))' >> $@.tmp;
@mv $@.tmp $@
$(d)/nix.json: $(bindir)/nix
- $(trace-gen) $(dummy-env) $(bindir)/nix __dump-args > $@.tmp
+ $(trace-gen) $(dummy-env) $(bindir)/nix __dump-cli > $@.tmp
@mv $@.tmp $@
$(d)/conf-file.json: $(bindir)/nix
@@ -72,9 +108,7 @@ $(d)/conf-file.json: $(bindir)/nix
$(d)/src/language/builtins.md: $(d)/builtins.json $(d)/generate-builtins.nix $(d)/src/language/builtins-prefix.md $(bindir)/nix
@cat doc/manual/src/language/builtins-prefix.md > $@.tmp
- @# @docroot@: https://nixos.org/manual/nix/unstable/contributing/hacking.html#docroot-variable
- $(trace-gen) $(nix-eval) --expr 'import doc/manual/generate-builtins.nix (builtins.fromJSON (builtins.readFile $<))' \
- | sed -e 's^@docroot@^..^g' >> $@.tmp
+ $(trace-gen) $(nix-eval) --expr 'import doc/manual/generate-builtins.nix (builtins.fromJSON (builtins.readFile $<))' >> $@.tmp;
@cat doc/manual/src/language/builtins-suffix.md >> $@.tmp
@mv $@.tmp $@
@@ -83,7 +117,8 @@ $(d)/builtins.json: $(bindir)/nix
@mv $@.tmp $@
# Generate the HTML manual.
-html: $(docdir)/manual/index.html
+.PHONY: manual-html
+manual-html: $(docdir)/manual/index.html
install: $(docdir)/manual/index.html
# Generate 'nix' manpages.
@@ -91,6 +126,8 @@ install: $(mandir)/man1/nix3-manpages
man: doc/manual/generated/man1/nix3-manpages
all: doc/manual/generated/man1/nix3-manpages
+# FIXME: unify with how the other man pages are generated.
+# this one works differently and does not use any of the amenities provided by `/mk/lib.mk`.
$(mandir)/man1/nix3-manpages: doc/manual/generated/man1/nix3-manpages
@mkdir -p $(DESTDIR)$$(dirname $@)
$(trace-install) install -m 0644 $$(dirname $<)/* $(DESTDIR)$$(dirname $@)
@@ -98,21 +135,31 @@ $(mandir)/man1/nix3-manpages: doc/manual/generated/man1/nix3-manpages
doc/manual/generated/man1/nix3-manpages: $(d)/src/command-ref/new-cli
@mkdir -p $(DESTDIR)$$(dirname $@)
$(trace-gen) for i in doc/manual/src/command-ref/new-cli/*.md; do \
- name=$$(basename $$i .md); \
- tmpFile=$$(mktemp); \
- if [[ $$name = SUMMARY ]]; then continue; fi; \
- printf "Title: %s\n\n" "$$name" > $$tmpFile; \
- cat $$i >> $$tmpFile; \
- lowdown -sT man --nroff-nolinks -M section=1 $$tmpFile -o $(DESTDIR)$$(dirname $@)/$$name.1; \
- rm $$tmpFile; \
+ name=$$(basename $$i .md); \
+ tmpFile=$$(mktemp); \
+ if [[ $$name = SUMMARY ]]; then continue; fi; \
+ printf "Title: %s\n\n" "$$name" > $$tmpFile; \
+ cat $$i >> $$tmpFile; \
+ lowdown -sT man --nroff-nolinks -M section=1 $$tmpFile -o $(DESTDIR)$$(dirname $@)/$$name.1; \
+ rm $$tmpFile; \
done
@touch $@
$(docdir)/manual/index.html: $(MANUAL_SRCS) $(d)/book.toml $(d)/anchors.jq $(d)/custom.css $(d)/src/SUMMARY.md $(d)/src/command-ref/new-cli $(d)/src/command-ref/conf-file.md $(d)/src/language/builtins.md
$(trace-gen) \
- set -euo pipefail; \
- RUST_LOG=warn mdbook build doc/manual -d $(DESTDIR)$(docdir)/manual.tmp 2>&1 \
- | { grep -Fv "because fragment resolution isn't implemented" || :; }
+ tmp="$$(mktemp -d)"; \
+ cp -r doc/manual "$$tmp"; \
+ find "$$tmp" -name '*.md' | while read -r file; do \
+ $(call process-includes,$$file,$$file); \
+ done; \
+ find "$$tmp" -name '*.md' | while read -r file; do \
+ docroot="$$(realpath --relative-to="$$(dirname "$$file")" $$tmp/manual/src)"; \
+ sed -i "s,@docroot@,$$docroot,g" "$$file"; \
+ done; \
+ set -euo pipefail; \
+ RUST_LOG=warn mdbook build "$$tmp/manual" -d $(DESTDIR)$(docdir)/manual.tmp 2>&1 \
+ | { grep -Fv "because fragment resolution isn't implemented" || :; }; \
+ rm -rf "$$tmp/manual"
@rm -rf $(DESTDIR)$(docdir)/manual
@mv $(DESTDIR)$(docdir)/manual.tmp/html $(DESTDIR)$(docdir)/manual
@rm -rf $(DESTDIR)$(docdir)/manual.tmp
diff --git a/doc/manual/src/SUMMARY.md.in b/doc/manual/src/SUMMARY.md.in
index 964091285..4b654567f 100644
--- a/doc/manual/src/SUMMARY.md.in
+++ b/doc/manual/src/SUMMARY.md.in
@@ -44,10 +44,41 @@
- [Common Options](command-ref/opt-common.md)
- [Common Environment Variables](command-ref/env-common.md)
- [Main Commands](command-ref/main-commands.md)
- - [nix-env](command-ref/nix-env.md)
- [nix-build](command-ref/nix-build.md)
- [nix-shell](command-ref/nix-shell.md)
- [nix-store](command-ref/nix-store.md)
+ - [nix-store --add-fixed](command-ref/nix-store/add-fixed.md)
+ - [nix-store --add](command-ref/nix-store/add.md)
+ - [nix-store --delete](command-ref/nix-store/delete.md)
+ - [nix-store --dump-db](command-ref/nix-store/dump-db.md)
+ - [nix-store --dump](command-ref/nix-store/dump.md)
+ - [nix-store --export](command-ref/nix-store/export.md)
+ - [nix-store --gc](command-ref/nix-store/gc.md)
+ - [nix-store --generate-binary-cache-key](command-ref/nix-store/generate-binary-cache-key.md)
+ - [nix-store --import](command-ref/nix-store/import.md)
+ - [nix-store --load-db](command-ref/nix-store/load-db.md)
+ - [nix-store --optimise](command-ref/nix-store/optimise.md)
+ - [nix-store --print-env](command-ref/nix-store/print-env.md)
+ - [nix-store --query](command-ref/nix-store/query.md)
+ - [nix-store --read-log](command-ref/nix-store/read-log.md)
+ - [nix-store --realise](command-ref/nix-store/realise.md)
+ - [nix-store --repair-path](command-ref/nix-store/repair-path.md)
+ - [nix-store --restore](command-ref/nix-store/restore.md)
+ - [nix-store --serve](command-ref/nix-store/serve.md)
+ - [nix-store --verify-path](command-ref/nix-store/verify-path.md)
+ - [nix-store --verify](command-ref/nix-store/verify.md)
+ - [nix-env](command-ref/nix-env.md)
+ - [nix-env --delete-generations](command-ref/nix-env/delete-generations.md)
+ - [nix-env --install](command-ref/nix-env/install.md)
+ - [nix-env --list-generations](command-ref/nix-env/list-generations.md)
+ - [nix-env --query](command-ref/nix-env/query.md)
+ - [nix-env --rollback](command-ref/nix-env/rollback.md)
+ - [nix-env --set-flag](command-ref/nix-env/set-flag.md)
+ - [nix-env --set](command-ref/nix-env/set.md)
+ - [nix-env --switch-generation](command-ref/nix-env/switch-generation.md)
+ - [nix-env --switch-profile](command-ref/nix-env/switch-profile.md)
+ - [nix-env --uninstall](command-ref/nix-env/uninstall.md)
+ - [nix-env --upgrade](command-ref/nix-env/upgrade.md)
- [Utilities](command-ref/utilities.md)
- [nix-channel](command-ref/nix-channel.md)
- [nix-collect-garbage](command-ref/nix-collect-garbage.md)
@@ -57,7 +88,7 @@
- [nix-instantiate](command-ref/nix-instantiate.md)
- [nix-prefetch-url](command-ref/nix-prefetch-url.md)
- [Experimental Commands](command-ref/experimental-commands.md)
-@manpages@
+{{#include ./command-ref/new-cli/SUMMARY.md}}
- [Files](command-ref/files.md)
- [nix.conf](command-ref/conf-file.md)
- [Architecture](architecture/architecture.md)
diff --git a/doc/manual/src/command-ref/env-common.md b/doc/manual/src/command-ref/env-common.md
index c5d38db47..bf00be84f 100644
--- a/doc/manual/src/command-ref/env-common.md
+++ b/doc/manual/src/command-ref/env-common.md
@@ -2,18 +2,29 @@
Most Nix commands interpret the following environment variables:
- - [`IN_NIX_SHELL`]{#env-IN_NIX_SHELL}\
+ - <span id="env-IN_NIX_SHELL">[`IN_NIX_SHELL`](#env-IN_NIX_SHELL)</span>\
Indicator that tells if the current environment was set up by
`nix-shell`. It can have the values `pure` or `impure`.
- - [`NIX_PATH`]{#env-NIX_PATH}\
+ - <span id="env-NIX_PATH">[`NIX_PATH`](#env-NIX_PATH)</span>\
A colon-separated list of directories used to look up the location of Nix
- expressions using [paths](../language/values.md#type-path)
+ expressions using [paths](@docroot@/language/values.md#type-path)
enclosed in angle brackets (i.e., `<path>`),
e.g. `/home/eelco/Dev:/etc/nixos`. It can be extended using the
- [`-I` option](./opt-common.md#opt-I).
+ [`-I` option](@docroot@/command-ref/opt-common.md#opt-I).
- - [`NIX_IGNORE_SYMLINK_STORE`]{#env-NIX_IGNORE_SYMLINK_STORE}\
+ If `NIX_PATH` is not set at all, Nix will fall back to the following list in [impure](@docroot@/command-ref/conf-file.md#conf-pure-eval) and [unrestricted](@docroot@/command-ref/conf-file.md#conf-restrict-eval) evaluation mode:
+
+ 1. `$HOME/.nix-defexpr/channels`
+ 2. `nixpkgs=/nix/var/nix/profiles/per-user/root/channels/nixpkgs`
+ 3. `/nix/var/nix/profiles/per-user/root/channels`
+
+ If `NIX_PATH` is set to an empty string, resolving search paths will always fail.
+ For example, attempting to use `<nixpkgs>` will produce:
+
+ error: file 'nixpkgs' was not found in the Nix search path
+
+ - <span id="env-NIX_IGNORE_SYMLINK_STORE">[`NIX_IGNORE_SYMLINK_STORE`](#env-NIX_IGNORE_SYMLINK_STORE)</span>\
Normally, the Nix store directory (typically `/nix/store`) is not
allowed to contain any symlink components. This is to prevent
“impure” builds. Builders sometimes “canonicalise” paths by
@@ -35,58 +46,58 @@ Most Nix commands interpret the following environment variables:
Consult the mount 8 manual page for details.
- - [`NIX_STORE_DIR`]{#env-NIX_STORE_DIR}\
+ - <span id="env-NIX_STORE_DIR">[`NIX_STORE_DIR`](#env-NIX_STORE_DIR)</span>\
Overrides the location of the Nix store (default `prefix/store`).
- - [`NIX_DATA_DIR`]{#env-NIX_DATA_DIR}\
+ - <span id="env-NIX_DATA_DIR">[`NIX_DATA_DIR`](#env-NIX_DATA_DIR)</span>\
Overrides the location of the Nix static data directory (default
`prefix/share`).
- - [`NIX_LOG_DIR`]{#env-NIX_LOG_DIR}\
+ - <span id="env-NIX_LOG_DIR">[`NIX_LOG_DIR`](#env-NIX_LOG_DIR)</span>\
Overrides the location of the Nix log directory (default
`prefix/var/log/nix`).
- - [`NIX_STATE_DIR`]{#env-NIX_STATE_DIR}\
+ - <span id="env-NIX_STATE_DIR">[`NIX_STATE_DIR`](#env-NIX_STATE_DIR)</span>\
Overrides the location of the Nix state directory (default
`prefix/var/nix`).
- - [`NIX_CONF_DIR`]{#env-NIX_CONF_DIR}\
+ - <span id="env-NIX_CONF_DIR">[`NIX_CONF_DIR`](#env-NIX_CONF_DIR)</span>\
Overrides the location of the system Nix configuration directory
(default `prefix/etc/nix`).
- - [`NIX_CONFIG`]{#env-NIX_CONFIG}\
+ - <span id="env-NIX_CONFIG">[`NIX_CONFIG`](#env-NIX_CONFIG)</span>\
Applies settings from Nix configuration from the environment.
The content is treated as if it was read from a Nix configuration file.
Settings are separated by the newline character.
- - [`NIX_USER_CONF_FILES`]{#env-NIX_USER_CONF_FILES}\
+ - <span id="env-NIX_USER_CONF_FILES">[`NIX_USER_CONF_FILES`](#env-NIX_USER_CONF_FILES)</span>\
Overrides the location of the user Nix configuration files to load
from (defaults to the XDG spec locations). The variable is treated
as a list separated by the `:` token.
- - [`TMPDIR`]{#env-TMPDIR}\
+ - <span id="env-TMPDIR">[`TMPDIR`](#env-TMPDIR)</span>\
Use the specified directory to store temporary files. In particular,
this includes temporary build directories; these can take up
substantial amounts of disk space. The default is `/tmp`.
- - [`NIX_REMOTE`]{#env-NIX_REMOTE}\
+ - <span id="env-NIX_REMOTE">[`NIX_REMOTE`](#env-NIX_REMOTE)</span>\
This variable should be set to `daemon` if you want to use the Nix
daemon to execute Nix operations. This is necessary in [multi-user
- Nix installations](../installation/multi-user.md). If the Nix
+ Nix installations](@docroot@/installation/multi-user.md). If the Nix
daemon's Unix socket is at some non-standard path, this variable
should be set to `unix://path/to/socket`. Otherwise, it should be
left unset.
- - [`NIX_SHOW_STATS`]{#env-NIX_SHOW_STATS}\
+ - <span id="env-NIX_SHOW_STATS">[`NIX_SHOW_STATS`](#env-NIX_SHOW_STATS)</span>\
If set to `1`, Nix will print some evaluation statistics, such as
the number of values allocated.
- - [`NIX_COUNT_CALLS`]{#env-NIX_COUNT_CALLS}\
+ - <span id="env-NIX_COUNT_CALLS">[`NIX_COUNT_CALLS`](#env-NIX_COUNT_CALLS)</span>\
If set to `1`, Nix will print how often functions were called during
Nix expression evaluation. This is useful for profiling your Nix
expressions.
- - [`GC_INITIAL_HEAP_SIZE`]{#env-GC_INITIAL_HEAP_SIZE}\
+ - <span id="env-GC_INITIAL_HEAP_SIZE">[`GC_INITIAL_HEAP_SIZE`](#env-GC_INITIAL_HEAP_SIZE)</span>\
If Nix has been configured to use the Boehm garbage collector, this
variable sets the initial size of the heap in bytes. It defaults to
384 MiB. Setting it to a low value reduces memory consumption, but
@@ -103,4 +114,4 @@ New Nix commands conform to the [XDG Base Directory Specification], and use the
Classic Nix commands can also be made to follow this standard using the [`use-xdg-base-directories`] configuration option.
[XDG Base Directory Specification]: https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html
-[`use-xdg-base-directories`]: ../command-ref/conf-file.md#conf-use-xdg-base-directories \ No newline at end of file
+[`use-xdg-base-directories`]: @docroot@/command-ref/conf-file.md#conf-use-xdg-base-directories \ No newline at end of file
diff --git a/doc/manual/src/command-ref/nix-build.md b/doc/manual/src/command-ref/nix-build.md
index 937b046b8..44de4cf53 100644
--- a/doc/manual/src/command-ref/nix-build.md
+++ b/doc/manual/src/command-ref/nix-build.md
@@ -38,7 +38,7 @@ directory containing at least a file named `default.nix`.
`nix-build` is essentially a wrapper around
[`nix-instantiate`](nix-instantiate.md) (to translate a high-level Nix
expression to a low-level [store derivation]) and [`nix-store
---realise`](nix-store.md#operation---realise) (to build the store
+--realise`](@docroot@/command-ref/nix-store/realise.md) (to build the store
derivation).
[store derivation]: ../glossary.md#gloss-store-derivation
@@ -51,9 +51,8 @@ derivation).
# Options
-All options not listed here are passed to `nix-store
---realise`, except for `--arg` and `--attr` / `-A` which are passed to
-`nix-instantiate`.
+All options not listed here are passed to `nix-store --realise`,
+except for `--arg` and `--attr` / `-A` which are passed to `nix-instantiate`.
- <span id="opt-no-out-link">[`--no-out-link`](#opt-no-out-link)<span>
@@ -70,7 +69,9 @@ All options not listed here are passed to `nix-store
Change the name of the symlink to the output path created from
`result` to *outlink*.
-The following common options are supported:
+{{#include ./opt-common.md}}
+
+{{#include ./env-common.md}}
# Examples
diff --git a/doc/manual/src/command-ref/nix-channel.md b/doc/manual/src/command-ref/nix-channel.md
index 24353525f..2027cc98d 100644
--- a/doc/manual/src/command-ref/nix-channel.md
+++ b/doc/manual/src/command-ref/nix-channel.md
@@ -45,6 +45,10 @@ Note that `--add` does not automatically perform an update.
The list of subscribed channels is stored in `~/.nix-channels`.
+{{#include ./opt-common.md}}
+
+{{#include ./env-common.md}}
+
# Examples
To subscribe to the Nixpkgs channel and install the GNU Hello package:
@@ -70,7 +74,7 @@ $ nix-instantiate --eval -E '(import <nixpkgs> {}).lib.version'
# Files
- - `/nix/var/nix/profiles/per-user/username/channels`\
+ - `${XDG_STATE_HOME-$HOME/.local/state}/nix/profiles/channels`\
`nix-channel` uses a `nix-env` profile to keep track of previous
versions of the subscribed channels. Every time you run `nix-channel
--update`, a new channel generation (that is, a symlink to the
@@ -79,7 +83,7 @@ $ nix-instantiate --eval -E '(import <nixpkgs> {}).lib.version'
- `~/.nix-defexpr/channels`\
This is a symlink to
- `/nix/var/nix/profiles/per-user/username/channels`. It ensures that
+ `${XDG_STATE_HOME-$HOME/.local/state}/nix/profiles/channels`. It ensures that
`nix-env` can find your channels. In a multi-user installation, you
may also have `~/.nix-defexpr/channels_root`, which links to the
channels of the root user.
diff --git a/doc/manual/src/command-ref/nix-collect-garbage.md b/doc/manual/src/command-ref/nix-collect-garbage.md
index 296165993..51db5fc67 100644
--- a/doc/manual/src/command-ref/nix-collect-garbage.md
+++ b/doc/manual/src/command-ref/nix-collect-garbage.md
@@ -9,7 +9,7 @@
# Description
The command `nix-collect-garbage` is mostly an alias of [`nix-store
---gc`](nix-store.md#operation---gc), that is, it deletes all
+--gc`](@docroot@/command-ref/nix-store/gc.md), that is, it deletes all
unreachable paths in the Nix store to clean up your system. However,
it provides two additional options: `-d` (`--delete-old`), which
deletes all old generations of all profiles in `/nix/var/nix/profiles`
@@ -20,6 +20,10 @@ and `--delete-older-than` *period*, where period is a value such as
of days in all profiles in `/nix/var/nix/profiles` (except for the
generations that were active at that point in time).
+{{#include ./opt-common.md}}
+
+{{#include ./env-common.md}}
+
# Example
To delete from the Nix store everything that is not used by the current
diff --git a/doc/manual/src/command-ref/nix-copy-closure.md b/doc/manual/src/command-ref/nix-copy-closure.md
index cd8e351bb..0801e8246 100644
--- a/doc/manual/src/command-ref/nix-copy-closure.md
+++ b/doc/manual/src/command-ref/nix-copy-closure.md
@@ -63,12 +63,16 @@ authentication, you can avoid typing the passphrase with `ssh-agent`.
- `-v`\
Show verbose output.
+{{#include ./opt-common.md}}
+
# Environment variables
- `NIX_SSHOPTS`\
Additional options to be passed to `ssh` on the command
line.
+{{#include ./env-common.md}}
+
# Examples
Copy Firefox with all its dependencies to a remote machine:
diff --git a/doc/manual/src/command-ref/nix-env.md b/doc/manual/src/command-ref/nix-env.md
index f4fa5b50c..42b5bca77 100644
--- a/doc/manual/src/command-ref/nix-env.md
+++ b/doc/manual/src/command-ref/nix-env.md
@@ -4,15 +4,14 @@
# Synopsis
-`nix-env`
+`nix-env` *operation* [*options*] [*arguments…*]
[`--option` *name* *value*]
[`--arg` *name* *value*]
[`--argstr` *name* *value*]
[{`--file` | `-f`} *path*]
- [{`--profile` | `-p`} *path(]
+ [{`--profile` | `-p`} *path*]
[`--system-filter` *system*]
[`--dry-run`]
- *operation* [*options…*] [*arguments…*]
# Description
@@ -24,7 +23,29 @@ environments: different users can have different environments, and
individual users can switch between different environments.
`nix-env` takes exactly one *operation* flag which indicates the
-subcommand to be performed. These are documented below.
+subcommand to be performed. The following operations are available:
+
+- [`--install`](./nix-env/install.md)
+- [`--upgrade`](./nix-env/upgrade.md)
+- [`--uninstall`](./nix-env/uninstall.md)
+- [`--set`](./nix-env/set.md)
+- [`--set-flag`](./nix-env/set-flag.md)
+- [`--query`](./nix-env/query.md)
+- [`--switch-profile`](./nix-env/switch-profile.md)
+- [`--list-generations`](./nix-env/list-generations.md)
+- [`--delete-generations`](./nix-env/delete-generations.md)
+- [`--switch-generation`](./nix-env/switch-generation.md)
+- [`--rollback`](./nix-env/rollback.md)
+
+These pages can be viewed offline:
+
+- `man nix-env-<operation>`.
+
+ Example: `man nix-env-install`
+
+- `nix-env --help --<operation>`
+
+ Example: `nix-env --help --install`
# Selectors
@@ -60,46 +81,6 @@ match. Here are some examples:
Matches any package name containing the strings `firefox` or
`chromium`.
-# Common options
-
-This section lists the options that are common to all operations. These
-options are allowed for every subcommand, though they may not always
-have an effect.
-
- - `--file` / `-f` *path*\
- Specifies the Nix expression (designated below as the *active Nix
- expression*) used by the `--install`, `--upgrade`, and `--query
- --available` operations to obtain derivations. The default is
- `~/.nix-defexpr`.
-
- If the argument starts with `http://` or `https://`, it is
- interpreted as the URL of a tarball that will be downloaded and
- unpacked to a temporary location. The tarball must include a single
- top-level directory containing at least a file named `default.nix`.
-
- - `--profile` / `-p` *path*\
- Specifies the profile to be used by those operations that operate on
- a profile (designated below as the *active profile*). A profile is a
- sequence of user environments called *generations*, one of which is
- the *current generation*.
-
- - `--dry-run`\
- For the `--install`, `--upgrade`, `--uninstall`,
- `--switch-generation`, `--delete-generations` and `--rollback`
- operations, this flag will cause `nix-env` to print what *would* be
- done if this flag had not been specified, without actually doing it.
-
- `--dry-run` also prints out which paths will be
- [substituted](../glossary.md) (i.e., downloaded) and which paths
- will be built from source (because no substitute is available).
-
- - `--system-filter` *system*\
- By default, operations such as `--query
- --available` show derivations matching any platform. This option
- allows you to use derivations for the specified platform *system*.
-
-<!-- end list -->
-
# Files
- `~/.nix-defexpr`\
@@ -145,750 +126,3 @@ have an effect.
symlink points to `prefix/var/nix/profiles/default`. The `PATH`
environment variable should include `~/.nix-profile/bin` for the
user environment to be visible to the user.
-
-# Operation `--install`
-
-## Synopsis
-
-`nix-env` {`--install` | `-i`} *args…*
- [{`--prebuilt-only` | `-b`}]
- [{`--attr` | `-A`}]
- [`--from-expression`] [`-E`]
- [`--from-profile` *path*]
- [`--preserve-installed` | `-P`]
- [`--remove-all` | `-r`]
-
-## Description
-
-The install operation creates a new user environment, based on the
-current generation of the active profile, to which a set of store paths
-described by *args* is added. The arguments *args* map to store paths in
-a number of possible ways:
-
- - By default, *args* is a set of derivation names denoting derivations
- in the active Nix expression. These are realised, and the resulting
- output paths are installed. Currently installed derivations with a
- name equal to the name of a derivation being added are removed
- unless the option `--preserve-installed` is specified.
-
- If there are multiple derivations matching a name in *args* that
- have the same name (e.g., `gcc-3.3.6` and `gcc-4.1.1`), then the
- derivation with the highest *priority* is used. A derivation can
- define a priority by declaring the `meta.priority` attribute. This
- attribute should be a number, with a higher value denoting a lower
- priority. The default priority is `0`.
-
- If there are multiple matching derivations with the same priority,
- then the derivation with the highest version will be installed.
-
- You can force the installation of multiple derivations with the same
- name by being specific about the versions. For instance, `nix-env -i
- gcc-3.3.6 gcc-4.1.1` will install both version of GCC (and will
- probably cause a user environment conflict\!).
-
- - If `--attr` (`-A`) is specified, the arguments are *attribute
- paths* that select attributes from the top-level Nix
- expression. This is faster than using derivation names and
- unambiguous. To find out the attribute paths of available
- packages, use `nix-env -qaP`.
-
- - If `--from-profile` *path* is given, *args* is a set of names
- denoting installed store paths in the profile *path*. This is an
- easy way to copy user environment elements from one profile to
- another.
-
- - If `--from-expression` is given, *args* are Nix
- [functions](../language/constructs.md#functions)
- that are called with the active Nix expression as their single
- argument. The derivations returned by those function calls are
- installed. This allows derivations to be specified in an
- unambiguous way, which is necessary if there are multiple
- derivations with the same name.
-
- - If *args* are [store derivation]s, then these are
- [realised](nix-store.md#operation---realise), and the resulting output paths
- are installed.
-
- [store derivation]: ../glossary.md#gloss-store-derivation
-
- - If *args* are store paths that are not store derivations, then these
- are [realised](nix-store.md#operation---realise) and installed.
-
- - By default all outputs are installed for each derivation. That can
- be reduced by setting `meta.outputsToInstall`.
-
-## Flags
-
- - `--prebuilt-only` / `-b`\
- Use only derivations for which a substitute is registered, i.e.,
- there is a pre-built binary available that can be downloaded in lieu
- of building the derivation. Thus, no packages will be built from
- source.
-
- - `--preserve-installed`; `-P`\
- Do not remove derivations with a name matching one of the
- derivations being installed. Usually, trying to have two versions of
- the same package installed in the same generation of a profile will
- lead to an error in building the generation, due to file name
- clashes between the two versions. However, this is not the case for
- all packages.
-
- - `--remove-all`; `-r`\
- Remove all previously installed packages first. This is equivalent
- to running `nix-env -e '.*'` first, except that everything happens
- in a single transaction.
-
-## Examples
-
-To install a package using a specific attribute path from the active Nix expression:
-
-```console
-$ nix-env -iA gcc40mips
-installing `gcc-4.0.2'
-$ nix-env -iA xorg.xorgserver
-installing `xorg-server-1.2.0'
-```
-
-To install a specific version of `gcc` using the derivation name:
-
-```console
-$ nix-env --install gcc-3.3.2
-installing `gcc-3.3.2'
-uninstalling `gcc-3.1'
-```
-
-Using attribute path for selecting a package is preferred,
-as it is much faster and there will not be multiple matches.
-
-Note the previously installed version is removed, since
-`--preserve-installed` was not specified.
-
-To install an arbitrary version:
-
-```console
-$ nix-env --install gcc
-installing `gcc-3.3.2'
-```
-
-To install all derivations in the Nix expression `foo.nix`:
-
-```console
-$ nix-env -f ~/foo.nix -i '.*'
-```
-
-To copy the store path with symbolic name `gcc` from another profile:
-
-```console
-$ nix-env -i --from-profile /nix/var/nix/profiles/foo gcc
-```
-
-To install a specific [store derivation] (typically created by
-`nix-instantiate`):
-
-```console
-$ nix-env -i /nix/store/fibjb1bfbpm5mrsxc4mh2d8n37sxh91i-gcc-3.4.3.drv
-```
-
-To install a specific output path:
-
-```console
-$ nix-env -i /nix/store/y3cgx0xj1p4iv9x0pnnmdhr8iyg741vk-gcc-3.4.3
-```
-
-To install from a Nix expression specified on the command-line:
-
-```console
-$ nix-env -f ./foo.nix -i -E \
- 'f: (f {system = "i686-linux";}).subversionWithJava'
-```
-
-I.e., this evaluates to `(f: (f {system =
-"i686-linux";}).subversionWithJava) (import ./foo.nix)`, thus selecting
-the `subversionWithJava` attribute from the set returned by calling the
-function defined in `./foo.nix`.
-
-A dry-run tells you which paths will be downloaded or built from source:
-
-```console
-$ nix-env -f '<nixpkgs>' -iA hello --dry-run
-(dry run; not doing anything)
-installing ‘hello-2.10’
-this path will be fetched (0.04 MiB download, 0.19 MiB unpacked):
- /nix/store/wkhdf9jinag5750mqlax6z2zbwhqb76n-hello-2.10
- ...
-```
-
-To install Firefox from the latest revision in the Nixpkgs/NixOS 14.12
-channel:
-
-```console
-$ nix-env -f https://github.com/NixOS/nixpkgs/archive/nixos-14.12.tar.gz -iA firefox
-```
-
-# Operation `--upgrade`
-
-## Synopsis
-
-`nix-env` {`--upgrade` | `-u`} *args*
- [`--lt` | `--leq` | `--eq` | `--always`]
- [{`--prebuilt-only` | `-b`}]
- [{`--attr` | `-A`}]
- [`--from-expression`] [`-E`]
- [`--from-profile` *path*]
- [`--preserve-installed` | `-P`]
-
-## Description
-
-The upgrade operation creates a new user environment, based on the
-current generation of the active profile, in which all store paths are
-replaced for which there are newer versions in the set of paths
-described by *args*. Paths for which there are no newer versions are
-left untouched; this is not an error. It is also not an error if an
-element of *args* matches no installed derivations.
-
-For a description of how *args* is mapped to a set of store paths, see
-[`--install`](#operation---install). If *args* describes multiple
-store paths with the same symbolic name, only the one with the highest
-version is installed.
-
-## Flags
-
- - `--lt`\
- Only upgrade a derivation to newer versions. This is the default.
-
- - `--leq`\
- In addition to upgrading to newer versions, also “upgrade” to
- derivations that have the same version. Version are not a unique
- identification of a derivation, so there may be many derivations
- that have the same version. This flag may be useful to force
- “synchronisation” between the installed and available derivations.
-
- - `--eq`\
- *Only* “upgrade” to derivations that have the same version. This may
- not seem very useful, but it actually is, e.g., when there is a new
- release of Nixpkgs and you want to replace installed applications
- with the same versions built against newer dependencies (to reduce
- the number of dependencies floating around on your system).
-
- - `--always`\
- In addition to upgrading to newer versions, also “upgrade” to
- derivations that have the same or a lower version. I.e., derivations
- may actually be downgraded depending on what is available in the
- active Nix expression.
-
-For the other flags, see `--install`.
-
-## Examples
-
-```console
-$ nix-env --upgrade -A nixpkgs.gcc
-upgrading `gcc-3.3.1' to `gcc-3.4'
-```
-
-When there are no updates available, nothing will happen:
-
-```console
-$ nix-env --upgrade -A nixpkgs.pan
-```
-
-Using `-A` is preferred when possible, as it is faster and unambiguous but
-it is also possible to upgrade to a specific version by matching the derivation name:
-
-```console
-$ nix-env -u gcc-3.3.2 --always
-upgrading `gcc-3.4' to `gcc-3.3.2'
-```
-
-To try to upgrade everything
-(matching packages based on the part of the derivation name without version):
-
-```console
-$ nix-env -u
-upgrading `hello-2.1.2' to `hello-2.1.3'
-upgrading `mozilla-1.2' to `mozilla-1.4'
-```
-
-## Versions
-
-The upgrade operation determines whether a derivation `y` is an upgrade
-of a derivation `x` by looking at their respective `name` attributes.
-The names (e.g., `gcc-3.3.1` are split into two parts: the package name
-(`gcc`), and the version (`3.3.1`). The version part starts after the
-first dash not followed by a letter. `y` is considered an upgrade of `x`
-if their package names match, and the version of `y` is higher than that
-of `x`.
-
-The versions are compared by splitting them into contiguous components
-of numbers and letters. E.g., `3.3.1pre5` is split into `[3, 3, 1,
-"pre", 5]`. These lists are then compared lexicographically (from left
-to right). Corresponding components `a` and `b` are compared as follows.
-If they are both numbers, integer comparison is used. If `a` is an empty
-string and `b` is a number, `a` is considered less than `b`. The special
-string component `pre` (for *pre-release*) is considered to be less than
-other components. String components are considered less than number
-components. Otherwise, they are compared lexicographically (i.e., using
-case-sensitive string comparison).
-
-This is illustrated by the following examples:
-
- 1.0 < 2.3
- 2.1 < 2.3
- 2.3 = 2.3
- 2.5 > 2.3
- 3.1 > 2.3
- 2.3.1 > 2.3
- 2.3.1 > 2.3a
- 2.3pre1 < 2.3
- 2.3pre3 < 2.3pre12
- 2.3a < 2.3c
- 2.3pre1 < 2.3c
- 2.3pre1 < 2.3q
-
-# Operation `--uninstall`
-
-## Synopsis
-
-`nix-env` {`--uninstall` | `-e`} *drvnames…*
-
-## Description
-
-The uninstall operation creates a new user environment, based on the
-current generation of the active profile, from which the store paths
-designated by the symbolic names *drvnames* are removed.
-
-## Examples
-
-```console
-$ nix-env --uninstall gcc
-$ nix-env -e '.*' (remove everything)
-```
-
-# Operation `--set`
-
-## Synopsis
-
-`nix-env` `--set` *drvname*
-
-## Description
-
-The `--set` operation modifies the current generation of a profile so
-that it contains exactly the specified derivation, and nothing else.
-
-## Examples
-
-The following updates a profile such that its current generation will
-contain just Firefox:
-
-```console
-$ nix-env -p /nix/var/nix/profiles/browser --set firefox
-```
-
-# Operation `--set-flag`
-
-## Synopsis
-
-`nix-env` `--set-flag` *name* *value* *drvnames*
-
-## Description
-
-The `--set-flag` operation allows meta attributes of installed packages
-to be modified. There are several attributes that can be usefully
-modified, because they affect the behaviour of `nix-env` or the user
-environment build script:
-
- - `priority` can be changed to resolve filename clashes. The user
- environment build script uses the `meta.priority` attribute of
- derivations to resolve filename collisions between packages. Lower
- priority values denote a higher priority. For instance, the GCC
- wrapper package and the Binutils package in Nixpkgs both have a file
- `bin/ld`, so previously if you tried to install both you would get a
- collision. Now, on the other hand, the GCC wrapper declares a higher
- priority than Binutils, so the former’s `bin/ld` is symlinked in the
- user environment.
-
- - `keep` can be set to `true` to prevent the package from being
- upgraded or replaced. This is useful if you want to hang on to an
- older version of a package.
-
- - `active` can be set to `false` to “disable” the package. That is, no
- symlinks will be generated to the files of the package, but it
- remains part of the profile (so it won’t be garbage-collected). It
- can be set back to `true` to re-enable the package.
-
-## Examples
-
-To prevent the currently installed Firefox from being upgraded:
-
-```console
-$ nix-env --set-flag keep true firefox
-```
-
-After this, `nix-env -u` will ignore Firefox.
-
-To disable the currently installed Firefox, then install a new Firefox
-while the old remains part of the profile:
-
-```console
-$ nix-env -q
-firefox-2.0.0.9 (the current one)
-
-$ nix-env --preserve-installed -i firefox-2.0.0.11
-installing `firefox-2.0.0.11'
-building path(s) `/nix/store/myy0y59q3ig70dgq37jqwg1j0rsapzsl-user-environment'
-collision between `/nix/store/...-firefox-2.0.0.11/bin/firefox'
- and `/nix/store/...-firefox-2.0.0.9/bin/firefox'.
-(i.e., can’t have two active at the same time)
-
-$ nix-env --set-flag active false firefox
-setting flag on `firefox-2.0.0.9'
-
-$ nix-env --preserve-installed -i firefox-2.0.0.11
-installing `firefox-2.0.0.11'
-
-$ nix-env -q
-firefox-2.0.0.11 (the enabled one)
-firefox-2.0.0.9 (the disabled one)
-```
-
-To make files from `binutils` take precedence over files from `gcc`:
-
-```console
-$ nix-env --set-flag priority 5 binutils
-$ nix-env --set-flag priority 10 gcc
-```
-
-# Operation `--query`
-
-## Synopsis
-
-`nix-env` {`--query` | `-q`} *names…*
- [`--installed` | `--available` | `-a`]
- [{`--status` | `-s`}]
- [{`--attr-path` | `-P`}]
- [`--no-name`]
- [{`--compare-versions` | `-c`}]
- [`--system`]
- [`--drv-path`]
- [`--out-path`]
- [`--description`]
- [`--meta`]
- [`--xml`]
- [`--json`]
- [{`--prebuilt-only` | `-b`}]
- [{`--attr` | `-A`} *attribute-path*]
-
-## Description
-
-The query operation displays information about either the store paths
-that are installed in the current generation of the active profile
-(`--installed`), or the derivations that are available for installation
-in the active Nix expression (`--available`). It only prints information
-about derivations whose symbolic name matches one of *names*.
-
-The derivations are sorted by their `name` attributes.
-
-## Source selection
-
-The following flags specify the set of things on which the query
-operates.
-
- - `--installed`\
- The query operates on the store paths that are installed in the
- current generation of the active profile. This is the default.
-
- - `--available`; `-a`\
- The query operates on the derivations that are available in the
- active Nix expression.
-
-## Queries
-
-The following flags specify what information to display about the
-selected derivations. Multiple flags may be specified, in which case the
-information is shown in the order given here. Note that the name of the
-derivation is shown unless `--no-name` is specified.
-
- - `--xml`\
- Print the result in an XML representation suitable for automatic
- processing by other tools. The root element is called `items`, which
- contains a `item` element for each available or installed
- derivation. The fields discussed below are all stored in attributes
- of the `item` elements.
-
- - `--json`\
- Print the result in a JSON representation suitable for automatic
- processing by other tools.
-
- - `--prebuilt-only` / `-b`\
- Show only derivations for which a substitute is registered, i.e.,
- there is a pre-built binary available that can be downloaded in lieu
- of building the derivation. Thus, this shows all packages that
- probably can be installed quickly.
-
- - `--status`; `-s`\
- Print the *status* of the derivation. The status consists of three
- characters. The first is `I` or `-`, indicating whether the
- derivation is currently installed in the current generation of the
- active profile. This is by definition the case for `--installed`,
- but not for `--available`. The second is `P` or `-`, indicating
- whether the derivation is present on the system. This indicates
- whether installation of an available derivation will require the
- derivation to be built. The third is `S` or `-`, indicating whether
- a substitute is available for the derivation.
-
- - `--attr-path`; `-P`\
- Print the *attribute path* of the derivation, which can be used to
- unambiguously select it using the `--attr` option available in
- commands that install derivations like `nix-env --install`. This
- option only works together with `--available`
-
- - `--no-name`\
- Suppress printing of the `name` attribute of each derivation.
-
- - `--compare-versions` / `-c`\
- Compare installed versions to available versions, or vice versa (if
- `--available` is given). This is useful for quickly seeing whether
- upgrades for installed packages are available in a Nix expression. A
- column is added with the following meaning:
-
- - `<` *version*\
- A newer version of the package is available or installed.
-
- - `=` *version*\
- At most the same version of the package is available or
- installed.
-
- - `>` *version*\
- Only older versions of the package are available or installed.
-
- - `- ?`\
- No version of the package is available or installed.
-
- - `--system`\
- Print the `system` attribute of the derivation.
-
- - `--drv-path`\
- Print the path of the [store derivation].
-
- - `--out-path`\
- Print the output path of the derivation.
-
- - `--description`\
- Print a short (one-line) description of the derivation, if
- available. The description is taken from the `meta.description`
- attribute of the derivation.
-
- - `--meta`\
- Print all of the meta-attributes of the derivation. This option is
- only available with `--xml` or `--json`.
-
-## Examples
-
-To show installed packages:
-
-```console
-$ nix-env -q
-bison-1.875c
-docbook-xml-4.2
-firefox-1.0.4
-MPlayer-1.0pre7
-ORBit2-2.8.3
-…
-```
-
-To show available packages:
-
-```console
-$ nix-env -qa
-firefox-1.0.7
-GConf-2.4.0.1
-MPlayer-1.0pre7
-ORBit2-2.8.3
-…
-```
-
-To show the status of available packages:
-
-```console
-$ nix-env -qas
--P- firefox-1.0.7 (not installed but present)
---S GConf-2.4.0.1 (not present, but there is a substitute for fast installation)
---S MPlayer-1.0pre3 (i.e., this is not the installed MPlayer, even though the version is the same!)
-IP- ORBit2-2.8.3 (installed and by definition present)
-…
-```
-
-To show available packages in the Nix expression `foo.nix`:
-
-```console
-$ nix-env -f ./foo.nix -qa
-foo-1.2.3
-```
-
-To compare installed versions to what’s available:
-
-```console
-$ nix-env -qc
-...
-acrobat-reader-7.0 - ? (package is not available at all)
-autoconf-2.59 = 2.59 (same version)
-firefox-1.0.4 < 1.0.7 (a more recent version is available)
-...
-```
-
-To show all packages with “`zip`” in the name:
-
-```console
-$ nix-env -qa '.*zip.*'
-bzip2-1.0.6
-gzip-1.6
-zip-3.0
-…
-```
-
-To show all packages with “`firefox`” or “`chromium`” in the name:
-
-```console
-$ nix-env -qa '.*(firefox|chromium).*'
-chromium-37.0.2062.94
-chromium-beta-38.0.2125.24
-firefox-32.0.3
-firefox-with-plugins-13.0.1
-…
-```
-
-To show all packages in the latest revision of the Nixpkgs repository:
-
-```console
-$ nix-env -f https://github.com/NixOS/nixpkgs/archive/master.tar.gz -qa
-```
-
-# Operation `--switch-profile`
-
-## Synopsis
-
-`nix-env` {`--switch-profile` | `-S`} *path*
-
-## Description
-
-This operation makes *path* the current profile for the user. That is,
-the symlink `~/.nix-profile` is made to point to *path*.
-
-## Examples
-
-```console
-$ nix-env -S ~/my-profile
-```
-
-# Operation `--list-generations`
-
-## Synopsis
-
-`nix-env` `--list-generations`
-
-## Description
-
-This operation print a list of all the currently existing generations
-for the active profile. These may be switched to using the
-`--switch-generation` operation. It also prints the creation date of the
-generation, and indicates the current generation.
-
-## Examples
-
-```console
-$ nix-env --list-generations
- 95 2004-02-06 11:48:24
- 96 2004-02-06 11:49:01
- 97 2004-02-06 16:22:45
- 98 2004-02-06 16:24:33 (current)
-```
-
-# Operation `--delete-generations`
-
-## Synopsis
-
-`nix-env` `--delete-generations` *generations*
-
-## Description
-
-This operation deletes the specified generations of the current profile.
-The generations can be a list of generation numbers, the special value
-`old` to delete all non-current generations, a value such as `30d` to
-delete all generations older than the specified number of days (except
-for the generation that was active at that point in time), or a value
-such as `+5` to keep the last `5` generations ignoring any newer than
-current, e.g., if `30` is the current generation `+5` will delete
-generation `25` and all older generations. Periodically deleting old
-generations is important to make garbage collection effective.
-
-## Examples
-
-```console
-$ nix-env --delete-generations 3 4 8
-```
-
-```console
-$ nix-env --delete-generations +5
-```
-
-```console
-$ nix-env --delete-generations 30d
-```
-
-```console
-$ nix-env -p other_profile --delete-generations old
-```
-
-# Operation `--switch-generation`
-
-## Synopsis
-
-`nix-env` {`--switch-generation` | `-G`} *generation*
-
-## Description
-
-This operation makes generation number *generation* the current
-generation of the active profile. That is, if the `profile` is the path
-to the active profile, then the symlink `profile` is made to point to
-`profile-generation-link`, which is in turn a symlink to the actual user
-environment in the Nix store.
-
-Switching will fail if the specified generation does not exist.
-
-## Examples
-
-```console
-$ nix-env -G 42
-switching from generation 50 to 42
-```
-
-# Operation `--rollback`
-
-## Synopsis
-
-`nix-env` `--rollback`
-
-## Description
-
-This operation switches to the “previous” generation of the active
-profile, that is, the highest numbered generation lower than the current
-generation, if it exists. It is just a convenience wrapper around
-`--list-generations` and `--switch-generation`.
-
-## Examples
-
-```console
-$ nix-env --rollback
-switching from generation 92 to 91
-```
-
-```console
-$ nix-env --rollback
-error: no generation older than the current (91) exists
-```
-
-# Environment variables
-
- - `NIX_PROFILE`\
- Location of the Nix profile. Defaults to the target of the symlink
- `~/.nix-profile`, if it exists, or `/nix/var/nix/profiles/default`
- otherwise.
diff --git a/doc/manual/src/command-ref/nix-env/delete-generations.md b/doc/manual/src/command-ref/nix-env/delete-generations.md
new file mode 100644
index 000000000..6f0af5384
--- /dev/null
+++ b/doc/manual/src/command-ref/nix-env/delete-generations.md
@@ -0,0 +1,46 @@
+# Name
+
+`nix-env --delete-generations` - delete profile generations
+
+# Synopsis
+
+`nix-env` `--delete-generations` *generations*
+
+# Description
+
+This operation deletes the specified generations of the current profile.
+The generations can be a list of generation numbers, the special value
+`old` to delete all non-current generations, a value such as `30d` to
+delete all generations older than the specified number of days (except
+for the generation that was active at that point in time), or a value
+such as `+5` to keep the last `5` generations ignoring any newer than
+current, e.g., if `30` is the current generation `+5` will delete
+generation `25` and all older generations. Periodically deleting old
+generations is important to make garbage collection effective.
+
+{{#include ./opt-common.md}}
+
+{{#include ../opt-common.md}}
+
+{{#include ./env-common.md}}
+
+{{#include ../env-common.md}}
+
+# Examples
+
+```console
+$ nix-env --delete-generations 3 4 8
+```
+
+```console
+$ nix-env --delete-generations +5
+```
+
+```console
+$ nix-env --delete-generations 30d
+```
+
+```console
+$ nix-env -p other_profile --delete-generations old
+```
+
diff --git a/doc/manual/src/command-ref/nix-env/env-common.md b/doc/manual/src/command-ref/nix-env/env-common.md
new file mode 100644
index 000000000..735817959
--- /dev/null
+++ b/doc/manual/src/command-ref/nix-env/env-common.md
@@ -0,0 +1,6 @@
+# Environment variables
+
+- `NIX_PROFILE`\
+ Location of the Nix profile. Defaults to the target of the symlink
+ `~/.nix-profile`, if it exists, or `/nix/var/nix/profiles/default`
+ otherwise.
diff --git a/doc/manual/src/command-ref/nix-env/install.md b/doc/manual/src/command-ref/nix-env/install.md
new file mode 100644
index 000000000..d754accfe
--- /dev/null
+++ b/doc/manual/src/command-ref/nix-env/install.md
@@ -0,0 +1,187 @@
+# Name
+
+`nix-env --install` - add packages to user environment
+
+# Synopsis
+
+`nix-env` {`--install` | `-i`} *args…*
+ [{`--prebuilt-only` | `-b`}]
+ [{`--attr` | `-A`}]
+ [`--from-expression`] [`-E`]
+ [`--from-profile` *path*]
+ [`--preserve-installed` | `-P`]
+ [`--remove-all` | `-r`]
+
+# Description
+
+The install operation creates a new user environment, based on the
+current generation of the active profile, to which a set of store paths
+described by *args* is added. The arguments *args* map to store paths in
+a number of possible ways:
+
+ - By default, *args* is a set of derivation names denoting derivations
+ in the active Nix expression. These are realised, and the resulting
+ output paths are installed. Currently installed derivations with a
+ name equal to the name of a derivation being added are removed
+ unless the option `--preserve-installed` is specified.
+
+ If there are multiple derivations matching a name in *args* that
+ have the same name (e.g., `gcc-3.3.6` and `gcc-4.1.1`), then the
+ derivation with the highest *priority* is used. A derivation can
+ define a priority by declaring the `meta.priority` attribute. This
+ attribute should be a number, with a higher value denoting a lower
+ priority. The default priority is `0`.
+
+ If there are multiple matching derivations with the same priority,
+ then the derivation with the highest version will be installed.
+
+ You can force the installation of multiple derivations with the same
+ name by being specific about the versions. For instance, `nix-env -i
+ gcc-3.3.6 gcc-4.1.1` will install both version of GCC (and will
+ probably cause a user environment conflict\!).
+
+ - If `--attr` (`-A`) is specified, the arguments are *attribute
+ paths* that select attributes from the top-level Nix
+ expression. This is faster than using derivation names and
+ unambiguous. To find out the attribute paths of available
+ packages, use `nix-env -qaP`.
+
+ - If `--from-profile` *path* is given, *args* is a set of names
+ denoting installed store paths in the profile *path*. This is an
+ easy way to copy user environment elements from one profile to
+ another.
+
+ - If `--from-expression` is given, *args* are Nix
+ [functions](@docroot@/language/constructs.md#functions)
+ that are called with the active Nix expression as their single
+ argument. The derivations returned by those function calls are
+ installed. This allows derivations to be specified in an
+ unambiguous way, which is necessary if there are multiple
+ derivations with the same name.
+
+ - If *args* are [store derivations](@docroot@/glossary.md#gloss-store-derivation), then these are
+ [realised](@docroot@/command-ref/nix-store/realise.md), and the resulting output paths
+ are installed.
+
+ - If *args* are store paths that are not store derivations, then these
+ are [realised](@docroot@/command-ref/nix-store/realise.md) and installed.
+
+ - By default all outputs are installed for each derivation. That can
+ be reduced by setting `meta.outputsToInstall`.
+
+# Flags
+
+ - `--prebuilt-only` / `-b`\
+ Use only derivations for which a substitute is registered, i.e.,
+ there is a pre-built binary available that can be downloaded in lieu
+ of building the derivation. Thus, no packages will be built from
+ source.
+
+ - `--preserve-installed` / `-P`\
+ Do not remove derivations with a name matching one of the
+ derivations being installed. Usually, trying to have two versions of
+ the same package installed in the same generation of a profile will
+ lead to an error in building the generation, due to file name
+ clashes between the two versions. However, this is not the case for
+ all packages.
+
+ - `--remove-all` / `-r`\
+ Remove all previously installed packages first. This is equivalent
+ to running `nix-env -e '.*'` first, except that everything happens
+ in a single transaction.
+
+{{#include ./opt-common.md}}
+
+{{#include ../opt-common.md}}
+
+{{#include ./env-common.md}}
+
+{{#include ../env-common.md}}
+
+# Examples
+
+To install a package using a specific attribute path from the active Nix expression:
+
+```console
+$ nix-env -iA gcc40mips
+installing `gcc-4.0.2'
+$ nix-env -iA xorg.xorgserver
+installing `xorg-server-1.2.0'
+```
+
+To install a specific version of `gcc` using the derivation name:
+
+```console
+$ nix-env --install gcc-3.3.2
+installing `gcc-3.3.2'
+uninstalling `gcc-3.1'
+```
+
+Using attribute path for selecting a package is preferred,
+as it is much faster and there will not be multiple matches.
+
+Note the previously installed version is removed, since
+`--preserve-installed` was not specified.
+
+To install an arbitrary version:
+
+```console
+$ nix-env --install gcc
+installing `gcc-3.3.2'
+```
+
+To install all derivations in the Nix expression `foo.nix`:
+
+```console
+$ nix-env -f ~/foo.nix -i '.*'
+```
+
+To copy the store path with symbolic name `gcc` from another profile:
+
+```console
+$ nix-env -i --from-profile /nix/var/nix/profiles/foo gcc
+```
+
+To install a specific [store derivation] (typically created by
+`nix-instantiate`):
+
+```console
+$ nix-env -i /nix/store/fibjb1bfbpm5mrsxc4mh2d8n37sxh91i-gcc-3.4.3.drv
+```
+
+To install a specific output path:
+
+```console
+$ nix-env -i /nix/store/y3cgx0xj1p4iv9x0pnnmdhr8iyg741vk-gcc-3.4.3
+```
+
+To install from a Nix expression specified on the command-line:
+
+```console
+$ nix-env -f ./foo.nix -i -E \
+ 'f: (f {system = "i686-linux";}).subversionWithJava'
+```
+
+I.e., this evaluates to `(f: (f {system =
+"i686-linux";}).subversionWithJava) (import ./foo.nix)`, thus selecting
+the `subversionWithJava` attribute from the set returned by calling the
+function defined in `./foo.nix`.
+
+A dry-run tells you which paths will be downloaded or built from source:
+
+```console
+$ nix-env -f '<nixpkgs>' -iA hello --dry-run
+(dry run; not doing anything)
+installing ‘hello-2.10’
+this path will be fetched (0.04 MiB download, 0.19 MiB unpacked):
+ /nix/store/wkhdf9jinag5750mqlax6z2zbwhqb76n-hello-2.10
+ ...
+```
+
+To install Firefox from the latest revision in the Nixpkgs/NixOS 14.12
+channel:
+
+```console
+$ nix-env -f https://github.com/NixOS/nixpkgs/archive/nixos-14.12.tar.gz -iA firefox
+```
+
diff --git a/doc/manual/src/command-ref/nix-env/list-generations.md b/doc/manual/src/command-ref/nix-env/list-generations.md
new file mode 100644
index 000000000..a4881ece8
--- /dev/null
+++ b/doc/manual/src/command-ref/nix-env/list-generations.md
@@ -0,0 +1,33 @@
+# Name
+
+`nix-env --list-generations` - list profile generations
+
+# Synopsis
+
+`nix-env` `--list-generations`
+
+# Description
+
+This operation print a list of all the currently existing generations
+for the active profile. These may be switched to using the
+`--switch-generation` operation. It also prints the creation date of the
+generation, and indicates the current generation.
+
+{{#include ./opt-common.md}}
+
+{{#include ../opt-common.md}}
+
+{{#include ./env-common.md}}
+
+{{#include ../env-common.md}}
+
+# Examples
+
+```console
+$ nix-env --list-generations
+ 95 2004-02-06 11:48:24
+ 96 2004-02-06 11:49:01
+ 97 2004-02-06 16:22:45
+ 98 2004-02-06 16:24:33 (current)
+```
+
diff --git a/doc/manual/src/command-ref/nix-env/opt-common.md b/doc/manual/src/command-ref/nix-env/opt-common.md
new file mode 100644
index 000000000..636281b6d
--- /dev/null
+++ b/doc/manual/src/command-ref/nix-env/opt-common.md
@@ -0,0 +1,35 @@
+# Options
+
+The following options are allowed for all `nix-env` operations, but may not always have an effect.
+
+ - `--file` / `-f` *path*\
+ Specifies the Nix expression (designated below as the *active Nix
+ expression*) used by the `--install`, `--upgrade`, and `--query
+ --available` operations to obtain derivations. The default is
+ `~/.nix-defexpr`.
+
+ If the argument starts with `http://` or `https://`, it is
+ interpreted as the URL of a tarball that will be downloaded and
+ unpacked to a temporary location. The tarball must include a single
+ top-level directory containing at least a file named `default.nix`.
+
+ - `--profile` / `-p` *path*\
+ Specifies the profile to be used by those operations that operate on
+ a profile (designated below as the *active profile*). A profile is a
+ sequence of user environments called *generations*, one of which is
+ the *current generation*.
+
+ - `--dry-run`\
+ For the `--install`, `--upgrade`, `--uninstall`,
+ `--switch-generation`, `--delete-generations` and `--rollback`
+ operations, this flag will cause `nix-env` to print what *would* be
+ done if this flag had not been specified, without actually doing it.
+
+ `--dry-run` also prints out which paths will be
+ [substituted](@docroot@/glossary.md) (i.e., downloaded) and which paths
+ will be built from source (because no substitute is available).
+
+ - `--system-filter` *system*\
+ By default, operations such as `--query
+ --available` show derivations matching any platform. This option
+ allows you to use derivations for the specified platform *system*.
diff --git a/doc/manual/src/command-ref/nix-env/query.md b/doc/manual/src/command-ref/nix-env/query.md
new file mode 100644
index 000000000..18f0ee210
--- /dev/null
+++ b/doc/manual/src/command-ref/nix-env/query.md
@@ -0,0 +1,215 @@
+# Name
+
+`nix-env --query` - display information about packages
+
+# Synopsis
+
+`nix-env` {`--query` | `-q`} *names…*
+ [`--installed` | `--available` | `-a`]
+ [{`--status` | `-s`}]
+ [{`--attr-path` | `-P`}]
+ [`--no-name`]
+ [{`--compare-versions` | `-c`}]
+ [`--system`]
+ [`--drv-path`]
+ [`--out-path`]
+ [`--description`]
+ [`--meta`]
+ [`--xml`]
+ [`--json`]
+ [{`--prebuilt-only` | `-b`}]
+ [{`--attr` | `-A`} *attribute-path*]
+
+# Description
+
+The query operation displays information about either the store paths
+that are installed in the current generation of the active profile
+(`--installed`), or the derivations that are available for installation
+in the active Nix expression (`--available`). It only prints information
+about derivations whose symbolic name matches one of *names*.
+
+The derivations are sorted by their `name` attributes.
+
+# Source selection
+
+The following flags specify the set of things on which the query
+operates.
+
+ - `--installed`\
+ The query operates on the store paths that are installed in the
+ current generation of the active profile. This is the default.
+
+ - `--available`; `-a`\
+ The query operates on the derivations that are available in the
+ active Nix expression.
+
+# Queries
+
+The following flags specify what information to display about the
+selected derivations. Multiple flags may be specified, in which case the
+information is shown in the order given here. Note that the name of the
+derivation is shown unless `--no-name` is specified.
+
+ - `--xml`\
+ Print the result in an XML representation suitable for automatic
+ processing by other tools. The root element is called `items`, which
+ contains a `item` element for each available or installed
+ derivation. The fields discussed below are all stored in attributes
+ of the `item` elements.
+
+ - `--json`\
+ Print the result in a JSON representation suitable for automatic
+ processing by other tools.
+
+ - `--prebuilt-only` / `-b`\
+ Show only derivations for which a substitute is registered, i.e.,
+ there is a pre-built binary available that can be downloaded in lieu
+ of building the derivation. Thus, this shows all packages that
+ probably can be installed quickly.
+
+ - `--status`; `-s`\
+ Print the *status* of the derivation. The status consists of three
+ characters. The first is `I` or `-`, indicating whether the
+ derivation is currently installed in the current generation of the
+ active profile. This is by definition the case for `--installed`,
+ but not for `--available`. The second is `P` or `-`, indicating
+ whether the derivation is present on the system. This indicates
+ whether installation of an available derivation will require the
+ derivation to be built. The third is `S` or `-`, indicating whether
+ a substitute is available for the derivation.
+
+ - `--attr-path`; `-P`\
+ Print the *attribute path* of the derivation, which can be used to
+ unambiguously select it using the `--attr` option available in
+ commands that install derivations like `nix-env --install`. This
+ option only works together with `--available`
+
+ - `--no-name`\
+ Suppress printing of the `name` attribute of each derivation.
+
+ - `--compare-versions` / `-c`\
+ Compare installed versions to available versions, or vice versa (if
+ `--available` is given). This is useful for quickly seeing whether
+ upgrades for installed packages are available in a Nix expression. A
+ column is added with the following meaning:
+
+ - `<` *version*\
+ A newer version of the package is available or installed.
+
+ - `=` *version*\
+ At most the same version of the package is available or
+ installed.
+
+ - `>` *version*\
+ Only older versions of the package are available or installed.
+
+ - `- ?`\
+ No version of the package is available or installed.
+
+ - `--system`\
+ Print the `system` attribute of the derivation.
+
+ - `--drv-path`\
+ Print the path of the [store derivation](@docroot@/glossary.md#gloss-store-derivation).
+
+ - `--out-path`\
+ Print the output path of the derivation.
+
+ - `--description`\
+ Print a short (one-line) description of the derivation, if
+ available. The description is taken from the `meta.description`
+ attribute of the derivation.
+
+ - `--meta`\
+ Print all of the meta-attributes of the derivation. This option is
+ only available with `--xml` or `--json`.
+
+{{#include ./opt-common.md}}
+
+{{#include ../opt-common.md}}
+
+{{#include ./env-common.md}}
+
+{{#include ../env-common.md}}
+
+# Examples
+
+To show installed packages:
+
+```console
+$ nix-env -q
+bison-1.875c
+docbook-xml-4.2
+firefox-1.0.4
+MPlayer-1.0pre7
+ORBit2-2.8.3
+…
+```
+
+To show available packages:
+
+```console
+$ nix-env -qa
+firefox-1.0.7
+GConf-2.4.0.1
+MPlayer-1.0pre7
+ORBit2-2.8.3
+…
+```
+
+To show the status of available packages:
+
+```console
+$ nix-env -qas
+-P- firefox-1.0.7 (not installed but present)
+--S GConf-2.4.0.1 (not present, but there is a substitute for fast installation)
+--S MPlayer-1.0pre3 (i.e., this is not the installed MPlayer, even though the version is the same!)
+IP- ORBit2-2.8.3 (installed and by definition present)
+…
+```
+
+To show available packages in the Nix expression `foo.nix`:
+
+```console
+$ nix-env -f ./foo.nix -qa
+foo-1.2.3
+```
+
+To compare installed versions to what’s available:
+
+```console
+$ nix-env -qc
+...
+acrobat-reader-7.0 - ? (package is not available at all)
+autoconf-2.59 = 2.59 (same version)
+firefox-1.0.4 < 1.0.7 (a more recent version is available)
+...
+```
+
+To show all packages with “`zip`” in the name:
+
+```console
+$ nix-env -qa '.*zip.*'
+bzip2-1.0.6
+gzip-1.6
+zip-3.0
+…
+```
+
+To show all packages with “`firefox`” or “`chromium`” in the name:
+
+```console
+$ nix-env -qa '.*(firefox|chromium).*'
+chromium-37.0.2062.94
+chromium-beta-38.0.2125.24
+firefox-32.0.3
+firefox-with-plugins-13.0.1
+…
+```
+
+To show all packages in the latest revision of the Nixpkgs repository:
+
+```console
+$ nix-env -f https://github.com/NixOS/nixpkgs/archive/master.tar.gz -qa
+```
+
diff --git a/doc/manual/src/command-ref/nix-env/rollback.md b/doc/manual/src/command-ref/nix-env/rollback.md
new file mode 100644
index 000000000..1e3958cfc
--- /dev/null
+++ b/doc/manual/src/command-ref/nix-env/rollback.md
@@ -0,0 +1,34 @@
+# Name
+
+`nix-env --rollback` - set user environment to previous generation
+
+# Synopsis
+
+`nix-env` `--rollback`
+
+# Description
+
+This operation switches to the “previous” generation of the active
+profile, that is, the highest numbered generation lower than the current
+generation, if it exists. It is just a convenience wrapper around
+`--list-generations` and `--switch-generation`.
+
+{{#include ./opt-common.md}}
+
+{{#include ../opt-common.md}}
+
+{{#include ./env-common.md}}
+
+{{#include ../env-common.md}}
+
+# Examples
+
+```console
+$ nix-env --rollback
+switching from generation 92 to 91
+```
+
+```console
+$ nix-env --rollback
+error: no generation older than the current (91) exists
+```
diff --git a/doc/manual/src/command-ref/nix-env/set-flag.md b/doc/manual/src/command-ref/nix-env/set-flag.md
new file mode 100644
index 000000000..63f0a0ff9
--- /dev/null
+++ b/doc/manual/src/command-ref/nix-env/set-flag.md
@@ -0,0 +1,82 @@
+# Name
+
+`nix-env --set-flag` - modify meta attributes of installed packages
+
+# Synopsis
+
+`nix-env` `--set-flag` *name* *value* *drvnames*
+
+# Description
+
+The `--set-flag` operation allows meta attributes of installed packages
+to be modified. There are several attributes that can be usefully
+modified, because they affect the behaviour of `nix-env` or the user
+environment build script:
+
+ - `priority` can be changed to resolve filename clashes. The user
+ environment build script uses the `meta.priority` attribute of
+ derivations to resolve filename collisions between packages. Lower
+ priority values denote a higher priority. For instance, the GCC
+ wrapper package and the Binutils package in Nixpkgs both have a file
+ `bin/ld`, so previously if you tried to install both you would get a
+ collision. Now, on the other hand, the GCC wrapper declares a higher
+ priority than Binutils, so the former’s `bin/ld` is symlinked in the
+ user environment.
+
+ - `keep` can be set to `true` to prevent the package from being
+ upgraded or replaced. This is useful if you want to hang on to an
+ older version of a package.
+
+ - `active` can be set to `false` to “disable” the package. That is, no
+ symlinks will be generated to the files of the package, but it
+ remains part of the profile (so it won’t be garbage-collected). It
+ can be set back to `true` to re-enable the package.
+
+{{#include ./opt-common.md}}
+
+{{#include ../opt-common.md}}
+
+{{#include ../env-common.md}}
+
+# Examples
+
+To prevent the currently installed Firefox from being upgraded:
+
+```console
+$ nix-env --set-flag keep true firefox
+```
+
+After this, `nix-env -u` will ignore Firefox.
+
+To disable the currently installed Firefox, then install a new Firefox
+while the old remains part of the profile:
+
+```console
+$ nix-env -q
+firefox-2.0.0.9 (the current one)
+
+$ nix-env --preserve-installed -i firefox-2.0.0.11
+installing `firefox-2.0.0.11'
+building path(s) `/nix/store/myy0y59q3ig70dgq37jqwg1j0rsapzsl-user-environment'
+collision between `/nix/store/...-firefox-2.0.0.11/bin/firefox'
+ and `/nix/store/...-firefox-2.0.0.9/bin/firefox'.
+(i.e., can’t have two active at the same time)
+
+$ nix-env --set-flag active false firefox
+setting flag on `firefox-2.0.0.9'
+
+$ nix-env --preserve-installed -i firefox-2.0.0.11
+installing `firefox-2.0.0.11'
+
+$ nix-env -q
+firefox-2.0.0.11 (the enabled one)
+firefox-2.0.0.9 (the disabled one)
+```
+
+To make files from `binutils` take precedence over files from `gcc`:
+
+```console
+$ nix-env --set-flag priority 5 binutils
+$ nix-env --set-flag priority 10 gcc
+```
+
diff --git a/doc/manual/src/command-ref/nix-env/set.md b/doc/manual/src/command-ref/nix-env/set.md
new file mode 100644
index 000000000..c1cf75739
--- /dev/null
+++ b/doc/manual/src/command-ref/nix-env/set.md
@@ -0,0 +1,30 @@
+# Name
+
+`nix-env --set` - set profile to contain a specified derivation
+
+## Synopsis
+
+`nix-env` `--set` *drvname*
+
+## Description
+
+The `--set` operation modifies the current generation of a profile so
+that it contains exactly the specified derivation, and nothing else.
+
+{{#include ./opt-common.md}}
+
+{{#include ../opt-common.md}}
+
+{{#include ./env-common.md}}
+
+{{#include ../env-common.md}}
+
+## Examples
+
+The following updates a profile such that its current generation will
+contain just Firefox:
+
+```console
+$ nix-env -p /nix/var/nix/profiles/browser --set firefox
+```
+
diff --git a/doc/manual/src/command-ref/nix-env/switch-generation.md b/doc/manual/src/command-ref/nix-env/switch-generation.md
new file mode 100644
index 000000000..e550325c4
--- /dev/null
+++ b/doc/manual/src/command-ref/nix-env/switch-generation.md
@@ -0,0 +1,33 @@
+# Name
+
+`nix-env --switch-generation` - set user environment to given profile generation
+
+# Synopsis
+
+`nix-env` {`--switch-generation` | `-G`} *generation*
+
+# Description
+
+This operation makes generation number *generation* the current
+generation of the active profile. That is, if the `profile` is the path
+to the active profile, then the symlink `profile` is made to point to
+`profile-generation-link`, which is in turn a symlink to the actual user
+environment in the Nix store.
+
+Switching will fail if the specified generation does not exist.
+
+{{#include ./opt-common.md}}
+
+{{#include ../opt-common.md}}
+
+{{#include ./env-common.md}}
+
+{{#include ../env-common.md}}
+
+# Examples
+
+```console
+$ nix-env -G 42
+switching from generation 50 to 42
+```
+
diff --git a/doc/manual/src/command-ref/nix-env/switch-profile.md b/doc/manual/src/command-ref/nix-env/switch-profile.md
new file mode 100644
index 000000000..b389e4140
--- /dev/null
+++ b/doc/manual/src/command-ref/nix-env/switch-profile.md
@@ -0,0 +1,26 @@
+# Name
+
+`nix-env --switch-profile` - set user environment to given profile
+
+# Synopsis
+
+`nix-env` {`--switch-profile` | `-S`} *path*
+
+# Description
+
+This operation makes *path* the current profile for the user. That is,
+the symlink `~/.nix-profile` is made to point to *path*.
+
+{{#include ./opt-common.md}}
+
+{{#include ../opt-common.md}}
+
+{{#include ./env-common.md}}
+
+{{#include ../env-common.md}}
+
+# Examples
+
+```console
+$ nix-env -S ~/my-profile
+```
diff --git a/doc/manual/src/command-ref/nix-env/uninstall.md b/doc/manual/src/command-ref/nix-env/uninstall.md
new file mode 100644
index 000000000..e9ec8a15e
--- /dev/null
+++ b/doc/manual/src/command-ref/nix-env/uninstall.md
@@ -0,0 +1,28 @@
+# Name
+
+`nix-env --uninstall` - remove packages from user environment
+
+# Synopsis
+
+`nix-env` {`--uninstall` | `-e`} *drvnames…*
+
+# Description
+
+The uninstall operation creates a new user environment, based on the
+current generation of the active profile, from which the store paths
+designated by the symbolic names *drvnames* are removed.
+
+{{#include ./opt-common.md}}
+
+{{#include ../opt-common.md}}
+
+{{#include ./env-common.md}}
+
+{{#include ../env-common.md}}
+
+# Examples
+
+```console
+$ nix-env --uninstall gcc
+$ nix-env -e '.*' (remove everything)
+```
diff --git a/doc/manual/src/command-ref/nix-env/upgrade.md b/doc/manual/src/command-ref/nix-env/upgrade.md
new file mode 100644
index 000000000..f88ffcbee
--- /dev/null
+++ b/doc/manual/src/command-ref/nix-env/upgrade.md
@@ -0,0 +1,141 @@
+# Name
+
+`nix-env --upgrade` - upgrade packages in user environment
+
+# Synopsis
+
+`nix-env` {`--upgrade` | `-u`} *args*
+ [`--lt` | `--leq` | `--eq` | `--always`]
+ [{`--prebuilt-only` | `-b`}]
+ [{`--attr` | `-A`}]
+ [`--from-expression`] [`-E`]
+ [`--from-profile` *path*]
+ [`--preserve-installed` | `-P`]
+
+# Description
+
+The upgrade operation creates a new user environment, based on the
+current generation of the active profile, in which all store paths are
+replaced for which there are newer versions in the set of paths
+described by *args*. Paths for which there are no newer versions are
+left untouched; this is not an error. It is also not an error if an
+element of *args* matches no installed derivations.
+
+For a description of how *args* is mapped to a set of store paths, see
+[`--install`](#operation---install). If *args* describes multiple
+store paths with the same symbolic name, only the one with the highest
+version is installed.
+
+# Flags
+
+ - `--lt`\
+ Only upgrade a derivation to newer versions. This is the default.
+
+ - `--leq`\
+ In addition to upgrading to newer versions, also “upgrade” to
+ derivations that have the same version. Version are not a unique
+ identification of a derivation, so there may be many derivations
+ that have the same version. This flag may be useful to force
+ “synchronisation” between the installed and available derivations.
+
+ - `--eq`\
+ *Only* “upgrade” to derivations that have the same version. This may
+ not seem very useful, but it actually is, e.g., when there is a new
+ release of Nixpkgs and you want to replace installed applications
+ with the same versions built against newer dependencies (to reduce
+ the number of dependencies floating around on your system).
+
+ - `--always`\
+ In addition to upgrading to newer versions, also “upgrade” to
+ derivations that have the same or a lower version. I.e., derivations
+ may actually be downgraded depending on what is available in the
+ active Nix expression.
+
+ - `--prebuilt-only` / `-b`\
+ Use only derivations for which a substitute is registered, i.e.,
+ there is a pre-built binary available that can be downloaded in lieu
+ of building the derivation. Thus, no packages will be built from
+ source.
+
+ - `--preserve-installed` / `-P`\
+ Do not remove derivations with a name matching one of the
+ derivations being installed. Usually, trying to have two versions of
+ the same package installed in the same generation of a profile will
+ lead to an error in building the generation, due to file name
+ clashes between the two versions. However, this is not the case for
+ all packages.
+
+{{#include ./opt-common.md}}
+
+{{#include ../opt-common.md}}
+
+{{#include ./env-common.md}}
+
+{{#include ../env-common.md}}
+
+# Examples
+
+```console
+$ nix-env --upgrade -A nixpkgs.gcc
+upgrading `gcc-3.3.1' to `gcc-3.4'
+```
+
+When there are no updates available, nothing will happen:
+
+```console
+$ nix-env --upgrade -A nixpkgs.pan
+```
+
+Using `-A` is preferred when possible, as it is faster and unambiguous but
+it is also possible to upgrade to a specific version by matching the derivation name:
+
+```console
+$ nix-env -u gcc-3.3.2 --always
+upgrading `gcc-3.4' to `gcc-3.3.2'
+```
+
+To try to upgrade everything
+(matching packages based on the part of the derivation name without version):
+
+```console
+$ nix-env -u
+upgrading `hello-2.1.2' to `hello-2.1.3'
+upgrading `mozilla-1.2' to `mozilla-1.4'
+```
+
+# Versions
+
+The upgrade operation determines whether a derivation `y` is an upgrade
+of a derivation `x` by looking at their respective `name` attributes.
+The names (e.g., `gcc-3.3.1` are split into two parts: the package name
+(`gcc`), and the version (`3.3.1`). The version part starts after the
+first dash not followed by a letter. `y` is considered an upgrade of `x`
+if their package names match, and the version of `y` is higher than that
+of `x`.
+
+The versions are compared by splitting them into contiguous components
+of numbers and letters. E.g., `3.3.1pre5` is split into `[3, 3, 1,
+"pre", 5]`. These lists are then compared lexicographically (from left
+to right). Corresponding components `a` and `b` are compared as follows.
+If they are both numbers, integer comparison is used. If `a` is an empty
+string and `b` is a number, `a` is considered less than `b`. The special
+string component `pre` (for *pre-release*) is considered to be less than
+other components. String components are considered less than number
+components. Otherwise, they are compared lexicographically (i.e., using
+case-sensitive string comparison).
+
+This is illustrated by the following examples:
+
+ 1.0 < 2.3
+ 2.1 < 2.3
+ 2.3 = 2.3
+ 2.5 > 2.3
+ 3.1 > 2.3
+ 2.3.1 > 2.3
+ 2.3.1 > 2.3a
+ 2.3pre1 < 2.3
+ 2.3pre3 < 2.3pre12
+ 2.3a < 2.3c
+ 2.3pre1 < 2.3c
+ 2.3pre1 < 2.3q
+
diff --git a/doc/manual/src/command-ref/nix-hash.md b/doc/manual/src/command-ref/nix-hash.md
index 45f67f1c5..37c8facec 100644
--- a/doc/manual/src/command-ref/nix-hash.md
+++ b/doc/manual/src/command-ref/nix-hash.md
@@ -6,9 +6,7 @@
`nix-hash` [`--flat`] [`--base32`] [`--truncate`] [`--type` *hashAlgo*] *path…*
-`nix-hash` `--to-base16` *hash…*
-
-`nix-hash` `--to-base32` *hash…*
+`nix-hash` [`--to-base16`|`--to-base32`|`--to-base64`|`--to-sri`] [`--type` *hashAlgo*] *hash…*
# Description
@@ -23,7 +21,7 @@ The hash is computed over a *serialisation* of each path: a dump of
the file system tree rooted at the path. This allows directories and
symlinks to be hashed as well as regular files. The dump is in the
*NAR format* produced by [`nix-store
---dump`](nix-store.md#operation---dump). Thus, `nix-hash path`
+--dump`](@docroot@/command-ref/nix-store/dump.md). Thus, `nix-hash path`
yields the same cryptographic hash as `nix-store --dump path |
md5sum`.
@@ -35,11 +33,23 @@ md5sum`.
The result is identical to that produced by the GNU commands
`md5sum` and `sha1sum`.
+ - `--base16`\
+ Print the hash in a hexadecimal representation (default).
+
- `--base32`\
Print the hash in a base-32 representation rather than hexadecimal.
This base-32 representation is more compact and can be used in Nix
expressions (such as in calls to `fetchurl`).
+ - `--base64`\
+ Similar to --base32, but print the hash in a base-64 representation,
+ which is more compact than the base-32 one.
+
+ - `--sri`\
+ Print the hash in SRI format with base-64 encoding.
+ The type of hash algorithm will be prepended to the hash string,
+ followed by a hyphen (-) and the base-64 hash body.
+
- `--truncate`\
Truncate hashes longer than 160 bits (such as SHA-256) to 160 bits.
@@ -55,6 +65,14 @@ md5sum`.
Don’t hash anything, but convert the hexadecimal hash representation
*hash* to base-32.
+ - `--to-base64`\
+ Don’t hash anything, but convert the hexadecimal hash representation
+ *hash* to base-64.
+
+ - `--to-sri`\
+ Don’t hash anything, but convert the hexadecimal hash representation
+ *hash* to SRI.
+
# Examples
Computing the same hash as `nix-prefetch-url`:
@@ -81,9 +99,18 @@ $ nix-store --dump test/ | md5sum (for comparison)
$ nix-hash --type sha1 test/
e4fd8ba5f7bbeaea5ace89fe10255536cd60dab6
+$ nix-hash --type sha1 --base16 test/
+e4fd8ba5f7bbeaea5ace89fe10255536cd60dab6
+
$ nix-hash --type sha1 --base32 test/
nvd61k9nalji1zl9rrdfmsmvyyjqpzg4
+$ nix-hash --type sha1 --base64 test/
+5P2Lpfe76upazon+ECVVNs1g2rY=
+
+$ nix-hash --type sha1 --sri test/
+sha1-5P2Lpfe76upazon+ECVVNs1g2rY=
+
$ nix-hash --type sha256 --flat test/
error: reading file `test/': Is a directory
@@ -91,7 +118,7 @@ $ nix-hash --type sha256 --flat test/world
5891b5b522d5df086d0ff0b110fbd9d21bb4fc7163af34d08286a2e846f6be03
```
-Converting between hexadecimal and base-32:
+Converting between hexadecimal, base-32, base-64, and SRI:
```console
$ nix-hash --type sha1 --to-base32 e4fd8ba5f7bbeaea5ace89fe10255536cd60dab6
@@ -99,4 +126,13 @@ nvd61k9nalji1zl9rrdfmsmvyyjqpzg4
$ nix-hash --type sha1 --to-base16 nvd61k9nalji1zl9rrdfmsmvyyjqpzg4
e4fd8ba5f7bbeaea5ace89fe10255536cd60dab6
+
+$ nix-hash --type sha1 --to-base64 e4fd8ba5f7bbeaea5ace89fe10255536cd60dab6
+5P2Lpfe76upazon+ECVVNs1g2rY=
+
+$ nix-hash --type sha1 --to-sri nvd61k9nalji1zl9rrdfmsmvyyjqpzg4
+sha1-5P2Lpfe76upazon+ECVVNs1g2rY=
+
+$ nix-hash --to-base16 sha1-5P2Lpfe76upazon+ECVVNs1g2rY=
+e4fd8ba5f7bbeaea5ace89fe10255536cd60dab6
```
diff --git a/doc/manual/src/command-ref/nix-instantiate.md b/doc/manual/src/command-ref/nix-instantiate.md
index 432fb2608..e55fb2afd 100644
--- a/doc/manual/src/command-ref/nix-instantiate.md
+++ b/doc/manual/src/command-ref/nix-instantiate.md
@@ -76,7 +76,9 @@ standard input.
this option is not enabled, there may be uninstantiated store paths
in the final output.
-<!-- end list -->
+{{#include ./opt-common.md}}
+
+{{#include ./env-common.md}}
# Examples
diff --git a/doc/manual/src/command-ref/nix-shell.md b/doc/manual/src/command-ref/nix-shell.md
index 840bccd25..9f36929f7 100644
--- a/doc/manual/src/command-ref/nix-shell.md
+++ b/doc/manual/src/command-ref/nix-shell.md
@@ -101,7 +101,7 @@ All options not listed here are passed to `nix-store
When a `--pure` shell is started, keep the listed environment
variables.
-The following common options are supported:
+{{#include ./opt-common.md}}
# Environment variables
@@ -110,6 +110,8 @@ The following common options are supported:
`bash` found in `<nixpkgs>`, falling back to the `bash` found in
`PATH` if not found.
+{{#include ./env-common.md}}
+
# Examples
To build the dependencies of the package Pan, and start an interactive
diff --git a/doc/manual/src/command-ref/nix-store.md b/doc/manual/src/command-ref/nix-store.md
index 31fdd7806..c7c5fdd2f 100644
--- a/doc/manual/src/command-ref/nix-store.md
+++ b/doc/manual/src/command-ref/nix-store.md
@@ -13,833 +13,35 @@
The command `nix-store` performs primitive operations on the Nix store.
You generally do not need to run this command manually.
-`nix-store` takes exactly one *operation* flag which indicates the
-subcommand to be performed. These are documented below.
-
-# Common options
-
-This section lists the options that are common to all operations. These
-options are allowed for every subcommand, though they may not always
-have an effect.
-
- - <span id="opt-add-root">[`--add-root`](#opt-add-root)</span> *path*
-
- Causes the result of a realisation (`--realise` and
- `--force-realise`) to be registered as a root of the garbage
- collector. *path* will be created as a symlink to the resulting
- store path. In addition, a uniquely named symlink to *path* will
- be created in `/nix/var/nix/gcroots/auto/`. For instance,
-
- ```console
- $ nix-store --add-root /home/eelco/bla/result -r ...
-
- $ ls -l /nix/var/nix/gcroots/auto
- lrwxrwxrwx 1 ... 2005-03-13 21:10 dn54lcypm8f8... -> /home/eelco/bla/result
-
- $ ls -l /home/eelco/bla/result
- lrwxrwxrwx 1 ... 2005-03-13 21:10 /home/eelco/bla/result -> /nix/store/1r11343n6qd4...-f-spot-0.0.10
- ```
-
- Thus, when `/home/eelco/bla/result` is removed, the GC root in the
- `auto` directory becomes a dangling symlink and will be ignored by
- the collector.
-
- > **Warning**
- >
- > Note that it is not possible to move or rename GC roots, since
- > the symlink in the `auto` directory will still point to the old
- > location.
-
- If there are multiple results, then multiple symlinks will be
- created by sequentially numbering symlinks beyond the first one
- (e.g., `foo`, `foo-2`, `foo-3`, and so on).
-
-# Operation `--realise`
-
-## Synopsis
-
-`nix-store` {`--realise` | `-r`} *paths…* [`--dry-run`]
-
-## Description
-
-The operation `--realise` essentially “builds” the specified store
-paths. Realisation is a somewhat overloaded term:
-
- - If the store path is a *derivation*, realisation ensures that the
- output paths of the derivation are [valid] (i.e.,
- the output path and its closure exist in the file system). This
- can be done in several ways. First, it is possible that the
- outputs are already valid, in which case we are done
- immediately. Otherwise, there may be [substitutes]
- that produce the outputs (e.g., by downloading them). Finally, the
- outputs can be produced by running the build task described
- by the derivation.
-
- - If the store path is not a derivation, realisation ensures that the
- specified path is valid (i.e., it and its closure exist in the file
- system). If the path is already valid, we are done immediately.
- Otherwise, the path and any missing paths in its closure may be
- produced through substitutes. If there are no (successful)
- substitutes, realisation fails.
-
-[valid]: ../glossary.md#gloss-validity
-[substitutes]: ../glossary.md#gloss-substitute
-
-The output path of each derivation is printed on standard output. (For
-non-derivations argument, the argument itself is printed.)
-
-The following flags are available:
-
- - `--dry-run`\
- Print on standard error a description of what packages would be
- built or downloaded, without actually performing the operation.
-
- - `--ignore-unknown`\
- If a non-derivation path does not have a substitute, then silently
- ignore it.
-
- - `--check`\
- This option allows you to check whether a derivation is
- deterministic. It rebuilds the specified derivation and checks
- whether the result is bitwise-identical with the existing outputs,
- printing an error if that’s not the case. The outputs of the
- specified derivation must already exist. When used with `-K`, if an
- output path is not identical to the corresponding output from the
- previous build, the new output path is left in
- `/nix/store/name.check.`
-
-Special exit codes:
-
- - `100`\
- Generic build failure, the builder process returned with a non-zero
- exit code.
-
- - `101`\
- Build timeout, the build was aborted because it did not complete
- within the specified `timeout`.
-
- - `102`\
- Hash mismatch, the build output was rejected because it does not
- match the [`outputHash` attribute of the
- derivation](../language/advanced-attributes.md).
-
- - `104`\
- Not deterministic, the build succeeded in check mode but the
- resulting output is not binary reproducible.
-
-With the `--keep-going` flag it's possible for multiple failures to
-occur, in this case the 1xx status codes are or combined using binary
-or.
-
- 1100100
- ^^^^
- |||`- timeout
- ||`-- output hash mismatch
- |`--- build failure
- `---- not deterministic
-
-## Examples
-
-This operation is typically used to build [store derivation]s produced by
-[`nix-instantiate`](./nix-instantiate.md):
-
-[store derivation]: ../glossary.md#gloss-store-derivation
-
-```console
-$ nix-store -r $(nix-instantiate ./test.nix)
-/nix/store/31axcgrlbfsxzmfff1gyj1bf62hvkby2-aterm-2.3.1
-```
-
-This is essentially what [`nix-build`](nix-build.md) does.
-
-To test whether a previously-built derivation is deterministic:
-
-```console
-$ nix-build '<nixpkgs>' -A hello --check -K
-```
-
-Use [`--read-log`](#operation---read-log) to show the stderr and stdout of a build:
-
-```console
-$ nix-store --read-log $(nix-instantiate ./test.nix)
-```
-
-# Operation `--serve`
-
-## Synopsis
-
-`nix-store` `--serve` [`--write`]
-
-## Description
-
-The operation `--serve` provides access to the Nix store over stdin and
-stdout, and is intended to be used as a means of providing Nix store
-access to a restricted ssh user.
-
-The following flags are available:
-
- - `--write`\
- Allow the connected client to request the realization of
- derivations. In effect, this can be used to make the host act as a
- remote builder.
-
-## Examples
-
-To turn a host into a build server, the `authorized_keys` file can be
-used to provide build access to a given SSH public key:
-
-```console
-$ cat <<EOF >>/root/.ssh/authorized_keys
-command="nice -n20 nix-store --serve --write" ssh-rsa AAAAB3NzaC1yc2EAAAA...
-EOF
-```
-
-# Operation `--gc`
-
-## Synopsis
-
-`nix-store` `--gc` [`--print-roots` | `--print-live` | `--print-dead`] [`--max-freed` *bytes*]
-
-## Description
-
-Without additional flags, the operation `--gc` performs a garbage
-collection on the Nix store. That is, all paths in the Nix store not
-reachable via file system references from a set of “roots”, are deleted.
-
-The following suboperations may be specified:
-
- - `--print-roots`\
- This operation prints on standard output the set of roots used by
- the garbage collector.
-
- - `--print-live`\
- This operation prints on standard output the set of “live” store
- paths, which are all the store paths reachable from the roots. Live
- paths should never be deleted, since that would break consistency —
- it would become possible that applications are installed that
- reference things that are no longer present in the store.
-
- - `--print-dead`\
- This operation prints out on standard output the set of “dead” store
- paths, which is just the opposite of the set of live paths: any path
- in the store that is not live (with respect to the roots) is dead.
-
-By default, all unreachable paths are deleted. The following options
-control what gets deleted and in what order:
-
- - `--max-freed` *bytes*\
- Keep deleting paths until at least *bytes* bytes have been deleted,
- then stop. The argument *bytes* can be followed by the
- multiplicative suffix `K`, `M`, `G` or `T`, denoting KiB, MiB, GiB
- or TiB units.
-
-The behaviour of the collector is also influenced by the
-`keep-outputs` and `keep-derivations` settings in the Nix
-configuration file.
-
-By default, the collector prints the total number of freed bytes when it
-finishes (or when it is interrupted). With `--print-dead`, it prints the
-number of bytes that would be freed.
-
-## Examples
-
-To delete all unreachable paths, just do:
-
-```console
-$ nix-store --gc
-deleting `/nix/store/kq82idx6g0nyzsp2s14gfsc38npai7lf-cairo-1.0.4.tar.gz.drv'
-...
-8825586 bytes freed (8.42 MiB)
-```
-
-To delete at least 100 MiBs of unreachable paths:
-
-```console
-$ nix-store --gc --max-freed $((100 * 1024 * 1024))
-```
-
-# Operation `--delete`
-
-## Synopsis
-
-`nix-store` `--delete` [`--ignore-liveness`] *paths…*
-
-## Description
-
-The operation `--delete` deletes the store paths *paths* from the Nix
-store, but only if it is safe to do so; that is, when the path is not
-reachable from a root of the garbage collector. This means that you can
-only delete paths that would also be deleted by `nix-store --gc`. Thus,
-`--delete` is a more targeted version of `--gc`.
-
-With the option `--ignore-liveness`, reachability from the roots is
-ignored. However, the path still won’t be deleted if there are other
-paths in the store that refer to it (i.e., depend on it).
-
-## Example
-
-```console
-$ nix-store --delete /nix/store/zq0h41l75vlb4z45kzgjjmsjxvcv1qk7-mesa-6.4
-0 bytes freed (0.00 MiB)
-error: cannot delete path `/nix/store/zq0h41l75vlb4z45kzgjjmsjxvcv1qk7-mesa-6.4' since it is still alive
-```
-
-# Operation `--query`
-
-## Synopsis
-
-`nix-store` {`--query` | `-q`}
- {`--outputs` | `--requisites` | `-R` | `--references` |
- `--referrers` | `--referrers-closure` | `--deriver` | `-d` |
- `--graph` | `--tree` | `--binding` *name* | `-b` *name* | `--hash` |
- `--size` | `--roots`}
- [`--use-output`] [`-u`] [`--force-realise`] [`-f`]
- *paths…*
-
-## Description
-
-The operation `--query` displays information about [store path]s.
-The queries are described below. At most one query can be
-specified. The default query is `--outputs`.
-
-The paths *paths* may also be symlinks from outside of the Nix store, to
-the Nix store. In that case, the query is applied to the target of the
-symlink.
-
-## Common query options
-
- - `--use-output`; `-u`\
- For each argument to the query that is a [store derivation], apply the
- query to the output path of the derivation instead.
-
- - `--force-realise`; `-f`\
- Realise each argument to the query first (see [`nix-store
- --realise`](#operation---realise)).
-
-## Queries
-
- - `--outputs`\
- Prints out the [output path]s of the store
- derivations *paths*. These are the paths that will be produced when
- the derivation is built.
-
- - `--requisites`; `-R`\
- Prints out the [closure] of the given *paths*.
-
- This query has one option:
-
- - `--include-outputs`
- Also include the existing output paths of [store derivation]s,
- and their closures.
-
- This query can be used to implement various kinds of deployment. A
- *source deployment* is obtained by distributing the closure of a
- store derivation. A *binary deployment* is obtained by distributing
- the closure of an output path. A *cache deployment* (combined
- source/binary deployment, including binaries of build-time-only
- dependencies) is obtained by distributing the closure of a store
- derivation and specifying the option `--include-outputs`.
-
- - `--references`\
- Prints the set of [references]s of the store paths
- *paths*, that is, their immediate dependencies. (For *all*
- dependencies, use `--requisites`.)
-
- [reference]: ../glossary.md#gloss-reference
-
- - `--referrers`\
- Prints the set of *referrers* of the store paths *paths*, that is,
- the store paths currently existing in the Nix store that refer to
- one of *paths*. Note that contrary to the references, the set of
- referrers is not constant; it can change as store paths are added or
- removed.
-
- - `--referrers-closure`\
- Prints the closure of the set of store paths *paths* under the
- referrers relation; that is, all store paths that directly or
- indirectly refer to one of *paths*. These are all the path currently
- in the Nix store that are dependent on *paths*.
-
- - `--deriver`; `-d`\
- Prints the [deriver] of the store paths *paths*. If
- the path has no deriver (e.g., if it is a source file), or if the
- deriver is not known (e.g., in the case of a binary-only
- deployment), the string `unknown-deriver` is printed.
-
- [deriver]: ../glossary.md#gloss-deriver
-
- - `--graph`\
- Prints the references graph of the store paths *paths* in the format
- of the `dot` tool of AT\&T's [Graphviz
- package](http://www.graphviz.org/). This can be used to visualise
- dependency graphs. To obtain a build-time dependency graph, apply
- this to a store derivation. To obtain a runtime dependency graph,
- apply it to an output path.
-
- - `--tree`\
- Prints the references graph of the store paths *paths* as a nested
- ASCII tree. References are ordered by descending closure size; this
- tends to flatten the tree, making it more readable. The query only
- recurses into a store path when it is first encountered; this
- prevents a blowup of the tree representation of the graph.
-
- - `--graphml`\
- Prints the references graph of the store paths *paths* in the
- [GraphML](http://graphml.graphdrawing.org/) file format. This can be
- used to visualise dependency graphs. To obtain a build-time
- dependency graph, apply this to a [store derivation]. To obtain a
- runtime dependency graph, apply it to an output path.
-
- - `--binding` *name*; `-b` *name*\
- Prints the value of the attribute *name* (i.e., environment
- variable) of the [store derivation]s *paths*. It is an error for a
- derivation to not have the specified attribute.
-
- - `--hash`\
- Prints the SHA-256 hash of the contents of the store paths *paths*
- (that is, the hash of the output of `nix-store --dump` on the given
- paths). Since the hash is stored in the Nix database, this is a fast
- operation.
-
- - `--size`\
- Prints the size in bytes of the contents of the store paths *paths*
- — to be precise, the size of the output of `nix-store --dump` on
- the given paths. Note that the actual disk space required by the
- store paths may be higher, especially on filesystems with large
- cluster sizes.
-
- - `--roots`\
- Prints the garbage collector roots that point, directly or
- indirectly, at the store paths *paths*.
-
-## Examples
-
-Print the closure (runtime dependencies) of the `svn` program in the
-current user environment:
-
-```console
-$ nix-store -qR $(which svn)
-/nix/store/5mbglq5ldqld8sj57273aljwkfvj22mc-subversion-1.1.4
-/nix/store/9lz9yc6zgmc0vlqmn2ipcpkjlmbi51vv-glibc-2.3.4
-...
-```
-
-Print the build-time dependencies of `svn`:
-
-```console
-$ nix-store -qR $(nix-store -qd $(which svn))
-/nix/store/02iizgn86m42q905rddvg4ja975bk2i4-grep-2.5.1.tar.bz2.drv
-/nix/store/07a2bzxmzwz5hp58nf03pahrv2ygwgs3-gcc-wrapper.sh
-/nix/store/0ma7c9wsbaxahwwl04gbw3fcd806ski4-glibc-2.3.4.drv
-... lots of other paths ...
-```
-
-The difference with the previous example is that we ask the closure of
-the derivation (`-qd`), not the closure of the output path that contains
-`svn`.
-
-Show the build-time dependencies as a tree:
-
-```console
-$ nix-store -q --tree $(nix-store -qd $(which svn))
-/nix/store/7i5082kfb6yjbqdbiwdhhza0am2xvh6c-subversion-1.1.4.drv
-+---/nix/store/d8afh10z72n8l1cr5w42366abiblgn54-builder.sh
-+---/nix/store/fmzxmpjx2lh849ph0l36snfj9zdibw67-bash-3.0.drv
-| +---/nix/store/570hmhmx3v57605cqg9yfvvyh0nnb8k8-bash
-| +---/nix/store/p3srsbd8dx44v2pg6nbnszab5mcwx03v-builder.sh
-...
-```
-
-Show all paths that depend on the same OpenSSL library as `svn`:
-
-```console
-$ nix-store -q --referrers $(nix-store -q --binding openssl $(nix-store -qd $(which svn)))
-/nix/store/23ny9l9wixx21632y2wi4p585qhva1q8-sylpheed-1.0.0
-/nix/store/5mbglq5ldqld8sj57273aljwkfvj22mc-subversion-1.1.4
-/nix/store/dpmvp969yhdqs7lm2r1a3gng7pyq6vy4-subversion-1.1.3
-/nix/store/l51240xqsgg8a7yrbqdx1rfzyv6l26fx-lynx-2.8.5
-```
-
-Show all paths that directly or indirectly depend on the Glibc (C
-library) used by `svn`:
-
-```console
-$ nix-store -q --referrers-closure $(ldd $(which svn) | grep /libc.so | awk '{print $3}')
-/nix/store/034a6h4vpz9kds5r6kzb9lhh81mscw43-libgnomeprintui-2.8.2
-/nix/store/15l3yi0d45prm7a82pcrknxdh6nzmxza-gawk-3.1.4
-...
-```
-
-Note that `ldd` is a command that prints out the dynamic libraries used
-by an ELF executable.
-
-Make a picture of the runtime dependency graph of the current user
-environment:
-
-```console
-$ nix-store -q --graph ~/.nix-profile | dot -Tps > graph.ps
-$ gv graph.ps
-```
-
-Show every garbage collector root that points to a store path that
-depends on `svn`:
-
-```console
-$ nix-store -q --roots $(which svn)
-/nix/var/nix/profiles/default-81-link
-/nix/var/nix/profiles/default-82-link
-/nix/var/nix/profiles/per-user/eelco/profile-97-link
-```
-
-# Operation `--add`
-
-## Synopsis
-
-`nix-store` `--add` *paths…*
-
-## Description
-
-The operation `--add` adds the specified paths to the Nix store. It
-prints the resulting paths in the Nix store on standard output.
-
-## Example
-
-```console
-$ nix-store --add ./foo.c
-/nix/store/m7lrha58ph6rcnv109yzx1nk1cj7k7zf-foo.c
-```
-
-# Operation `--add-fixed`
-
-## Synopsis
-
-`nix-store` `--add-fixed` [`--recursive`] *algorithm* *paths…*
-
-## Description
-
-The operation `--add-fixed` adds the specified paths to the Nix store.
-Unlike `--add` paths are registered using the specified hashing
-algorithm, resulting in the same output path as a fixed-output
-derivation. This can be used for sources that are not available from a
-public url or broke since the download expression was written.
-
-This operation has the following options:
-
- - `--recursive`\
- Use recursive instead of flat hashing mode, used when adding
- directories to the store.
-
-## Example
-
-```console
-$ nix-store --add-fixed sha256 ./hello-2.10.tar.gz
-/nix/store/3x7dwzq014bblazs7kq20p9hyzz0qh8g-hello-2.10.tar.gz
-```
-
-# Operation `--verify`
-
-## Synopsis
-
-`nix-store` `--verify` [`--check-contents`] [`--repair`]
-
-## Description
-
-The operation `--verify` verifies the internal consistency of the Nix
-database, and the consistency between the Nix database and the Nix
-store. Any inconsistencies encountered are automatically repaired.
-Inconsistencies are generally the result of the Nix store or database
-being modified by non-Nix tools, or of bugs in Nix itself.
-
-This operation has the following options:
-
- - `--check-contents`\
- Checks that the contents of every valid store path has not been
- altered by computing a SHA-256 hash of the contents and comparing it
- with the hash stored in the Nix database at build time. Paths that
- have been modified are printed out. For large stores,
- `--check-contents` is obviously quite slow.
-
- - `--repair`\
- If any valid path is missing from the store, or (if
- `--check-contents` is given) the contents of a valid path has been
- modified, then try to repair the path by redownloading it. See
- `nix-store --repair-path` for details.
-
-# Operation `--verify-path`
-
-## Synopsis
-
-`nix-store` `--verify-path` *paths…*
-
-## Description
-
-The operation `--verify-path` compares the contents of the given store
-paths to their cryptographic hashes stored in Nix’s database. For every
-changed path, it prints a warning message. The exit status is 0 if no
-path has changed, and 1 otherwise.
-
-## Example
-
-To verify the integrity of the `svn` command and all its dependencies:
-
-```console
-$ nix-store --verify-path $(nix-store -qR $(which svn))
-```
-
-# Operation `--repair-path`
-
-## Synopsis
-
-`nix-store` `--repair-path` *paths…*
-
-## Description
-
-The operation `--repair-path` attempts to “repair” the specified paths
-by redownloading them using the available substituters. If no
-substitutes are available, then repair is not possible.
-
-> **Warning**
->
-> During repair, there is a very small time window during which the old
-> path (if it exists) is moved out of the way and replaced with the new
-> path. If repair is interrupted in between, then the system may be left
-> in a broken state (e.g., if the path contains a critical system
-> component like the GNU C Library).
-
-## Example
-
-```console
-$ nix-store --verify-path /nix/store/dj7a81wsm1ijwwpkks3725661h3263p5-glibc-2.13
-path `/nix/store/dj7a81wsm1ijwwpkks3725661h3263p5-glibc-2.13' was modified!
- expected hash `2db57715ae90b7e31ff1f2ecb8c12ec1cc43da920efcbe3b22763f36a1861588',
- got `481c5aa5483ebc97c20457bb8bca24deea56550d3985cda0027f67fe54b808e4'
-
-$ nix-store --repair-path /nix/store/dj7a81wsm1ijwwpkks3725661h3263p5-glibc-2.13
-fetching path `/nix/store/d7a81wsm1ijwwpkks3725661h3263p5-glibc-2.13'...
-…
-```
-
-# Operation `--dump`
-
-## Synopsis
-
-`nix-store` `--dump` *path*
-
-## Description
-
-The operation `--dump` produces a NAR (Nix ARchive) file containing the
-contents of the file system tree rooted at *path*. The archive is
-written to standard output.
-
-A NAR archive is like a TAR or Zip archive, but it contains only the
-information that Nix considers important. For instance, timestamps are
-elided because all files in the Nix store have their timestamp set to 1
-anyway. Likewise, all permissions are left out except for the execute
-bit, because all files in the Nix store have 444 or 555 permission.
-
-Also, a NAR archive is *canonical*, meaning that “equal” paths always
-produce the same NAR archive. For instance, directory entries are
-always sorted so that the actual on-disk order doesn’t influence the
-result. This means that the cryptographic hash of a NAR dump of a
-path is usable as a fingerprint of the contents of the path. Indeed,
-the hashes of store paths stored in Nix’s database (see `nix-store -q
---hash`) are SHA-256 hashes of the NAR dump of each store path.
-
-NAR archives support filenames of unlimited length and 64-bit file
-sizes. They can contain regular files, directories, and symbolic links,
-but not other types of files (such as device nodes).
-
-A Nix archive can be unpacked using `nix-store
---restore`.
-
-# Operation `--restore`
-
-## Synopsis
-
-`nix-store` `--restore` *path*
-
-## Description
-
-The operation `--restore` unpacks a NAR archive to *path*, which must
-not already exist. The archive is read from standard input.
-
-# Operation `--export`
-
-## Synopsis
-
-`nix-store` `--export` *paths…*
-
-## Description
-
-The operation `--export` writes a serialisation of the specified store
-paths to standard output in a format that can be imported into another
-Nix store with `nix-store --import`. This is like `nix-store
---dump`, except that the NAR archive produced by that command doesn’t
-contain the necessary meta-information to allow it to be imported into
-another Nix store (namely, the set of references of the path).
-
-This command does not produce a *closure* of the specified paths, so if
-a store path references other store paths that are missing in the target
-Nix store, the import will fail. To copy a whole closure, do something
-like:
-
-```console
-$ nix-store --export $(nix-store -qR paths) > out
-```
-
-To import the whole closure again, run:
-
-```console
-$ nix-store --import < out
-```
-
-# Operation `--import`
-
-## Synopsis
-
-`nix-store` `--import`
-
-## Description
-
-The operation `--import` reads a serialisation of a set of store paths
-produced by `nix-store --export` from standard input and adds those
-store paths to the Nix store. Paths that already exist in the Nix store
-are ignored. If a path refers to another path that doesn’t exist in the
-Nix store, the import fails.
-
-# Operation `--optimise`
-
-## Synopsis
-
-`nix-store` `--optimise`
-
-## Description
-
-The operation `--optimise` reduces Nix store disk space usage by finding
-identical files in the store and hard-linking them to each other. It
-typically reduces the size of the store by something like 25-35%. Only
-regular files and symlinks are hard-linked in this manner. Files are
-considered identical when they have the same NAR archive serialisation:
-that is, regular files must have the same contents and permission
-(executable or non-executable), and symlinks must have the same
-contents.
-
-After completion, or when the command is interrupted, a report on the
-achieved savings is printed on standard error.
-
-Use `-vv` or `-vvv` to get some progress indication.
-
-## Example
-
-```console
-$ nix-store --optimise
-hashing files in `/nix/store/qhqx7l2f1kmwihc9bnxs7rc159hsxnf3-gcc-4.1.1'
-...
-541838819 bytes (516.74 MiB) freed by hard-linking 54143 files;
-there are 114486 files with equal contents out of 215894 files in total
-```
-
-# Operation `--read-log`
-
-## Synopsis
-
-`nix-store` {`--read-log` | `-l`} *paths…*
-
-## Description
-
-The operation `--read-log` prints the build log of the specified store
-paths on standard output. The build log is whatever the builder of a
-derivation wrote to standard output and standard error. If a store path
-is not a derivation, the deriver of the store path is used.
-
-Build logs are kept in `/nix/var/log/nix/drvs`. However, there is no
-guarantee that a build log is available for any particular store path.
-For instance, if the path was downloaded as a pre-built binary through a
-substitute, then the log is unavailable.
-
-## Example
-
-```console
-$ nix-store -l $(which ktorrent)
-building /nix/store/dhc73pvzpnzxhdgpimsd9sw39di66ph1-ktorrent-2.2.1
-unpacking sources
-unpacking source archive /nix/store/p8n1jpqs27mgkjw07pb5269717nzf5f8-ktorrent-2.2.1.tar.gz
-ktorrent-2.2.1/
-ktorrent-2.2.1/NEWS
-...
-```
-
-# Operation `--dump-db`
-
-## Synopsis
-
-`nix-store` `--dump-db` [*paths…*]
-
-## Description
-
-The operation `--dump-db` writes a dump of the Nix database to standard
-output. It can be loaded into an empty Nix store using `--load-db`. This
-is useful for making backups and when migrating to different database
-schemas.
-
-By default, `--dump-db` will dump the entire Nix database. When one or
-more store paths is passed, only the subset of the Nix database for
-those store paths is dumped. As with `--export`, the user is responsible
-for passing all the store paths for a closure. See `--export` for an
-example.
-
-# Operation `--load-db`
-
-## Synopsis
-
-`nix-store` `--load-db`
-
-## Description
-
-The operation `--load-db` reads a dump of the Nix database created by
-`--dump-db` from standard input and loads it into the Nix database.
-
-# Operation `--print-env`
-
-## Synopsis
-
-`nix-store` `--print-env` *drvpath*
-
-## Description
-
-The operation `--print-env` prints out the environment of a derivation
-in a format that can be evaluated by a shell. The command line arguments
-of the builder are placed in the variable `_args`.
-
-## Example
-
-```console
-$ nix-store --print-env $(nix-instantiate '<nixpkgs>' -A firefox)
-…
-export src; src='/nix/store/plpj7qrwcz94z2psh6fchsi7s8yihc7k-firefox-12.0.source.tar.bz2'
-export stdenv; stdenv='/nix/store/7c8asx3yfrg5dg1gzhzyq2236zfgibnn-stdenv'
-export system; system='x86_64-linux'
-export _args; _args='-e /nix/store/9krlzvny65gdc8s7kpb6lkx8cd02c25c-default-builder.sh'
-```
-
-# Operation `--generate-binary-cache-key`
-
-## Synopsis
-
-`nix-store` `--generate-binary-cache-key` *key-name* *secret-key-file* *public-key-file*
-
-## Description
-
-This command generates an [Ed25519 key pair](http://ed25519.cr.yp.to/)
-that can be used to create a signed binary cache. It takes three
-mandatory parameters:
-
-1. A key name, such as `cache.example.org-1`, that is used to look up
- keys on the client when it verifies signatures. It can be anything,
- but it’s suggested to use the host name of your cache (e.g.
- `cache.example.org`) with a suffix denoting the number of the key
- (to be incremented every time you need to revoke a key).
-
-2. The file name where the secret key is to be stored.
-
-3. The file name where the public key is to be stored.
+`nix-store` takes exactly one *operation* flag which indicates the subcommand to be performed. The following operations are available:
+
+- [`--realise`](./nix-store/realise.md)
+- [`--serve`](./nix-store/serve.md)
+- [`--gc`](./nix-store/gc.md)
+- [`--delete`](./nix-store/delete.md)
+- [`--query`](./nix-store/query.md)
+- [`--add`](./nix-store/add.md)
+- [`--add-fixed`](./nix-store/add-fixed.md)
+- [`--verify`](./nix-store/verify.md)
+- [`--verify-path`](./nix-store/verify-path.md)
+- [`--repair-path`](./nix-store/repair-path.md)
+- [`--dump`](./nix-store/dump.md)
+- [`--restore`](./nix-store/restore.md)
+- [`--export`](./nix-store/export.md)
+- [`--import`](./nix-store/import.md)
+- [`--optimise`](./nix-store/optimise.md)
+- [`--read-log`](./nix-store/read-log.md)
+- [`--dump-db`](./nix-store/dump-db.md)
+- [`--load-db`](./nix-store/load-db.md)
+- [`--print-env`](./nix-store/print-env.md)
+- [`--generate-binary-cache-key`](./nix-store/generate-binary-cache-key.md)
+
+These pages can be viewed offline:
+
+- `man nix-store-<operation>`.
+
+ Example: `man nix-store-realise`
+
+- `nix-store --help --<operation>`
+
+ Example: `nix-store --help --realise`
diff --git a/doc/manual/src/command-ref/nix-store/add-fixed.md b/doc/manual/src/command-ref/nix-store/add-fixed.md
new file mode 100644
index 000000000..d25db091c
--- /dev/null
+++ b/doc/manual/src/command-ref/nix-store/add-fixed.md
@@ -0,0 +1,35 @@
+# Name
+
+`nix-store --add-fixed` - add paths to store using given hashing algorithm
+
+## Synopsis
+
+`nix-store` `--add-fixed` [`--recursive`] *algorithm* *paths…*
+
+## Description
+
+The operation `--add-fixed` adds the specified paths to the Nix store.
+Unlike `--add` paths are registered using the specified hashing
+algorithm, resulting in the same output path as a fixed-output
+derivation. This can be used for sources that are not available from a
+public url or broke since the download expression was written.
+
+This operation has the following options:
+
+ - `--recursive`\
+ Use recursive instead of flat hashing mode, used when adding
+ directories to the store.
+
+{{#include ./opt-common.md}}
+
+{{#include ../opt-common.md}}
+
+{{#include ../env-common.md}}
+
+## Example
+
+```console
+$ nix-store --add-fixed sha256 ./hello-2.10.tar.gz
+/nix/store/3x7dwzq014bblazs7kq20p9hyzz0qh8g-hello-2.10.tar.gz
+```
+
diff --git a/doc/manual/src/command-ref/nix-store/add.md b/doc/manual/src/command-ref/nix-store/add.md
new file mode 100644
index 000000000..87d504cd3
--- /dev/null
+++ b/doc/manual/src/command-ref/nix-store/add.md
@@ -0,0 +1,25 @@
+# Name
+
+`nix-store --add` - add paths to Nix store
+
+# Synopsis
+
+`nix-store` `--add` *paths…*
+
+# Description
+
+The operation `--add` adds the specified paths to the Nix store. It
+prints the resulting paths in the Nix store on standard output.
+
+{{#include ./opt-common.md}}
+
+{{#include ../opt-common.md}}
+
+{{#include ../env-common.md}}
+
+# Example
+
+```console
+$ nix-store --add ./foo.c
+/nix/store/m7lrha58ph6rcnv109yzx1nk1cj7k7zf-foo.c
+```
diff --git a/doc/manual/src/command-ref/nix-store/delete.md b/doc/manual/src/command-ref/nix-store/delete.md
new file mode 100644
index 000000000..550c5ea29
--- /dev/null
+++ b/doc/manual/src/command-ref/nix-store/delete.md
@@ -0,0 +1,33 @@
+# Name
+
+`nix-store --delete` - delete store paths
+
+# Synopsis
+
+`nix-store` `--delete` [`--ignore-liveness`] *paths…*
+
+# Description
+
+The operation `--delete` deletes the store paths *paths* from the Nix
+store, but only if it is safe to do so; that is, when the path is not
+reachable from a root of the garbage collector. This means that you can
+only delete paths that would also be deleted by `nix-store --gc`. Thus,
+`--delete` is a more targeted version of `--gc`.
+
+With the option `--ignore-liveness`, reachability from the roots is
+ignored. However, the path still won’t be deleted if there are other
+paths in the store that refer to it (i.e., depend on it).
+
+{{#include ./opt-common.md}}
+
+{{#include ../opt-common.md}}
+
+{{#include ../env-common.md}}
+
+# Example
+
+```console
+$ nix-store --delete /nix/store/zq0h41l75vlb4z45kzgjjmsjxvcv1qk7-mesa-6.4
+0 bytes freed (0.00 MiB)
+error: cannot delete path `/nix/store/zq0h41l75vlb4z45kzgjjmsjxvcv1qk7-mesa-6.4' since it is still alive
+```
diff --git a/doc/manual/src/command-ref/nix-store/dump-db.md b/doc/manual/src/command-ref/nix-store/dump-db.md
new file mode 100644
index 000000000..b2c77ced0
--- /dev/null
+++ b/doc/manual/src/command-ref/nix-store/dump-db.md
@@ -0,0 +1,26 @@
+# Name
+
+`nix-store --dump-db` - export Nix database
+
+# Synopsis
+
+`nix-store` `--dump-db` [*paths…*]
+
+# Description
+
+The operation `--dump-db` writes a dump of the Nix database to standard
+output. It can be loaded into an empty Nix store using `--load-db`. This
+is useful for making backups and when migrating to different database
+schemas.
+
+By default, `--dump-db` will dump the entire Nix database. When one or
+more store paths is passed, only the subset of the Nix database for
+those store paths is dumped. As with `--export`, the user is responsible
+for passing all the store paths for a closure. See `--export` for an
+example.
+
+{{#include ./opt-common.md}}
+
+{{#include ../opt-common.md}}
+
+{{#include ../env-common.md}}
diff --git a/doc/manual/src/command-ref/nix-store/dump.md b/doc/manual/src/command-ref/nix-store/dump.md
new file mode 100644
index 000000000..62656d599
--- /dev/null
+++ b/doc/manual/src/command-ref/nix-store/dump.md
@@ -0,0 +1,40 @@
+# Name
+
+`nix-store --dump` - write a single path to a Nix Archive
+
+## Synopsis
+
+`nix-store` `--dump` *path*
+
+## Description
+
+The operation `--dump` produces a NAR (Nix ARchive) file containing the
+contents of the file system tree rooted at *path*. The archive is
+written to standard output.
+
+A NAR archive is like a TAR or Zip archive, but it contains only the
+information that Nix considers important. For instance, timestamps are
+elided because all files in the Nix store have their timestamp set to 0
+anyway. Likewise, all permissions are left out except for the execute
+bit, because all files in the Nix store have 444 or 555 permission.
+
+Also, a NAR archive is *canonical*, meaning that “equal” paths always
+produce the same NAR archive. For instance, directory entries are
+always sorted so that the actual on-disk order doesn’t influence the
+result. This means that the cryptographic hash of a NAR dump of a
+path is usable as a fingerprint of the contents of the path. Indeed,
+the hashes of store paths stored in Nix’s database (see `nix-store -q
+--hash`) are SHA-256 hashes of the NAR dump of each store path.
+
+NAR archives support filenames of unlimited length and 64-bit file
+sizes. They can contain regular files, directories, and symbolic links,
+but not other types of files (such as device nodes).
+
+A Nix archive can be unpacked using `nix-store
+--restore`.
+
+{{#include ./opt-common.md}}
+
+{{#include ../opt-common.md}}
+
+{{#include ../env-common.md}}
diff --git a/doc/manual/src/command-ref/nix-store/export.md b/doc/manual/src/command-ref/nix-store/export.md
new file mode 100644
index 000000000..aeea38636
--- /dev/null
+++ b/doc/manual/src/command-ref/nix-store/export.md
@@ -0,0 +1,41 @@
+# Name
+
+`nix-store --export` - export store paths to a Nix Archive
+
+## Synopsis
+
+`nix-store` `--export` *paths…*
+
+## Description
+
+The operation `--export` writes a serialisation of the specified store
+paths to standard output in a format that can be imported into another
+Nix store with `nix-store --import`. This is like `nix-store
+--dump`, except that the NAR archive produced by that command doesn’t
+contain the necessary meta-information to allow it to be imported into
+another Nix store (namely, the set of references of the path).
+
+This command does not produce a *closure* of the specified paths, so if
+a store path references other store paths that are missing in the target
+Nix store, the import will fail.
+
+{{#include ./opt-common.md}}
+
+{{#include ../opt-common.md}}
+
+{{#include ../env-common.md}}
+
+# Examples
+
+To copy a whole closure, do something
+like:
+
+```console
+$ nix-store --export $(nix-store -qR paths) > out
+```
+
+To import the whole closure again, run:
+
+```console
+$ nix-store --import < out
+```
diff --git a/doc/manual/src/command-ref/nix-store/gc.md b/doc/manual/src/command-ref/nix-store/gc.md
new file mode 100644
index 000000000..7be0d559a
--- /dev/null
+++ b/doc/manual/src/command-ref/nix-store/gc.md
@@ -0,0 +1,72 @@
+# Name
+
+`nix-store --gc` - run garbage collection
+
+# Synopsis
+
+`nix-store` `--gc` [`--print-roots` | `--print-live` | `--print-dead`] [`--max-freed` *bytes*]
+
+# Description
+
+Without additional flags, the operation `--gc` performs a garbage
+collection on the Nix store. That is, all paths in the Nix store not
+reachable via file system references from a set of “roots”, are deleted.
+
+The following suboperations may be specified:
+
+ - `--print-roots`\
+ This operation prints on standard output the set of roots used by
+ the garbage collector.
+
+ - `--print-live`\
+ This operation prints on standard output the set of “live” store
+ paths, which are all the store paths reachable from the roots. Live
+ paths should never be deleted, since that would break consistency —
+ it would become possible that applications are installed that
+ reference things that are no longer present in the store.
+
+ - `--print-dead`\
+ This operation prints out on standard output the set of “dead” store
+ paths, which is just the opposite of the set of live paths: any path
+ in the store that is not live (with respect to the roots) is dead.
+
+By default, all unreachable paths are deleted. The following options
+control what gets deleted and in what order:
+
+ - `--max-freed` *bytes*\
+ Keep deleting paths until at least *bytes* bytes have been deleted,
+ then stop. The argument *bytes* can be followed by the
+ multiplicative suffix `K`, `M`, `G` or `T`, denoting KiB, MiB, GiB
+ or TiB units.
+
+The behaviour of the collector is also influenced by the
+`keep-outputs` and `keep-derivations` settings in the Nix
+configuration file.
+
+By default, the collector prints the total number of freed bytes when it
+finishes (or when it is interrupted). With `--print-dead`, it prints the
+number of bytes that would be freed.
+
+{{#include ./opt-common.md}}
+
+{{#include ../opt-common.md}}
+
+{{#include ../env-common.md}}
+
+# Examples
+
+To delete all unreachable paths, just do:
+
+```console
+$ nix-store --gc
+deleting `/nix/store/kq82idx6g0nyzsp2s14gfsc38npai7lf-cairo-1.0.4.tar.gz.drv'
+...
+8825586 bytes freed (8.42 MiB)
+```
+
+To delete at least 100 MiBs of unreachable paths:
+
+```console
+$ nix-store --gc --max-freed $((100 * 1024 * 1024))
+```
+
diff --git a/doc/manual/src/command-ref/nix-store/generate-binary-cache-key.md b/doc/manual/src/command-ref/nix-store/generate-binary-cache-key.md
new file mode 100644
index 000000000..8085d877b
--- /dev/null
+++ b/doc/manual/src/command-ref/nix-store/generate-binary-cache-key.md
@@ -0,0 +1,29 @@
+# Name
+
+`nix-store --generate-binary-cache-key` - generate key pair to use for a binary cache
+
+## Synopsis
+
+`nix-store` `--generate-binary-cache-key` *key-name* *secret-key-file* *public-key-file*
+
+## Description
+
+This command generates an [Ed25519 key pair](http://ed25519.cr.yp.to/)
+that can be used to create a signed binary cache. It takes three
+mandatory parameters:
+
+1. A key name, such as `cache.example.org-1`, that is used to look up
+ keys on the client when it verifies signatures. It can be anything,
+ but it’s suggested to use the host name of your cache (e.g.
+ `cache.example.org`) with a suffix denoting the number of the key
+ (to be incremented every time you need to revoke a key).
+
+2. The file name where the secret key is to be stored.
+
+3. The file name where the public key is to be stored.
+
+{{#include ./opt-common.md}}
+
+{{#include ../opt-common.md}}
+
+{{#include ../env-common.md}}
diff --git a/doc/manual/src/command-ref/nix-store/import.md b/doc/manual/src/command-ref/nix-store/import.md
new file mode 100644
index 000000000..2711316a7
--- /dev/null
+++ b/doc/manual/src/command-ref/nix-store/import.md
@@ -0,0 +1,21 @@
+# Name
+
+`nix-store --import` - import Nix Archive into the store
+
+# Synopsis
+
+`nix-store` `--import`
+
+# Description
+
+The operation `--import` reads a serialisation of a set of store paths
+produced by `nix-store --export` from standard input and adds those
+store paths to the Nix store. Paths that already exist in the Nix store
+are ignored. If a path refers to another path that doesn’t exist in the
+Nix store, the import fails.
+
+{{#include ./opt-common.md}}
+
+{{#include ../opt-common.md}}
+
+{{#include ../env-common.md}}
diff --git a/doc/manual/src/command-ref/nix-store/load-db.md b/doc/manual/src/command-ref/nix-store/load-db.md
new file mode 100644
index 000000000..e2f438ed6
--- /dev/null
+++ b/doc/manual/src/command-ref/nix-store/load-db.md
@@ -0,0 +1,18 @@
+# Name
+
+`nix-store --load-db` - import Nix database
+
+# Synopsis
+
+`nix-store` `--load-db`
+
+# Description
+
+The operation `--load-db` reads a dump of the Nix database created by
+`--dump-db` from standard input and loads it into the Nix database.
+
+{{#include ./opt-common.md}}
+
+{{#include ../opt-common.md}}
+
+{{#include ../env-common.md}}
diff --git a/doc/manual/src/command-ref/nix-store/opt-common.md b/doc/manual/src/command-ref/nix-store/opt-common.md
new file mode 100644
index 000000000..bf6566555
--- /dev/null
+++ b/doc/manual/src/command-ref/nix-store/opt-common.md
@@ -0,0 +1,36 @@
+# Options
+
+The following options are allowed for all `nix-store` operations, but may not always have an effect.
+
+- <span id="opt-add-root">[`--add-root`](#opt-add-root)</span> *path*
+
+ Causes the result of a realisation (`--realise` and
+ `--force-realise`) to be registered as a root of the garbage
+ collector. *path* will be created as a symlink to the resulting
+ store path. In addition, a uniquely named symlink to *path* will
+ be created in `/nix/var/nix/gcroots/auto/`. For instance,
+
+ ```console
+ $ nix-store --add-root /home/eelco/bla/result -r ...
+
+ $ ls -l /nix/var/nix/gcroots/auto
+ lrwxrwxrwx 1 ... 2005-03-13 21:10 dn54lcypm8f8... -> /home/eelco/bla/result
+
+ $ ls -l /home/eelco/bla/result
+ lrwxrwxrwx 1 ... 2005-03-13 21:10 /home/eelco/bla/result -> /nix/store/1r11343n6qd4...-f-spot-0.0.10
+ ```
+
+ Thus, when `/home/eelco/bla/result` is removed, the GC root in the
+ `auto` directory becomes a dangling symlink and will be ignored by
+ the collector.
+
+ > **Warning**
+ >
+ > Note that it is not possible to move or rename GC roots, since
+ > the symlink in the `auto` directory will still point to the old
+ > location.
+
+ If there are multiple results, then multiple symlinks will be
+ created by sequentially numbering symlinks beyond the first one
+ (e.g., `foo`, `foo-2`, `foo-3`, and so on).
+
diff --git a/doc/manual/src/command-ref/nix-store/optimise.md b/doc/manual/src/command-ref/nix-store/optimise.md
new file mode 100644
index 000000000..dc392aeb8
--- /dev/null
+++ b/doc/manual/src/command-ref/nix-store/optimise.md
@@ -0,0 +1,40 @@
+# Name
+
+`nix-store --optimise` - reduce disk space usage
+
+## Synopsis
+
+`nix-store` `--optimise`
+
+## Description
+
+The operation `--optimise` reduces Nix store disk space usage by finding
+identical files in the store and hard-linking them to each other. It
+typically reduces the size of the store by something like 25-35%. Only
+regular files and symlinks are hard-linked in this manner. Files are
+considered identical when they have the same NAR archive serialisation:
+that is, regular files must have the same contents and permission
+(executable or non-executable), and symlinks must have the same
+contents.
+
+After completion, or when the command is interrupted, a report on the
+achieved savings is printed on standard error.
+
+Use `-vv` or `-vvv` to get some progress indication.
+
+{{#include ./opt-common.md}}
+
+{{#include ../opt-common.md}}
+
+{{#include ../env-common.md}}
+
+## Example
+
+```console
+$ nix-store --optimise
+hashing files in `/nix/store/qhqx7l2f1kmwihc9bnxs7rc159hsxnf3-gcc-4.1.1'
+...
+541838819 bytes (516.74 MiB) freed by hard-linking 54143 files;
+there are 114486 files with equal contents out of 215894 files in total
+```
+
diff --git a/doc/manual/src/command-ref/nix-store/print-env.md b/doc/manual/src/command-ref/nix-store/print-env.md
new file mode 100644
index 000000000..bd2084ef6
--- /dev/null
+++ b/doc/manual/src/command-ref/nix-store/print-env.md
@@ -0,0 +1,31 @@
+# Name
+
+`nix-store --print-env` - print the build environment of a derivation
+
+## Synopsis
+
+`nix-store` `--print-env` *drvpath*
+
+## Description
+
+The operation `--print-env` prints out the environment of a derivation
+in a format that can be evaluated by a shell. The command line arguments
+of the builder are placed in the variable `_args`.
+
+{{#include ./opt-common.md}}
+
+{{#include ../opt-common.md}}
+
+{{#include ../env-common.md}}
+
+## Example
+
+```console
+$ nix-store --print-env $(nix-instantiate '<nixpkgs>' -A firefox)
+…
+export src; src='/nix/store/plpj7qrwcz94z2psh6fchsi7s8yihc7k-firefox-12.0.source.tar.bz2'
+export stdenv; stdenv='/nix/store/7c8asx3yfrg5dg1gzhzyq2236zfgibnn-stdenv'
+export system; system='x86_64-linux'
+export _args; _args='-e /nix/store/9krlzvny65gdc8s7kpb6lkx8cd02c25c-default-builder.sh'
+```
+
diff --git a/doc/manual/src/command-ref/nix-store/query.md b/doc/manual/src/command-ref/nix-store/query.md
new file mode 100644
index 000000000..9f7dbd3e8
--- /dev/null
+++ b/doc/manual/src/command-ref/nix-store/query.md
@@ -0,0 +1,220 @@
+# Name
+
+`nix-store --query` - display information about store paths
+
+# Synopsis
+
+`nix-store` {`--query` | `-q`}
+ {`--outputs` | `--requisites` | `-R` | `--references` |
+ `--referrers` | `--referrers-closure` | `--deriver` | `-d` |
+ `--graph` | `--tree` | `--binding` *name* | `-b` *name* | `--hash` |
+ `--size` | `--roots`}
+ [`--use-output`] [`-u`] [`--force-realise`] [`-f`]
+ *paths…*
+
+# Description
+
+The operation `--query` displays various bits of information about the
+store paths . The queries are described below. At most one query can be
+specified. The default query is `--outputs`.
+
+The paths *paths* may also be symlinks from outside of the Nix store, to
+the Nix store. In that case, the query is applied to the target of the
+symlink.
+
+# Common query options
+
+ - `--use-output`; `-u`\
+ For each argument to the query that is a [store derivation], apply the
+ query to the output path of the derivation instead.
+
+ - `--force-realise`; `-f`\
+ Realise each argument to the query first (see [`nix-store --realise`](./realise.md)).
+
+[store derivation]: @docroot@/glossary.md#gloss-store-derivation
+
+# Queries
+
+ - `--outputs`\
+ Prints out the [output paths] of the store
+ derivations *paths*. These are the paths that will be produced when
+ the derivation is built.
+
+ [output paths]: ../../glossary.md#gloss-output-path
+
+ - `--requisites`; `-R`\
+ Prints out the [closure] of the store path *paths*.
+
+ [closure]: ../../glossary.md#gloss-closure
+
+ This query has one option:
+
+ - `--include-outputs`
+ Also include the existing output paths of [store derivation]s,
+ and their closures.
+
+ This query can be used to implement various kinds of deployment. A
+ *source deployment* is obtained by distributing the closure of a
+ store derivation. A *binary deployment* is obtained by distributing
+ the closure of an output path. A *cache deployment* (combined
+ source/binary deployment, including binaries of build-time-only
+ dependencies) is obtained by distributing the closure of a store
+ derivation and specifying the option `--include-outputs`.
+
+ - `--references`\
+ Prints the set of [references] of the store paths
+ *paths*, that is, their immediate dependencies. (For *all*
+ dependencies, use `--requisites`.)
+
+ [references]: ../../glossary.md#gloss-reference
+
+ - `--referrers`\
+ Prints the set of *referrers* of the store paths *paths*, that is,
+ the store paths currently existing in the Nix store that refer to
+ one of *paths*. Note that contrary to the references, the set of
+ referrers is not constant; it can change as store paths are added or
+ removed.
+
+ - `--referrers-closure`\
+ Prints the closure of the set of store paths *paths* under the
+ referrers relation; that is, all store paths that directly or
+ indirectly refer to one of *paths*. These are all the path currently
+ in the Nix store that are dependent on *paths*.
+
+ - `--deriver`; `-d`\
+ Prints the [deriver] of the store paths *paths*. If
+ the path has no deriver (e.g., if it is a source file), or if the
+ deriver is not known (e.g., in the case of a binary-only
+ deployment), the string `unknown-deriver` is printed.
+
+ [deriver]: ../../glossary.md#gloss-deriver
+
+ - `--graph`\
+ Prints the references graph of the store paths *paths* in the format
+ of the `dot` tool of AT\&T's [Graphviz
+ package](http://www.graphviz.org/). This can be used to visualise
+ dependency graphs. To obtain a build-time dependency graph, apply
+ this to a store derivation. To obtain a runtime dependency graph,
+ apply it to an output path.
+
+ - `--tree`\
+ Prints the references graph of the store paths *paths* as a nested
+ ASCII tree. References are ordered by descending closure size; this
+ tends to flatten the tree, making it more readable. The query only
+ recurses into a store path when it is first encountered; this
+ prevents a blowup of the tree representation of the graph.
+
+ - `--graphml`\
+ Prints the references graph of the store paths *paths* in the
+ [GraphML](http://graphml.graphdrawing.org/) file format. This can be
+ used to visualise dependency graphs. To obtain a build-time
+ dependency graph, apply this to a [store derivation]. To obtain a
+ runtime dependency graph, apply it to an output path.
+
+ - `--binding` *name*; `-b` *name*\
+ Prints the value of the attribute *name* (i.e., environment
+ variable) of the [store derivation]s *paths*. It is an error for a
+ derivation to not have the specified attribute.
+
+ - `--hash`\
+ Prints the SHA-256 hash of the contents of the store paths *paths*
+ (that is, the hash of the output of `nix-store --dump` on the given
+ paths). Since the hash is stored in the Nix database, this is a fast
+ operation.
+
+ - `--size`\
+ Prints the size in bytes of the contents of the store paths *paths*
+ — to be precise, the size of the output of `nix-store --dump` on
+ the given paths. Note that the actual disk space required by the
+ store paths may be higher, especially on filesystems with large
+ cluster sizes.
+
+ - `--roots`\
+ Prints the garbage collector roots that point, directly or
+ indirectly, at the store paths *paths*.
+
+{{#include ./opt-common.md}}
+
+{{#include ../opt-common.md}}
+
+{{#include ../env-common.md}}
+
+# Examples
+
+Print the closure (runtime dependencies) of the `svn` program in the
+current user environment:
+
+```console
+$ nix-store -qR $(which svn)
+/nix/store/5mbglq5ldqld8sj57273aljwkfvj22mc-subversion-1.1.4
+/nix/store/9lz9yc6zgmc0vlqmn2ipcpkjlmbi51vv-glibc-2.3.4
+...
+```
+
+Print the build-time dependencies of `svn`:
+
+```console
+$ nix-store -qR $(nix-store -qd $(which svn))
+/nix/store/02iizgn86m42q905rddvg4ja975bk2i4-grep-2.5.1.tar.bz2.drv
+/nix/store/07a2bzxmzwz5hp58nf03pahrv2ygwgs3-gcc-wrapper.sh
+/nix/store/0ma7c9wsbaxahwwl04gbw3fcd806ski4-glibc-2.3.4.drv
+... lots of other paths ...
+```
+
+The difference with the previous example is that we ask the closure of
+the derivation (`-qd`), not the closure of the output path that contains
+`svn`.
+
+Show the build-time dependencies as a tree:
+
+```console
+$ nix-store -q --tree $(nix-store -qd $(which svn))
+/nix/store/7i5082kfb6yjbqdbiwdhhza0am2xvh6c-subversion-1.1.4.drv
++---/nix/store/d8afh10z72n8l1cr5w42366abiblgn54-builder.sh
++---/nix/store/fmzxmpjx2lh849ph0l36snfj9zdibw67-bash-3.0.drv
+| +---/nix/store/570hmhmx3v57605cqg9yfvvyh0nnb8k8-bash
+| +---/nix/store/p3srsbd8dx44v2pg6nbnszab5mcwx03v-builder.sh
+...
+```
+
+Show all paths that depend on the same OpenSSL library as `svn`:
+
+```console
+$ nix-store -q --referrers $(nix-store -q --binding openssl $(nix-store -qd $(which svn)))
+/nix/store/23ny9l9wixx21632y2wi4p585qhva1q8-sylpheed-1.0.0
+/nix/store/5mbglq5ldqld8sj57273aljwkfvj22mc-subversion-1.1.4
+/nix/store/dpmvp969yhdqs7lm2r1a3gng7pyq6vy4-subversion-1.1.3
+/nix/store/l51240xqsgg8a7yrbqdx1rfzyv6l26fx-lynx-2.8.5
+```
+
+Show all paths that directly or indirectly depend on the Glibc (C
+library) used by `svn`:
+
+```console
+$ nix-store -q --referrers-closure $(ldd $(which svn) | grep /libc.so | awk '{print $3}')
+/nix/store/034a6h4vpz9kds5r6kzb9lhh81mscw43-libgnomeprintui-2.8.2
+/nix/store/15l3yi0d45prm7a82pcrknxdh6nzmxza-gawk-3.1.4
+...
+```
+
+Note that `ldd` is a command that prints out the dynamic libraries used
+by an ELF executable.
+
+Make a picture of the runtime dependency graph of the current user
+environment:
+
+```console
+$ nix-store -q --graph ~/.nix-profile | dot -Tps > graph.ps
+$ gv graph.ps
+```
+
+Show every garbage collector root that points to a store path that
+depends on `svn`:
+
+```console
+$ nix-store -q --roots $(which svn)
+/nix/var/nix/profiles/default-81-link
+/nix/var/nix/profiles/default-82-link
+/home/eelco/.local/state/nix/profiles/profile-97-link
+```
+
diff --git a/doc/manual/src/command-ref/nix-store/read-log.md b/doc/manual/src/command-ref/nix-store/read-log.md
new file mode 100644
index 000000000..4a88e9382
--- /dev/null
+++ b/doc/manual/src/command-ref/nix-store/read-log.md
@@ -0,0 +1,38 @@
+# Name
+
+`nix-store --read-log` - print build log
+
+# Synopsis
+
+`nix-store` {`--read-log` | `-l`} *paths…*
+
+# Description
+
+The operation `--read-log` prints the build log of the specified store
+paths on standard output. The build log is whatever the builder of a
+derivation wrote to standard output and standard error. If a store path
+is not a derivation, the deriver of the store path is used.
+
+Build logs are kept in `/nix/var/log/nix/drvs`. However, there is no
+guarantee that a build log is available for any particular store path.
+For instance, if the path was downloaded as a pre-built binary through a
+substitute, then the log is unavailable.
+
+{{#include ./opt-common.md}}
+
+{{#include ../opt-common.md}}
+
+{{#include ../env-common.md}}
+
+# Example
+
+```console
+$ nix-store -l $(which ktorrent)
+building /nix/store/dhc73pvzpnzxhdgpimsd9sw39di66ph1-ktorrent-2.2.1
+unpacking sources
+unpacking source archive /nix/store/p8n1jpqs27mgkjw07pb5269717nzf5f8-ktorrent-2.2.1.tar.gz
+ktorrent-2.2.1/
+ktorrent-2.2.1/NEWS
+...
+```
+
diff --git a/doc/manual/src/command-ref/nix-store/realise.md b/doc/manual/src/command-ref/nix-store/realise.md
new file mode 100644
index 000000000..f61a20100
--- /dev/null
+++ b/doc/manual/src/command-ref/nix-store/realise.md
@@ -0,0 +1,118 @@
+# Name
+
+`nix-store --realise` - realise specified store paths
+
+# Synopsis
+
+`nix-store` {`--realise` | `-r`} *paths…* [`--dry-run`]
+
+# Description
+
+The operation `--realise` essentially “builds” the specified store
+paths. Realisation is a somewhat overloaded term:
+
+ - If the store path is a *derivation*, realisation ensures that the
+ output paths of the derivation are [valid] (i.e.,
+ the output path and its closure exist in the file system). This
+ can be done in several ways. First, it is possible that the
+ outputs are already valid, in which case we are done
+ immediately. Otherwise, there may be [substitutes]
+ that produce the outputs (e.g., by downloading them). Finally, the
+ outputs can be produced by running the build task described
+ by the derivation.
+
+ - If the store path is not a derivation, realisation ensures that the
+ specified path is valid (i.e., it and its closure exist in the file
+ system). If the path is already valid, we are done immediately.
+ Otherwise, the path and any missing paths in its closure may be
+ produced through substitutes. If there are no (successful)
+ substitutes, realisation fails.
+
+[valid]: @docroot@/glossary.md#gloss-validity
+[substitutes]: @docroot@/glossary.md#gloss-substitute
+
+The output path of each derivation is printed on standard output. (For
+non-derivations argument, the argument itself is printed.)
+
+The following flags are available:
+
+ - `--dry-run`\
+ Print on standard error a description of what packages would be
+ built or downloaded, without actually performing the operation.
+
+ - `--ignore-unknown`\
+ If a non-derivation path does not have a substitute, then silently
+ ignore it.
+
+ - `--check`\
+ This option allows you to check whether a derivation is
+ deterministic. It rebuilds the specified derivation and checks
+ whether the result is bitwise-identical with the existing outputs,
+ printing an error if that’s not the case. The outputs of the
+ specified derivation must already exist. When used with `-K`, if an
+ output path is not identical to the corresponding output from the
+ previous build, the new output path is left in
+ `/nix/store/name.check.`
+
+Special exit codes:
+
+ - `100`\
+ Generic build failure, the builder process returned with a non-zero
+ exit code.
+
+ - `101`\
+ Build timeout, the build was aborted because it did not complete
+ within the specified `timeout`.
+
+ - `102`\
+ Hash mismatch, the build output was rejected because it does not
+ match the [`outputHash` attribute of the
+ derivation](@docroot@/language/advanced-attributes.md).
+
+ - `104`\
+ Not deterministic, the build succeeded in check mode but the
+ resulting output is not binary reproducible.
+
+With the `--keep-going` flag it's possible for multiple failures to
+occur, in this case the 1xx status codes are or combined using binary
+or.
+
+ 1100100
+ ^^^^
+ |||`- timeout
+ ||`-- output hash mismatch
+ |`--- build failure
+ `---- not deterministic
+
+
+{{#include ./opt-common.md}}
+
+{{#include ../opt-common.md}}
+
+{{#include ../env-common.md}}
+
+# Examples
+
+This operation is typically used to build [store derivation]s produced by
+[`nix-instantiate`](@docroot@/command-ref/nix-instantiate.md):
+
+[store derivation]: @docroot@/glossary.md#gloss-store-derivation
+
+```console
+$ nix-store -r $(nix-instantiate ./test.nix)
+/nix/store/31axcgrlbfsxzmfff1gyj1bf62hvkby2-aterm-2.3.1
+```
+
+This is essentially what [`nix-build`](@docroot@/command-ref/nix-build.md) does.
+
+To test whether a previously-built derivation is deterministic:
+
+```console
+$ nix-build '<nixpkgs>' -A hello --check -K
+```
+
+Use [`nix-store --read-log`](./read-log.md) to show the stderr and stdout of a build:
+
+```console
+$ nix-store --read-log $(nix-instantiate ./test.nix)
+```
diff --git a/doc/manual/src/command-ref/nix-store/repair-path.md b/doc/manual/src/command-ref/nix-store/repair-path.md
new file mode 100644
index 000000000..9c3d9f7cd
--- /dev/null
+++ b/doc/manual/src/command-ref/nix-store/repair-path.md
@@ -0,0 +1,35 @@
+# Name
+
+`nix --repair-path` - re-download path from substituter
+
+# Synopsis
+
+`nix-store` `--repair-path` *paths…*
+
+# Description
+
+The operation `--repair-path` attempts to “repair” the specified paths
+by redownloading them using the available substituters. If no
+substitutes are available, then repair is not possible.
+
+> **Warning**
+>
+> During repair, there is a very small time window during which the old
+> path (if it exists) is moved out of the way and replaced with the new
+> path. If repair is interrupted in between, then the system may be left
+> in a broken state (e.g., if the path contains a critical system
+> component like the GNU C Library).
+
+# Example
+
+```console
+$ nix-store --verify-path /nix/store/dj7a81wsm1ijwwpkks3725661h3263p5-glibc-2.13
+path `/nix/store/dj7a81wsm1ijwwpkks3725661h3263p5-glibc-2.13' was modified!
+ expected hash `2db57715ae90b7e31ff1f2ecb8c12ec1cc43da920efcbe3b22763f36a1861588',
+ got `481c5aa5483ebc97c20457bb8bca24deea56550d3985cda0027f67fe54b808e4'
+
+$ nix-store --repair-path /nix/store/dj7a81wsm1ijwwpkks3725661h3263p5-glibc-2.13
+fetching path `/nix/store/d7a81wsm1ijwwpkks3725661h3263p5-glibc-2.13'...
+…
+```
+
diff --git a/doc/manual/src/command-ref/nix-store/restore.md b/doc/manual/src/command-ref/nix-store/restore.md
new file mode 100644
index 000000000..fcba43df4
--- /dev/null
+++ b/doc/manual/src/command-ref/nix-store/restore.md
@@ -0,0 +1,18 @@
+# Name
+
+`nix-store --restore` - extract a Nix archive
+
+## Synopsis
+
+`nix-store` `--restore` *path*
+
+## Description
+
+The operation `--restore` unpacks a NAR archive to *path*, which must
+not already exist. The archive is read from standard input.
+
+{{#include ./opt-common.md}}
+
+{{#include ../opt-common.md}}
+
+{{#include ../env-common.md}}
diff --git a/doc/manual/src/command-ref/nix-store/serve.md b/doc/manual/src/command-ref/nix-store/serve.md
new file mode 100644
index 000000000..0f90f65ae
--- /dev/null
+++ b/doc/manual/src/command-ref/nix-store/serve.md
@@ -0,0 +1,38 @@
+# Name
+
+`nix-store --serve` - serve local Nix store over SSH
+
+# Synopsis
+
+`nix-store` `--serve` [`--write`]
+
+# Description
+
+The operation `--serve` provides access to the Nix store over stdin and
+stdout, and is intended to be used as a means of providing Nix store
+access to a restricted ssh user.
+
+The following flags are available:
+
+ - `--write`\
+ Allow the connected client to request the realization of
+ derivations. In effect, this can be used to make the host act as a
+ remote builder.
+
+{{#include ./opt-common.md}}
+
+{{#include ../opt-common.md}}
+
+{{#include ../env-common.md}}
+
+# Examples
+
+To turn a host into a build server, the `authorized_keys` file can be
+used to provide build access to a given SSH public key:
+
+```console
+$ cat <<EOF >>/root/.ssh/authorized_keys
+command="nice -n20 nix-store --serve --write" ssh-rsa AAAAB3NzaC1yc2EAAAA...
+EOF
+```
+
diff --git a/doc/manual/src/command-ref/nix-store/verify-path.md b/doc/manual/src/command-ref/nix-store/verify-path.md
new file mode 100644
index 000000000..59ffe92a3
--- /dev/null
+++ b/doc/manual/src/command-ref/nix-store/verify-path.md
@@ -0,0 +1,29 @@
+# Name
+
+`nix-store --verify-path` - check path contents against Nix database
+
+## Synopsis
+
+`nix-store` `--verify-path` *paths…*
+
+## Description
+
+The operation `--verify-path` compares the contents of the given store
+paths to their cryptographic hashes stored in Nix’s database. For every
+changed path, it prints a warning message. The exit status is 0 if no
+path has changed, and 1 otherwise.
+
+{{#include ./opt-common.md}}
+
+{{#include ../opt-common.md}}
+
+{{#include ../env-common.md}}
+
+## Example
+
+To verify the integrity of the `svn` command and all its dependencies:
+
+```console
+$ nix-store --verify-path $(nix-store -qR $(which svn))
+```
+
diff --git a/doc/manual/src/command-ref/nix-store/verify.md b/doc/manual/src/command-ref/nix-store/verify.md
new file mode 100644
index 000000000..2695b3361
--- /dev/null
+++ b/doc/manual/src/command-ref/nix-store/verify.md
@@ -0,0 +1,36 @@
+# Name
+
+`nix-store --verify` - check Nix database for consistency
+
+# Synopsis
+
+`nix-store` `--verify` [`--check-contents`] [`--repair`]
+
+# Description
+
+The operation `--verify` verifies the internal consistency of the Nix
+database, and the consistency between the Nix database and the Nix
+store. Any inconsistencies encountered are automatically repaired.
+Inconsistencies are generally the result of the Nix store or database
+being modified by non-Nix tools, or of bugs in Nix itself.
+
+This operation has the following options:
+
+ - `--check-contents`\
+ Checks that the contents of every valid store path has not been
+ altered by computing a SHA-256 hash of the contents and comparing it
+ with the hash stored in the Nix database at build time. Paths that
+ have been modified are printed out. For large stores,
+ `--check-contents` is obviously quite slow.
+
+ - `--repair`\
+ If any valid path is missing from the store, or (if
+ `--check-contents` is given) the contents of a valid path has been
+ modified, then try to repair the path by redownloading it. See
+ `nix-store --repair-path` for details.
+
+{{#include ./opt-common.md}}
+
+{{#include ../opt-common.md}}
+
+{{#include ../env-common.md}}
diff --git a/doc/manual/src/command-ref/opt-common.md b/doc/manual/src/command-ref/opt-common.md
index e612c416f..a23b87e4e 100644
--- a/doc/manual/src/command-ref/opt-common.md
+++ b/doc/manual/src/command-ref/opt-common.md
@@ -2,13 +2,13 @@
Most Nix commands accept the following command-line options:
- - [`--help`]{#opt-help}\
+ - <span id="opt-help">[`--help`](#opt-help)</span>\
Prints out a summary of the command syntax and exits.
- - [`--version`]{#opt-version}\
+ - <span id="opt-version">[`--version`](#opt-version)</span>\
Prints out the Nix version number on standard output and exits.
- - [`--verbose`]{#opt-verbose} / `-v`\
+ - <span id="opt-verbose">[`--verbose`](#opt-verbose)</span> / `-v`\
Increases the level of verbosity of diagnostic messages printed on
standard error. For each Nix operation, the information printed on
standard output is well-defined; any diagnostic information is
@@ -37,14 +37,14 @@ Most Nix commands accept the following command-line options:
- 5\
“Vomit”: print vast amounts of debug information.
- - [`--quiet`]{#opt-quiet}\
+ - <span id="opt-quiet">[`--quiet`](#opt-quiet)</span>\
Decreases the level of verbosity of diagnostic messages printed on
standard error. This is the inverse option to `-v` / `--verbose`.
This option may be specified repeatedly. See the previous verbosity
levels list.
- - [`--log-format`]{#opt-log-format} *format*\
+ - <span id="opt-log-format">[`--log-format`](#opt-log-format)</span> *format*\
This option can be used to change the output of the log format, with
*format* being one of:
@@ -66,14 +66,14 @@ Most Nix commands accept the following command-line options:
- bar-with-logs\
Display the raw logs, with the progress bar at the bottom.
- - [`--no-build-output`]{#opt-no-build-output} / `-Q`\
+ - <span id="opt-no-build-output">[`--no-build-output`](#opt-no-build-output)</span> / `-Q`\
By default, output written by builders to standard output and
standard error is echoed to the Nix command's standard error. This
option suppresses this behaviour. Note that the builder's standard
output and error are always written to a log file in
`prefix/nix/var/log/nix`.
- - [`--max-jobs`]{#opt-max-jobs} / `-j` *number*\
+ - <span id="opt-max-jobs">[`--max-jobs`](#opt-max-jobs)</span> / `-j` *number*\
Sets the maximum number of build jobs that Nix will perform in
parallel to the specified number. Specify `auto` to use the number
of CPUs in the system. The default is specified by the `max-jobs`
@@ -83,7 +83,7 @@ Most Nix commands accept the following command-line options:
Setting it to `0` disallows building on the local machine, which is
useful when you want builds to happen only on remote builders.
- - [`--cores`]{#opt-cores}\
+ - <span id="opt-cores">[`--cores`](#opt-cores)</span>\
Sets the value of the `NIX_BUILD_CORES` environment variable in
the invocation of builders. Builders can use this variable at
their discretion to control the maximum amount of parallelism. For
@@ -94,18 +94,18 @@ Most Nix commands accept the following command-line options:
means that the builder should use all available CPU cores in the
system.
- - [`--max-silent-time`]{#opt-max-silent-time}\
+ - <span id="opt-max-silent-time">[`--max-silent-time`](#opt-max-silent-time)</span>\
Sets the maximum number of seconds that a builder can go without
producing any data on standard output or standard error. The
default is specified by the `max-silent-time` configuration
setting. `0` means no time-out.
- - [`--timeout`]{#opt-timeout}\
+ - <span id="opt-timeout">[`--timeout`](#opt-timeout)</span>\
Sets the maximum number of seconds that a builder can run. The
default is specified by the `timeout` configuration setting. `0`
means no timeout.
- - [`--keep-going`]{#opt-keep-going} / `-k`\
+ - <span id="opt-keep-going">[`--keep-going`](#opt-keep-going)</span> / `-k`\
Keep going in case of failed builds, to the greatest extent
possible. That is, if building an input of some derivation fails,
Nix will still build the other inputs, but not the derivation
@@ -113,13 +113,13 @@ Most Nix commands accept the following command-line options:
for builds of substitutes), possibly killing builds in progress (in
case of parallel or distributed builds).
- - [`--keep-failed`]{#opt-keep-failed} / `-K`\
+ - <span id="opt-keep-failed">[`--keep-failed`](#opt-keep-failed)</span> / `-K`\
Specifies that in case of a build failure, the temporary directory
(usually in `/tmp`) in which the build takes place should not be
deleted. The path of the build directory is printed as an
informational message.
- - [`--fallback`]{#opt-fallback}\
+ - <span id="opt-fallback">[`--fallback`](#opt-fallback)</span>\
Whenever Nix attempts to build a derivation for which substitutes
are known for each output path, but realising the output paths
through the substitutes fails, fall back on building the derivation.
@@ -134,18 +134,18 @@ Most Nix commands accept the following command-line options:
failure in obtaining the substitutes to lead to a full build from
source (with the related consumption of resources).
- - [`--readonly-mode`]{#opt-readonly-mode}\
+ - <span id="opt-readonly-mode">[`--readonly-mode`](#opt-readonly-mode)</span>\
When this option is used, no attempt is made to open the Nix
database. Most Nix operations do need database access, so those
operations will fail.
- - [`--arg`]{#opt-arg} *name* *value*\
+ - <span id="opt-arg">[`--arg`](#opt-arg)</span> *name* *value*\
This option is accepted by `nix-env`, `nix-instantiate`,
`nix-shell` and `nix-build`. When evaluating Nix expressions, the
expression evaluator will automatically try to call functions that
it encounters. It can automatically call functions for which every
argument has a [default
- value](../language/constructs.md#functions) (e.g.,
+ value](@docroot@/language/constructs.md#functions) (e.g.,
`{ argName ? defaultValue }: ...`). With `--arg`, you can also
call functions that have arguments without a default value (or
override a default value). That is, if the evaluator encounters a
@@ -164,26 +164,26 @@ Most Nix commands accept the following command-line options:
So if you call this Nix expression (e.g., when you do `nix-env -iA
pkgname`), the function will be called automatically using the
- value [`builtins.currentSystem`](../language/builtins.md) for
+ value [`builtins.currentSystem`](@docroot@/language/builtins.md) for
the `system` argument. You can override this using `--arg`, e.g.,
`nix-env -iA pkgname --arg system \"i686-freebsd\"`. (Note that
since the argument is a Nix string literal, you have to escape the
quotes.)
- - [`--argstr`]{#opt-argstr} *name* *value*\
+ - <span id="opt-argstr">[`--argstr`](#opt-argstr)</span> *name* *value*\
This option is like `--arg`, only the value is not a Nix
expression but a string. So instead of `--arg system
\"i686-linux\"` (the outer quotes are to keep the shell happy) you
can say `--argstr system i686-linux`.
- - [`--attr`]{#opt-attr} / `-A` *attrPath*\
+ - <span id="opt-attr">[`--attr`](#opt-attr)</span> / `-A` *attrPath*\
Select an attribute from the top-level Nix expression being
evaluated. (`nix-env`, `nix-instantiate`, `nix-build` and
`nix-shell` only.) The *attribute path* *attrPath* is a sequence
of attribute names separated by dots. For instance, given a
top-level Nix expression *e*, the attribute path `xorg.xorgserver`
would cause the expression `e.xorg.xorgserver` to be used. See
- [`nix-env --install`](nix-env.md#operation---install) for some
+ [`nix-env --install`](@docroot@/command-ref/nix-env/install.md) for some
concrete examples.
In addition to attribute names, you can also specify array indices.
@@ -191,7 +191,7 @@ Most Nix commands accept the following command-line options:
attribute of the fourth element of the array in the `foo` attribute
of the top-level expression.
- - [`--expr`]{#opt-expr} / `-E`\
+ - <span id="opt-expr">[`--expr`](#opt-expr)</span> / `-E`\
Interpret the command line arguments as a list of Nix expressions to
be parsed and evaluated, rather than as a list of file names of Nix
expressions. (`nix-instantiate`, `nix-build` and `nix-shell` only.)
@@ -202,17 +202,17 @@ Most Nix commands accept the following command-line options:
use, give your expression to the `nix-shell -p` convenience flag
instead.
- - [`-I`]{#opt-I} *path*\
+ - <span id="opt-I">[`-I`](#opt-I)</span> *path*\
Add a path to the Nix expression search path. This option may be
given multiple times. See the `NIX_PATH` environment variable for
information on the semantics of the Nix search path. Paths added
through `-I` take precedence over `NIX_PATH`.
- - [`--option`]{#opt-option} *name* *value*\
+ - <span id="opt-option">[`--option`](#opt-option)</span> *name* *value*\
Set the Nix configuration option *name* to *value*. This overrides
settings in the Nix configuration file (see nix.conf5).
- - [`--repair`]{#opt-repair}\
+ - <span id="opt-repair">[`--repair`](#opt-repair)</span>\
Fix corrupted or missing store paths by redownloading or rebuilding
them. Note that this is slow because it requires computing a
cryptographic hash of the contents of every path in the closure of
diff --git a/doc/manual/src/contributing/hacking.md b/doc/manual/src/contributing/hacking.md
index 3869c37a4..ca69f076a 100644
--- a/doc/manual/src/contributing/hacking.md
+++ b/doc/manual/src/contributing/hacking.md
@@ -389,3 +389,35 @@ If a broken link occurs in a snippet that was inserted into multiple generated f
If the `@docroot@` literal appears in an error message from the `mdbook-linkcheck` tool, the `@docroot@` replacement needs to be applied to the generated source file that mentions it.
See existing `@docroot@` logic in the [Makefile].
Regular markdown files used for the manual have a base path of their own and they can use relative paths instead of `@docroot@`.
+
+## API documentation
+
+Doxygen API documentation is [available
+online](https://hydra.nixos.org/job/nix/master/internal-api-docs/latest/download-by-type/doc/internal-api-docs). You
+can also build and view it yourself:
+
+```console
+# nix build .#hydraJobs.internal-api-docs
+# xdg-open ./result/share/doc/nix/internal-api/html/index.html
+```
+
+or inside a `nix develop` shell by running:
+
+```
+# make internal-api-html
+# xdg-open ./outputs/doc/share/doc/nix/internal-api/html/index.html
+```
+
+## Coverage analysis
+
+A coverage analysis report is [available
+online](https://hydra.nixos.org/job/nix/master/coverage/latest/download-by-type/report/coverage). You
+can build it yourself:
+
+```
+# nix build .#hydraJobs.coverage
+# xdg-open ./result/coverage/index.html
+```
+
+Metrics about the change in line/function coverage over time are also
+[available](https://hydra.nixos.org/job/nix/master/coverage#tabs-charts).
diff --git a/doc/manual/src/glossary.md b/doc/manual/src/glossary.md
index d0aff34e2..b56d857d1 100644
--- a/doc/manual/src/glossary.md
+++ b/doc/manual/src/glossary.md
@@ -31,7 +31,7 @@
This means either running the `builder` executable as specified in the corresponding [derivation] or fetching a pre-built [store object] from a [substituter].
- See [`nix-build`](./command-ref/nix-build.md) and [`nix-store --realise`](./command-ref/nix-store.md#operation---realise).
+ See [`nix-build`](./command-ref/nix-build.md) and [`nix-store --realise`](@docroot@/command-ref/nix-store/realise.md).
See [`nix build`](./command-ref/new-cli/nix3-build.md) (experimental).
@@ -135,14 +135,13 @@
then be built.
- [reference]{#gloss-reference}\
- A store path `P` is said to have a reference to a store path `Q` if
- the store object at `P` contains the path `Q` somewhere. The
- *references* of a store path are the set of store paths to which it
- has a reference.
+ A [store object] `O` is said to have a *reference* to a store object `P` if a [store path] to `P` appears in the contents of `O`.
- A derivation can reference other derivations and sources (but not
- output paths), whereas an output path only references other output
- paths.
+ Store objects can refer to both other store objects and themselves.
+ References from a store object to itself are called *self-references*.
+ References other than a self-reference must not form a cycle.
+
+ [reference]: #gloss-reference
- [reachable]{#gloss-reachable}\
A store path `Q` is reachable from another store path `P` if `Q`
@@ -159,8 +158,8 @@
files could be missing. The command `nix-store -qR` prints out
closures of store paths.
- As an example, if the store object at path `P` contains a reference
- to path `Q`, then `Q` is in the closure of `P`. Further, if `Q`
+ As an example, if the [store object] at path `P` contains a [reference]
+ to a store object at path `Q`, then `Q` is in the closure of `P`. Further, if `Q`
references `R` then `R` is also in the closure of `P`.
[closure]: #gloss-closure
@@ -193,6 +192,11 @@
A symlink to the current *user environment* of a user, e.g.,
`/nix/var/nix/profiles/default`.
+ - [installable]{#gloss-installable}\
+ Something that can be realised in the Nix store.
+
+ See [installables](./command-ref/new-cli/nix.md#installables) for [`nix` commands](./command-ref/new-cli/nix.md) (experimental) for details.
+
- [NAR]{#gloss-nar}\
A *N*ix *AR*chive. This is a serialisation of a path in the Nix
store. It can contain regular files, directories and symbolic
diff --git a/doc/manual/src/installation/env-variables.md b/doc/manual/src/installation/env-variables.md
index fb8155a80..db98f52ff 100644
--- a/doc/manual/src/installation/env-variables.md
+++ b/doc/manual/src/installation/env-variables.md
@@ -42,14 +42,11 @@ export NIX_SSL_CERT_FILE=/etc/ssl/my-certificate-bundle.crt
> You must not add the export and then do the install, as the Nix
> installer will detect the presence of Nix configuration, and abort.
-## `NIX_SSL_CERT_FILE` with macOS and the Nix daemon
+If you use the Nix daemon, you should also add the following to
+`/etc/nix/nix.conf`:
-On macOS you must specify the environment variable for the Nix daemon
-service, then restart it:
-
-```console
-$ sudo launchctl setenv NIX_SSL_CERT_FILE /etc/ssl/my-certificate-bundle.crt
-$ sudo launchctl kickstart -k system/org.nixos.nix-daemon
+```
+ssl-cert-file = /etc/ssl/my-certificate-bundle.crt
```
## Proxy Environment Variables
diff --git a/doc/manual/src/installation/prerequisites-source.md b/doc/manual/src/installation/prerequisites-source.md
index 6f4eb3008..5a708f11b 100644
--- a/doc/manual/src/installation/prerequisites-source.md
+++ b/doc/manual/src/installation/prerequisites-source.md
@@ -71,3 +71,8 @@
<http://libcpuid.sourceforge.net>.
This is an optional dependency and can be disabled
by providing a `--disable-cpuid` to the `configure` script.
+
+ - Unless `./configure --disable-tests` is specified, GoogleTest (GTest) and
+ RapidCheck are required, which are available at
+ <https://google.github.io/googletest/> and
+ <https://github.com/emil-e/rapidcheck> respectively.
diff --git a/doc/manual/src/language/advanced-attributes.md b/doc/manual/src/language/advanced-attributes.md
index 5a63236e5..3e8c48890 100644
--- a/doc/manual/src/language/advanced-attributes.md
+++ b/doc/manual/src/language/advanced-attributes.md
@@ -198,8 +198,7 @@ Derivations can declare some infrequently used optional attributes.
- `"recursive"`\
The hash is computed over the NAR archive dump of the output
- (i.e., the result of [`nix-store
- --dump`](../command-ref/nix-store.md#operation---dump)). In
+ (i.e., the result of [`nix-store --dump`](@docroot@/command-ref/nix-store/dump.md)). In
this case, the output can be anything, including a directory
tree.
diff --git a/doc/manual/src/language/operators.md b/doc/manual/src/language/operators.md
index 90b325597..a07d976ad 100644
--- a/doc/manual/src/language/operators.md
+++ b/doc/manual/src/language/operators.md
@@ -43,8 +43,8 @@ If the attribute doesn’t exist, return *value* if provided, otherwise abort ev
An attribute path is a dot-separated list of attribute names.
An attribute name can be an identifier or a string.
-> *attrpath* = *name* [ `.` *name* ]...
-> *name* = *identifier* | *string*
+> *attrpath* = *name* [ `.` *name* ]... \
+> *name* = *identifier* | *string* \
> *identifier* ~ `[a-zA-Z_][a-zA-Z0-9_'-]*`
[Attribute selection]: #attribute-selection
diff --git a/doc/manual/src/language/values.md b/doc/manual/src/language/values.md
index 3973518ca..c85124278 100644
--- a/doc/manual/src/language/values.md
+++ b/doc/manual/src/language/values.md
@@ -205,7 +205,7 @@ You can use arbitrary double-quoted strings as attribute names:
```
```nix
-let bar = "bar";
+let bar = "bar"; in
{ "foo ${bar}" = 123; }."foo ${bar}"
```
diff --git a/doc/manual/src/package-management/s3-substituter.md b/doc/manual/src/package-management/s3-substituter.md
index 30f2b2e11..d8a1d9105 100644
--- a/doc/manual/src/package-management/s3-substituter.md
+++ b/doc/manual/src/package-management/s3-substituter.md
@@ -1,41 +1,11 @@
# Serving a Nix store via S3
-Nix has built-in support for storing and fetching store paths from
+Nix has [built-in support](@docroot@/command-ref/new-cli/nix3-help-stores.md#s3-binary-cache-store)
+for storing and fetching store paths from
Amazon S3 and S3-compatible services. This uses the same *binary*
cache mechanism that Nix usually uses to fetch prebuilt binaries from
[cache.nixos.org](https://cache.nixos.org/).
-The following options can be specified as URL parameters to the S3 URL:
-
- - `profile`\
- The name of the AWS configuration profile to use. By default Nix
- will use the `default` profile.
-
- - `region`\
- The region of the S3 bucket. `us–east-1` by default.
-
- If your bucket is not in `us–east-1`, you should always explicitly
- specify the region parameter.
-
- - `endpoint`\
- The URL to your S3-compatible service, for when not using Amazon S3.
- Do not specify this value if you're using Amazon S3.
-
- > **Note**
- >
- > This endpoint must support HTTPS and will use path-based
- > addressing instead of virtual host based addressing.
-
- - `scheme`\
- The scheme used for S3 requests, `https` (default) or `http`. This
- option allows you to disable HTTPS for binary caches which don't
- support it.
-
- > **Note**
- >
- > HTTPS should be used if the cache might contain sensitive
- > information.
-
In this example we will use the bucket named `example-nix-cache`.
## Anonymous Reads to your S3-compatible binary cache
diff --git a/doc/manual/src/release-notes/rl-next.md b/doc/manual/src/release-notes/rl-next.md
index 091d12b7e..ae159de8f 100644
--- a/doc/manual/src/release-notes/rl-next.md
+++ b/doc/manual/src/release-notes/rl-next.md
@@ -1,5 +1,16 @@
# Release X.Y (202?-??-??)
+* Commands which take installables on the command line can now read them from the standard input if
+ passed the `--stdin` flag. This is primarily useful when you have a large amount of paths which
+ exceed the OS arg limit.
+
+* The `nix-hash` command now supports Base64 and SRI. Use the flags `--base64`
+ or `--sri` to specify the format of output hash as Base64 or SRI, and `--to-base64`
+ or `--to-sri` to convert a hash to Base64 or SRI format, respectively.
+
+ As the choice of hash formats is no longer binary, the `--base16` flag is also added
+ to explicitly specify the Base16 format, which is still the default.
+
* The special handling of an [installable](../command-ref/new-cli/nix.md#installables) with `.drv` suffix being interpreted as all of the given [store derivation](../glossary.md#gloss-store-derivation)'s output paths is removed, and instead taken as the literal store path that it represents.
The new `^` syntax for store paths introduced in Nix 2.13 allows explicitly referencing output paths of a derivation.
@@ -16,3 +27,15 @@
$ nix path-info /nix/store/gzaflydcr6sb3567hap9q6srzx8ggdgg-glibc-2.33-78.drv^*
```
provides information about each of its outputs.
+
+* The experimental command `nix describe-stores` has been removed.
+
+* Nix stores and their settings are now documented in [`nix help-stores`](@docroot@/command-ref/new-cli/nix3-help-stores.md).
+
+* Documentation for operations of `nix-store` and `nix-env` are now available on separate pages of the manual.
+ They include all common options that can be specified and common environment variables that affect these commands.
+
+ These pages can be viewed offline with `man` using
+
+ * `man nix-store-<operation>` and `man nix-env-<operation>`
+ * `nix-store --help --<operation>` and `nix-env --help --<operation>`.
diff --git a/doc/manual/utils.nix b/doc/manual/utils.nix
index d0643ef46..5eacce0dd 100644
--- a/doc/manual/utils.nix
+++ b/doc/manual/utils.nix
@@ -38,4 +38,46 @@ rec {
filterAttrs = pred: set:
listToAttrs (concatMap (name: let v = set.${name}; in if pred name v then [(nameValuePair name v)] else []) (attrNames set));
+
+ showSetting = { useAnchors }: name: { description, documentDefault, defaultValue, aliases, value }:
+ let
+ result = squash ''
+ - ${if useAnchors
+ then ''<span id="conf-${name}">[`${name}`](#conf-${name})</span>''
+ else ''`${name}`''}
+
+ ${indent " " body}
+ '';
+
+ # separate body to cleanly handle indentation
+ body = ''
+ ${description}
+
+ **Default:** ${showDefault documentDefault defaultValue}
+
+ ${showAliases aliases}
+ '';
+
+ showDefault = documentDefault: defaultValue:
+ if documentDefault then
+ # a StringMap value type is specified as a string, but
+ # this shows the value type. The empty stringmap is `null` in
+ # JSON, but that converts to `{ }` here.
+ if defaultValue == "" || defaultValue == [] || isAttrs defaultValue
+ then "*empty*"
+ else if isBool defaultValue then
+ if defaultValue then "`true`" else "`false`"
+ else "`${toString defaultValue}`"
+ else "*machine-specific*";
+
+ showAliases = aliases:
+ if aliases == [] then "" else
+ "**Deprecated alias:** ${(concatStringsSep ", " (map (s: "`${s}`") aliases))}";
+
+ indent = prefix: s:
+ concatStringsSep "\n" (map (x: if x == "" then x else "${prefix}${x}") (splitLines s));
+
+ in result;
+
+ showSettings = args: settingsInfo: concatStrings (attrValues (mapAttrs (showSetting args) settingsInfo));
}
diff --git a/docker.nix b/docker.nix
index 203a06b53..52199af66 100644
--- a/docker.nix
+++ b/docker.nix
@@ -8,6 +8,7 @@
, extraPkgs ? []
, maxLayers ? 100
, nixConf ? {}
+, flake-registry ? null
}:
let
defaultPkgs = with pkgs; [
@@ -247,7 +248,16 @@ let
mkdir -p $out/bin $out/usr/bin
ln -s ${pkgs.coreutils}/bin/env $out/usr/bin/env
ln -s ${pkgs.bashInteractive}/bin/bash $out/bin/sh
- '';
+
+ '' + (lib.optionalString (flake-registry != null) ''
+ nixCacheDir="/root/.cache/nix"
+ mkdir -p $out$nixCacheDir
+ globalFlakeRegistryPath="$nixCacheDir/flake-registry.json"
+ ln -s ${flake-registry}/flake-registry.json $out$globalFlakeRegistryPath
+ mkdir -p $out/nix/var/nix/gcroots/auto
+ rootName=$(${pkgs.nix}/bin/nix --extra-experimental-features nix-command hash file --type sha1 --base32 <(echo -n $globalFlakeRegistryPath))
+ ln -s $globalFlakeRegistryPath $out/nix/var/nix/gcroots/auto/$rootName
+ '');
in
pkgs.dockerTools.buildLayeredImageWithNixDb {
diff --git a/flake.lock b/flake.lock
index 4490b5ead..1d2aab5ed 100644
--- a/flake.lock
+++ b/flake.lock
@@ -1,5 +1,21 @@
{
"nodes": {
+ "flake-compat": {
+ "flake": false,
+ "locked": {
+ "lastModified": 1673956053,
+ "narHash": "sha256-4gtG9iQuiKITOjNQQeQIpoIB6b16fm+504Ch3sNKLd8=",
+ "owner": "edolstra",
+ "repo": "flake-compat",
+ "rev": "35bb57c0c8d8b62bbfd284272c928ceb64ddbde9",
+ "type": "github"
+ },
+ "original": {
+ "owner": "edolstra",
+ "repo": "flake-compat",
+ "type": "github"
+ }
+ },
"lowdown-src": {
"flake": false,
"locked": {
@@ -50,6 +66,7 @@
},
"root": {
"inputs": {
+ "flake-compat": "flake-compat",
"lowdown-src": "lowdown-src",
"nixpkgs": "nixpkgs",
"nixpkgs-regression": "nixpkgs-regression"
diff --git a/flake.nix b/flake.nix
index 563a46d65..dc64bdfcf 100644
--- a/flake.nix
+++ b/flake.nix
@@ -4,8 +4,9 @@
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-22.11-small";
inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2";
inputs.lowdown-src = { url = "github:kristapsdz/lowdown"; flake = false; };
+ inputs.flake-compat = { url = "github:edolstra/flake-compat"; flake = false; };
- outputs = { self, nixpkgs, nixpkgs-regression, lowdown-src }:
+ outputs = { self, nixpkgs, nixpkgs-regression, lowdown-src, flake-compat }:
let
inherit (nixpkgs) lib;
@@ -89,9 +90,7 @@
});
configureFlags =
- [
- "CXXFLAGS=-I${lib.getDev rapidcheck}/extras/gtest/include"
- ] ++ lib.optionals stdenv.isLinux [
+ lib.optionals stdenv.isLinux [
"--with-boost=${boost}/lib"
"--with-sandbox-shell=${sh}/bin/busybox"
]
@@ -99,6 +98,14 @@
"LDFLAGS=-fuse-ld=gold"
];
+ testConfigureFlags = [
+ "RAPIDCHECK_HEADERS=${lib.getDev rapidcheck}/extras/gtest/include"
+ ];
+
+ internalApiDocsConfigureFlags = [
+ "--enable-internal-api-docs"
+ ];
+
nativeBuildDeps =
[
buildPackages.bison
@@ -124,13 +131,20 @@
libarchive
boost
lowdown-nix
- gtest
- rapidcheck
]
++ lib.optionals stdenv.isLinux [libseccomp]
++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium
++ lib.optional stdenv.hostPlatform.isx86_64 libcpuid;
+ checkDeps = [
+ gtest
+ rapidcheck
+ ];
+
+ internalApiDocsDeps = [
+ buildPackages.doxygen
+ ];
+
awsDeps = lib.optional (stdenv.isLinux || stdenv.isDarwin)
(aws-sdk-cpp.override {
apis = ["s3" "transfer"];
@@ -200,7 +214,7 @@
VERSION_SUFFIX = versionSuffix;
nativeBuildInputs = nativeBuildDeps;
- buildInputs = buildDeps ++ awsDeps;
+ buildInputs = buildDeps ++ awsDeps ++ checkDeps;
propagatedBuildInputs = propagatedDeps;
enableParallelBuilding = true;
@@ -305,12 +319,18 @@
};
let
canRunInstalled = currentStdenv.buildPlatform.canExecute currentStdenv.hostPlatform;
- in currentStdenv.mkDerivation {
+
+ sourceByRegexInverted = rxs: origSrc: final.lib.cleanSourceWith {
+ filter = (path: type:
+ let relPath = final.lib.removePrefix (toString origSrc + "/") (toString path);
+ in ! lib.any (re: builtins.match re relPath != null) rxs);
+ src = origSrc;
+ };
+ in currentStdenv.mkDerivation (finalAttrs: {
name = "nix-${version}";
inherit version;
- src = self;
-
+ src = sourceByRegexInverted [ "tests/nixos/.*" "tests/installer/.*" ] self;
VERSION_SUFFIX = versionSuffix;
outputs = [ "out" "dev" "doc" ];
@@ -318,7 +338,8 @@
nativeBuildInputs = nativeBuildDeps;
buildInputs = buildDeps
# There have been issues building these dependencies
- ++ lib.optionals (currentStdenv.hostPlatform == currentStdenv.buildPlatform) awsDeps;
+ ++ lib.optionals (currentStdenv.hostPlatform == currentStdenv.buildPlatform) awsDeps
+ ++ lib.optionals finalAttrs.doCheck checkDeps;
propagatedBuildInputs = propagatedDeps;
@@ -348,6 +369,8 @@
configureFlags = configureFlags ++
[ "--sysconfdir=/etc" ] ++
lib.optional stdenv.hostPlatform.isStatic "--enable-embedded-sandbox-shell" ++
+ [ (lib.enableFeature finalAttrs.doCheck "tests") ] ++
+ lib.optionals finalAttrs.doCheck testConfigureFlags ++
lib.optional (!canRunInstalled) "--disable-doc-gen";
enableParallelBuilding = true;
@@ -361,6 +384,10 @@
postInstall = ''
mkdir -p $doc/nix-support
echo "doc manual $doc/share/doc/nix/manual" >> $doc/nix-support/hydra-build-products
+ ${lib.optionalString currentStdenv.hostPlatform.isStatic ''
+ mkdir -p $out/nix-support
+ echo "file binary-dist $out/bin/nix" >> $out/nix-support/hydra-build-products
+ ''}
${lib.optionalString currentStdenv.isDarwin ''
install_name_tool \
-change ${boost}/lib/libboost_context.dylib \
@@ -369,8 +396,9 @@
''}
'';
- doInstallCheck = true;
+ doInstallCheck = finalAttrs.doCheck;
installCheckFlags = "sysconfdir=$(out)/etc";
+ installCheckTarget = "installcheck"; # work around buggy detection in stdenv
separateDebugInfo = !currentStdenv.hostPlatform.isStatic;
@@ -411,7 +439,7 @@
});
meta.platforms = lib.platforms.unix;
- };
+ });
lowdown-nix = with final; currentStdenv.mkDerivation rec {
name = "lowdown-0.9.0";
@@ -444,8 +472,6 @@
};
in {
- inherit nixpkgsFor;
-
# A Nixpkgs overlay that overrides the 'nix' and
# 'nix.perl-bindings' packages.
overlays.default = overlayFor (p: p.stdenv);
@@ -462,6 +488,14 @@
buildNoGc = forAllSystems (system: self.packages.${system}.nix.overrideAttrs (a: { configureFlags = (a.configureFlags or []) ++ ["--enable-gc=no"];}));
+ buildNoTests = forAllSystems (system:
+ self.packages.${system}.nix.overrideAttrs (a: {
+ doCheck =
+ assert ! a?dontCheck;
+ false;
+ })
+ );
+
# Perl bindings for various platforms.
perlBindings = forAllSystems (system: nixpkgsFor.${system}.native.nix.perl-bindings);
@@ -496,25 +530,48 @@
src = self;
- configureFlags = [
- "CXXFLAGS=-I${lib.getDev pkgs.rapidcheck}/extras/gtest/include"
- ];
+ configureFlags = testConfigureFlags;
enableParallelBuilding = true;
nativeBuildInputs = nativeBuildDeps;
- buildInputs = buildDeps ++ propagatedDeps ++ awsDeps;
+ buildInputs = buildDeps ++ propagatedDeps ++ awsDeps ++ checkDeps;
dontInstall = false;
doInstallCheck = true;
+ installCheckTarget = "installcheck"; # work around buggy detection in stdenv
lcovFilter = [ "*/boost/*" "*-tab.*" ];
- # We call `dot', and even though we just use it to
- # syntax-check generated dot files, it still requires some
- # fonts. So provide those.
- FONTCONFIG_FILE = texFunctions.fontsConf;
+ hardeningDisable = ["fortify"];
+ };
+
+ # API docs for Nix's unstable internal C++ interfaces.
+ internal-api-docs =
+ with nixpkgsFor.x86_64-linux.native;
+ with commonDeps { inherit pkgs; };
+
+ stdenv.mkDerivation {
+ pname = "nix-internal-api-docs";
+ inherit version;
+
+ src = self;
+
+ configureFlags = testConfigureFlags ++ internalApiDocsConfigureFlags;
+
+ nativeBuildInputs = nativeBuildDeps;
+ buildInputs = buildDeps ++ propagatedDeps
+ ++ awsDeps ++ checkDeps ++ internalApiDocsDeps;
+
+ dontBuild = true;
+
+ installTargets = [ "internal-api-html" ];
+
+ postInstall = ''
+ mkdir -p $out/nix-support
+ echo "doc internal-api-docs $out/share/doc/nix/internal-api/html" >> $out/nix-support/hydra-build-products
+ '';
};
# System tests.
@@ -524,6 +581,8 @@
tests.nix-copy-closure = runNixOSTestFor "x86_64-linux" ./tests/nixos/nix-copy-closure.nix;
+ tests.nix-copy = runNixOSTestFor "x86_64-linux" ./tests/nixos/nix-copy.nix;
+
tests.nssPreload = runNixOSTestFor "x86_64-linux" ./tests/nixos/nss-preload.nix;
tests.githubFlakes = runNixOSTestFor "x86_64-linux" ./tests/nixos/github-flakes.nix;
@@ -634,9 +693,11 @@
nativeBuildInputs = nativeBuildDeps
++ (lib.optionals stdenv.cc.isClang [ pkgs.bear pkgs.clang-tools ]);
- buildInputs = buildDeps ++ propagatedDeps ++ awsDeps;
+ buildInputs = buildDeps ++ propagatedDeps
+ ++ awsDeps ++ checkDeps ++ internalApiDocsDeps;
- inherit configureFlags;
+ configureFlags = configureFlags
+ ++ testConfigureFlags ++ internalApiDocsConfigureFlags;
enableParallelBuilding = true;
diff --git a/local.mk b/local.mk
index 6a7074e8e..6951c179e 100644
--- a/local.mk
+++ b/local.mk
@@ -1,6 +1,8 @@
clean-files += Makefile.config
-GLOBAL_CXXFLAGS += -Wno-deprecated-declarations
+GLOBAL_CXXFLAGS += -Wno-deprecated-declarations -Werror=switch
+# Allow switch-enum to be overridden for files that do not support it, usually because of dependency headers.
+ERROR_SWITCH_ENUM = -Werror=switch-enum
$(foreach i, config.h $(wildcard src/lib*/*.hh), \
$(eval $(call install-file-in, $(i), $(includedir)/nix, 0644)))
diff --git a/maintainers/README.md b/maintainers/README.md
index 08d197c1b..36d40a0ab 100644
--- a/maintainers/README.md
+++ b/maintainers/README.md
@@ -2,7 +2,30 @@
## Motivation
-The goal of the team is to help other people to contribute to Nix.
+The team's main responsibility is to set a direction for the development of Nix and ensure that the code is in good shape.
+
+We aim to achieve this by improving the contributor experience and attracting more maintainers – that is, by helping other people contributing to Nix and eventually taking responsibility – in order to scale the development process to match users' needs.
+
+### Objectives
+
+- It is obvious what is worthwhile to work on.
+- It is easy to find the right place in the code to make a change.
+- It is clear what is expected of a pull request.
+- It is predictable how to get a change merged and released.
+
+### Tasks
+
+- Establish, communicate, and maintain a technical roadmap
+- Improve documentation targeted at contributors
+ - Record architecture and design decisions
+ - Elaborate contribution guides and abide to them
+ - Define and assert quality criteria for contributions
+- Maintain the issue tracker and triage pull requests
+- Help contributors succeed with pull requests that address roadmap milestones
+- Manage the release lifecycle
+- Regularly publish reports on work done
+- Engage with third parties in the interest of the project
+- Ensure the required maintainer capacity for all of the above
## Members
@@ -11,6 +34,7 @@ The goal of the team is to help other people to contribute to Nix.
- Valentin Gagarin (@fricklerhandwerk)
- Thomas Bereknyei (@tomberek)
- Robert Hensing (@roberth)
+- John Ericson (@Ericson2314)
## Meeting protocol
@@ -45,6 +69,7 @@ Issues on the board progress through the following states:
2. [security](https://github.com/NixOS/nix/labels/security)
3. [regression](https://github.com/NixOS/nix/labels/regression)
4. [bug](https://github.com/NixOS/nix/issues?q=is%3Aopen+label%3Abug+sort%3Areactions-%2B1-desc)
+ 5. [tests of existing functionality](https://github.com/NixOS/nix/issues?q=is%3Aopen+label%3Atests+-label%3Afeature+sort%3Areactions-%2B1-desc)
- [oldest pull requests](https://github.com/NixOS/nix/pulls?q=is%3Apr+is%3Aopen+sort%3Acreated-asc)
- [most popular pull requests](https://github.com/NixOS/nix/pulls?q=is%3Apr+is%3Aopen+sort%3Areactions-%2B1-desc)
@@ -67,7 +92,7 @@ Issues on the board progress through the following states:
Contributors who took the time to implement concrete change proposals should not wait indefinitely.
- - Prioritise fixing bugs over documentation, improvements or new features
+ - Prioritise fixing bugs and testing over documentation, improvements or new features
The team values stability and accessibility higher than raw functionality.
diff --git a/maintainers/release-process.md b/maintainers/release-process.md
index b52d218f5..ec9e96489 100644
--- a/maintainers/release-process.md
+++ b/maintainers/release-process.md
@@ -123,6 +123,7 @@ release:
`/home/eelco/Dev/nixpkgs-pristine`.
TODO: trigger nixos.org netlify: https://docs.netlify.com/configure-builds/build-hooks/
+
* Prepare for the next point release by editing `.version` to
e.g.
@@ -152,7 +153,7 @@ release:
from the previous milestone, and close the previous milestone. Set
the date for the next milestone 6 weeks from now.
-* Create a backport label
+* Create a backport label.
* Post an [announcement on Discourse](https://discourse.nixos.org/c/announcements/8), including the contents of
`rl-$VERSION.md`.
diff --git a/misc/launchd/org.nixos.nix-daemon.plist.in b/misc/launchd/org.nixos.nix-daemon.plist.in
index 5fa489b20..e1470cf99 100644
--- a/misc/launchd/org.nixos.nix-daemon.plist.in
+++ b/misc/launchd/org.nixos.nix-daemon.plist.in
@@ -4,8 +4,6 @@
<dict>
<key>EnvironmentVariables</key>
<dict>
- <key>NIX_SSL_CERT_FILE</key>
- <string>/nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt</string>
<key>OBJC_DISABLE_INITIALIZE_FORK_SAFETY</key>
<string>YES</string>
</dict>
diff --git a/mk/debug-test.sh b/mk/debug-test.sh
index 6299e68a0..b5b628ecd 100755
--- a/mk/debug-test.sh
+++ b/mk/debug-test.sh
@@ -1,6 +1,6 @@
#!/usr/bin/env bash
-set -eu
+set -eu -o pipefail
test=$1
diff --git a/mk/disable-tests.mk b/mk/disable-tests.mk
new file mode 100644
index 000000000..f72f84412
--- /dev/null
+++ b/mk/disable-tests.mk
@@ -0,0 +1,12 @@
+# This file is only active for `./configure --disable-tests`.
+# Running `make check` or `make installcheck` would indicate a mistake in the
+# caller.
+
+installcheck:
+ @echo "Tests are disabled. Configure without '--disable-tests', or avoid calling 'make installcheck'."
+ @exit 1
+
+# This currently has little effect.
+check:
+ @echo "Tests are disabled. Configure without '--disable-tests', or avoid calling 'make check'."
+ @exit 1
diff --git a/mk/libraries.mk b/mk/libraries.mk
index 02e4d47f9..1bc73d7f7 100644
--- a/mk/libraries.mk
+++ b/mk/libraries.mk
@@ -126,7 +126,7 @@ define build-library
$(1)_PATH := $$(_d)/$$($(1)_NAME).a
$$($(1)_PATH): $$($(1)_OBJS) | $$(_d)/
- +$$(trace-ld) $(LD) -Ur -o $$(_d)/$$($(1)_NAME).o $$^
+ $$(trace-ld) $(LD) $$(ifndef $(HOST_DARWIN),-U) -r -o $$(_d)/$$($(1)_NAME).o $$^
$$(trace-ar) $(AR) crs $$@ $$(_d)/$$($(1)_NAME).o
$(1)_LDFLAGS_USE += $$($(1)_PATH) $$($(1)_LDFLAGS) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE))
diff --git a/mk/patterns.mk b/mk/patterns.mk
index 86a724806..c81150260 100644
--- a/mk/patterns.mk
+++ b/mk/patterns.mk
@@ -1,10 +1,10 @@
$(buildprefix)%.o: %.cc
@mkdir -p "$(dir $@)"
- $(trace-cxx) $(CXX) -o $@ -c $< $(CPPFLAGS) $(GLOBAL_CXXFLAGS_PCH) $(GLOBAL_CXXFLAGS) $(CXXFLAGS) $($@_CXXFLAGS) -MMD -MF $(call filename-to-dep, $@) -MP
+ $(trace-cxx) $(CXX) -o $@ -c $< $(CPPFLAGS) $(GLOBAL_CXXFLAGS_PCH) $(GLOBAL_CXXFLAGS) $(CXXFLAGS) $($@_CXXFLAGS) $(ERROR_SWITCH_ENUM) -MMD -MF $(call filename-to-dep, $@) -MP
$(buildprefix)%.o: %.cpp
@mkdir -p "$(dir $@)"
- $(trace-cxx) $(CXX) -o $@ -c $< $(CPPFLAGS) $(GLOBAL_CXXFLAGS_PCH) $(GLOBAL_CXXFLAGS) $(CXXFLAGS) $($@_CXXFLAGS) -MMD -MF $(call filename-to-dep, $@) -MP
+ $(trace-cxx) $(CXX) -o $@ -c $< $(CPPFLAGS) $(GLOBAL_CXXFLAGS_PCH) $(GLOBAL_CXXFLAGS) $(CXXFLAGS) $($@_CXXFLAGS) $(ERROR_SWITCH_ENUM) -MMD -MF $(call filename-to-dep, $@) -MP
$(buildprefix)%.o: %.c
@mkdir -p "$(dir $@)"
diff --git a/mk/run-test.sh b/mk/run-test.sh
index 219c8577f..1a1d65930 100755
--- a/mk/run-test.sh
+++ b/mk/run-test.sh
@@ -1,6 +1,6 @@
#!/usr/bin/env bash
-set -u
+set -eu -o pipefail
red=""
green=""
@@ -22,24 +22,11 @@ fi
run_test () {
(init_test 2>/dev/null > /dev/null)
- log="$(run_test_proper 2>&1)"
- status=$?
+ log="$(run_test_proper 2>&1)" && status=0 || status=$?
}
run_test
-# Hack: Retry the test if it fails with “unexpected EOF reading a line” as these
-# appear randomly without anyone knowing why.
-# See https://github.com/NixOS/nix/issues/3605 for more info
-if [[ $status -ne 0 && $status -ne 99 && \
- "$(uname)" == "Darwin" && \
- "$log" =~ "unexpected EOF reading a line" \
-]]; then
- echo "$post_run_msg [${yellow}FAIL$normal] (possibly flaky, so will be retried)"
- echo "$log" | sed 's/^/ /'
- run_test
-fi
-
if [ $status -eq 0 ]; then
echo "$post_run_msg [${green}PASS$normal]"
elif [ $status -eq 99 ]; then
diff --git a/scripts/nix-profile-daemon.sh.in b/scripts/nix-profile-daemon.sh.in
index 80774f1f2..8cfd3149e 100644
--- a/scripts/nix-profile-daemon.sh.in
+++ b/scripts/nix-profile-daemon.sh.in
@@ -8,7 +8,7 @@ if [ -n "${XDG_STATE_HOME-}" ]; then
else
NIX_LINK_NEW=$HOME/.local/state/nix/profile
fi
-if ! [ -e "$NIX_LINK" ]; then
+if [ -e "$NIX_LINK_NEW" ]; then
NIX_LINK="$NIX_LINK_NEW"
else
if [ -t 2 ] && [ -e "$NIX_LINK_NEW" ]; then
diff --git a/scripts/nix-profile.sh.in b/scripts/nix-profile.sh.in
index e8af91211..c4d60cf37 100644
--- a/scripts/nix-profile.sh.in
+++ b/scripts/nix-profile.sh.in
@@ -8,7 +8,7 @@ if [ -n "$HOME" ] && [ -n "$USER" ]; then
else
NIX_LINK_NEW="$HOME/.local/state/nix/profile"
fi
- if ! [ -e "$NIX_LINK" ]; then
+ if [ -e "$NIX_LINK_NEW" ]; then
NIX_LINK="$NIX_LINK_NEW"
else
if [ -t 2 ] && [ -e "$NIX_LINK_NEW" ]; then
diff --git a/src/build-remote/build-remote.cc b/src/build-remote/build-remote.cc
index 174435e7c..cfc4baaca 100644
--- a/src/build-remote/build-remote.cc
+++ b/src/build-remote/build-remote.cc
@@ -219,7 +219,7 @@ static int main_build_remote(int argc, char * * argv)
% concatStringsSep<StringSet>(", ", m.supportedFeatures)
% concatStringsSep<StringSet>(", ", m.mandatoryFeatures);
- printMsg(couldBuildLocally ? lvlChatty : lvlWarn, error);
+ printMsg(couldBuildLocally ? lvlChatty : lvlWarn, error.str());
std::cerr << "# decline\n";
}
@@ -305,7 +305,7 @@ connected:
std::set<Realisation> missingRealisations;
StorePathSet missingPaths;
- if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations) && !drv.type().hasKnownOutputPaths()) {
+ if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations) && !drv.type().hasKnownOutputPaths()) {
for (auto & outputName : wantedOutputs) {
auto thisOutputHash = outputHashes.at(outputName);
auto thisOutputId = DrvOutput{ thisOutputHash, outputName };
@@ -337,7 +337,7 @@ connected:
for (auto & realisation : missingRealisations) {
// Should hold, because if the feature isn't enabled the set
// of missing realisations should be empty
- settings.requireExperimentalFeature(Xp::CaDerivations);
+ experimentalFeatureSettings.require(Xp::CaDerivations);
store->registerDrvOutput(realisation);
}
diff --git a/src/libcmd/command-installable-value.cc b/src/libcmd/command-installable-value.cc
new file mode 100644
index 000000000..d7581534b
--- /dev/null
+++ b/src/libcmd/command-installable-value.cc
@@ -0,0 +1,11 @@
+#include "command-installable-value.hh"
+
+namespace nix {
+
+void InstallableValueCommand::run(ref<Store> store, ref<Installable> installable)
+{
+ auto installableValue = InstallableValue::require(installable);
+ run(store, installableValue);
+}
+
+}
diff --git a/src/libcmd/command-installable-value.hh b/src/libcmd/command-installable-value.hh
new file mode 100644
index 000000000..7880d4119
--- /dev/null
+++ b/src/libcmd/command-installable-value.hh
@@ -0,0 +1,23 @@
+#pragma once
+///@file
+
+#include "installable-value.hh"
+#include "command.hh"
+
+namespace nix {
+
+/**
+ * An InstallableCommand where the single positional argument must be an
+ * InstallableValue in particular.
+ */
+struct InstallableValueCommand : InstallableCommand
+{
+ /**
+ * Entry point to this command
+ */
+ virtual void run(ref<Store> store, ref<InstallableValue> installable) = 0;
+
+ void run(ref<Store> store, ref<Installable> installable) override;
+};
+
+}
diff --git a/src/libcmd/command.cc b/src/libcmd/command.cc
index ab51c229d..bedf11e2c 100644
--- a/src/libcmd/command.cc
+++ b/src/libcmd/command.cc
@@ -165,7 +165,7 @@ BuiltPathsCommand::BuiltPathsCommand(bool recursive)
});
}
-void BuiltPathsCommand::run(ref<Store> store)
+void BuiltPathsCommand::run(ref<Store> store, Installables && installables)
{
BuiltPaths paths;
if (all) {
@@ -211,7 +211,7 @@ void StorePathsCommand::run(ref<Store> store, BuiltPaths && paths)
run(store, std::move(sorted));
}
-void StorePathCommand::run(ref<Store> store, std::vector<StorePath> && storePaths)
+void StorePathCommand::run(ref<Store> store, StorePaths && storePaths)
{
if (storePaths.size() != 1)
throw UsageError("this command requires exactly one store path");
@@ -246,7 +246,7 @@ void MixProfile::updateProfile(const BuiltPaths & buildables)
{
if (!profile) return;
- std::vector<StorePath> result;
+ StorePaths result;
for (auto & buildable : buildables) {
std::visit(overloaded {
diff --git a/src/libcmd/command.hh b/src/libcmd/command.hh
index b6d554aab..96236b987 100644
--- a/src/libcmd/command.hh
+++ b/src/libcmd/command.hh
@@ -1,6 +1,7 @@
#pragma once
+///@file
-#include "installables.hh"
+#include "installable-value.hh"
#include "args.hh"
#include "common-eval-args.hh"
#include "path.hh"
@@ -18,17 +19,21 @@ class EvalState;
struct Pos;
class Store;
+static constexpr Command::Category catHelp = -1;
static constexpr Command::Category catSecondary = 100;
static constexpr Command::Category catUtility = 101;
static constexpr Command::Category catNixInstallation = 102;
-static constexpr auto installablesCategory = "Options that change the interpretation of installables";
+static constexpr auto installablesCategory = "Options that change the interpretation of [installables](@docroot@/command-ref/new-cli/nix.md#installables)";
struct NixMultiCommand : virtual MultiCommand, virtual Command
{
nlohmann::json toJSON() override;
};
+// For the overloaded run methods
+#pragma GCC diagnostic ignored "-Woverloaded-virtual"
+
/* A command that requires a Nix store. */
struct StoreCommand : virtual Command
{
@@ -97,10 +102,10 @@ struct SourceExprCommand : virtual Args, MixFlakeOptions
SourceExprCommand();
- std::vector<std::shared_ptr<Installable>> parseInstallables(
+ Installables parseInstallables(
ref<Store> store, std::vector<std::string> ss);
- std::shared_ptr<Installable> parseInstallable(
+ ref<Installable> parseInstallable(
ref<Store> store, const std::string & installable);
virtual Strings getDefaultFlakeAttrPaths();
@@ -115,34 +120,43 @@ struct MixReadOnlyOption : virtual Args
MixReadOnlyOption();
};
-/* A command that operates on a list of "installables", which can be
- store paths, attribute paths, Nix expressions, etc. */
-struct InstallablesCommand : virtual Args, SourceExprCommand
+/* Like InstallablesCommand but the installables are not loaded */
+struct RawInstallablesCommand : virtual Args, SourceExprCommand
{
- std::vector<std::shared_ptr<Installable>> installables;
+ RawInstallablesCommand();
- InstallablesCommand();
+ virtual void run(ref<Store> store, std::vector<std::string> && rawInstallables) = 0;
+
+ void run(ref<Store> store) override;
- void prepare() override;
- Installables load();
+ // FIXME make const after CmdRepl's override is fixed up
+ virtual void applyDefaultInstallables(std::vector<std::string> & rawInstallables);
- virtual bool useDefaultInstallables() { return true; }
+ bool readFromStdIn = false;
std::vector<std::string> getFlakesForCompletion() override;
-protected:
+private:
- std::vector<std::string> _installables;
+ std::vector<std::string> rawInstallables;
+};
+/* A command that operates on a list of "installables", which can be
+ store paths, attribute paths, Nix expressions, etc. */
+struct InstallablesCommand : RawInstallablesCommand
+{
+ virtual void run(ref<Store> store, Installables && installables) = 0;
+
+ void run(ref<Store> store, std::vector<std::string> && rawInstallables) override;
};
/* A command that operates on exactly one "installable" */
struct InstallableCommand : virtual Args, SourceExprCommand
{
- std::shared_ptr<Installable> installable;
-
InstallableCommand();
- void prepare() override;
+ virtual void run(ref<Store> store, ref<Installable> installable) = 0;
+
+ void run(ref<Store> store) override;
std::vector<std::string> getFlakesForCompletion() override
{
@@ -177,22 +191,18 @@ public:
BuiltPathsCommand(bool recursive = false);
- using StoreCommand::run;
-
virtual void run(ref<Store> store, BuiltPaths && paths) = 0;
- void run(ref<Store> store) override;
+ void run(ref<Store> store, Installables && installables) override;
- bool useDefaultInstallables() override { return !all; }
+ void applyDefaultInstallables(std::vector<std::string> & rawInstallables) override;
};
struct StorePathsCommand : public BuiltPathsCommand
{
StorePathsCommand(bool recursive = false);
- using BuiltPathsCommand::run;
-
- virtual void run(ref<Store> store, std::vector<StorePath> && storePaths) = 0;
+ virtual void run(ref<Store> store, StorePaths && storePaths) = 0;
void run(ref<Store> store, BuiltPaths && paths) override;
};
@@ -200,11 +210,9 @@ struct StorePathsCommand : public BuiltPathsCommand
/* A command that operates on exactly one store path. */
struct StorePathCommand : public StorePathsCommand
{
- using StorePathsCommand::run;
-
virtual void run(ref<Store> store, const StorePath & storePath) = 0;
- void run(ref<Store> store, std::vector<StorePath> && storePaths) override;
+ void run(ref<Store> store, StorePaths && storePaths) override;
};
/* A helper class for registering commands globally. */
diff --git a/src/libcmd/common-eval-args.cc b/src/libcmd/common-eval-args.cc
index 908127b4d..5b6477c82 100644
--- a/src/libcmd/common-eval-args.cc
+++ b/src/libcmd/common-eval-args.cc
@@ -136,7 +136,11 @@ MixEvalArgs::MixEvalArgs()
addFlag({
.longName = "eval-store",
- .description = "The Nix store to use for evaluations.",
+ .description =
+ R"(
+ The [URL of the Nix store](@docroot@/command-ref/new-cli/nix3-help-stores.md#store-url-format)
+ to use for evaluation, i.e. to store derivations (`.drv` files) and inputs referenced by them.
+ )",
.category = category,
.labels = {"store-url"},
.handler = {&evalStoreUrl},
@@ -166,7 +170,7 @@ Path lookupFileArg(EvalState & state, std::string_view s)
}
else if (hasPrefix(s, "flake:")) {
- settings.requireExperimentalFeature(Xp::Flakes);
+ experimentalFeatureSettings.require(Xp::Flakes);
auto flakeRef = parseFlakeRef(std::string(s.substr(6)), {}, true, false);
auto storePath = flakeRef.resolve(state.store).fetchTree(state.store).first.storePath;
return state.store->toRealPath(storePath);
diff --git a/src/libcmd/common-eval-args.hh b/src/libcmd/common-eval-args.hh
index 1ec800613..b69db11dd 100644
--- a/src/libcmd/common-eval-args.hh
+++ b/src/libcmd/common-eval-args.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "args.hh"
diff --git a/src/libcmd/editor-for.hh b/src/libcmd/editor-for.hh
index 8fbd08792..f752bd849 100644
--- a/src/libcmd/editor-for.hh
+++ b/src/libcmd/editor-for.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "types.hh"
diff --git a/src/libcmd/installable-attr-path.cc b/src/libcmd/installable-attr-path.cc
index d9377f0d6..cf513126d 100644
--- a/src/libcmd/installable-attr-path.cc
+++ b/src/libcmd/installable-attr-path.cc
@@ -87,6 +87,10 @@ DerivedPathsWithInfo InstallableAttrPath::toDerivedPaths()
.drvPath = drvPath,
.outputs = outputs,
},
+ .info = make_ref<ExtraPathInfoValue>(ExtraPathInfoValue::Value {
+ /* FIXME: reconsider backwards compatibility above
+ so we can fill in this info. */
+ }),
});
return res;
diff --git a/src/libcmd/installable-attr-path.hh b/src/libcmd/installable-attr-path.hh
index c06132ec8..e9f0c33da 100644
--- a/src/libcmd/installable-attr-path.hh
+++ b/src/libcmd/installable-attr-path.hh
@@ -1,3 +1,6 @@
+#pragma once
+///@file
+
#include "globals.hh"
#include "installable-value.hh"
#include "outputs-spec.hh"
diff --git a/src/libcmd/installable-derived-path.cc b/src/libcmd/installable-derived-path.cc
index 729dc7d31..6ecf54b7c 100644
--- a/src/libcmd/installable-derived-path.cc
+++ b/src/libcmd/installable-derived-path.cc
@@ -10,7 +10,10 @@ std::string InstallableDerivedPath::what() const
DerivedPathsWithInfo InstallableDerivedPath::toDerivedPaths()
{
- return {{.path = derivedPath, .info = {} }};
+ return {{
+ .path = derivedPath,
+ .info = make_ref<ExtraPathInfo>(),
+ }};
}
std::optional<StorePath> InstallableDerivedPath::getStorePath()
diff --git a/src/libcmd/installable-derived-path.hh b/src/libcmd/installable-derived-path.hh
index 042878b91..e0b4f18b3 100644
--- a/src/libcmd/installable-derived-path.hh
+++ b/src/libcmd/installable-derived-path.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "installables.hh"
diff --git a/src/libcmd/installable-flake.cc b/src/libcmd/installable-flake.cc
index 60a97deaf..a3352af76 100644
--- a/src/libcmd/installable-flake.cc
+++ b/src/libcmd/installable-flake.cc
@@ -101,7 +101,8 @@ DerivedPathsWithInfo InstallableFlake::toDerivedPaths()
return {{
.path = DerivedPath::Opaque {
.path = std::move(storePath),
- }
+ },
+ .info = make_ref<ExtraPathInfo>(),
}};
}
@@ -113,7 +114,8 @@ DerivedPathsWithInfo InstallableFlake::toDerivedPaths()
return {{
.path = DerivedPath::Opaque {
.path = std::move(*storePath),
- }
+ },
+ .info = make_ref<ExtraPathInfo>(),
}};
} else
throw Error("flake output attribute '%s' evaluates to the string '%s' which is not a store path", attrPath, s);
@@ -160,13 +162,16 @@ DerivedPathsWithInfo InstallableFlake::toDerivedPaths()
},
}, extendedOutputsSpec.raw()),
},
- .info = {
- .priority = priority,
- .originalRef = flakeRef,
- .resolvedRef = getLockedFlake()->flake.lockedRef,
- .attrPath = attrPath,
- .extendedOutputsSpec = extendedOutputsSpec,
- }
+ .info = make_ref<ExtraPathInfoFlake>(
+ ExtraPathInfoValue::Value {
+ .priority = priority,
+ .attrPath = attrPath,
+ .extendedOutputsSpec = extendedOutputsSpec,
+ },
+ ExtraPathInfoFlake::Flake {
+ .originalRef = flakeRef,
+ .resolvedRef = getLockedFlake()->flake.lockedRef,
+ }),
}};
}
@@ -178,8 +183,7 @@ std::pair<Value *, PosIdx> InstallableFlake::toValue(EvalState & state)
std::vector<ref<eval_cache::AttrCursor>>
InstallableFlake::getCursors(EvalState & state)
{
- auto evalCache = openEvalCache(state,
- std::make_shared<flake::LockedFlake>(lockFlake(state, flakeRef, lockFlags)));
+ auto evalCache = openEvalCache(state, getLockedFlake());
auto root = evalCache->getRoot();
@@ -213,6 +217,7 @@ std::shared_ptr<flake::LockedFlake> InstallableFlake::getLockedFlake() const
{
if (!_lockedFlake) {
flake::LockFlags lockFlagsApplyConfig = lockFlags;
+ // FIXME why this side effect?
lockFlagsApplyConfig.applyNixConfig = true;
_lockedFlake = std::make_shared<flake::LockedFlake>(lockFlake(*state, flakeRef, lockFlagsApplyConfig));
}
@@ -230,7 +235,7 @@ FlakeRef InstallableFlake::nixpkgsFlakeRef() const
}
}
- return Installable::nixpkgsFlakeRef();
+ return InstallableValue::nixpkgsFlakeRef();
}
}
diff --git a/src/libcmd/installable-flake.hh b/src/libcmd/installable-flake.hh
index c75765086..afe64d977 100644
--- a/src/libcmd/installable-flake.hh
+++ b/src/libcmd/installable-flake.hh
@@ -1,9 +1,34 @@
#pragma once
+///@file
#include "installable-value.hh"
namespace nix {
+/**
+ * Extra info about a \ref DerivedPath "derived path" that ultimately
+ * come from a Flake.
+ *
+ * Invariant: every ExtraPathInfo gotten from an InstallableFlake should
+ * be possible to downcast to an ExtraPathInfoFlake.
+ */
+struct ExtraPathInfoFlake : ExtraPathInfoValue
+{
+ /**
+ * Extra struct to get around C++ designated initializer limitations
+ */
+ struct Flake {
+ FlakeRef originalRef;
+ FlakeRef resolvedRef;
+ };
+
+ Flake flake;
+
+ ExtraPathInfoFlake(Value && v, Flake && f)
+ : ExtraPathInfoValue(std::move(v)), flake(f)
+ { }
+};
+
struct InstallableFlake : InstallableValue
{
FlakeRef flakeRef;
@@ -33,8 +58,10 @@ struct InstallableFlake : InstallableValue
std::pair<Value *, PosIdx> toValue(EvalState & state) override;
- /* Get a cursor to every attrpath in getActualAttrPaths()
- that exists. However if none exists, throw an exception. */
+ /**
+ * Get a cursor to every attrpath in getActualAttrPaths() that
+ * exists. However if none exists, throw an exception.
+ */
std::vector<ref<eval_cache::AttrCursor>>
getCursors(EvalState & state) override;
diff --git a/src/libcmd/installable-value.cc b/src/libcmd/installable-value.cc
new file mode 100644
index 000000000..30f80edb2
--- /dev/null
+++ b/src/libcmd/installable-value.cc
@@ -0,0 +1,44 @@
+#include "installable-value.hh"
+#include "eval-cache.hh"
+
+namespace nix {
+
+std::vector<ref<eval_cache::AttrCursor>>
+InstallableValue::getCursors(EvalState & state)
+{
+ auto evalCache =
+ std::make_shared<nix::eval_cache::EvalCache>(std::nullopt, state,
+ [&]() { return toValue(state).first; });
+ return {evalCache->getRoot()};
+}
+
+ref<eval_cache::AttrCursor>
+InstallableValue::getCursor(EvalState & state)
+{
+ /* Although getCursors should return at least one element, in case it doesn't,
+ bound check to avoid an undefined behavior for vector[0] */
+ return getCursors(state).at(0);
+}
+
+static UsageError nonValueInstallable(Installable & installable)
+{
+ return UsageError("installable '%s' does not correspond to a Nix language value", installable.what());
+}
+
+InstallableValue & InstallableValue::require(Installable & installable)
+{
+ auto * castedInstallable = dynamic_cast<InstallableValue *>(&installable);
+ if (!castedInstallable)
+ throw nonValueInstallable(installable);
+ return *castedInstallable;
+}
+
+ref<InstallableValue> InstallableValue::require(ref<Installable> installable)
+{
+ auto castedInstallable = installable.dynamic_pointer_cast<InstallableValue>();
+ if (!castedInstallable)
+ throw nonValueInstallable(*installable);
+ return ref { castedInstallable };
+}
+
+}
diff --git a/src/libcmd/installable-value.hh b/src/libcmd/installable-value.hh
index c6cdc4797..bfb3bfeed 100644
--- a/src/libcmd/installable-value.hh
+++ b/src/libcmd/installable-value.hh
@@ -1,14 +1,108 @@
#pragma once
+///@file
#include "installables.hh"
+#include "flake/flake.hh"
namespace nix {
+struct DrvInfo;
+struct SourceExprCommand;
+
+namespace eval_cache { class EvalCache; class AttrCursor; }
+
+struct App
+{
+ std::vector<DerivedPath> context;
+ Path program;
+ // FIXME: add args, sandbox settings, metadata, ...
+};
+
+struct UnresolvedApp
+{
+ App unresolved;
+ App resolve(ref<Store> evalStore, ref<Store> store);
+};
+
+/**
+ * Extra info about a \ref DerivedPath "derived path" that ultimately
+ * come from a Nix language value.
+ *
+ * Invariant: every ExtraPathInfo gotten from an InstallableValue should
+ * be possible to downcast to an ExtraPathInfoValue.
+ */
+struct ExtraPathInfoValue : ExtraPathInfo
+{
+ /**
+ * Extra struct to get around C++ designated initializer limitations
+ */
+ struct Value {
+ /**
+ * An optional priority for use with "build envs". See Package
+ */
+ std::optional<NixInt> priority;
+
+ /**
+ * The attribute path associated with this value. The idea is
+ * that an installable referring to a value typically refers to
+ * a larger value, from which we project a smaller value out
+ * with this.
+ */
+ std::string attrPath;
+
+ /**
+ * \todo merge with DerivedPath's 'outputs' field?
+ */
+ ExtendedOutputsSpec extendedOutputsSpec;
+ };
+
+ Value value;
+
+ ExtraPathInfoValue(Value && v)
+ : value(v)
+ { }
+
+ virtual ~ExtraPathInfoValue() = default;
+};
+
+/**
+ * An Installable which corresponds a Nix langauge value, in addition to
+ * a collection of \ref DerivedPath "derived paths".
+ */
struct InstallableValue : Installable
{
ref<EvalState> state;
InstallableValue(ref<EvalState> state) : state(state) {}
+
+ virtual ~InstallableValue() { }
+
+ virtual std::pair<Value *, PosIdx> toValue(EvalState & state) = 0;
+
+ /**
+ * Get a cursor to each value this Installable could refer to.
+ * However if none exists, throw exception instead of returning
+ * empty vector.
+ */
+ virtual std::vector<ref<eval_cache::AttrCursor>>
+ getCursors(EvalState & state);
+
+ /**
+ * Get the first and most preferred cursor this Installable could
+ * refer to, or throw an exception if none exists.
+ */
+ virtual ref<eval_cache::AttrCursor>
+ getCursor(EvalState & state);
+
+ UnresolvedApp toApp(EvalState & state);
+
+ virtual FlakeRef nixpkgsFlakeRef() const
+ {
+ return FlakeRef::fromAttrs({{"type","indirect"}, {"id", "nixpkgs"}});
+ }
+
+ static InstallableValue & require(Installable & installable);
+ static ref<InstallableValue> require(ref<Installable> installable);
};
}
diff --git a/src/libcmd/installables.cc b/src/libcmd/installables.cc
index 90f001902..32ae46d9f 100644
--- a/src/libcmd/installables.cc
+++ b/src/libcmd/installables.cc
@@ -103,6 +103,28 @@ MixFlakeOptions::MixFlakeOptions()
});
addFlag({
+ .longName = "reference-lock-file",
+ .description = "Read the given lock file instead of `flake.lock` within the top-level flake.",
+ .category = category,
+ .labels = {"flake-lock-path"},
+ .handler = {[&](std::string lockFilePath) {
+ lockFlags.referenceLockFilePath = lockFilePath;
+ }},
+ .completer = completePath
+ });
+
+ addFlag({
+ .longName = "output-lock-file",
+ .description = "Write the given lock file instead of `flake.lock` within the top-level flake.",
+ .category = category,
+ .labels = {"flake-lock-path"},
+ .handler = {[&](std::string lockFilePath) {
+ lockFlags.outputLockFilePath = lockFilePath;
+ }},
+ .completer = completePath
+ });
+
+ addFlag({
.longName = "inputs-from",
.description = "Use the inputs of the specified flake as registry entries.",
.category = category,
@@ -153,7 +175,7 @@ SourceExprCommand::SourceExprCommand()
.longName = "file",
.shortName = 'f',
.description =
- "Interpret installables as attribute paths relative to the Nix expression stored in *file*. "
+ "Interpret [*installables*](@docroot@/command-ref/new-cli/nix.md#installables) as attribute paths relative to the Nix expression stored in *file*. "
"If *file* is the character -, then a Nix expression will be read from standard input. "
"Implies `--impure`.",
.category = installablesCategory,
@@ -164,7 +186,7 @@ SourceExprCommand::SourceExprCommand()
addFlag({
.longName = "expr",
- .description = "Interpret installables as attribute paths relative to the Nix expression *expr*.",
+ .description = "Interpret [*installables*](@docroot@/command-ref/new-cli/nix.md#installables) as attribute paths relative to the Nix expression *expr*.",
.category = installablesCategory,
.labels = {"expr"},
.handler = {&expr}
@@ -332,7 +354,7 @@ void completeFlakeRefWithFragment(
void completeFlakeRef(ref<Store> store, std::string_view prefix)
{
- if (!settings.isExperimentalFeatureEnabled(Xp::Flakes))
+ if (!experimentalFeatureSettings.isEnabled(Xp::Flakes))
return;
if (prefix == "")
@@ -364,23 +386,6 @@ DerivedPathWithInfo Installable::toDerivedPath()
return std::move(buildables[0]);
}
-std::vector<ref<eval_cache::AttrCursor>>
-Installable::getCursors(EvalState & state)
-{
- auto evalCache =
- std::make_shared<nix::eval_cache::EvalCache>(std::nullopt, state,
- [&]() { return toValue(state).first; });
- return {evalCache->getRoot()};
-}
-
-ref<eval_cache::AttrCursor>
-Installable::getCursor(EvalState & state)
-{
- /* Although getCursors should return at least one element, in case it doesn't,
- bound check to avoid an undefined behavior for vector[0] */
- return getCursors(state).at(0);
-}
-
static StorePath getDeriver(
ref<Store> store,
const Installable & i,
@@ -422,10 +427,10 @@ ref<eval_cache::EvalCache> openEvalCache(
});
}
-std::vector<std::shared_ptr<Installable>> SourceExprCommand::parseInstallables(
+Installables SourceExprCommand::parseInstallables(
ref<Store> store, std::vector<std::string> ss)
{
- std::vector<std::shared_ptr<Installable>> result;
+ Installables result;
if (file || expr) {
if (file && expr)
@@ -451,7 +456,7 @@ std::vector<std::shared_ptr<Installable>> SourceExprCommand::parseInstallables(
for (auto & s : ss) {
auto [prefix, extendedOutputsSpec] = ExtendedOutputsSpec::parse(s);
result.push_back(
- std::make_shared<InstallableAttrPath>(
+ make_ref<InstallableAttrPath>(
InstallableAttrPath::parse(
state, *this, vFile, prefix, extendedOutputsSpec)));
}
@@ -468,7 +473,7 @@ std::vector<std::shared_ptr<Installable>> SourceExprCommand::parseInstallables(
if (prefix.find('/') != std::string::npos) {
try {
- result.push_back(std::make_shared<InstallableDerivedPath>(
+ result.push_back(make_ref<InstallableDerivedPath>(
InstallableDerivedPath::parse(store, prefix, extendedOutputsSpec)));
continue;
} catch (BadStorePath &) {
@@ -480,7 +485,7 @@ std::vector<std::shared_ptr<Installable>> SourceExprCommand::parseInstallables(
try {
auto [flakeRef, fragment] = parseFlakeRefWithFragment(std::string { prefix }, absPath("."));
- result.push_back(std::make_shared<InstallableFlake>(
+ result.push_back(make_ref<InstallableFlake>(
this,
getEvalState(),
std::move(flakeRef),
@@ -501,7 +506,7 @@ std::vector<std::shared_ptr<Installable>> SourceExprCommand::parseInstallables(
return result;
}
-std::shared_ptr<Installable> SourceExprCommand::parseInstallable(
+ref<Installable> SourceExprCommand::parseInstallable(
ref<Store> store, const std::string & installable)
{
auto installables = parseInstallables(store, {installable});
@@ -513,7 +518,7 @@ std::vector<BuiltPathWithResult> Installable::build(
ref<Store> evalStore,
ref<Store> store,
Realise mode,
- const std::vector<std::shared_ptr<Installable>> & installables,
+ const Installables & installables,
BuildMode bMode)
{
std::vector<BuiltPathWithResult> res;
@@ -522,11 +527,11 @@ std::vector<BuiltPathWithResult> Installable::build(
return res;
}
-std::vector<std::pair<std::shared_ptr<Installable>, BuiltPathWithResult>> Installable::build2(
+std::vector<std::pair<ref<Installable>, BuiltPathWithResult>> Installable::build2(
ref<Store> evalStore,
ref<Store> store,
Realise mode,
- const std::vector<std::shared_ptr<Installable>> & installables,
+ const Installables & installables,
BuildMode bMode)
{
if (mode == Realise::Nothing)
@@ -534,8 +539,8 @@ std::vector<std::pair<std::shared_ptr<Installable>, BuiltPathWithResult>> Instal
struct Aux
{
- ExtraPathInfo info;
- std::shared_ptr<Installable> installable;
+ ref<ExtraPathInfo> info;
+ ref<Installable> installable;
};
std::vector<DerivedPath> pathsToBuild;
@@ -548,7 +553,7 @@ std::vector<std::pair<std::shared_ptr<Installable>, BuiltPathWithResult>> Instal
}
}
- std::vector<std::pair<std::shared_ptr<Installable>, BuiltPathWithResult>> res;
+ std::vector<std::pair<ref<Installable>, BuiltPathWithResult>> res;
switch (mode) {
@@ -620,7 +625,7 @@ BuiltPaths Installable::toBuiltPaths(
ref<Store> store,
Realise mode,
OperateOn operateOn,
- const std::vector<std::shared_ptr<Installable>> & installables)
+ const Installables & installables)
{
if (operateOn == OperateOn::Output) {
BuiltPaths res;
@@ -642,7 +647,7 @@ StorePathSet Installable::toStorePaths(
ref<Store> evalStore,
ref<Store> store,
Realise mode, OperateOn operateOn,
- const std::vector<std::shared_ptr<Installable>> & installables)
+ const Installables & installables)
{
StorePathSet outPaths;
for (auto & path : toBuiltPaths(evalStore, store, mode, operateOn, installables)) {
@@ -656,7 +661,7 @@ StorePath Installable::toStorePath(
ref<Store> evalStore,
ref<Store> store,
Realise mode, OperateOn operateOn,
- std::shared_ptr<Installable> installable)
+ ref<Installable> installable)
{
auto paths = toStorePaths(evalStore, store, mode, operateOn, {installable});
@@ -668,7 +673,7 @@ StorePath Installable::toStorePath(
StorePathSet Installable::toDerivations(
ref<Store> store,
- const std::vector<std::shared_ptr<Installable>> & installables,
+ const Installables & installables,
bool useDeriver)
{
StorePathSet drvPaths;
@@ -692,36 +697,55 @@ StorePathSet Installable::toDerivations(
return drvPaths;
}
-InstallablesCommand::InstallablesCommand()
+RawInstallablesCommand::RawInstallablesCommand()
{
+ addFlag({
+ .longName = "stdin",
+ .description = "Read installables from the standard input.",
+ .handler = {&readFromStdIn, true}
+ });
+
expectArgs({
.label = "installables",
- .handler = {&_installables},
+ .handler = {&rawInstallables},
.completer = {[&](size_t, std::string_view prefix) {
completeInstallable(prefix);
}}
});
}
-void InstallablesCommand::prepare()
+void RawInstallablesCommand::applyDefaultInstallables(std::vector<std::string> & rawInstallables)
{
- installables = load();
+ if (rawInstallables.empty()) {
+ // FIXME: commands like "nix profile install" should not have a
+ // default, probably.
+ rawInstallables.push_back(".");
+ }
}
-Installables InstallablesCommand::load()
+void RawInstallablesCommand::run(ref<Store> store)
{
- if (_installables.empty() && useDefaultInstallables())
- // FIXME: commands like "nix profile install" should not have a
- // default, probably.
- _installables.push_back(".");
- return parseInstallables(getStore(), _installables);
+ if (readFromStdIn && !isatty(STDIN_FILENO)) {
+ std::string word;
+ while (std::cin >> word) {
+ rawInstallables.emplace_back(std::move(word));
+ }
+ }
+
+ applyDefaultInstallables(rawInstallables);
+ run(store, std::move(rawInstallables));
+}
+
+std::vector<std::string> RawInstallablesCommand::getFlakesForCompletion()
+{
+ applyDefaultInstallables(rawInstallables);
+ return rawInstallables;
}
-std::vector<std::string> InstallablesCommand::getFlakesForCompletion()
+void InstallablesCommand::run(ref<Store> store, std::vector<std::string> && rawInstallables)
{
- if (_installables.empty() && useDefaultInstallables())
- return {"."};
- return _installables;
+ auto installables = parseInstallables(store, rawInstallables);
+ run(store, std::move(installables));
}
InstallableCommand::InstallableCommand()
@@ -737,9 +761,16 @@ InstallableCommand::InstallableCommand()
});
}
-void InstallableCommand::prepare()
+void InstallableCommand::run(ref<Store> store)
+{
+ auto installable = parseInstallable(store, _installable);
+ run(store, std::move(installable));
+}
+
+void BuiltPathsCommand::applyDefaultInstallables(std::vector<std::string> & rawInstallables)
{
- installable = parseInstallable(getStore(), _installable);
+ if (rawInstallables.empty() && !all)
+ rawInstallables.push_back(".");
}
}
diff --git a/src/libcmd/installables.hh b/src/libcmd/installables.hh
index be77fdc81..42d6c7c7c 100644
--- a/src/libcmd/installables.hh
+++ b/src/libcmd/installables.hh
@@ -1,12 +1,11 @@
#pragma once
+///@file
#include "util.hh"
#include "path.hh"
#include "outputs-spec.hh"
#include "derived-path.hh"
-#include "eval.hh"
#include "store-api.hh"
-#include "flake/flake.hh"
#include "build-result.hh"
#include <optional>
@@ -14,122 +13,156 @@
namespace nix {
struct DrvInfo;
-struct SourceExprCommand;
-
-namespace eval_cache { class EvalCache; class AttrCursor; }
-
-struct App
-{
- std::vector<DerivedPath> context;
- Path program;
- // FIXME: add args, sandbox settings, metadata, ...
-};
-
-struct UnresolvedApp
-{
- App unresolved;
- App resolve(ref<Store> evalStore, ref<Store> store);
-};
enum class Realise {
- /* Build the derivation. Postcondition: the
- derivation outputs exist. */
+ /**
+ * Build the derivation.
+ *
+ * Postcondition: the derivation outputs exist.
+ */
Outputs,
- /* Don't build the derivation. Postcondition: the store derivation
- exists. */
+ /**
+ * Don't build the derivation.
+ *
+ * Postcondition: the store derivation exists.
+ */
Derivation,
- /* Evaluate in dry-run mode. Postcondition: nothing. */
- // FIXME: currently unused, but could be revived if we can
- // evaluate derivations in-memory.
+ /**
+ * Evaluate in dry-run mode.
+ *
+ * Postcondition: nothing.
+ *
+ * \todo currently unused, but could be revived if we can evaluate
+ * derivations in-memory.
+ */
Nothing
};
-/* How to handle derivations in commands that operate on store paths. */
+/**
+ * How to handle derivations in commands that operate on store paths.
+ */
enum class OperateOn {
- /* Operate on the output path. */
+ /**
+ * Operate on the output path.
+ */
Output,
- /* Operate on the .drv path. */
+ /**
+ * Operate on the .drv path.
+ */
Derivation
};
+/**
+ * Extra info about a DerivedPath
+ *
+ * Yes, this is empty, but that is intended. It will be sub-classed by
+ * the subclasses of Installable to allow those to provide more info.
+ * Certain commands will make use of this info.
+ */
struct ExtraPathInfo
{
- std::optional<NixInt> priority;
- std::optional<FlakeRef> originalRef;
- std::optional<FlakeRef> resolvedRef;
- std::optional<std::string> attrPath;
- // FIXME: merge with DerivedPath's 'outputs' field?
- std::optional<ExtendedOutputsSpec> extendedOutputsSpec;
+ virtual ~ExtraPathInfo() = default;
};
-/* A derived path with any additional info that commands might
- need from the derivation. */
+/**
+ * A DerivedPath with \ref ExtraPathInfo "any additional info" that
+ * commands might need from the derivation.
+ */
struct DerivedPathWithInfo
{
DerivedPath path;
- ExtraPathInfo info;
+ ref<ExtraPathInfo> info;
};
+/**
+ * Like DerivedPathWithInfo but extending BuiltPath with \ref
+ * ExtraPathInfo "extra info" and also possibly the \ref BuildResult
+ * "result of building".
+ */
struct BuiltPathWithResult
{
BuiltPath path;
- ExtraPathInfo info;
+ ref<ExtraPathInfo> info;
std::optional<BuildResult> result;
};
+/**
+ * Shorthand, for less typing and helping us keep the choice of
+ * collection in sync.
+ */
typedef std::vector<DerivedPathWithInfo> DerivedPathsWithInfo;
+struct Installable;
+
+/**
+ * Shorthand, for less typing and helping us keep the choice of
+ * collection in sync.
+ */
+typedef std::vector<ref<Installable>> Installables;
+
+/**
+ * Installables are the main positional arguments for the Nix
+ * Command-line.
+ *
+ * This base class is very flexible, and just assumes and the
+ * Installable refers to a collection of \ref DerivedPath "derived paths" with
+ * \ref ExtraPathInfo "extra info".
+ */
struct Installable
{
virtual ~Installable() { }
+ /**
+ * What Installable is this?
+ *
+ * Prints back valid CLI syntax that would result in this same
+ * installable. It doesn't need to be exactly what the user wrote,
+ * just something that means the same thing.
+ */
virtual std::string what() const = 0;
+ /**
+ * Get the collection of \ref DerivedPathWithInfo "derived paths
+ * with info" that this \ref Installable instalallable denotes.
+ *
+ * This is the main method of this class
+ */
virtual DerivedPathsWithInfo toDerivedPaths() = 0;
+ /**
+ * A convenience wrapper of the above for when we expect an
+ * installable to produce a single \ref DerivedPath "derived path"
+ * only.
+ *
+ * If no or multiple \ref DerivedPath "derived paths" are produced,
+ * and error is raised.
+ */
DerivedPathWithInfo toDerivedPath();
- UnresolvedApp toApp(EvalState & state);
-
- virtual std::pair<Value *, PosIdx> toValue(EvalState & state)
- {
- throw Error("argument '%s' cannot be evaluated", what());
- }
-
- /* Return a value only if this installable is a store path or a
- symlink to it. */
+ /**
+ * Return a value only if this installable is a store path or a
+ * symlink to it.
+ *
+ * \todo should we move this to InstallableDerivedPath? It is only
+ * supposed to work there anyways. Can always downcast.
+ */
virtual std::optional<StorePath> getStorePath()
{
return {};
}
- /* Get a cursor to each value this Installable could refer to. However
- if none exists, throw exception instead of returning empty vector. */
- virtual std::vector<ref<eval_cache::AttrCursor>>
- getCursors(EvalState & state);
-
- /* Get the first and most preferred cursor this Installable could refer
- to, or throw an exception if none exists. */
- virtual ref<eval_cache::AttrCursor>
- getCursor(EvalState & state);
-
- virtual FlakeRef nixpkgsFlakeRef() const
- {
- return FlakeRef::fromAttrs({{"type","indirect"}, {"id", "nixpkgs"}});
- }
-
static std::vector<BuiltPathWithResult> build(
ref<Store> evalStore,
ref<Store> store,
Realise mode,
- const std::vector<std::shared_ptr<Installable>> & installables,
+ const Installables & installables,
BuildMode bMode = bmNormal);
- static std::vector<std::pair<std::shared_ptr<Installable>, BuiltPathWithResult>> build2(
+ static std::vector<std::pair<ref<Installable>, BuiltPathWithResult>> build2(
ref<Store> evalStore,
ref<Store> store,
Realise mode,
- const std::vector<std::shared_ptr<Installable>> & installables,
+ const Installables & installables,
BuildMode bMode = bmNormal);
static std::set<StorePath> toStorePaths(
@@ -137,18 +170,18 @@ struct Installable
ref<Store> store,
Realise mode,
OperateOn operateOn,
- const std::vector<std::shared_ptr<Installable>> & installables);
+ const Installables & installables);
static StorePath toStorePath(
ref<Store> evalStore,
ref<Store> store,
Realise mode,
OperateOn operateOn,
- std::shared_ptr<Installable> installable);
+ ref<Installable> installable);
static std::set<StorePath> toDerivations(
ref<Store> store,
- const std::vector<std::shared_ptr<Installable>> & installables,
+ const Installables & installables,
bool useDeriver = false);
static BuiltPaths toBuiltPaths(
@@ -156,9 +189,7 @@ struct Installable
ref<Store> store,
Realise mode,
OperateOn operateOn,
- const std::vector<std::shared_ptr<Installable>> & installables);
+ const Installables & installables);
};
-typedef std::vector<std::shared_ptr<Installable>> Installables;
-
}
diff --git a/src/libcmd/legacy.hh b/src/libcmd/legacy.hh
index f503b0da3..357500a4d 100644
--- a/src/libcmd/legacy.hh
+++ b/src/libcmd/legacy.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include <functional>
#include <map>
diff --git a/src/libcmd/markdown.hh b/src/libcmd/markdown.hh
index 78320fcf5..a04d32a4f 100644
--- a/src/libcmd/markdown.hh
+++ b/src/libcmd/markdown.hh
@@ -1,3 +1,6 @@
+#pragma once
+///@file
+
#include "types.hh"
namespace nix {
diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc
index 29c838ec0..57848a5d3 100644
--- a/src/libcmd/repl.cc
+++ b/src/libcmd/repl.cc
@@ -1026,6 +1026,8 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int m
str << v.fpoint;
break;
+ case nThunk:
+ case nExternal:
default:
str << ANSI_RED "«unknown»" ANSI_NORMAL;
break;
diff --git a/src/libcmd/repl.hh b/src/libcmd/repl.hh
index dfccc93e7..731c8e6db 100644
--- a/src/libcmd/repl.hh
+++ b/src/libcmd/repl.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "eval.hh"
diff --git a/src/libexpr/attr-path.hh b/src/libexpr/attr-path.hh
index 117e0051b..d0d05b1a1 100644
--- a/src/libexpr/attr-path.hh
+++ b/src/libexpr/attr-path.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "eval.hh"
diff --git a/src/libexpr/attr-set.hh b/src/libexpr/attr-set.hh
index dcc73b506..3fe54408b 100644
--- a/src/libexpr/attr-set.hh
+++ b/src/libexpr/attr-set.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "nixexpr.hh"
#include "symbol-table.hh"
diff --git a/src/libexpr/eval-cache.hh b/src/libexpr/eval-cache.hh
index c93e55b93..c90882edc 100644
--- a/src/libexpr/eval-cache.hh
+++ b/src/libexpr/eval-cache.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "sync.hh"
#include "hash.hh"
diff --git a/src/libexpr/eval-inline.hh b/src/libexpr/eval-inline.hh
index f0da688db..f8ddd2acc 100644
--- a/src/libexpr/eval-inline.hh
+++ b/src/libexpr/eval-inline.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "eval.hh"
diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc
index 21fc4d0fe..7f2065656 100644
--- a/src/libexpr/eval.cc
+++ b/src/libexpr/eval.cc
@@ -8,6 +8,7 @@
#include "eval-inline.hh"
#include "filetransfer.hh"
#include "function-trace.hh"
+#include "profiles.hh"
#include <algorithm>
#include <chrono>
@@ -172,7 +173,17 @@ void Value::print(const SymbolTable & symbols, std::ostream & str,
case tFloat:
str << fpoint;
break;
+ case tBlackhole:
+ // Although we know for sure that it's going to be an infinite recursion
+ // when this value is accessed _in the current context_, it's likely
+ // that the user will misinterpret a simpler «infinite recursion» output
+ // as a definitive statement about the value, while in fact it may be
+ // a valid value after `builtins.trace` and perhaps some other steps
+ // have completed.
+ str << "«potential infinite recursion»";
+ break;
default:
+ printError("Nix evaluator internal error: Value::print(): invalid value type %1%", internalType);
abort();
}
}
@@ -228,6 +239,9 @@ std::string_view showType(ValueType type)
std::string showType(const Value & v)
{
+ // Allow selecting a subset of enum values
+ #pragma GCC diagnostic push
+ #pragma GCC diagnostic ignored "-Wswitch-enum"
switch (v.internalType) {
case tString: return v.string.context ? "a string with context" : "a string";
case tPrimOp:
@@ -241,16 +255,21 @@ std::string showType(const Value & v)
default:
return std::string(showType(v.type()));
}
+ #pragma GCC diagnostic pop
}
PosIdx Value::determinePos(const PosIdx pos) const
{
+ // Allow selecting a subset of enum values
+ #pragma GCC diagnostic push
+ #pragma GCC diagnostic ignored "-Wswitch-enum"
switch (internalType) {
case tAttrs: return attrs->pos;
case tLambda: return lambda.fun->pos;
case tApp: return app.left->determinePos(pos);
default: return pos;
}
+ #pragma GCC diagnostic pop
}
bool Value::isTrivial() const
@@ -368,7 +387,7 @@ void initGC()
size = (pageSize * pages) / 4; // 25% of RAM
if (size > maxSize) size = maxSize;
#endif
- debug(format("setting initial heap size to %1% bytes") % size);
+ debug("setting initial heap size to %1% bytes", size);
GC_expand_hp(size);
}
@@ -609,7 +628,7 @@ Path EvalState::checkSourcePath(const Path & path_)
}
/* Resolve symlinks. */
- debug(format("checking access to '%s'") % abspath);
+ debug("checking access to '%s'", abspath);
Path path = canonPath(abspath, true);
for (auto & i : *allowedPaths) {
@@ -2326,6 +2345,7 @@ bool EvalState::eqValues(Value & v1, Value & v2, const PosIdx pos, std::string_v
case nFloat:
return v1.fpoint == v2.fpoint;
+ case nThunk: // Must not be left by forceValue
default:
error("cannot compare %1% with %2%", showType(v1), showType(v2)).withTrace(pos, errorCtx).debugThrow<EvalError>();
}
@@ -2491,8 +2511,8 @@ Strings EvalSettings::getDefaultNixPath()
if (!evalSettings.restrictEval && !evalSettings.pureEval) {
add(settings.useXDGBaseDirectories ? getStateDir() + "/nix/defexpr/channels" : getHome() + "/.nix-defexpr/channels");
- add(settings.nixStateDir + "/profiles/per-user/root/channels/nixpkgs", "nixpkgs");
- add(settings.nixStateDir + "/profiles/per-user/root/channels");
+ add(rootChannelsDir() + "/nixpkgs", "nixpkgs");
+ add(rootChannelsDir());
}
return res;
diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh
index e4d5906bd..a1b54951e 100644
--- a/src/libexpr/eval.hh
+++ b/src/libexpr/eval.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "attr-set.hh"
#include "types.hh"
diff --git a/src/libexpr/flake/flake.cc b/src/libexpr/flake/flake.cc
index 336eb274d..ac396236f 100644
--- a/src/libexpr/flake/flake.cc
+++ b/src/libexpr/flake/flake.cc
@@ -125,6 +125,9 @@ static FlakeInput parseFlakeInput(EvalState & state,
follows.insert(follows.begin(), lockRootPath.begin(), lockRootPath.end());
input.follows = follows;
} else {
+ // Allow selecting a subset of enum values
+ #pragma GCC diagnostic push
+ #pragma GCC diagnostic ignored "-Wswitch-enum"
switch (attr.value->type()) {
case nString:
attrs.emplace(state.symbols[attr.name], attr.value->string.s);
@@ -139,6 +142,7 @@ static FlakeInput parseFlakeInput(EvalState & state,
throw TypeError("flake input attribute '%s' is %s while a string, Boolean, or integer is expected",
state.symbols[attr.name], showType(*attr.value));
}
+ #pragma GCC diagnostic pop
}
} catch (Error & e) {
e.addTrace(
@@ -320,7 +324,7 @@ LockedFlake lockFlake(
const FlakeRef & topRef,
const LockFlags & lockFlags)
{
- settings.requireExperimentalFeature(Xp::Flakes);
+ experimentalFeatureSettings.require(Xp::Flakes);
FlakeCache flakeCache;
@@ -334,10 +338,14 @@ LockedFlake lockFlake(
}
try {
+ if (!fetchSettings.allowDirty && lockFlags.referenceLockFilePath) {
+ throw Error("reference lock file was provided, but the `allow-dirty` setting is set to false");
+ }
// FIXME: symlink attack
auto oldLockFile = LockFile::read(
- flake.sourceInfo->actualPath + "/" + flake.lockedRef.subdir + "/flake.lock");
+ lockFlags.referenceLockFilePath.value_or(
+ flake.sourceInfo->actualPath + "/" + flake.lockedRef.subdir + "/flake.lock"));
debug("old lock file: %s", oldLockFile);
@@ -619,13 +627,20 @@ LockedFlake lockFlake(
debug("new lock file: %s", newLockFile);
+ auto relPath = (topRef.subdir == "" ? "" : topRef.subdir + "/") + "flake.lock";
+ auto sourcePath = topRef.input.getSourcePath();
+ auto outputLockFilePath = sourcePath ? std::optional{*sourcePath + "/" + relPath} : std::nullopt;
+ if (lockFlags.outputLockFilePath) {
+ outputLockFilePath = lockFlags.outputLockFilePath;
+ }
+
/* Check whether we need to / can write the new lock file. */
- if (!(newLockFile == oldLockFile)) {
+ if (newLockFile != oldLockFile || lockFlags.outputLockFilePath) {
auto diff = LockFile::diff(oldLockFile, newLockFile);
if (lockFlags.writeLockFile) {
- if (auto sourcePath = topRef.input.getSourcePath()) {
+ if (outputLockFilePath) {
if (auto unlockedInput = newLockFile.isUnlocked()) {
if (fetchSettings.warnDirty)
warn("will not write lock file of flake '%s' because it has an unlocked input ('%s')", topRef, *unlockedInput);
@@ -633,25 +648,24 @@ LockedFlake lockFlake(
if (!lockFlags.updateLockFile)
throw Error("flake '%s' requires lock file changes but they're not allowed due to '--no-update-lock-file'", topRef);
- auto relPath = (topRef.subdir == "" ? "" : topRef.subdir + "/") + "flake.lock";
-
- auto path = *sourcePath + "/" + relPath;
-
- bool lockFileExists = pathExists(path);
+ bool lockFileExists = pathExists(*outputLockFilePath);
if (lockFileExists) {
auto s = chomp(diff);
if (s.empty())
- warn("updating lock file '%s'", path);
+ warn("updating lock file '%s'", *outputLockFilePath);
else
- warn("updating lock file '%s':\n%s", path, s);
+ warn("updating lock file '%s':\n%s", *outputLockFilePath, s);
} else
- warn("creating lock file '%s'", path);
+ warn("creating lock file '%s'", *outputLockFilePath);
- newLockFile.write(path);
+ newLockFile.write(*outputLockFilePath);
std::optional<std::string> commitMessage = std::nullopt;
if (lockFlags.commitLockFile) {
+ if (lockFlags.outputLockFilePath) {
+ throw Error("--commit-lock-file and --output-lock-file are currently incompatible");
+ }
std::string cm;
cm = fetchSettings.commitLockFileSummary.get();
diff --git a/src/libexpr/flake/flake.hh b/src/libexpr/flake/flake.hh
index 10301d8aa..b6f710288 100644
--- a/src/libexpr/flake/flake.hh
+++ b/src/libexpr/flake/flake.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "types.hh"
#include "flakeref.hh"
@@ -117,6 +118,12 @@ struct LockFlags
/* Whether to commit changes to flake.lock. */
bool commitLockFile = false;
+ /* The path to a lock file to read instead of the `flake.lock` file in the top-level flake */
+ std::optional<std::string> referenceLockFilePath;
+
+ /* The path to a lock file to write to instead of the `flake.lock` file in the top-level flake */
+ std::optional<Path> outputLockFilePath;
+
/* Flake inputs to be overridden. */
std::map<InputPath, FlakeRef> inputOverrides;
diff --git a/src/libexpr/flake/flakeref.hh b/src/libexpr/flake/flakeref.hh
index c4142fc20..23d19adb1 100644
--- a/src/libexpr/flake/flakeref.hh
+++ b/src/libexpr/flake/flakeref.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "types.hh"
#include "hash.hh"
diff --git a/src/libexpr/flake/lockfile.hh b/src/libexpr/flake/lockfile.hh
index 02e9bdfbc..6512509c5 100644
--- a/src/libexpr/flake/lockfile.hh
+++ b/src/libexpr/flake/lockfile.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "flakeref.hh"
diff --git a/src/libexpr/function-trace.hh b/src/libexpr/function-trace.hh
index e9a2526bd..91439b0aa 100644
--- a/src/libexpr/function-trace.hh
+++ b/src/libexpr/function-trace.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "eval.hh"
diff --git a/src/libexpr/get-drvs.hh b/src/libexpr/get-drvs.hh
index bbd2d3c47..51ef7782a 100644
--- a/src/libexpr/get-drvs.hh
+++ b/src/libexpr/get-drvs.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "eval.hh"
#include "path.hh"
diff --git a/src/libexpr/json-to-value.hh b/src/libexpr/json-to-value.hh
index 84bec4eba..3b8ec000f 100644
--- a/src/libexpr/json-to-value.hh
+++ b/src/libexpr/json-to-value.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "eval.hh"
diff --git a/src/libexpr/local.mk b/src/libexpr/local.mk
index 2171e769b..d243b9cec 100644
--- a/src/libexpr/local.mk
+++ b/src/libexpr/local.mk
@@ -46,3 +46,5 @@ $(foreach i, $(wildcard src/libexpr/flake/*.hh), \
$(d)/primops.cc: $(d)/imported-drv-to-derivation.nix.gen.hh $(d)/primops/derivation.nix.gen.hh $(d)/fetchurl.nix.gen.hh
$(d)/flake/flake.cc: $(d)/flake/call-flake.nix.gen.hh
+
+src/libexpr/primops/fromTOML.o: ERROR_SWITCH_ENUM =
diff --git a/src/libexpr/nixexpr.hh b/src/libexpr/nixexpr.hh
index 4a81eaa47..4079a7b24 100644
--- a/src/libexpr/nixexpr.hh
+++ b/src/libexpr/nixexpr.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include <map>
#include <vector>
diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y
index dec5818fc..97e615c37 100644
--- a/src/libexpr/parser.y
+++ b/src/libexpr/parser.y
@@ -469,7 +469,7 @@ expr_simple
new ExprString(std::move(path))});
}
| URI {
- static bool noURLLiterals = settings.isExperimentalFeatureEnabled(Xp::NoUrlLiterals);
+ static bool noURLLiterals = experimentalFeatureSettings.isEnabled(Xp::NoUrlLiterals);
if (noURLLiterals)
throw ParseError({
.msg = hintfmt("URL literals are disabled"),
@@ -732,7 +732,7 @@ Expr * EvalState::parseExprFromString(std::string s, const Path & basePath)
Expr * EvalState::parseStdin()
{
- //Activity act(*logger, lvlTalkative, format("parsing standard input"));
+ //Activity act(*logger, lvlTalkative, "parsing standard input");
auto buffer = drainFD(0);
// drainFD should have left some extra space for terminators
buffer.append("\0\0", 2);
@@ -816,7 +816,7 @@ std::pair<bool, std::string> EvalState::resolveSearchPathElem(const SearchPathEl
}
else if (hasPrefix(elem.second, "flake:")) {
- settings.requireExperimentalFeature(Xp::Flakes);
+ experimentalFeatureSettings.require(Xp::Flakes);
auto flakeRef = parseFlakeRef(elem.second.substr(6), {}, true, false);
debug("fetching flake search path element '%s''", elem.second);
auto storePath = flakeRef.resolve(store).fetchTree(store).first.storePath;
@@ -835,7 +835,7 @@ std::pair<bool, std::string> EvalState::resolveSearchPathElem(const SearchPathEl
}
}
- debug(format("resolved search path element '%s' to '%s'") % elem.second % res.second);
+ debug("resolved search path element '%s' to '%s'", elem.second, res.second);
searchPathResolved[elem.second] = res;
return res;
diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index fb7fc3ddb..f1bce2fb6 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -254,9 +254,16 @@ static RegisterPrimOp primop_import({
.args = {"path"},
// TODO turn "normal path values" into link below
.doc = R"(
- Load, parse and return the Nix expression in the file *path*. If
- *path* is a directory, the file ` default.nix ` in that directory
- is loaded. Evaluation aborts if the file doesn’t exist or contains
+ Load, parse and return the Nix expression in the file *path*.
+
+ The value *path* can be a path, a string, or an attribute set with an
+ `__toString` attribute or a `outPath` attribute (as derivations or flake
+ inputs typically have).
+
+ If *path* is a directory, the file `default.nix` in that directory
+ is loaded.
+
+ Evaluation aborts if the file doesn’t exist or contains
an incorrect Nix expression. `import` implements Nix’s module
system: you can put any Nix expression (such as a set or a
function) in a separate file, and use it from Nix expressions in
@@ -570,6 +577,9 @@ struct CompareValues
return v1->integer < v2->fpoint;
if (v1->type() != v2->type())
state.error("cannot compare %s with %s", showType(*v1), showType(*v2)).debugThrow<EvalError>();
+ // Allow selecting a subset of enum values
+ #pragma GCC diagnostic push
+ #pragma GCC diagnostic ignored "-Wswitch-enum"
switch (v1->type()) {
case nInt:
return v1->integer < v2->integer;
@@ -592,6 +602,7 @@ struct CompareValues
}
default:
state.error("cannot compare %s with %s; values of that type are incomparable", showType(*v1), showType(*v2)).debugThrow<EvalError>();
+ #pragma GCC diagnostic pop
}
} catch (Error & e) {
if (!errorCtx.empty())
@@ -1141,13 +1152,13 @@ drvName, Bindings * attrs, Value & v)
if (i->name == state.sContentAddressed) {
contentAddressed = state.forceBool(*i->value, noPos, context_below);
if (contentAddressed)
- settings.requireExperimentalFeature(Xp::CaDerivations);
+ experimentalFeatureSettings.require(Xp::CaDerivations);
}
else if (i->name == state.sImpure) {
isImpure = state.forceBool(*i->value, noPos, context_below);
if (isImpure)
- settings.requireExperimentalFeature(Xp::ImpureDerivations);
+ experimentalFeatureSettings.require(Xp::ImpureDerivations);
}
/* The `args' attribute is special: it supplies the
@@ -4114,7 +4125,7 @@ void EvalState::createBaseEnv()
if (RegisterPrimOp::primOps)
for (auto & primOp : *RegisterPrimOp::primOps)
if (!primOp.experimentalFeature
- || settings.isExperimentalFeatureEnabled(*primOp.experimentalFeature))
+ || experimentalFeatureSettings.isEnabled(*primOp.experimentalFeature))
{
addPrimOp({
.fun = primOp.fun,
diff --git a/src/libexpr/primops.hh b/src/libexpr/primops.hh
index 1cfb4356b..1c5ce219f 100644
--- a/src/libexpr/primops.hh
+++ b/src/libexpr/primops.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "eval.hh"
diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc
index c9faf3ffb..0d0e00fa5 100644
--- a/src/libexpr/primops/fetchTree.cc
+++ b/src/libexpr/primops/fetchTree.cc
@@ -190,7 +190,7 @@ static void fetchTree(
static void prim_fetchTree(EvalState & state, const PosIdx pos, Value * * args, Value & v)
{
- settings.requireExperimentalFeature(Xp::Flakes);
+ experimentalFeatureSettings.require(Xp::Flakes);
fetchTree(state, pos, args, v, std::nullopt, FetchTreeParams { .allowNameArgument = false });
}
@@ -353,36 +353,44 @@ static RegisterPrimOp primop_fetchGit({
of the repo at that URL is fetched. Otherwise, it can be an
attribute with the following attributes (all except `url` optional):
- - url\
- The URL of the repo.
+ - `url`
- - name\
- The name of the directory the repo should be exported to in the
- store. Defaults to the basename of the URL.
+ The URL of the repo.
- - rev\
- The git revision to fetch. Defaults to the tip of `ref`.
+ - `name` (default: *basename of the URL*)
- - ref\
- The git ref to look for the requested revision under. This is
- often a branch or tag name. Defaults to `HEAD`.
+ The name of the directory the repo should be exported to in the store.
- By default, the `ref` value is prefixed with `refs/heads/`. As
- of Nix 2.3.0 Nix will not prefix `refs/heads/` if `ref` starts
- with `refs/`.
+ - `rev` (default: *the tip of `ref`*)
- - submodules\
- A Boolean parameter that specifies whether submodules should be
- checked out. Defaults to `false`.
+ The [Git revision] to fetch.
+ This is typically a commit hash.
- - shallow\
- A Boolean parameter that specifies whether fetching a shallow clone
- is allowed. Defaults to `false`.
+ [Git revision]: https://git-scm.com/docs/git-rev-parse#_specifying_revisions
- - allRefs\
- Whether to fetch all refs of the repository. With this argument being
- true, it's possible to load a `rev` from *any* `ref` (by default only
- `rev`s from the specified `ref` are supported).
+ - `ref` (default: `HEAD`)
+
+ The [Git reference] under which to look for the requested revision.
+ This is often a branch or tag name.
+
+ [Git reference]: https://git-scm.com/book/en/v2/Git-Internals-Git-References
+
+ By default, the `ref` value is prefixed with `refs/heads/`.
+ As of 2.3.0, Nix will not prefix `refs/heads/` if `ref` starts with `refs/`.
+
+ - `submodules` (default: `false`)
+
+ A Boolean parameter that specifies whether submodules should be checked out.
+
+ - `shallow` (default: `false`)
+
+ A Boolean parameter that specifies whether fetching a shallow clone is allowed.
+
+ - `allRefs`
+
+ Whether to fetch all references of the repository.
+ With this argument being true, it's possible to load a `rev` from *any* `ref`
+ (by default only `rev`s from the specified `ref` are supported).
Here are some examples of how to use `fetchGit`.
@@ -473,10 +481,10 @@ static RegisterPrimOp primop_fetchGit({
builtins.fetchGit ./work-dir
```
- If the URL points to a local directory, and no `ref` or `rev` is
- given, `fetchGit` will use the current content of the checked-out
- files, even if they are not committed or added to Git's index. It will
- only consider files added to the Git repository, as listed by `git ls-files`.
+ If the URL points to a local directory, and no `ref` or `rev` is
+ given, `fetchGit` will use the current content of the checked-out
+ files, even if they are not committed or added to Git's index. It will
+ only consider files added to the Git repository, as listed by `git ls-files`.
)",
.fun = prim_fetchGit,
});
diff --git a/src/libexpr/symbol-table.hh b/src/libexpr/symbol-table.hh
index 288c15602..c97a0a2db 100644
--- a/src/libexpr/symbol-table.hh
+++ b/src/libexpr/symbol-table.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include <list>
#include <map>
diff --git a/src/libexpr/tests/libexpr.hh b/src/libexpr/tests/libexpr.hh
index 8534d9567..69c932f05 100644
--- a/src/libexpr/tests/libexpr.hh
+++ b/src/libexpr/tests/libexpr.hh
@@ -1,3 +1,6 @@
+#pragma once
+///@file
+
#include <gtest/gtest.h>
#include <gmock/gmock.h>
diff --git a/src/libexpr/tests/primops.cc b/src/libexpr/tests/primops.cc
index e1d3ac503..ce3b5d11f 100644
--- a/src/libexpr/tests/primops.cc
+++ b/src/libexpr/tests/primops.cc
@@ -15,8 +15,8 @@ namespace nix {
return oss.str();
}
- void log(Verbosity lvl, const FormatOrString & fs) override {
- oss << fs.s << std::endl;
+ void log(Verbosity lvl, std::string_view s) override {
+ oss << s << std::endl;
}
void logEI(const ErrorInfo & ei) override {
diff --git a/src/libexpr/tests/value/context.hh b/src/libexpr/tests/value/context.hh
index 54d21760e..c0bc97ba3 100644
--- a/src/libexpr/tests/value/context.hh
+++ b/src/libexpr/tests/value/context.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include <rapidcheck/gen/Arbitrary.h>
diff --git a/src/libexpr/value-to-json.hh b/src/libexpr/value-to-json.hh
index 22f26b790..713356c7f 100644
--- a/src/libexpr/value-to-json.hh
+++ b/src/libexpr/value-to-json.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "nixexpr.hh"
#include "eval.hh"
diff --git a/src/libexpr/value-to-xml.cc b/src/libexpr/value-to-xml.cc
index 3f6222768..341c8922f 100644
--- a/src/libexpr/value-to-xml.cc
+++ b/src/libexpr/value-to-xml.cc
@@ -26,8 +26,8 @@ static void posToXML(EvalState & state, XMLAttrs & xmlAttrs, const Pos & pos)
{
if (auto path = std::get_if<Path>(&pos.origin))
xmlAttrs["path"] = *path;
- xmlAttrs["line"] = (format("%1%") % pos.line).str();
- xmlAttrs["column"] = (format("%1%") % pos.column).str();
+ xmlAttrs["line"] = fmt("%1%", pos.line);
+ xmlAttrs["column"] = fmt("%1%", pos.column);
}
@@ -64,7 +64,7 @@ static void printValueAsXML(EvalState & state, bool strict, bool location,
switch (v.type()) {
case nInt:
- doc.writeEmptyElement("int", singletonAttrs("value", (format("%1%") % v.integer).str()));
+ doc.writeEmptyElement("int", singletonAttrs("value", fmt("%1%", v.integer)));
break;
case nBool:
@@ -156,7 +156,7 @@ static void printValueAsXML(EvalState & state, bool strict, bool location,
break;
case nFloat:
- doc.writeEmptyElement("float", singletonAttrs("value", (format("%1%") % v.fpoint).str()));
+ doc.writeEmptyElement("float", singletonAttrs("value", fmt("%1%", v.fpoint)));
break;
case nThunk:
diff --git a/src/libexpr/value-to-xml.hh b/src/libexpr/value-to-xml.hh
index 506f32b6b..ace7ead0f 100644
--- a/src/libexpr/value-to-xml.hh
+++ b/src/libexpr/value-to-xml.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "nixexpr.hh"
#include "eval.hh"
diff --git a/src/libexpr/value.hh b/src/libexpr/value.hh
index 508dbe218..bfae4ee94 100644
--- a/src/libexpr/value.hh
+++ b/src/libexpr/value.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include <cassert>
diff --git a/src/libexpr/value/context.hh b/src/libexpr/value/context.hh
index 721563cba..d467b4f1d 100644
--- a/src/libexpr/value/context.hh
+++ b/src/libexpr/value/context.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "util.hh"
#include "comparator.hh"
diff --git a/src/libfetchers/attrs.hh b/src/libfetchers/attrs.hh
index e41037633..1a14bb023 100644
--- a/src/libfetchers/attrs.hh
+++ b/src/libfetchers/attrs.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "types.hh"
diff --git a/src/libfetchers/cache.hh b/src/libfetchers/cache.hh
index 3763ee2a6..ae398d040 100644
--- a/src/libfetchers/cache.hh
+++ b/src/libfetchers/cache.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "fetchers.hh"
diff --git a/src/libfetchers/fetch-settings.hh b/src/libfetchers/fetch-settings.hh
index 7049dea30..6108a179c 100644
--- a/src/libfetchers/fetch-settings.hh
+++ b/src/libfetchers/fetch-settings.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "types.hh"
#include "config.hh"
@@ -75,21 +76,25 @@ struct FetchSettings : public Config
Path or URI of the global flake registry.
When empty, disables the global flake registry.
- )"};
+ )",
+ {}, true, Xp::Flakes};
Setting<bool> useRegistries{this, true, "use-registries",
- "Whether to use flake registries to resolve flake references."};
+ "Whether to use flake registries to resolve flake references.",
+ {}, true, Xp::Flakes};
Setting<bool> acceptFlakeConfig{this, false, "accept-flake-config",
- "Whether to accept nix configuration from a flake without prompting."};
+ "Whether to accept nix configuration from a flake without prompting.",
+ {}, true, Xp::Flakes};
Setting<std::string> commitLockFileSummary{
this, "", "commit-lockfile-summary",
R"(
The commit summary to use when committing changed flake lock files. If
empty, the summary is generated based on the action performed.
- )"};
+ )",
+ {}, true, Xp::Flakes};
};
// FIXME: don't use a global variable.
diff --git a/src/libfetchers/fetchers.hh b/src/libfetchers/fetchers.hh
index 17da37f47..acdecea57 100644
--- a/src/libfetchers/fetchers.hh
+++ b/src/libfetchers/fetchers.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "types.hh"
#include "hash.hh"
@@ -63,6 +64,11 @@ public:
one that contains a commit hash or content hash. */
bool isLocked() const { return locked; }
+ /* Check whether the input carries all necessary info required
+ for cache insertion and substitution.
+ These fields are used to uniquely identify cached trees
+ within the "tarball TTL" window without necessarily
+ indicating that the input's origin is unchanged. */
bool hasAllInfo() const;
bool operator ==(const Input & other) const;
diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc
index 309a143f5..1da8c9609 100644
--- a/src/libfetchers/git.cc
+++ b/src/libfetchers/git.cc
@@ -266,7 +266,7 @@ struct GitInputScheme : InputScheme
for (auto & [name, value] : url.query) {
if (name == "rev" || name == "ref")
attrs.emplace(name, value);
- else if (name == "shallow" || name == "submodules")
+ else if (name == "shallow" || name == "submodules" || name == "allRefs")
attrs.emplace(name, Explicit<bool> { value == "1" });
else
url2.query.emplace(name, value);
diff --git a/src/libfetchers/registry.hh b/src/libfetchers/registry.hh
index 260a2c460..f57ab1e6b 100644
--- a/src/libfetchers/registry.hh
+++ b/src/libfetchers/registry.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "types.hh"
#include "fetchers.hh"
diff --git a/src/libmain/common-args.hh b/src/libmain/common-args.hh
index f180d83ce..e7ed0d934 100644
--- a/src/libmain/common-args.hh
+++ b/src/libmain/common-args.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "args.hh"
diff --git a/src/libmain/loggers.hh b/src/libmain/loggers.hh
index f3c759193..e5721420c 100644
--- a/src/libmain/loggers.hh
+++ b/src/libmain/loggers.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "types.hh"
diff --git a/src/libmain/progress-bar.cc b/src/libmain/progress-bar.cc
index e9205a5e5..6600ec177 100644
--- a/src/libmain/progress-bar.cc
+++ b/src/libmain/progress-bar.cc
@@ -72,6 +72,7 @@ private:
uint64_t corruptedPaths = 0, untrustedPaths = 0;
bool active = true;
+ bool paused = false;
bool haveUpdate = true;
};
@@ -120,16 +121,28 @@ public:
updateThread.join();
}
+ void pause() override {
+ state_.lock()->paused = true;
+ writeToStderr("\r\e[K");
+ }
+
+ void resume() override {
+ state_.lock()->paused = false;
+ writeToStderr("\r\e[K");
+ state_.lock()->haveUpdate = true;
+ updateCV.notify_one();
+ }
+
bool isVerbose() override
{
return printBuildLogs;
}
- void log(Verbosity lvl, const FormatOrString & fs) override
+ void log(Verbosity lvl, std::string_view s) override
{
if (lvl > verbosity) return;
auto state(state_.lock());
- log(*state, lvl, fs.s);
+ log(*state, lvl, s);
}
void logEI(const ErrorInfo & ei) override
@@ -142,7 +155,7 @@ public:
log(*state, ei.level, oss.str());
}
- void log(State & state, Verbosity lvl, const std::string & s)
+ void log(State & state, Verbosity lvl, std::string_view s)
{
if (state.active) {
writeToStderr("\r\e[K" + filterANSIEscapes(s, !isTTY) + ANSI_NORMAL "\n");
@@ -339,7 +352,7 @@ public:
auto nextWakeup = std::chrono::milliseconds::max();
state.haveUpdate = false;
- if (!state.active) return nextWakeup;
+ if (state.paused || !state.active) return nextWakeup;
std::string line;
diff --git a/src/libmain/progress-bar.hh b/src/libmain/progress-bar.hh
index 3a76f8448..c3c6e3833 100644
--- a/src/libmain/progress-bar.hh
+++ b/src/libmain/progress-bar.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "logging.hh"
diff --git a/src/libmain/shared.cc b/src/libmain/shared.cc
index d4871a8e2..37664c065 100644
--- a/src/libmain/shared.cc
+++ b/src/libmain/shared.cc
@@ -84,8 +84,18 @@ void printMissing(ref<Store> store, const StorePathSet & willBuild,
downloadSizeMiB,
narSizeMiB);
}
- for (auto & i : willSubstitute)
- printMsg(lvl, " %s", store->printStorePath(i));
+ std::vector<const StorePath *> willSubstituteSorted = {};
+ std::for_each(willSubstitute.begin(), willSubstitute.end(),
+ [&](const StorePath &p) { willSubstituteSorted.push_back(&p); });
+ std::sort(willSubstituteSorted.begin(), willSubstituteSorted.end(),
+ [](const StorePath *lhs, const StorePath *rhs) {
+ if (lhs->name() == rhs->name())
+ return lhs->to_string() < rhs->to_string();
+ else
+ return lhs->name() < rhs->name();
+ });
+ for (auto p : willSubstituteSorted)
+ printMsg(lvl, " %s", store->printStorePath(*p));
}
if (!unknown.empty()) {
@@ -347,7 +357,7 @@ void parseCmdLine(const std::string & programName, const Strings & args,
void printVersion(const std::string & programName)
{
- std::cout << format("%1% (Nix) %2%") % programName % nixVersion << std::endl;
+ std::cout << fmt("%1% (Nix) %2%", programName, nixVersion) << std::endl;
if (verbosity > lvlInfo) {
Strings cfg;
#if HAVE_BOEHMGC
diff --git a/src/libmain/shared.hh b/src/libmain/shared.hh
index 1715374a6..d915a4a65 100644
--- a/src/libmain/shared.hh
+++ b/src/libmain/shared.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "util.hh"
#include "args.hh"
diff --git a/src/libstore/binary-cache-store.hh b/src/libstore/binary-cache-store.hh
index abd92a83c..5e52d7844 100644
--- a/src/libstore/binary-cache-store.hh
+++ b/src/libstore/binary-cache-store.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "crypto.hh"
#include "store-api.hh"
@@ -16,17 +17,33 @@ struct BinaryCacheStoreConfig : virtual StoreConfig
{
using StoreConfig::StoreConfig;
- const Setting<std::string> compression{(StoreConfig*) this, "xz", "compression", "NAR compression method ('xz', 'bzip2', 'gzip', 'zstd', or 'none')"};
- const Setting<bool> writeNARListing{(StoreConfig*) this, false, "write-nar-listing", "whether to write a JSON file listing the files in each NAR"};
- const Setting<bool> writeDebugInfo{(StoreConfig*) this, false, "index-debug-info", "whether to index DWARF debug info files by build ID"};
- const Setting<Path> secretKeyFile{(StoreConfig*) this, "", "secret-key", "path to secret key used to sign the binary cache"};
- const Setting<Path> localNarCache{(StoreConfig*) this, "", "local-nar-cache", "path to a local cache of NARs"};
+ const Setting<std::string> compression{(StoreConfig*) this, "xz", "compression",
+ "NAR compression method (`xz`, `bzip2`, `gzip`, `zstd`, or `none`)."};
+
+ const Setting<bool> writeNARListing{(StoreConfig*) this, false, "write-nar-listing",
+ "Whether to write a JSON file that lists the files in each NAR."};
+
+ const Setting<bool> writeDebugInfo{(StoreConfig*) this, false, "index-debug-info",
+ R"(
+ Whether to index DWARF debug info files by build ID. This allows [`dwarffs`](https://github.com/edolstra/dwarffs) to
+ fetch debug info on demand
+ )"};
+
+ const Setting<Path> secretKeyFile{(StoreConfig*) this, "", "secret-key",
+ "Path to the secret key used to sign the binary cache."};
+
+ const Setting<Path> localNarCache{(StoreConfig*) this, "", "local-nar-cache",
+ "Path to a local cache of NARs fetched from this binary cache, used by commands such as `nix store cat`."};
+
const Setting<bool> parallelCompression{(StoreConfig*) this, false, "parallel-compression",
- "enable multi-threading compression for NARs, available for xz and zstd only currently"};
+ "Enable multi-threaded compression of NARs. This is currently only available for `xz` and `zstd`."};
+
const Setting<int> compressionLevel{(StoreConfig*) this, -1, "compression-level",
- "specify 'preset level' of compression to be used with NARs: "
- "meaning and accepted range of values depends on compression method selected, "
- "other than -1 which we reserve to indicate Nix defaults should be used"};
+ R"(
+ The *preset level* to be used when compressing NARs.
+ The meaning and accepted values depend on the compression method selected.
+ `-1` specifies that the default compression level should be used.
+ )"};
};
class BinaryCacheStore : public virtual BinaryCacheStoreConfig,
diff --git a/src/libstore/build-result.hh b/src/libstore/build-result.hh
index a5749cf33..a12c599d9 100644
--- a/src/libstore/build-result.hh
+++ b/src/libstore/build-result.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "realisation.hh"
#include "derived-path.hh"
@@ -52,6 +53,7 @@ struct BuildResult
case LogLimitExceeded: return "LogLimitExceeded";
case NotDeterministic: return "NotDeterministic";
case ResolvesToAlreadyValid: return "ResolvesToAlreadyValid";
+ case NoSubstituters: return "NoSubstituters";
default: return "Unknown";
};
}();
diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc
index 2021d0023..26faf8c8e 100644
--- a/src/libstore/build/derivation-goal.cc
+++ b/src/libstore/build/derivation-goal.cc
@@ -199,10 +199,10 @@ void DerivationGoal::haveDerivation()
parsedDrv = std::make_unique<ParsedDerivation>(drvPath, *drv);
if (!drv->type().hasKnownOutputPaths())
- settings.requireExperimentalFeature(Xp::CaDerivations);
+ experimentalFeatureSettings.require(Xp::CaDerivations);
if (!drv->type().isPure()) {
- settings.requireExperimentalFeature(Xp::ImpureDerivations);
+ experimentalFeatureSettings.require(Xp::ImpureDerivations);
for (auto & [outputName, output] : drv->outputs) {
auto randomPath = StorePath::random(outputPathName(drv->name, outputName));
@@ -336,7 +336,7 @@ void DerivationGoal::gaveUpOnSubstitution()
for (auto & i : dynamic_cast<Derivation *>(drv.get())->inputDrvs) {
/* Ensure that pure, non-fixed-output derivations don't
depend on impure derivations. */
- if (settings.isExperimentalFeatureEnabled(Xp::ImpureDerivations) && drv->type().isPure() && !drv->type().isFixed()) {
+ if (experimentalFeatureSettings.isEnabled(Xp::ImpureDerivations) && drv->type().isPure() && !drv->type().isFixed()) {
auto inputDrv = worker.evalStore.readDerivation(i.first);
if (!inputDrv.type().isPure())
throw Error("pure derivation '%s' depends on impure derivation '%s'",
@@ -477,7 +477,7 @@ void DerivationGoal::inputsRealised()
ca.fixed
/* Can optionally resolve if fixed, which is good
for avoiding unnecessary rebuilds. */
- ? settings.isExperimentalFeatureEnabled(Xp::CaDerivations)
+ ? experimentalFeatureSettings.isEnabled(Xp::CaDerivations)
/* Must resolve if floating and there are any inputs
drvs. */
: true);
@@ -488,7 +488,7 @@ void DerivationGoal::inputsRealised()
}, drvType.raw());
if (resolveDrv && !fullDrv.inputDrvs.empty()) {
- settings.requireExperimentalFeature(Xp::CaDerivations);
+ experimentalFeatureSettings.require(Xp::CaDerivations);
/* We are be able to resolve this derivation based on the
now-known results of dependencies. If so, we become a
@@ -732,7 +732,7 @@ void replaceValidPath(const Path & storePath, const Path & tmpPath)
tmpPath (the replacement), so we have to move it out of the
way first. We'd better not be interrupted here, because if
we're repairing (say) Glibc, we end up with a broken system. */
- Path oldPath = (format("%1%.old-%2%-%3%") % storePath % getpid() % random()).str();
+ Path oldPath = fmt("%1%.old-%2%-%3%", storePath, getpid(), random());
if (pathExists(storePath))
movePath(storePath, oldPath);
@@ -911,7 +911,11 @@ void DerivationGoal::buildDone()
msg += line;
msg += "\n";
}
- msg += fmt("For full logs, run '" ANSI_BOLD "nix log %s" ANSI_NORMAL "'.",
+ auto nixLogCommand = experimentalFeatureSettings.isEnabled(Xp::NixCommand)
+ ? "nix log"
+ : "nix-store -l";
+ msg += fmt("For full logs, run '" ANSI_BOLD "%s %s" ANSI_NORMAL "'.",
+ nixLogCommand,
worker.store.printStorePath(drvPath));
}
@@ -1352,7 +1356,7 @@ std::pair<bool, DrvOutputs> DerivationGoal::checkPathValidity()
};
}
auto drvOutput = DrvOutput{info.outputHash, i.first};
- if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations)) {
+ if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) {
if (auto real = worker.store.queryRealisation(drvOutput)) {
info.known = {
.path = real->outPath,
diff --git a/src/libstore/build/derivation-goal.hh b/src/libstore/build/derivation-goal.hh
index 707e38b4b..f43ce22af 100644
--- a/src/libstore/build/derivation-goal.hh
+++ b/src/libstore/build/derivation-goal.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "parsed-derivations.hh"
#include "lock.hh"
diff --git a/src/libstore/build/drv-output-substitution-goal.cc b/src/libstore/build/drv-output-substitution-goal.cc
index b7f7b5ab1..b30957c84 100644
--- a/src/libstore/build/drv-output-substitution-goal.cc
+++ b/src/libstore/build/drv-output-substitution-goal.cc
@@ -61,20 +61,25 @@ void DrvOutputSubstitutionGoal::tryNext()
// FIXME: Make async
// outputInfo = sub->queryRealisation(id);
- outPipe.create();
- promise = decltype(promise)();
+
+ /* The callback of the curl download below can outlive `this` (if
+ some other error occurs), so it must not touch `this`. So put
+ the shared state in a separate refcounted object. */
+ downloadState = std::make_shared<DownloadState>();
+ downloadState->outPipe.create();
sub->queryRealisation(
- id, { [&](std::future<std::shared_ptr<const Realisation>> res) {
+ id,
+ { [downloadState(downloadState)](std::future<std::shared_ptr<const Realisation>> res) {
try {
- Finally updateStats([this]() { outPipe.writeSide.close(); });
- promise.set_value(res.get());
+ Finally updateStats([&]() { downloadState->outPipe.writeSide.close(); });
+ downloadState->promise.set_value(res.get());
} catch (...) {
- promise.set_exception(std::current_exception());
+ downloadState->promise.set_exception(std::current_exception());
}
} });
- worker.childStarted(shared_from_this(), {outPipe.readSide.get()}, true, false);
+ worker.childStarted(shared_from_this(), {downloadState->outPipe.readSide.get()}, true, false);
state = &DrvOutputSubstitutionGoal::realisationFetched;
}
@@ -84,7 +89,7 @@ void DrvOutputSubstitutionGoal::realisationFetched()
worker.childTerminated(this);
try {
- outputInfo = promise.get_future().get();
+ outputInfo = downloadState->promise.get_future().get();
} catch (std::exception & e) {
printError(e.what());
substituterFailed = true;
@@ -155,7 +160,7 @@ void DrvOutputSubstitutionGoal::work()
void DrvOutputSubstitutionGoal::handleEOF(int fd)
{
- if (fd == outPipe.readSide.get()) worker.wakeUp(shared_from_this());
+ if (fd == downloadState->outPipe.readSide.get()) worker.wakeUp(shared_from_this());
}
diff --git a/src/libstore/build/drv-output-substitution-goal.hh b/src/libstore/build/drv-output-substitution-goal.hh
index 948dbda8f..3b6620b76 100644
--- a/src/libstore/build/drv-output-substitution-goal.hh
+++ b/src/libstore/build/drv-output-substitution-goal.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "store-api.hh"
#include "goal.hh"
@@ -16,7 +17,7 @@ class Worker;
// 2. Substitute the corresponding output path
// 3. Register the output info
class DrvOutputSubstitutionGoal : public Goal {
-private:
+
// The drv output we're trying to substitue
DrvOutput id;
@@ -30,9 +31,13 @@ private:
/* The current substituter. */
std::shared_ptr<Store> sub;
- Pipe outPipe;
- std::thread thr;
- std::promise<std::shared_ptr<const Realisation>> promise;
+ struct DownloadState
+ {
+ Pipe outPipe;
+ std::promise<std::shared_ptr<const Realisation>> promise;
+ };
+
+ std::shared_ptr<DownloadState> downloadState;
/* Whether a substituter failed. */
bool substituterFailed = false;
diff --git a/src/libstore/build/goal.cc b/src/libstore/build/goal.cc
index 58e805f55..d59b94797 100644
--- a/src/libstore/build/goal.cc
+++ b/src/libstore/build/goal.cc
@@ -78,9 +78,9 @@ void Goal::amDone(ExitCode result, std::optional<Error> ex)
}
-void Goal::trace(const FormatOrString & fs)
+void Goal::trace(std::string_view s)
{
- debug("%1%: %2%", name, fs.s);
+ debug("%1%: %2%", name, s);
}
}
diff --git a/src/libstore/build/goal.hh b/src/libstore/build/goal.hh
index 35121c5d9..924a8bbd5 100644
--- a/src/libstore/build/goal.hh
+++ b/src/libstore/build/goal.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "types.hh"
#include "store-api.hh"
@@ -88,7 +89,7 @@ struct Goal : public std::enable_shared_from_this<Goal>
abort();
}
- void trace(const FormatOrString & fs);
+ void trace(std::string_view s);
std::string getName()
{
diff --git a/src/libstore/build/hook-instance.cc b/src/libstore/build/hook-instance.cc
index cb58a1f02..075ad554f 100644
--- a/src/libstore/build/hook-instance.cc
+++ b/src/libstore/build/hook-instance.cc
@@ -35,7 +35,10 @@ HookInstance::HookInstance()
/* Fork the hook. */
pid = startProcess([&]() {
- commonChildInit(fromHook);
+ if (dup2(fromHook.writeSide.get(), STDERR_FILENO) == -1)
+ throw SysError("cannot pipe standard error into log file");
+
+ commonChildInit();
if (chdir("/") == -1) throw SysError("changing into /");
diff --git a/src/libstore/build/hook-instance.hh b/src/libstore/build/hook-instance.hh
index 9e8cff128..6bf60b297 100644
--- a/src/libstore/build/hook-instance.hh
+++ b/src/libstore/build/hook-instance.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "logging.hh"
#include "serialise.hh"
diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc
index a961d8eed..e22180670 100644
--- a/src/libstore/build/local-derivation-goal.cc
+++ b/src/libstore/build/local-derivation-goal.cc
@@ -292,7 +292,7 @@ void LocalDerivationGoal::closeReadPipes()
if (hook) {
DerivationGoal::closeReadPipes();
} else
- builderOut.readSide = -1;
+ builderOut.close();
}
@@ -413,7 +413,7 @@ void LocalDerivationGoal::startBuilder()
)
{
#if __linux__
- settings.requireExperimentalFeature(Xp::Cgroups);
+ experimentalFeatureSettings.require(Xp::Cgroups);
auto cgroupFS = getCgroupFS();
if (!cgroupFS)
@@ -650,7 +650,7 @@ void LocalDerivationGoal::startBuilder()
/* Clean up the chroot directory automatically. */
autoDelChroot = std::make_shared<AutoDelete>(chrootRootDir);
- printMsg(lvlChatty, format("setting up chroot environment in '%1%'") % chrootRootDir);
+ printMsg(lvlChatty, "setting up chroot environment in '%1%'", chrootRootDir);
// FIXME: make this 0700
if (mkdir(chrootRootDir.c_str(), buildUser && buildUser->getUIDCount() != 1 ? 0755 : 0750) == -1)
@@ -753,8 +753,7 @@ void LocalDerivationGoal::startBuilder()
throw Error("home directory '%1%' exists; please remove it to assure purity of builds without sandboxing", homeDir);
if (useChroot && settings.preBuildHook != "" && dynamic_cast<Derivation *>(drv.get())) {
- printMsg(lvlChatty, format("executing pre-build hook '%1%'")
- % settings.preBuildHook);
+ printMsg(lvlChatty, "executing pre-build hook '%1%'", settings.preBuildHook);
auto args = useChroot ? Strings({worker.store.printStorePath(drvPath), chrootRootDir}) :
Strings({ worker.store.printStorePath(drvPath) });
enum BuildHookState {
@@ -803,15 +802,13 @@ void LocalDerivationGoal::startBuilder()
/* Create the log file. */
Path logFile = openLogFile();
- /* Create a pipe to get the output of the builder. */
- //builderOut.create();
-
- builderOut.readSide = posix_openpt(O_RDWR | O_NOCTTY);
- if (!builderOut.readSide)
+ /* Create a pseudoterminal to get the output of the builder. */
+ builderOut = posix_openpt(O_RDWR | O_NOCTTY);
+ if (!builderOut)
throw SysError("opening pseudoterminal master");
// FIXME: not thread-safe, use ptsname_r
- std::string slaveName(ptsname(builderOut.readSide.get()));
+ std::string slaveName = ptsname(builderOut.get());
if (buildUser) {
if (chmod(slaveName.c_str(), 0600))
@@ -822,34 +819,34 @@ void LocalDerivationGoal::startBuilder()
}
#if __APPLE__
else {
- if (grantpt(builderOut.readSide.get()))
+ if (grantpt(builderOut.get()))
throw SysError("granting access to pseudoterminal slave");
}
#endif
- #if 0
- // Mount the pt in the sandbox so that the "tty" command works.
- // FIXME: this doesn't work with the new devpts in the sandbox.
- if (useChroot)
- dirsInChroot[slaveName] = {slaveName, false};
- #endif
-
- if (unlockpt(builderOut.readSide.get()))
+ if (unlockpt(builderOut.get()))
throw SysError("unlocking pseudoterminal");
- builderOut.writeSide = open(slaveName.c_str(), O_RDWR | O_NOCTTY);
- if (!builderOut.writeSide)
- throw SysError("opening pseudoterminal slave");
+ /* Open the slave side of the pseudoterminal and use it as stderr. */
+ auto openSlave = [&]()
+ {
+ AutoCloseFD builderOut = open(slaveName.c_str(), O_RDWR | O_NOCTTY);
+ if (!builderOut)
+ throw SysError("opening pseudoterminal slave");
- // Put the pt into raw mode to prevent \n -> \r\n translation.
- struct termios term;
- if (tcgetattr(builderOut.writeSide.get(), &term))
- throw SysError("getting pseudoterminal attributes");
+ // Put the pt into raw mode to prevent \n -> \r\n translation.
+ struct termios term;
+ if (tcgetattr(builderOut.get(), &term))
+ throw SysError("getting pseudoterminal attributes");
- cfmakeraw(&term);
+ cfmakeraw(&term);
- if (tcsetattr(builderOut.writeSide.get(), TCSANOW, &term))
- throw SysError("putting pseudoterminal into raw mode");
+ if (tcsetattr(builderOut.get(), TCSANOW, &term))
+ throw SysError("putting pseudoterminal into raw mode");
+
+ if (dup2(builderOut.get(), STDERR_FILENO) == -1)
+ throw SysError("cannot pipe standard error into log file");
+ };
buildResult.startTime = time(0);
@@ -898,7 +895,16 @@ void LocalDerivationGoal::startBuilder()
usingUserNamespace = userNamespacesSupported();
+ Pipe sendPid;
+ sendPid.create();
+
Pid helper = startProcess([&]() {
+ sendPid.readSide.close();
+
+ /* We need to open the slave early, before
+ CLONE_NEWUSER. Otherwise we get EPERM when running as
+ root. */
+ openSlave();
/* Drop additional groups here because we can't do it
after we've created the new user namespace. FIXME:
@@ -920,11 +926,12 @@ void LocalDerivationGoal::startBuilder()
pid_t child = startProcess([&]() { runChild(); }, options);
- writeFull(builderOut.writeSide.get(),
- fmt("%d %d\n", usingUserNamespace, child));
+ writeFull(sendPid.writeSide.get(), fmt("%d\n", child));
_exit(0);
});
+ sendPid.writeSide.close();
+
if (helper.wait() != 0)
throw Error("unable to start build process");
@@ -936,10 +943,9 @@ void LocalDerivationGoal::startBuilder()
userNamespaceSync.writeSide = -1;
});
- auto ss = tokenizeString<std::vector<std::string>>(readLine(builderOut.readSide.get()));
- assert(ss.size() == 2);
- usingUserNamespace = ss[0] == "1";
- pid = string2Int<pid_t>(ss[1]).value();
+ auto ss = tokenizeString<std::vector<std::string>>(readLine(sendPid.readSide.get()));
+ assert(ss.size() == 1);
+ pid = string2Int<pid_t>(ss[0]).value();
if (usingUserNamespace) {
/* Set the UID/GID mapping of the builder's user namespace
@@ -994,21 +1000,21 @@ void LocalDerivationGoal::startBuilder()
#endif
{
pid = startProcess([&]() {
+ openSlave();
runChild();
});
}
/* parent */
pid.setSeparatePG(true);
- builderOut.writeSide = -1;
- worker.childStarted(shared_from_this(), {builderOut.readSide.get()}, true, true);
+ worker.childStarted(shared_from_this(), {builderOut.get()}, true, true);
/* Check if setting up the build environment failed. */
std::vector<std::string> msgs;
while (true) {
std::string msg = [&]() {
try {
- return readLine(builderOut.readSide.get());
+ return readLine(builderOut.get());
} catch (Error & e) {
auto status = pid.wait();
e.addTrace({}, "while waiting for the build environment for '%s' to initialize (%s, previous messages: %s)",
@@ -1020,7 +1026,7 @@ void LocalDerivationGoal::startBuilder()
}();
if (msg.substr(0, 1) == "\2") break;
if (msg.substr(0, 1) == "\1") {
- FdSource source(builderOut.readSide.get());
+ FdSource source(builderOut.get());
auto ex = readError(source);
ex.addTrace({}, "while setting up the build environment");
throw ex;
@@ -1104,7 +1110,7 @@ void LocalDerivationGoal::initEnv()
env["NIX_STORE"] = worker.store.storeDir;
/* The maximum number of cores to utilize for parallel building. */
- env["NIX_BUILD_CORES"] = (format("%d") % settings.buildCores).str();
+ env["NIX_BUILD_CORES"] = fmt("%d", settings.buildCores);
initTmpDir();
@@ -1155,10 +1161,10 @@ void LocalDerivationGoal::writeStructuredAttrs()
writeFile(tmpDir + "/.attrs.sh", rewriteStrings(jsonSh, inputRewrites));
chownToBuilder(tmpDir + "/.attrs.sh");
- env["NIX_ATTRS_SH_FILE"] = tmpDir + "/.attrs.sh";
+ env["NIX_ATTRS_SH_FILE"] = tmpDirInSandbox + "/.attrs.sh";
writeFile(tmpDir + "/.attrs.json", rewriteStrings(json.dump(), inputRewrites));
chownToBuilder(tmpDir + "/.attrs.json");
- env["NIX_ATTRS_JSON_FILE"] = tmpDir + "/.attrs.json";
+ env["NIX_ATTRS_JSON_FILE"] = tmpDirInSandbox + "/.attrs.json";
}
}
@@ -1414,7 +1420,7 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual Lo
void LocalDerivationGoal::startDaemon()
{
- settings.requireExperimentalFeature(Xp::RecursiveNix);
+ experimentalFeatureSettings.require(Xp::RecursiveNix);
Store::Params params;
params["path-info-cache-size"] = "0";
@@ -1650,7 +1656,7 @@ void LocalDerivationGoal::runChild()
try { /* child */
- commonChildInit(builderOut);
+ commonChildInit();
try {
setupSeccomp();
@@ -2063,7 +2069,7 @@ void LocalDerivationGoal::runChild()
/* The tmpDir in scope points at the temporary build directory for our derivation. Some packages try different mechanisms
to find temporary directories, so we want to open up a broader place for them to dump their files, if needed. */
- Path globalTmpDir = canonPath(getEnv("TMPDIR").value_or("/tmp"), true);
+ Path globalTmpDir = canonPath(getEnvNonEmpty("TMPDIR").value_or("/tmp"), true);
/* They don't like trailing slashes on subpath directives */
if (globalTmpDir.back() == '/') globalTmpDir.pop_back();
@@ -2274,7 +2280,7 @@ DrvOutputs LocalDerivationGoal::registerOutputs()
bool discardReferences = false;
if (auto structuredAttrs = parsedDrv->getStructuredAttrs()) {
if (auto udr = get(*structuredAttrs, "unsafeDiscardReferences")) {
- settings.requireExperimentalFeature(Xp::DiscardReferences);
+ experimentalFeatureSettings.require(Xp::DiscardReferences);
if (auto output = get(*udr, outputName)) {
if (!output->is_boolean())
throw Error("attribute 'unsafeDiscardReferences.\"%s\"' of derivation '%s' must be a Boolean", outputName, drvPath.to_string());
@@ -2694,7 +2700,7 @@ DrvOutputs LocalDerivationGoal::registerOutputs()
},
.outPath = newInfo.path
};
- if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations)
+ if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)
&& drv->type().isPure())
{
signRealisation(thisRealisation);
@@ -2892,7 +2898,7 @@ void LocalDerivationGoal::deleteTmpDir(bool force)
bool LocalDerivationGoal::isReadDesc(int fd)
{
return (hook && DerivationGoal::isReadDesc(fd)) ||
- (!hook && fd == builderOut.readSide.get());
+ (!hook && fd == builderOut.get());
}
diff --git a/src/libstore/build/local-derivation-goal.hh b/src/libstore/build/local-derivation-goal.hh
index 34c4e9187..1c4b4e3fe 100644
--- a/src/libstore/build/local-derivation-goal.hh
+++ b/src/libstore/build/local-derivation-goal.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "derivation-goal.hh"
#include "local-store.hh"
@@ -24,8 +25,9 @@ struct LocalDerivationGoal : public DerivationGoal
/* The path of the temporary directory in the sandbox. */
Path tmpDirInSandbox;
- /* Pipe for the builder's standard output/error. */
- Pipe builderOut;
+ /* Master side of the pseudoterminal used for the builder's
+ standard output/error. */
+ AutoCloseFD builderOut;
/* Pipe for synchronising updates to the builder namespaces. */
Pipe userNamespaceSync;
diff --git a/src/libstore/build/personality.hh b/src/libstore/build/personality.hh
index 30e4f4062..91b730fab 100644
--- a/src/libstore/build/personality.hh
+++ b/src/libstore/build/personality.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include <string>
diff --git a/src/libstore/build/substitution-goal.hh b/src/libstore/build/substitution-goal.hh
index a73f8e666..1add9eb14 100644
--- a/src/libstore/build/substitution-goal.hh
+++ b/src/libstore/build/substitution-goal.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "lock.hh"
#include "store-api.hh"
diff --git a/src/libstore/build/worker.hh b/src/libstore/build/worker.hh
index 6d68d3cf1..d840b3b3f 100644
--- a/src/libstore/build/worker.hh
+++ b/src/libstore/build/worker.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "types.hh"
#include "lock.hh"
diff --git a/src/libstore/builtins.hh b/src/libstore/builtins.hh
index 66597e456..d201fb3ac 100644
--- a/src/libstore/builtins.hh
+++ b/src/libstore/builtins.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "derivations.hh"
diff --git a/src/libstore/builtins/buildenv.hh b/src/libstore/builtins/buildenv.hh
index a018de3af..0923c2adb 100644
--- a/src/libstore/builtins/buildenv.hh
+++ b/src/libstore/builtins/buildenv.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "derivations.hh"
#include "store-api.hh"
diff --git a/src/libstore/content-address.hh b/src/libstore/content-address.hh
index f6a6f5140..19fdfc1eb 100644
--- a/src/libstore/content-address.hh
+++ b/src/libstore/content-address.hh
@@ -1,48 +1,80 @@
#pragma once
+///@file
#include <variant>
#include "hash.hh"
namespace nix {
+/**
+ * An enumeration of the ways we can serialize file system objects.
+ */
enum struct FileIngestionMethod : uint8_t {
+ /**
+ * Flat-file hashing. Directly ingest the contents of a single file
+ */
Flat = false,
+ /**
+ * Recursive (or NAR) hashing. Serializes the file-system object in Nix
+ * Archive format and ingest that
+ */
Recursive = true
};
+/**
+ * Somewhat obscure, used by \ref Derivation derivations and
+ * `builtins.toFile` currently.
+ */
struct TextHash {
+ /**
+ * Hash of the contents of the text/file.
+ */
Hash hash;
};
-/// Pair of a hash, and how the file system was ingested
+/**
+ * For path computed by makeFixedOutputPath.
+ */
struct FixedOutputHash {
+ /**
+ * How the file system objects are serialized
+ */
FileIngestionMethod method;
+ /**
+ * Hash of that serialization
+ */
Hash hash;
+
std::string printMethodAlgo() const;
};
-/*
- We've accumulated several types of content-addressed paths over the years;
- fixed-output derivations support multiple hash algorithms and serialisation
- methods (flat file vs NAR). Thus, ‘ca’ has one of the following forms:
-
- * ‘text:sha256:<sha256 hash of file contents>’: For paths
- computed by makeTextPath() / addTextToStore().
-
- * ‘fixed:<r?>:<ht>:<h>’: For paths computed by
- makeFixedOutputPath() / addToStore().
-*/
+/**
+ * We've accumulated several types of content-addressed paths over the
+ * years; fixed-output derivations support multiple hash algorithms and
+ * serialisation methods (flat file vs NAR). Thus, ‘ca’ has one of the
+ * following forms:
+ *
+ * - ‘text:sha256:<sha256 hash of file contents>’: For paths
+ * computed by Store::makeTextPath() / Store::addTextToStore().
+ *
+ * - ‘fixed:<r?>:<ht>:<h>’: For paths computed by
+ * Store::makeFixedOutputPath() / Store::addToStore().
+ */
typedef std::variant<
- TextHash, // for paths computed by makeTextPath() / addTextToStore
- FixedOutputHash // for path computed by makeFixedOutputPath
+ TextHash,
+ FixedOutputHash
> ContentAddress;
-/* Compute the prefix to the hash algorithm which indicates how the files were
- ingested. */
+/**
+ * Compute the prefix to the hash algorithm which indicates how the
+ * files were ingested.
+ */
std::string makeFileIngestionPrefix(const FileIngestionMethod m);
-/* Compute the content-addressability assertion (ValidPathInfo::ca)
- for paths created by makeFixedOutputPath() / addToStore(). */
+/**
+ * Compute the content-addressability assertion (ValidPathInfo::ca) for
+ * paths created by Store::makeFixedOutputPath() / Store::addToStore().
+ */
std::string makeFixedOutputCA(FileIngestionMethod method, const Hash & hash);
std::string renderContentAddress(ContentAddress ca);
@@ -65,6 +97,11 @@ struct FixedOutputHashMethod {
HashType hashType;
};
+/**
+ * Ways of content addressing but not a complete ContentAddress.
+ *
+ * A ContentAddress without a Hash.
+ */
typedef std::variant<
TextHashMethod,
FixedOutputHashMethod
diff --git a/src/libstore/crypto.hh b/src/libstore/crypto.hh
index 03f85c103..a98f2a3b8 100644
--- a/src/libstore/crypto.hh
+++ b/src/libstore/crypto.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "types.hh"
diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc
index 5e6fd011f..656ad4587 100644
--- a/src/libstore/daemon.cc
+++ b/src/libstore/daemon.cc
@@ -67,12 +67,12 @@ struct TunnelLogger : public Logger
state->pendingMsgs.push_back(s);
}
- void log(Verbosity lvl, const FormatOrString & fs) override
+ void log(Verbosity lvl, std::string_view s) override
{
if (lvl > verbosity) return;
StringSink buf;
- buf << STDERR_NEXT << (fs.s + "\n");
+ buf << STDERR_NEXT << (s + "\n");
enqueueMsg(buf.s);
}
@@ -231,10 +231,10 @@ struct ClientSettings
try {
if (name == "ssh-auth-sock") // obsolete
;
- else if (name == settings.experimentalFeatures.name) {
+ else if (name == experimentalFeatureSettings.experimentalFeatures.name) {
// We don’t want to forward the experimental features to
// the daemon, as that could cause some pretty weird stuff
- if (parseFeatures(tokenizeString<StringSet>(value)) != settings.experimentalFeatures.get())
+ if (parseFeatures(tokenizeString<StringSet>(value)) != experimentalFeatureSettings.experimentalFeatures.get())
debug("Ignoring the client-specified experimental features");
} else if (name == settings.pluginFiles.name) {
if (tokenizeString<Paths>(value) != settings.pluginFiles.get())
diff --git a/src/libstore/daemon.hh b/src/libstore/daemon.hh
index 8c765615c..67340a05b 100644
--- a/src/libstore/daemon.hh
+++ b/src/libstore/daemon.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "serialise.hh"
#include "store-api.hh"
diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc
index 05dc9a3cc..06cc69056 100644
--- a/src/libstore/derivations.cc
+++ b/src/libstore/derivations.cc
@@ -221,7 +221,7 @@ static DerivationOutput parseDerivationOutput(const Store & store,
}
const auto hashType = parseHashType(hashAlgo);
if (hash == "impure") {
- settings.requireExperimentalFeature(Xp::ImpureDerivations);
+ experimentalFeatureSettings.require(Xp::ImpureDerivations);
assert(pathS == "");
return DerivationOutput::Impure {
.method = std::move(method),
@@ -236,7 +236,7 @@ static DerivationOutput parseDerivationOutput(const Store & store,
},
};
} else {
- settings.requireExperimentalFeature(Xp::CaDerivations);
+ experimentalFeatureSettings.require(Xp::CaDerivations);
assert(pathS == "");
return DerivationOutput::CAFloating {
.method = std::move(method),
diff --git a/src/libstore/derivations.hh b/src/libstore/derivations.hh
index 8456b29e7..e12bd2119 100644
--- a/src/libstore/derivations.hh
+++ b/src/libstore/derivations.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "path.hh"
#include "types.hh"
@@ -17,42 +18,72 @@ class Store;
/* Abstract syntax of derivations. */
-/* The traditional non-fixed-output derivation type. */
+/**
+ * The traditional non-fixed-output derivation type.
+ */
struct DerivationOutputInputAddressed
{
StorePath path;
};
-/* Fixed-output derivations, whose output paths are content addressed
- according to that fixed output. */
+/**
+ * Fixed-output derivations, whose output paths are content
+ * addressed according to that fixed output.
+ */
struct DerivationOutputCAFixed
{
- FixedOutputHash hash; /* hash used for expected hash computation */
+ /**
+ * hash used for expected hash computation
+ */
+ FixedOutputHash hash;
+
+ /**
+ * Return the \ref StorePath "store path" corresponding to this output
+ *
+ * @param drvName The name of the derivation this is an output of, without the `.drv`.
+ * @param outputName The name of this output.
+ */
StorePath path(const Store & store, std::string_view drvName, std::string_view outputName) const;
};
-/* Floating-output derivations, whose output paths are content addressed, but
- not fixed, and so are dynamically calculated from whatever the output ends
- up being. */
+/**
+ * Floating-output derivations, whose output paths are content
+ * addressed, but not fixed, and so are dynamically calculated from
+ * whatever the output ends up being.
+ * */
struct DerivationOutputCAFloating
{
- /* information used for expected hash computation */
+ /**
+ * How the file system objects will be serialized for hashing
+ */
FileIngestionMethod method;
+
+ /**
+ * How the serialization will be hashed
+ */
HashType hashType;
};
-/* Input-addressed output which depends on a (CA) derivation whose hash isn't
- * known yet.
+/**
+ * Input-addressed output which depends on a (CA) derivation whose hash
+ * isn't known yet.
*/
struct DerivationOutputDeferred {};
-/* Impure output which is moved to a content-addressed location (like
- CAFloating) but isn't registered as a realization.
+/**
+ * Impure output which is moved to a content-addressed location (like
+ * CAFloating) but isn't registered as a realization.
*/
struct DerivationOutputImpure
{
- /* information used for expected hash computation */
+ /**
+ * How the file system objects will be serialized for hashing
+ */
FileIngestionMethod method;
+
+ /**
+ * How the serialization will be hashed
+ */
HashType hashType;
};
@@ -64,6 +95,9 @@ typedef std::variant<
DerivationOutputImpure
> _DerivationOutputRaw;
+/**
+ * A single output of a BasicDerivation (and Derivation).
+ */
struct DerivationOutput : _DerivationOutputRaw
{
using Raw = _DerivationOutputRaw;
@@ -75,9 +109,12 @@ struct DerivationOutput : _DerivationOutputRaw
using Deferred = DerivationOutputDeferred;
using Impure = DerivationOutputImpure;
- /* Note, when you use this function you should make sure that you're passing
- the right derivation name. When in doubt, you should use the safer
- interface provided by BasicDerivation::outputsAndOptPaths */
+ /**
+ * \note when you use this function you should make sure that you're
+ * passing the right derivation name. When in doubt, you should use
+ * the safer interface provided by
+ * BasicDerivation::outputsAndOptPaths
+ */
std::optional<StorePath> path(const Store & store, std::string_view drvName, std::string_view outputName) const;
inline const Raw & raw() const {
@@ -92,26 +129,61 @@ struct DerivationOutput : _DerivationOutputRaw
typedef std::map<std::string, DerivationOutput> DerivationOutputs;
-/* These are analogues to the previous DerivationOutputs data type, but they
- also contains, for each output, the (optional) store path in which it would
- be written. To calculate values of these types, see the corresponding
- functions in BasicDerivation */
+/**
+ * These are analogues to the previous DerivationOutputs data type,
+ * but they also contains, for each output, the (optional) store
+ * path in which it would be written. To calculate values of these
+ * types, see the corresponding functions in BasicDerivation.
+ */
typedef std::map<std::string, std::pair<DerivationOutput, std::optional<StorePath>>>
DerivationOutputsAndOptPaths;
-/* For inputs that are sub-derivations, we specify exactly which
- output IDs we are interested in. */
+/**
+ * For inputs that are sub-derivations, we specify exactly which
+ * output IDs we are interested in.
+ */
typedef std::map<StorePath, StringSet> DerivationInputs;
+/**
+ * Input-addressed derivation types
+ */
struct DerivationType_InputAddressed {
+ /**
+ * True iff the derivation type can't be determined statically,
+ * for instance because it (transitively) depends on a content-addressed
+ * derivation.
+ */
bool deferred;
};
+/**
+ * Content-addressed derivation types
+ */
struct DerivationType_ContentAddressed {
+ /**
+ * Whether the derivation should be built safely inside a sandbox.
+ */
bool sandboxed;
+ /**
+ * Whether the derivation's outputs' content-addresses are "fixed"
+ * or "floating.
+ *
+ * - Fixed: content-addresses are written down as part of the
+ * derivation itself. If the outputs don't end up matching the
+ * build fails.
+ *
+ * - Floating: content-addresses are not written down, we do not
+ * know them until we perform the build.
+ */
bool fixed;
};
+/**
+ * Impure derivation type
+ *
+ * This is similar at buil-time to the content addressed, not standboxed, not fixed
+ * type, but has some restrictions on its usage.
+ */
struct DerivationType_Impure {
};
@@ -128,30 +200,38 @@ struct DerivationType : _DerivationTypeRaw {
using ContentAddressed = DerivationType_ContentAddressed;
using Impure = DerivationType_Impure;
- /* Do the outputs of the derivation have paths calculated from their content,
- or from the derivation itself? */
+ /**
+ * Do the outputs of the derivation have paths calculated from their
+ * content, or from the derivation itself?
+ */
bool isCA() const;
- /* Is the content of the outputs fixed a-priori via a hash? Never true for
- non-CA derivations. */
+ /**
+ * Is the content of the outputs fixed <em>a priori</em> via a hash?
+ * Never true for non-CA derivations.
+ */
bool isFixed() const;
- /* Whether the derivation is fully sandboxed. If false, the
- sandbox is opened up, e.g. the derivation has access to the
- network. Note that whether or not we actually sandbox the
- derivation is controlled separately. Always true for non-CA
- derivations. */
+ /**
+ * Whether the derivation is fully sandboxed. If false, the sandbox
+ * is opened up, e.g. the derivation has access to the network. Note
+ * that whether or not we actually sandbox the derivation is
+ * controlled separately. Always true for non-CA derivations.
+ */
bool isSandboxed() const;
- /* Whether the derivation is expected to produce the same result
- every time, and therefore it only needs to be built once. This
- is only false for derivations that have the attribute '__impure
- = true'. */
+ /**
+ * Whether the derivation is expected to produce the same result
+ * every time, and therefore it only needs to be built once. This is
+ * only false for derivations that have the attribute '__impure =
+ * true'.
+ */
bool isPure() const;
- /* Does the derivation knows its own output paths?
- Only true when there's no floating-ca derivation involved in the
- closure, or if fixed output.
+ /**
+ * Does the derivation knows its own output paths?
+ * Only true when there's no floating-ca derivation involved in the
+ * closure, or if fixed output.
*/
bool hasKnownOutputPaths() const;
@@ -175,15 +255,21 @@ struct BasicDerivation
bool isBuiltin() const;
- /* Return true iff this is a fixed-output derivation. */
+ /**
+ * Return true iff this is a fixed-output derivation.
+ */
DerivationType type() const;
- /* Return the output names of a derivation. */
+ /**
+ * Return the output names of a derivation.
+ */
StringSet outputNames() const;
- /* Calculates the maps that contains all the DerivationOutputs, but
- augmented with knowledge of the Store paths they would be written
- into. */
+ /**
+ * Calculates the maps that contains all the DerivationOutputs, but
+ * augmented with knowledge of the Store paths they would be written
+ * into.
+ */
DerivationOutputsAndOptPaths outputsAndOptPaths(const Store & store) const;
static std::string_view nameFromPath(const StorePath & storePath);
@@ -191,23 +277,33 @@ struct BasicDerivation
struct Derivation : BasicDerivation
{
- DerivationInputs inputDrvs; /* inputs that are sub-derivations */
+ /**
+ * inputs that are sub-derivations
+ */
+ DerivationInputs inputDrvs;
- /* Print a derivation. */
+ /**
+ * Print a derivation.
+ */
std::string unparse(const Store & store, bool maskOutputs,
std::map<std::string, StringSet> * actualInputs = nullptr) const;
- /* Return the underlying basic derivation but with these changes:
-
- 1. Input drvs are emptied, but the outputs of them that were used are
- added directly to input sources.
-
- 2. Input placeholders are replaced with realized input store paths. */
+ /**
+ * Return the underlying basic derivation but with these changes:
+ *
+ * 1. Input drvs are emptied, but the outputs of them that were used
+ * are added directly to input sources.
+ *
+ * 2. Input placeholders are replaced with realized input store
+ * paths.
+ */
std::optional<BasicDerivation> tryResolve(Store & store) const;
- /* Like the above, but instead of querying the Nix database for
- realisations, uses a given mapping from input derivation paths
- + output names to actual output store paths. */
+ /**
+ * Like the above, but instead of querying the Nix database for
+ * realisations, uses a given mapping from input derivation paths +
+ * output names to actual output store paths.
+ */
std::optional<BasicDerivation> tryResolve(
Store & store,
const std::map<std::pair<StorePath, std::string>, StorePath> & inputDrvOutputs) const;
@@ -222,81 +318,108 @@ struct Derivation : BasicDerivation
class Store;
-/* Write a derivation to the Nix store, and return its path. */
+/**
+ * Write a derivation to the Nix store, and return its path.
+ */
StorePath writeDerivation(Store & store,
const Derivation & drv,
RepairFlag repair = NoRepair,
bool readOnly = false);
-/* Read a derivation from a file. */
+/**
+ * Read a derivation from a file.
+ */
Derivation parseDerivation(const Store & store, std::string && s, std::string_view name);
-// FIXME: remove
+/**
+ * \todo Remove.
+ *
+ * Use Path::isDerivation instead.
+ */
bool isDerivation(std::string_view fileName);
-/* Calculate the name that will be used for the store path for this
- output.
-
- This is usually <drv-name>-<output-name>, but is just <drv-name> when
- the output name is "out". */
+/**
+ * Calculate the name that will be used for the store path for this
+ * output.
+ *
+ * This is usually <drv-name>-<output-name>, but is just <drv-name> when
+ * the output name is "out".
+ */
std::string outputPathName(std::string_view drvName, std::string_view outputName);
-// The hashes modulo of a derivation.
-//
-// Each output is given a hash, although in practice only the content-addressed
-// derivations (fixed-output or not) will have a different hash for each
-// output.
+/**
+ * The hashes modulo of a derivation.
+ *
+ * Each output is given a hash, although in practice only the content-addressed
+ * derivations (fixed-output or not) will have a different hash for each
+ * output.
+ */
struct DrvHash {
+ /**
+ * Map from output names to hashes
+ */
std::map<std::string, Hash> hashes;
enum struct Kind : bool {
- // Statically determined derivations.
- // This hash will be directly used to compute the output paths
+ /**
+ * Statically determined derivations.
+ * This hash will be directly used to compute the output paths
+ */
Regular,
- // Floating-output derivations (and their reverse dependencies).
+
+ /**
+ * Floating-output derivations (and their reverse dependencies).
+ */
Deferred,
};
+ /**
+ * The kind of derivation this is, simplified for just "derivation hash
+ * modulo" purposes.
+ */
Kind kind;
};
void operator |= (DrvHash::Kind & self, const DrvHash::Kind & other) noexcept;
-/* Returns hashes with the details of fixed-output subderivations
- expunged.
-
- A fixed-output derivation is a derivation whose outputs have a
- specified content hash and hash algorithm. (Currently they must have
- exactly one output (`out'), which is specified using the `outputHash'
- and `outputHashAlgo' attributes, but the algorithm doesn't assume
- this.) We don't want changes to such derivations to propagate upwards
- through the dependency graph, changing output paths everywhere.
-
- For instance, if we change the url in a call to the `fetchurl'
- function, we do not want to rebuild everything depending on it---after
- all, (the hash of) the file being downloaded is unchanged. So the
- *output paths* should not change. On the other hand, the *derivation
- paths* should change to reflect the new dependency graph.
-
- For fixed-output derivations, this returns a map from the name of
- each output to its hash, unique up to the output's contents.
-
- For regular derivations, it returns a single hash of the derivation
- ATerm, after subderivations have been likewise expunged from that
- derivation.
+/**
+ * Returns hashes with the details of fixed-output subderivations
+ * expunged.
+ *
+ * A fixed-output derivation is a derivation whose outputs have a
+ * specified content hash and hash algorithm. (Currently they must have
+ * exactly one output (`out'), which is specified using the `outputHash'
+ * and `outputHashAlgo' attributes, but the algorithm doesn't assume
+ * this.) We don't want changes to such derivations to propagate upwards
+ * through the dependency graph, changing output paths everywhere.
+ *
+ * For instance, if we change the url in a call to the `fetchurl'
+ * function, we do not want to rebuild everything depending on it---after
+ * all, (the hash of) the file being downloaded is unchanged. So the
+ * *output paths* should not change. On the other hand, the *derivation
+ * paths* should change to reflect the new dependency graph.
+ *
+ * For fixed-output derivations, this returns a map from the name of
+ * each output to its hash, unique up to the output's contents.
+ *
+ * For regular derivations, it returns a single hash of the derivation
+ * ATerm, after subderivations have been likewise expunged from that
+ * derivation.
*/
DrvHash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOutputs);
-/*
- Return a map associating each output to a hash that uniquely identifies its
- derivation (modulo the self-references).
-
- FIXME: what is the Hash in this map?
+/**
+ * Return a map associating each output to a hash that uniquely identifies its
+ * derivation (modulo the self-references).
+ *
+ * \todo What is the Hash in this map?
*/
std::map<std::string, Hash> staticOutputHashes(Store & store, const Derivation & drv);
-/* Memoisation of hashDerivationModulo(). */
+/**
+ * Memoisation of hashDerivationModulo().
+ */
typedef std::map<StorePath, DrvHash> DrvHashes;
// FIXME: global, though at least thread-safe.
@@ -308,21 +431,25 @@ struct Sink;
Source & readDerivation(Source & in, const Store & store, BasicDerivation & drv, std::string_view name);
void writeDerivation(Sink & out, const Store & store, const BasicDerivation & drv);
-/* This creates an opaque and almost certainly unique string
- deterministically from the output name.
-
- It is used as a placeholder to allow derivations to refer to their
- own outputs without needing to use the hash of a derivation in
- itself, making the hash near-impossible to calculate. */
+/**
+ * This creates an opaque and almost certainly unique string
+ * deterministically from the output name.
+ *
+ * It is used as a placeholder to allow derivations to refer to their
+ * own outputs without needing to use the hash of a derivation in
+ * itself, making the hash near-impossible to calculate.
+ */
std::string hashPlaceholder(const std::string_view outputName);
-/* This creates an opaque and almost certainly unique string
- deterministically from a derivation path and output name.
-
- It is used as a placeholder to allow derivations to refer to
- content-addressed paths whose content --- and thus the path
- themselves --- isn't yet known. This occurs when a derivation has a
- dependency which is a CA derivation. */
+/**
+ * This creates an opaque and almost certainly unique string
+ * deterministically from a derivation path and output name.
+ *
+ * It is used as a placeholder to allow derivations to refer to
+ * content-addressed paths whose content --- and thus the path
+ * themselves --- isn't yet known. This occurs when a derivation has a
+ * dependency which is a CA derivation.
+ */
std::string downstreamPlaceholder(const Store & store, const StorePath & drvPath, std::string_view outputName);
extern const Hash impureOutputHash;
diff --git a/src/libstore/derived-path.cc b/src/libstore/derived-path.cc
index e0d86a42f..e5f0f1b33 100644
--- a/src/libstore/derived-path.cc
+++ b/src/libstore/derived-path.cc
@@ -105,7 +105,7 @@ RealisedPath::Set BuiltPath::toRealisedPaths(Store & store) const
auto drvHashes =
staticOutputHashes(store, store.readDerivation(p.drvPath));
for (auto& [outputName, outputPath] : p.outputs) {
- if (settings.isExperimentalFeatureEnabled(
+ if (experimentalFeatureSettings.isEnabled(
Xp::CaDerivations)) {
auto drvOutput = get(drvHashes, outputName);
if (!drvOutput)
diff --git a/src/libstore/derived-path.hh b/src/libstore/derived-path.hh
index 9e0cce377..2155776b1 100644
--- a/src/libstore/derived-path.hh
+++ b/src/libstore/derived-path.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "util.hh"
#include "path.hh"
@@ -105,7 +106,7 @@ using _BuiltPathRaw = std::variant<
>;
/**
- * A built path. Similar to a `DerivedPath`, but enriched with the corresponding
+ * A built path. Similar to a DerivedPath, but enriched with the corresponding
* output path(s).
*/
struct BuiltPath : _BuiltPathRaw {
diff --git a/src/libstore/dummy-store.cc b/src/libstore/dummy-store.cc
index b4fbe0b70..16e5fafd7 100644
--- a/src/libstore/dummy-store.cc
+++ b/src/libstore/dummy-store.cc
@@ -7,6 +7,13 @@ struct DummyStoreConfig : virtual StoreConfig {
using StoreConfig::StoreConfig;
const std::string name() override { return "Dummy Store"; }
+
+ std::string doc() override
+ {
+ return
+ #include "dummy-store.md"
+ ;
+ }
};
struct DummyStore : public virtual DummyStoreConfig, public virtual Store
diff --git a/src/libstore/dummy-store.md b/src/libstore/dummy-store.md
new file mode 100644
index 000000000..eb7b4ba0d
--- /dev/null
+++ b/src/libstore/dummy-store.md
@@ -0,0 +1,13 @@
+R"(
+
+**Store URL format**: `dummy://`
+
+This store type represents a store that contains no store paths and
+cannot be written to. It's useful when you want to use the Nix
+evaluator when no actual Nix store exists, e.g.
+
+```console
+# nix eval --store dummy:// --expr '1 + 2'
+```
+
+)"
diff --git a/src/libstore/export-import.cc b/src/libstore/export-import.cc
index 9875da909..4eb838b68 100644
--- a/src/libstore/export-import.cc
+++ b/src/libstore/export-import.cc
@@ -16,7 +16,7 @@ void Store::exportPaths(const StorePathSet & paths, Sink & sink)
//logger->incExpected(doneLabel, sorted.size());
for (auto & path : sorted) {
- //Activity act(*logger, lvlInfo, format("exporting path '%s'") % path);
+ //Activity act(*logger, lvlInfo, "exporting path '%s'", path);
sink << 1;
exportPath(path, sink);
//logger->incProgress(doneLabel);
@@ -71,7 +71,7 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs)
auto path = parseStorePath(readString(source));
- //Activity act(*logger, lvlInfo, format("importing path '%s'") % info.path);
+ //Activity act(*logger, lvlInfo, "importing path '%s'", info.path);
auto references = worker_proto::read(*this, source, Phantom<StorePathSet> {});
auto deriver = readString(source);
diff --git a/src/libstore/filetransfer.cc b/src/libstore/filetransfer.cc
index 1c8676a59..2346accbe 100644
--- a/src/libstore/filetransfer.cc
+++ b/src/libstore/filetransfer.cc
@@ -88,6 +88,10 @@ struct curlFileTransfer : public FileTransfer
{request.uri}, request.parentAct)
, callback(std::move(callback))
, finalSink([this](std::string_view data) {
+ if (errorSink) {
+ (*errorSink)(data);
+ }
+
if (this->request.dataCallback) {
auto httpStatus = getHTTPStatus();
@@ -163,8 +167,6 @@ struct curlFileTransfer : public FileTransfer
}
}
- if (errorSink)
- (*errorSink)({(char *) contents, realSize});
(*decompressionSink)({(char *) contents, realSize});
return realSize;
@@ -183,7 +185,7 @@ struct curlFileTransfer : public FileTransfer
{
size_t realSize = size * nmemb;
std::string line((char *) contents, realSize);
- printMsg(lvlVomit, format("got header for '%s': %s") % request.uri % trim(line));
+ printMsg(lvlVomit, "got header for '%s': %s", request.uri, trim(line));
static std::regex statusLine("HTTP/[^ ]+ +[0-9]+(.*)", std::regex::extended | std::regex::icase);
std::smatch match;
if (std::regex_match(line, match, statusLine)) {
@@ -207,7 +209,7 @@ struct curlFileTransfer : public FileTransfer
long httpStatus = 0;
curl_easy_getinfo(req, CURLINFO_RESPONSE_CODE, &httpStatus);
if (result.etag == request.expectedETag && httpStatus == 200) {
- debug(format("shutting down on 200 HTTP response with expected ETag"));
+ debug("shutting down on 200 HTTP response with expected ETag");
return 0;
}
} else if (name == "content-encoding")
@@ -316,7 +318,7 @@ struct curlFileTransfer : public FileTransfer
if (request.verifyTLS) {
if (settings.caFile != "")
- curl_easy_setopt(req, CURLOPT_CAINFO, settings.caFile.c_str());
+ curl_easy_setopt(req, CURLOPT_CAINFO, settings.caFile.get().c_str());
} else {
curl_easy_setopt(req, CURLOPT_SSL_VERIFYPEER, 0);
curl_easy_setopt(req, CURLOPT_SSL_VERIFYHOST, 0);
@@ -405,6 +407,10 @@ struct curlFileTransfer : public FileTransfer
err = Misc;
} else {
// Don't bother retrying on certain cURL errors either
+
+ // Allow selecting a subset of enum values
+ #pragma GCC diagnostic push
+ #pragma GCC diagnostic ignored "-Wswitch-enum"
switch (code) {
case CURLE_FAILED_INIT:
case CURLE_URL_MALFORMAT:
@@ -425,6 +431,7 @@ struct curlFileTransfer : public FileTransfer
default: // Shut up warnings
break;
}
+ #pragma GCC diagnostic pop
}
attempt++;
diff --git a/src/libstore/filetransfer.hh b/src/libstore/filetransfer.hh
index 07d58f53a..378c6ff78 100644
--- a/src/libstore/filetransfer.hh
+++ b/src/libstore/filetransfer.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "types.hh"
#include "hash.hh"
@@ -87,39 +88,56 @@ struct FileTransfer
{
virtual ~FileTransfer() { }
- /* Enqueue a data transfer request, returning a future to the result of
- the download. The future may throw a FileTransferError
- exception. */
+ /**
+ * Enqueue a data transfer request, returning a future to the result of
+ * the download. The future may throw a FileTransferError
+ * exception.
+ */
virtual void enqueueFileTransfer(const FileTransferRequest & request,
Callback<FileTransferResult> callback) = 0;
std::future<FileTransferResult> enqueueFileTransfer(const FileTransferRequest & request);
- /* Synchronously download a file. */
+ /**
+ * Synchronously download a file.
+ */
FileTransferResult download(const FileTransferRequest & request);
- /* Synchronously upload a file. */
+ /**
+ * Synchronously upload a file.
+ */
FileTransferResult upload(const FileTransferRequest & request);
- /* Download a file, writing its data to a sink. The sink will be
- invoked on the thread of the caller. */
+ /**
+ * Download a file, writing its data to a sink. The sink will be
+ * invoked on the thread of the caller.
+ */
void download(FileTransferRequest && request, Sink & sink);
enum Error { NotFound, Forbidden, Misc, Transient, Interrupted };
};
-/* Return a shared FileTransfer object. Using this object is preferred
- because it enables connection reuse and HTTP/2 multiplexing. */
+/**
+ * @return a shared FileTransfer object.
+ *
+ * Using this object is preferred because it enables connection reuse
+ * and HTTP/2 multiplexing.
+ */
ref<FileTransfer> getFileTransfer();
-/* Return a new FileTransfer object. */
+/**
+ * @return a new FileTransfer object
+ *
+ * Prefer getFileTransfer() to this; see its docs for why.
+ */
ref<FileTransfer> makeFileTransfer();
class FileTransferError : public Error
{
public:
FileTransfer::Error error;
- std::optional<std::string> response; // intentionally optional
+ /// intentionally optional
+ std::optional<std::string> response;
template<typename... Args>
FileTransferError(FileTransfer::Error error, std::optional<std::string> response, const Args & ... args);
diff --git a/src/libstore/fs-accessor.hh b/src/libstore/fs-accessor.hh
index c825e84f2..1c98a42d7 100644
--- a/src/libstore/fs-accessor.hh
+++ b/src/libstore/fs-accessor.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "types.hh"
diff --git a/src/libstore/gc-store.hh b/src/libstore/gc-store.hh
index b3cbbad74..17f043a63 100644
--- a/src/libstore/gc-store.hh
+++ b/src/libstore/gc-store.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "store-api.hh"
diff --git a/src/libstore/gc.cc b/src/libstore/gc.cc
index 996f26a95..0038ec802 100644
--- a/src/libstore/gc.cc
+++ b/src/libstore/gc.cc
@@ -34,8 +34,7 @@ static void makeSymlink(const Path & link, const Path & target)
createDirs(dirOf(link));
/* Create the new symlink. */
- Path tempLink = (format("%1%.tmp-%2%-%3%")
- % link % getpid() % random()).str();
+ Path tempLink = fmt("%1%.tmp-%2%-%3%", link, getpid(), random());
createSymlink(target, tempLink);
/* Atomically replace the old one. */
@@ -197,7 +196,7 @@ void LocalStore::findTempRoots(Roots & tempRoots, bool censor)
pid_t pid = std::stoi(i.name);
- debug(format("reading temporary root file '%1%'") % path);
+ debug("reading temporary root file '%1%'", path);
AutoCloseFD fd(open(path.c_str(), O_CLOEXEC | O_RDWR, 0666));
if (!fd) {
/* It's okay if the file has disappeared. */
@@ -263,7 +262,7 @@ void LocalStore::findRoots(const Path & path, unsigned char type, Roots & roots)
target = absPath(target, dirOf(path));
if (!pathExists(target)) {
if (isInDir(path, stateDir + "/" + gcRootsDir + "/auto")) {
- printInfo(format("removing stale link from '%1%' to '%2%'") % path % target);
+ printInfo("removing stale link from '%1%' to '%2%'", path, target);
unlink(path.c_str());
}
} else {
@@ -372,29 +371,29 @@ void LocalStore::findRuntimeRoots(Roots & roots, bool censor)
while (errno = 0, ent = readdir(procDir.get())) {
checkInterrupt();
if (std::regex_match(ent->d_name, digitsRegex)) {
- readProcLink(fmt("/proc/%s/exe" ,ent->d_name), unchecked);
- readProcLink(fmt("/proc/%s/cwd", ent->d_name), unchecked);
-
- auto fdStr = fmt("/proc/%s/fd", ent->d_name);
- auto fdDir = AutoCloseDir(opendir(fdStr.c_str()));
- if (!fdDir) {
- if (errno == ENOENT || errno == EACCES)
- continue;
- throw SysError("opening %1%", fdStr);
- }
- struct dirent * fd_ent;
- while (errno = 0, fd_ent = readdir(fdDir.get())) {
- if (fd_ent->d_name[0] != '.')
- readProcLink(fmt("%s/%s", fdStr, fd_ent->d_name), unchecked);
- }
- if (errno) {
- if (errno == ESRCH)
- continue;
- throw SysError("iterating /proc/%1%/fd", ent->d_name);
- }
- fdDir.reset();
-
try {
+ readProcLink(fmt("/proc/%s/exe" ,ent->d_name), unchecked);
+ readProcLink(fmt("/proc/%s/cwd", ent->d_name), unchecked);
+
+ auto fdStr = fmt("/proc/%s/fd", ent->d_name);
+ auto fdDir = AutoCloseDir(opendir(fdStr.c_str()));
+ if (!fdDir) {
+ if (errno == ENOENT || errno == EACCES)
+ continue;
+ throw SysError("opening %1%", fdStr);
+ }
+ struct dirent * fd_ent;
+ while (errno = 0, fd_ent = readdir(fdDir.get())) {
+ if (fd_ent->d_name[0] != '.')
+ readProcLink(fmt("%s/%s", fdStr, fd_ent->d_name), unchecked);
+ }
+ if (errno) {
+ if (errno == ESRCH)
+ continue;
+ throw SysError("iterating /proc/%1%/fd", ent->d_name);
+ }
+ fdDir.reset();
+
auto mapFile = fmt("/proc/%s/maps", ent->d_name);
auto mapLines = tokenizeString<std::vector<std::string>>(readFile(mapFile), "\n");
for (const auto & line : mapLines) {
@@ -863,7 +862,7 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results)
continue;
}
- printMsg(lvlTalkative, format("deleting unused link '%1%'") % path);
+ printMsg(lvlTalkative, "deleting unused link '%1%'", path);
if (unlink(path.c_str()) == -1)
throw SysError("deleting '%1%'", path);
diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc
index 8e33a3dec..823b4af74 100644
--- a/src/libstore/globals.cc
+++ b/src/libstore/globals.cc
@@ -30,28 +30,23 @@ static GlobalConfig::Register rSettings(&settings);
Settings::Settings()
: nixPrefix(NIX_PREFIX)
- , nixStore(canonPath(getEnv("NIX_STORE_DIR").value_or(getEnv("NIX_STORE").value_or(NIX_STORE_DIR))))
- , nixDataDir(canonPath(getEnv("NIX_DATA_DIR").value_or(NIX_DATA_DIR)))
- , nixLogDir(canonPath(getEnv("NIX_LOG_DIR").value_or(NIX_LOG_DIR)))
- , nixStateDir(canonPath(getEnv("NIX_STATE_DIR").value_or(NIX_STATE_DIR)))
- , nixConfDir(canonPath(getEnv("NIX_CONF_DIR").value_or(NIX_CONF_DIR)))
+ , nixStore(canonPath(getEnvNonEmpty("NIX_STORE_DIR").value_or(getEnvNonEmpty("NIX_STORE").value_or(NIX_STORE_DIR))))
+ , nixDataDir(canonPath(getEnvNonEmpty("NIX_DATA_DIR").value_or(NIX_DATA_DIR)))
+ , nixLogDir(canonPath(getEnvNonEmpty("NIX_LOG_DIR").value_or(NIX_LOG_DIR)))
+ , nixStateDir(canonPath(getEnvNonEmpty("NIX_STATE_DIR").value_or(NIX_STATE_DIR)))
+ , nixConfDir(canonPath(getEnvNonEmpty("NIX_CONF_DIR").value_or(NIX_CONF_DIR)))
, nixUserConfFiles(getUserConfigFiles())
- , nixBinDir(canonPath(getEnv("NIX_BIN_DIR").value_or(NIX_BIN_DIR)))
+ , nixBinDir(canonPath(getEnvNonEmpty("NIX_BIN_DIR").value_or(NIX_BIN_DIR)))
, nixManDir(canonPath(NIX_MAN_DIR))
- , nixDaemonSocketFile(canonPath(getEnv("NIX_DAEMON_SOCKET_PATH").value_or(nixStateDir + DEFAULT_SOCKET_PATH)))
+ , nixDaemonSocketFile(canonPath(getEnvNonEmpty("NIX_DAEMON_SOCKET_PATH").value_or(nixStateDir + DEFAULT_SOCKET_PATH)))
{
buildUsersGroup = getuid() == 0 ? "nixbld" : "";
lockCPU = getEnv("NIX_AFFINITY_HACK") == "1";
allowSymlinkedStore = getEnv("NIX_IGNORE_SYMLINK_STORE") == "1";
- caFile = getEnv("NIX_SSL_CERT_FILE").value_or(getEnv("SSL_CERT_FILE").value_or(""));
- if (caFile == "") {
- for (auto & fn : {"/etc/ssl/certs/ca-certificates.crt", "/nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt"})
- if (pathExists(fn)) {
- caFile = fn;
- break;
- }
- }
+ auto sslOverride = getEnv("NIX_SSL_CERT_FILE").value_or(getEnv("SSL_CERT_FILE").value_or(""));
+ if (sslOverride != "")
+ caFile = sslOverride;
/* Backwards compatibility. */
auto s = getEnv("NIX_REMOTE_SYSTEMS");
@@ -166,18 +161,6 @@ StringSet Settings::getDefaultExtraPlatforms()
return extraPlatforms;
}
-bool Settings::isExperimentalFeatureEnabled(const ExperimentalFeature & feature)
-{
- auto & f = experimentalFeatures.get();
- return std::find(f.begin(), f.end(), feature) != f.end();
-}
-
-void Settings::requireExperimentalFeature(const ExperimentalFeature & feature)
-{
- if (!isExperimentalFeatureEnabled(feature))
- throw MissingExperimentalFeature(feature);
-}
-
bool Settings::isWSL1()
{
struct utsname utsbuf;
@@ -187,6 +170,13 @@ bool Settings::isWSL1()
return hasSuffix(utsbuf.release, "-Microsoft");
}
+Path Settings::getDefaultSSLCertFile()
+{
+ for (auto & fn : {"/etc/ssl/certs/ca-certificates.crt", "/nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt"})
+ if (pathExists(fn)) return fn;
+ return "";
+}
+
const std::string nixVersion = PACKAGE_VERSION;
NLOHMANN_JSON_SERIALIZE_ENUM(SandboxMode, {
diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh
index 93086eaf8..9b04c9e78 100644
--- a/src/libstore/globals.hh
+++ b/src/libstore/globals.hh
@@ -1,9 +1,9 @@
#pragma once
+///@file
#include "types.hh"
#include "config.hh"
#include "util.hh"
-#include "experimental-features.hh"
#include <map>
#include <limits>
@@ -64,6 +64,8 @@ class Settings : public Config {
bool isWSL1();
+ Path getDefaultSSLCertFile();
+
public:
Settings();
@@ -97,7 +99,12 @@ public:
Path nixDaemonSocketFile;
Setting<std::string> storeUri{this, getEnv("NIX_REMOTE").value_or("auto"), "store",
- "The default Nix store to use."};
+ R"(
+ The [URL of the Nix store](@docroot@/command-ref/new-cli/nix3-help-stores.md#store-url-format)
+ to use for most operations.
+ See [`nix help-stores`](@docroot@/command-ref/new-cli/nix3-help-stores.md)
+ for supported store types and settings.
+ )"};
Setting<bool> keepFailed{this, false, "keep-failed",
"Whether to keep temporary directories of failed builds."};
@@ -678,8 +685,9 @@ public:
Strings{"https://cache.nixos.org/"},
"substituters",
R"(
- A list of URLs of substituters, separated by whitespace. Substituters
- are tried based on their Priority value, which each substituter can set
+ A list of [URLs of Nix stores](@docroot@/command-ref/new-cli/nix3-help-stores.md#store-url-format)
+ to be used as substituters, separated by whitespace.
+ Substituters are tried based on their Priority value, which each substituter can set
independently. Lower value means higher priority.
The default is `https://cache.nixos.org`, with a Priority of 40.
@@ -697,7 +705,8 @@ public:
Setting<StringSet> trustedSubstituters{
this, {}, "trusted-substituters",
R"(
- A list of URLs of substituters, separated by whitespace. These are
+ A list of [URLs of Nix stores](@docroot@/command-ref/new-cli/nix3-help-stores.md#store-url-format),
+ separated by whitespace. These are
not used by default, but can be enabled by users of the Nix daemon
by specifying `--option substituters urls` on the command
line. Unprivileged users are only allowed to pass a subset of the
@@ -826,8 +835,22 @@ public:
> `.netrc`.
)"};
- /* Path to the SSL CA file used */
- Path caFile;
+ Setting<Path> caFile{
+ this, getDefaultSSLCertFile(), "ssl-cert-file",
+ R"(
+ The path of a file containing CA certificates used to
+ authenticate `https://` downloads. Nix by default will use
+ the first of the following files that exists:
+
+ 1. `/etc/ssl/certs/ca-certificates.crt`
+ 2. `/nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt`
+
+ The path can be overridden by the following environment
+ variables, in order of precedence:
+
+ 1. `NIX_SSL_CERT_FILE`
+ 2. `SSL_CERT_FILE`
+ )"};
#if __linux__
Setting<bool> filterSyscalls{
@@ -932,13 +955,6 @@ public:
are loaded as plugins (non-recursively).
)"};
- Setting<std::set<ExperimentalFeature>> experimentalFeatures{this, {}, "experimental-features",
- "Experimental Nix features to enable."};
-
- bool isExperimentalFeatureEnabled(const ExperimentalFeature &);
-
- void requireExperimentalFeature(const ExperimentalFeature &);
-
Setting<size_t> narBufferSize{this, 32 * 1024 * 1024, "nar-buffer-size",
"Maximum size of NARs before spilling them to disk."};
diff --git a/src/libstore/http-binary-cache-store.cc b/src/libstore/http-binary-cache-store.cc
index 1479822a9..238fd1d98 100644
--- a/src/libstore/http-binary-cache-store.cc
+++ b/src/libstore/http-binary-cache-store.cc
@@ -12,7 +12,14 @@ struct HttpBinaryCacheStoreConfig : virtual BinaryCacheStoreConfig
{
using BinaryCacheStoreConfig::BinaryCacheStoreConfig;
- const std::string name() override { return "Http Binary Cache Store"; }
+ const std::string name() override { return "HTTP Binary Cache Store"; }
+
+ std::string doc() override
+ {
+ return
+ #include "http-binary-cache-store.md"
+ ;
+ }
};
class HttpBinaryCacheStore : public virtual HttpBinaryCacheStoreConfig, public virtual BinaryCacheStore
diff --git a/src/libstore/http-binary-cache-store.md b/src/libstore/http-binary-cache-store.md
new file mode 100644
index 000000000..20c26d0c2
--- /dev/null
+++ b/src/libstore/http-binary-cache-store.md
@@ -0,0 +1,8 @@
+R"(
+
+**Store URL format**: `http://...`, `https://...`
+
+This store allows a binary cache to be accessed via the HTTP
+protocol.
+
+)"
diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc
index 2c9dd2680..98322b045 100644
--- a/src/libstore/legacy-ssh-store.cc
+++ b/src/libstore/legacy-ssh-store.cc
@@ -1,3 +1,4 @@
+#include "ssh-store-config.hh"
#include "archive.hh"
#include "pool.hh"
#include "remote-store.hh"
@@ -12,17 +13,24 @@
namespace nix {
-struct LegacySSHStoreConfig : virtual StoreConfig
+struct LegacySSHStoreConfig : virtual CommonSSHStoreConfig
{
- using StoreConfig::StoreConfig;
- const Setting<int> maxConnections{(StoreConfig*) this, 1, "max-connections", "maximum number of concurrent SSH connections"};
- const Setting<Path> sshKey{(StoreConfig*) this, "", "ssh-key", "path to an SSH private key"};
- const Setting<std::string> sshPublicHostKey{(StoreConfig*) this, "", "base64-ssh-public-host-key", "The public half of the host's SSH key"};
- const Setting<bool> compress{(StoreConfig*) this, false, "compress", "whether to compress the connection"};
- const Setting<Path> remoteProgram{(StoreConfig*) this, "nix-store", "remote-program", "path to the nix-store executable on the remote system"};
- const Setting<std::string> remoteStore{(StoreConfig*) this, "", "remote-store", "URI of the store on the remote system"};
-
- const std::string name() override { return "Legacy SSH Store"; }
+ using CommonSSHStoreConfig::CommonSSHStoreConfig;
+
+ const Setting<Path> remoteProgram{(StoreConfig*) this, "nix-store", "remote-program",
+ "Path to the `nix-store` executable on the remote machine."};
+
+ const Setting<int> maxConnections{(StoreConfig*) this, 1, "max-connections",
+ "Maximum number of concurrent SSH connections."};
+
+ const std::string name() override { return "SSH Store"; }
+
+ std::string doc() override
+ {
+ return
+ #include "legacy-ssh-store.md"
+ ;
+ }
};
struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Store
@@ -51,6 +59,7 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
LegacySSHStore(const std::string & scheme, const std::string & host, const Params & params)
: StoreConfig(params)
+ , CommonSSHStoreConfig(params)
, LegacySSHStoreConfig(params)
, Store(params)
, host(host)
diff --git a/src/libstore/legacy-ssh-store.md b/src/libstore/legacy-ssh-store.md
new file mode 100644
index 000000000..043acebd6
--- /dev/null
+++ b/src/libstore/legacy-ssh-store.md
@@ -0,0 +1,8 @@
+R"(
+
+**Store URL format**: `ssh://[username@]hostname`
+
+This store type allows limited access to a remote store on another
+machine via SSH.
+
+)"
diff --git a/src/libstore/local-binary-cache-store.cc b/src/libstore/local-binary-cache-store.cc
index f20b1fa02..e5ee6fc15 100644
--- a/src/libstore/local-binary-cache-store.cc
+++ b/src/libstore/local-binary-cache-store.cc
@@ -11,6 +11,13 @@ struct LocalBinaryCacheStoreConfig : virtual BinaryCacheStoreConfig
using BinaryCacheStoreConfig::BinaryCacheStoreConfig;
const std::string name() override { return "Local Binary Cache Store"; }
+
+ std::string doc() override
+ {
+ return
+ #include "local-binary-cache-store.md"
+ ;
+ }
};
class LocalBinaryCacheStore : public virtual LocalBinaryCacheStoreConfig, public virtual BinaryCacheStore
diff --git a/src/libstore/local-binary-cache-store.md b/src/libstore/local-binary-cache-store.md
new file mode 100644
index 000000000..93fddc840
--- /dev/null
+++ b/src/libstore/local-binary-cache-store.md
@@ -0,0 +1,16 @@
+R"(
+
+**Store URL format**: `file://`*path*
+
+This store allows reading and writing a binary cache stored in *path*
+in the local filesystem. If *path* does not exist, it will be created.
+
+For example, the following builds or downloads `nixpkgs#hello` into
+the local store and then copies it to the binary cache in
+`/tmp/binary-cache`:
+
+```
+# nix copy --to file:///tmp/binary-cache nixpkgs#hello
+```
+
+)"
diff --git a/src/libstore/local-fs-store.hh b/src/libstore/local-fs-store.hh
index 947707341..1e7f31a09 100644
--- a/src/libstore/local-fs-store.hh
+++ b/src/libstore/local-fs-store.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "store-api.hh"
#include "gc-store.hh"
@@ -9,20 +10,28 @@ namespace nix {
struct LocalFSStoreConfig : virtual StoreConfig
{
using StoreConfig::StoreConfig;
+
// FIXME: the (StoreConfig*) cast works around a bug in gcc that causes
// it to omit the call to the Setting constructor. Clang works fine
// either way.
+
const PathSetting rootDir{(StoreConfig*) this, true, "",
- "root", "directory prefixed to all other paths"};
+ "root",
+ "Directory prefixed to all other paths."};
+
const PathSetting stateDir{(StoreConfig*) this, false,
rootDir != "" ? rootDir + "/nix/var/nix" : settings.nixStateDir,
- "state", "directory where Nix will store state"};
+ "state",
+ "Directory where Nix will store state."};
+
const PathSetting logDir{(StoreConfig*) this, false,
rootDir != "" ? rootDir + "/nix/var/log/nix" : settings.nixLogDir,
- "log", "directory where Nix will store state"};
+ "log",
+ "directory where Nix will store log files."};
+
const PathSetting realStoreDir{(StoreConfig*) this, false,
rootDir != "" ? rootDir + "/nix/store" : storeDir, "real",
- "physical path to the Nix store"};
+ "Physical path of the Nix store."};
};
class LocalFSStore : public virtual LocalFSStoreConfig,
diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc
index 82edaa9bf..f58d90895 100644
--- a/src/libstore/local-store.cc
+++ b/src/libstore/local-store.cc
@@ -44,6 +44,13 @@
namespace nix {
+std::string LocalStoreConfig::doc()
+{
+ return
+ #include "local-store.md"
+ ;
+}
+
struct LocalStore::State::Stmts {
/* Some precompiled SQLite statements. */
SQLiteStmt RegisterValidPath;
@@ -280,7 +287,7 @@ LocalStore::LocalStore(const Params & params)
else if (curSchema == 0) { /* new store */
curSchema = nixSchemaVersion;
openDB(*state, true);
- writeFile(schemaPath, (format("%1%") % nixSchemaVersion).str(), 0666, true);
+ writeFile(schemaPath, fmt("%1%", nixSchemaVersion), 0666, true);
}
else if (curSchema < nixSchemaVersion) {
@@ -329,14 +336,14 @@ LocalStore::LocalStore(const Params & params)
txn.commit();
}
- writeFile(schemaPath, (format("%1%") % nixSchemaVersion).str(), 0666, true);
+ writeFile(schemaPath, fmt("%1%", nixSchemaVersion), 0666, true);
lockFile(globalLock.get(), ltRead, true);
}
else openDB(*state, false);
- if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations)) {
+ if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) {
migrateCASchema(state->db, dbDir + "/ca-schema", globalLock);
}
@@ -366,7 +373,7 @@ LocalStore::LocalStore(const Params & params)
state->stmts->QueryPathFromHashPart.create(state->db,
"select path from ValidPaths where path >= ? limit 1;");
state->stmts->QueryValidPaths.create(state->db, "select path from ValidPaths");
- if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations)) {
+ if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) {
state->stmts->RegisterRealisedOutput.create(state->db,
R"(
insert into Realisations (drvPath, outputName, outputPath, signatures)
@@ -413,6 +420,13 @@ LocalStore::LocalStore(const Params & params)
}
+LocalStore::LocalStore(std::string scheme, std::string path, const Params & params)
+ : LocalStore(params)
+{
+ throw UnimplementedError("LocalStore");
+}
+
+
AutoCloseFD LocalStore::openGCLock()
{
Path fnGCLock = stateDir + "/gc.lock";
@@ -754,7 +768,7 @@ void LocalStore::checkDerivationOutputs(const StorePath & drvPath, const Derivat
void LocalStore::registerDrvOutput(const Realisation & info, CheckSigsFlag checkSigs)
{
- settings.requireExperimentalFeature(Xp::CaDerivations);
+ experimentalFeatureSettings.require(Xp::CaDerivations);
if (checkSigs == NoCheckSigs || !realisationIsUntrusted(info))
registerDrvOutput(info);
else
@@ -763,7 +777,7 @@ void LocalStore::registerDrvOutput(const Realisation & info, CheckSigsFlag check
void LocalStore::registerDrvOutput(const Realisation & info)
{
- settings.requireExperimentalFeature(Xp::CaDerivations);
+ experimentalFeatureSettings.require(Xp::CaDerivations);
retrySQLite<void>([&]() {
auto state(_state.lock());
if (auto oldR = queryRealisation_(*state, info.id)) {
@@ -1052,7 +1066,7 @@ LocalStore::queryPartialDerivationOutputMap(const StorePath & path_)
return outputs;
});
- if (!settings.isExperimentalFeatureEnabled(Xp::CaDerivations))
+ if (!experimentalFeatureSettings.isEnabled(Xp::CaDerivations))
return outputs;
auto drv = readInvalidDerivation(path);
@@ -1120,54 +1134,6 @@ StorePathSet LocalStore::querySubstitutablePaths(const StorePathSet & paths)
}
-// FIXME: move this, it's not specific to LocalStore.
-void LocalStore::querySubstitutablePathInfos(const StorePathCAMap & paths, SubstitutablePathInfos & infos)
-{
- if (!settings.useSubstitutes) return;
- for (auto & sub : getDefaultSubstituters()) {
- for (auto & path : paths) {
- if (infos.count(path.first))
- // Choose first succeeding substituter.
- continue;
-
- auto subPath(path.first);
-
- // Recompute store path so that we can use a different store root.
- if (path.second) {
- subPath = makeFixedOutputPathFromCA(path.first.name(), *path.second);
- if (sub->storeDir == storeDir)
- assert(subPath == path.first);
- if (subPath != path.first)
- debug("replaced path '%s' with '%s' for substituter '%s'", printStorePath(path.first), sub->printStorePath(subPath), sub->getUri());
- } else if (sub->storeDir != storeDir) continue;
-
- debug("checking substituter '%s' for path '%s'", sub->getUri(), sub->printStorePath(subPath));
- try {
- auto info = sub->queryPathInfo(subPath);
-
- if (sub->storeDir != storeDir && !(info->isContentAddressed(*sub) && info->references.empty()))
- continue;
-
- auto narInfo = std::dynamic_pointer_cast<const NarInfo>(
- std::shared_ptr<const ValidPathInfo>(info));
- infos.insert_or_assign(path.first, SubstitutablePathInfo{
- info->deriver,
- info->references,
- narInfo ? narInfo->fileSize : 0,
- info->narSize});
- } catch (InvalidPath &) {
- } catch (SubstituterDisabled &) {
- } catch (Error & e) {
- if (settings.tryFallback)
- logError(e.info());
- else
- throw;
- }
- }
- }
-}
-
-
void LocalStore::registerValidPath(const ValidPathInfo & info)
{
registerValidPaths({{info.path, info}});
@@ -1560,7 +1526,7 @@ void LocalStore::invalidatePathChecked(const StorePath & path)
bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
{
- printInfo(format("reading the Nix store..."));
+ printInfo("reading the Nix store...");
bool errors = false;
@@ -1950,5 +1916,6 @@ std::optional<std::string> LocalStore::getVersion()
return nixVersion;
}
+static RegisterStoreImplementation<LocalStore, LocalStoreConfig> regLocalStore;
} // namespace nix
diff --git a/src/libstore/local-store.hh b/src/libstore/local-store.hh
index a84eb7c26..1b668b6fd 100644
--- a/src/libstore/local-store.hh
+++ b/src/libstore/local-store.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "sqlite.hh"
@@ -38,11 +39,13 @@ struct LocalStoreConfig : virtual LocalFSStoreConfig
Setting<bool> requireSigs{(StoreConfig*) this,
settings.requireSigs,
- "require-sigs", "whether store paths should have a trusted signature on import"};
+ "require-sigs",
+ "Whether store paths copied into this store should have a trusted signature."};
const std::string name() override { return "Local Store"; }
-};
+ std::string doc() override;
+};
class LocalStore : public virtual LocalStoreConfig, public virtual LocalFSStore, public virtual GcStore
{
@@ -100,9 +103,13 @@ public:
/* Initialise the local store, upgrading the schema if
necessary. */
LocalStore(const Params & params);
+ LocalStore(std::string scheme, std::string path, const Params & params);
~LocalStore();
+ static std::set<std::string> uriSchemes()
+ { return {}; }
+
/* Implementations of abstract store API methods. */
std::string getUri() override;
@@ -127,9 +134,6 @@ public:
StorePathSet querySubstitutablePaths(const StorePathSet & paths) override;
- void querySubstitutablePathInfos(const StorePathCAMap & paths,
- SubstitutablePathInfos & infos) override;
-
bool pathInfoIsUntrusted(const ValidPathInfo &) override;
bool realisationIsUntrusted(const Realisation & ) override;
diff --git a/src/libstore/local-store.md b/src/libstore/local-store.md
new file mode 100644
index 000000000..8174df839
--- /dev/null
+++ b/src/libstore/local-store.md
@@ -0,0 +1,39 @@
+R"(
+
+**Store URL format**: `local`, *root*
+
+This store type accesses a Nix store in the local filesystem directly
+(i.e. not via the Nix daemon). *root* is an absolute path that is
+prefixed to other directories such as the Nix store directory. The
+store pseudo-URL `local` denotes a store that uses `/` as its root
+directory.
+
+A store that uses a *root* other than `/` is called a *chroot
+store*. With such stores, the store directory is "logically" still
+`/nix/store`, so programs stored in them can only be built and
+executed by `chroot`-ing into *root*. Chroot stores only support
+building and running on Linux when [`mount namespaces`](https://man7.org/linux/man-pages/man7/mount_namespaces.7.html) and [`user namespaces`](https://man7.org/linux/man-pages/man7/user_namespaces.7.html) are
+enabled.
+
+For example, the following uses `/tmp/root` as the chroot environment
+to build or download `nixpkgs#hello` and then execute it:
+
+```console
+# nix run --store /tmp/root nixpkgs#hello
+Hello, world!
+```
+
+Here, the "physical" store location is `/tmp/root/nix/store`, and
+Nix's store metadata is in `/tmp/root/nix/var/nix/db`.
+
+It is also possible, but not recommended, to change the "logical"
+location of the Nix store from its default of `/nix/store`. This makes
+it impossible to use default substituters such as
+`https://cache.nixos.org/`, and thus you may have to build everything
+locally. Here is an example:
+
+```console
+# nix build --store 'local?store=/tmp/my-nix/store&state=/tmp/my-nix/state&log=/tmp/my-nix/log' nixpkgs#hello
+```
+
+)"
diff --git a/src/libstore/lock.cc b/src/libstore/lock.cc
index 4fe1fcf56..7202a64b3 100644
--- a/src/libstore/lock.cc
+++ b/src/libstore/lock.cc
@@ -129,7 +129,7 @@ struct AutoUserLock : UserLock
useUserNamespace = false;
#endif
- settings.requireExperimentalFeature(Xp::AutoAllocateUids);
+ experimentalFeatureSettings.require(Xp::AutoAllocateUids);
assert(settings.startId > 0);
assert(settings.uidCount % maxIdsPerBuild == 0);
assert((uint64_t) settings.startId + (uint64_t) settings.uidCount <= std::numeric_limits<uid_t>::max());
diff --git a/src/libstore/lock.hh b/src/libstore/lock.hh
index 7f1934510..7be3ba314 100644
--- a/src/libstore/lock.hh
+++ b/src/libstore/lock.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "types.hh"
diff --git a/src/libstore/log-store.hh b/src/libstore/log-store.hh
index e4d95bab6..7aeec73b2 100644
--- a/src/libstore/log-store.hh
+++ b/src/libstore/log-store.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "store-api.hh"
diff --git a/src/libstore/machines.hh b/src/libstore/machines.hh
index 834626de9..1adeaf1f0 100644
--- a/src/libstore/machines.hh
+++ b/src/libstore/machines.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "types.hh"
diff --git a/src/libstore/make-content-addressed.hh b/src/libstore/make-content-addressed.hh
index c4a66ed41..2ce6ec7bc 100644
--- a/src/libstore/make-content-addressed.hh
+++ b/src/libstore/make-content-addressed.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "store-api.hh"
diff --git a/src/libstore/misc.cc b/src/libstore/misc.cc
index b28768459..89148d415 100644
--- a/src/libstore/misc.cc
+++ b/src/libstore/misc.cc
@@ -326,7 +326,7 @@ OutputPathMap resolveDerivedPath(Store & store, const DerivedPath::Built & bfd,
throw Error(
"the derivation '%s' doesn't have an output named '%s'",
store.printStorePath(bfd.drvPath), output);
- if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations)) {
+ if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) {
DrvOutput outputId { *outputHash, output };
auto realisation = store.queryRealisation(outputId);
if (!realisation)
diff --git a/src/libstore/names.hh b/src/libstore/names.hh
index 3977fc6cc..d82b99bb4 100644
--- a/src/libstore/names.hh
+++ b/src/libstore/names.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include <memory>
diff --git a/src/libstore/nar-accessor.cc b/src/libstore/nar-accessor.cc
index 9a0003588..f0dfcb19b 100644
--- a/src/libstore/nar-accessor.cc
+++ b/src/libstore/nar-accessor.cc
@@ -275,6 +275,7 @@ json listNar(ref<FSAccessor> accessor, const Path & path, bool recurse)
obj["type"] = "symlink";
obj["target"] = accessor->readLink(path);
break;
+ case FSAccessor::Type::tMissing:
default:
throw Error("path '%s' does not exist in NAR", path);
}
diff --git a/src/libstore/nar-accessor.hh b/src/libstore/nar-accessor.hh
index 7d998ae0b..940e537b6 100644
--- a/src/libstore/nar-accessor.hh
+++ b/src/libstore/nar-accessor.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include <functional>
diff --git a/src/libstore/nar-info-disk-cache.hh b/src/libstore/nar-info-disk-cache.hh
index 4877f56d8..c596f2d71 100644
--- a/src/libstore/nar-info-disk-cache.hh
+++ b/src/libstore/nar-info-disk-cache.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "ref.hh"
#include "nar-info.hh"
diff --git a/src/libstore/nar-info.hh b/src/libstore/nar-info.hh
index 01683ec73..3cae8e659 100644
--- a/src/libstore/nar-info.hh
+++ b/src/libstore/nar-info.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "types.hh"
#include "hash.hh"
diff --git a/src/libstore/optimise-store.cc b/src/libstore/optimise-store.cc
index 4d2781180..4a79cf4a1 100644
--- a/src/libstore/optimise-store.cc
+++ b/src/libstore/optimise-store.cc
@@ -55,7 +55,7 @@ LocalStore::InodeHash LocalStore::loadInodeHash()
}
if (errno) throw SysError("reading directory '%1%'", linksDir);
- printMsg(lvlTalkative, format("loaded %1% hash inodes") % inodeHash.size());
+ printMsg(lvlTalkative, "loaded %1% hash inodes", inodeHash.size());
return inodeHash;
}
@@ -73,7 +73,7 @@ Strings LocalStore::readDirectoryIgnoringInodes(const Path & path, const InodeHa
checkInterrupt();
if (inodeHash.count(dirent->d_ino)) {
- debug(format("'%1%' is already linked") % dirent->d_name);
+ debug("'%1%' is already linked", dirent->d_name);
continue;
}
@@ -102,7 +102,7 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
if (std::regex_search(path, std::regex("\\.app/Contents/.+$")))
{
- debug(format("'%1%' is not allowed to be linked in macOS") % path);
+ debug("'%1%' is not allowed to be linked in macOS", path);
return;
}
#endif
@@ -146,7 +146,7 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
contents of the symlink (i.e. the result of readlink()), not
the contents of the target (which may not even exist). */
Hash hash = hashPath(htSHA256, path).first;
- debug(format("'%1%' has hash '%2%'") % path % hash.to_string(Base32, true));
+ debug("'%1%' has hash '%2%'", path, hash.to_string(Base32, true));
/* Check if this is a known hash. */
Path linkPath = linksDir + "/" + hash.to_string(Base32, false);
@@ -196,11 +196,11 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
auto stLink = lstat(linkPath);
if (st.st_ino == stLink.st_ino) {
- debug(format("'%1%' is already linked to '%2%'") % path % linkPath);
+ debug("'%1%' is already linked to '%2%'", path, linkPath);
return;
}
- printMsg(lvlTalkative, format("linking '%1%' to '%2%'") % path % linkPath);
+ printMsg(lvlTalkative, "linking '%1%' to '%2%'", path, linkPath);
/* Make the containing directory writable, but only if it's not
the store itself (we don't want or need to mess with its
@@ -213,8 +213,7 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
its timestamp back to 0. */
MakeReadOnly makeReadOnly(mustToggle ? dirOfPath : "");
- Path tempLink = (format("%1%/.tmp-link-%2%-%3%")
- % realStoreDir % getpid() % random()).str();
+ Path tempLink = fmt("%1%/.tmp-link-%2%-%3%", realStoreDir, getpid(), random());
if (link(linkPath.c_str(), tempLink.c_str()) == -1) {
if (errno == EMLINK) {
@@ -222,7 +221,7 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
systems). This is likely to happen with empty files.
Just shrug and ignore. */
if (st.st_size)
- printInfo(format("'%1%' has maximum number of links") % linkPath);
+ printInfo("'%1%' has maximum number of links", linkPath);
return;
}
throw SysError("cannot link '%1%' to '%2%'", tempLink, linkPath);
diff --git a/src/libstore/outputs-spec.hh b/src/libstore/outputs-spec.hh
index 46bc35ebc..5a726fe90 100644
--- a/src/libstore/outputs-spec.hh
+++ b/src/libstore/outputs-spec.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include <cassert>
#include <optional>
@@ -9,6 +10,9 @@
namespace nix {
+/**
+ * A non-empty set of outputs, specified by name
+ */
struct OutputNames : std::set<std::string> {
using std::set<std::string>::set;
@@ -18,6 +22,9 @@ struct OutputNames : std::set<std::string> {
: std::set<std::string>(s)
{ assert(!empty()); }
+ /**
+ * Needs to be "inherited manually"
+ */
OutputNames(std::set<std::string> && s)
: std::set<std::string>(s)
{ assert(!empty()); }
@@ -28,6 +35,9 @@ struct OutputNames : std::set<std::string> {
OutputNames() = delete;
};
+/**
+ * The set of all outputs, without needing to name them explicitly
+ */
struct AllOutputs : std::monostate { };
typedef std::variant<AllOutputs, OutputNames> _OutputsSpecRaw;
@@ -36,7 +46,9 @@ struct OutputsSpec : _OutputsSpecRaw {
using Raw = _OutputsSpecRaw;
using Raw::Raw;
- /* Force choosing a variant */
+ /**
+ * Force choosing a variant
+ */
OutputsSpec() = delete;
using Names = OutputNames;
@@ -52,14 +64,20 @@ struct OutputsSpec : _OutputsSpecRaw {
bool contains(const std::string & output) const;
- /* Create a new OutputsSpec which is the union of this and that. */
+ /**
+ * Create a new OutputsSpec which is the union of this and that.
+ */
OutputsSpec union_(const OutputsSpec & that) const;
- /* Whether this OutputsSpec is a subset of that. */
+ /**
+ * Whether this OutputsSpec is a subset of that.
+ */
bool isSubsetOf(const OutputsSpec & outputs) const;
- /* Parse a string of the form 'output1,...outputN' or
- '*', returning the outputs spec. */
+ /**
+ * Parse a string of the form 'output1,...outputN' or '*', returning
+ * the outputs spec.
+ */
static OutputsSpec parse(std::string_view s);
static std::optional<OutputsSpec> parseOpt(std::string_view s);
@@ -81,8 +99,10 @@ struct ExtendedOutputsSpec : _ExtendedOutputsSpecRaw {
return static_cast<const Raw &>(*this);
}
- /* Parse a string of the form 'prefix^output1,...outputN' or
- 'prefix^*', returning the prefix and the extended outputs spec. */
+ /**
+ * Parse a string of the form 'prefix^output1,...outputN' or
+ * 'prefix^*', returning the prefix and the extended outputs spec.
+ */
static std::pair<std::string_view, ExtendedOutputsSpec> parse(std::string_view s);
static std::optional<std::pair<std::string_view, ExtendedOutputsSpec>> parseOpt(std::string_view s);
diff --git a/src/libstore/parsed-derivations.hh b/src/libstore/parsed-derivations.hh
index bfb3857c0..71085a604 100644
--- a/src/libstore/parsed-derivations.hh
+++ b/src/libstore/parsed-derivations.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "derivations.hh"
#include "store-api.hh"
diff --git a/src/libstore/path-info.hh b/src/libstore/path-info.hh
index a7fcbd232..b28bf751c 100644
--- a/src/libstore/path-info.hh
+++ b/src/libstore/path-info.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "crypto.hh"
#include "path.hh"
diff --git a/src/libstore/path-regex.hh b/src/libstore/path-regex.hh
index 6893c3876..4f8dc4c1f 100644
--- a/src/libstore/path-regex.hh
+++ b/src/libstore/path-regex.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
namespace nix {
diff --git a/src/libstore/path-with-outputs.hh b/src/libstore/path-with-outputs.hh
index 5d25656a5..a845b0e5f 100644
--- a/src/libstore/path-with-outputs.hh
+++ b/src/libstore/path-with-outputs.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "path.hh"
#include "derived-path.hh"
diff --git a/src/libstore/path.hh b/src/libstore/path.hh
index 1e5579b90..4ca6747b3 100644
--- a/src/libstore/path.hh
+++ b/src/libstore/path.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include <string_view>
@@ -8,13 +9,22 @@ namespace nix {
struct Hash;
+/**
+ * \ref StorePath "Store path" is the fundamental reference type of Nix.
+ * A store paths refers to a Store object.
+ *
+ * See glossary.html#gloss-store-path for more information on a
+ * conceptual level.
+ */
class StorePath
{
std::string baseName;
public:
- /* Size of the hash part of store paths, in base-32 characters. */
+ /**
+ * Size of the hash part of store paths, in base-32 characters.
+ */
constexpr static size_t HashLen = 32; // i.e. 160 bits
constexpr static size_t MaxPathLen = 211;
@@ -45,8 +55,9 @@ public:
return baseName != other.baseName;
}
- /* Check whether a file name ends with the extension for
- derivations. */
+ /**
+ * Check whether a file name ends with the extension for derivations.
+ */
bool isDerivation() const;
std::string_view name() const
@@ -67,7 +78,10 @@ public:
typedef std::set<StorePath> StorePathSet;
typedef std::vector<StorePath> StorePaths;
-/* Extension of derivations in the Nix store. */
+/**
+ * The file extension of \ref Derivation derivations when serialized
+ * into store objects.
+ */
const std::string drvExtension = ".drv";
}
diff --git a/src/libstore/pathlocks.cc b/src/libstore/pathlocks.cc
index 42023cd0a..adc763e6a 100644
--- a/src/libstore/pathlocks.cc
+++ b/src/libstore/pathlocks.cc
@@ -96,7 +96,7 @@ bool PathLocks::lockPaths(const PathSet & paths,
checkInterrupt();
Path lockPath = path + ".lock";
- debug(format("locking path '%1%'") % path);
+ debug("locking path '%1%'", path);
AutoCloseFD fd;
@@ -118,7 +118,7 @@ bool PathLocks::lockPaths(const PathSet & paths,
}
}
- debug(format("lock acquired on '%1%'") % lockPath);
+ debug("lock acquired on '%1%'", lockPath);
/* Check that the lock file hasn't become stale (i.e.,
hasn't been unlinked). */
@@ -130,7 +130,7 @@ bool PathLocks::lockPaths(const PathSet & paths,
a lock on a deleted file. This means that other
processes may create and acquire a lock on
`lockPath', and proceed. So we must retry. */
- debug(format("open lock file '%1%' has become stale") % lockPath);
+ debug("open lock file '%1%' has become stale", lockPath);
else
break;
}
@@ -163,7 +163,7 @@ void PathLocks::unlock()
"error (ignored): cannot close lock file on '%1%'",
i.second);
- debug(format("lock released on '%1%'") % i.second);
+ debug("lock released on '%1%'", i.second);
}
fds.clear();
diff --git a/src/libstore/pathlocks.hh b/src/libstore/pathlocks.hh
index 5e3a734b4..ba8f2749b 100644
--- a/src/libstore/pathlocks.hh
+++ b/src/libstore/pathlocks.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "util.hh"
diff --git a/src/libstore/profiles.cc b/src/libstore/profiles.cc
index c551c5f3e..ba5c8583f 100644
--- a/src/libstore/profiles.cc
+++ b/src/libstore/profiles.cc
@@ -64,7 +64,7 @@ std::pair<Generations, std::optional<GenerationNumber>> findGenerations(Path pro
static void makeName(const Path & profile, GenerationNumber num,
Path & outLink)
{
- Path prefix = (format("%1%-%2%") % profile % num).str();
+ Path prefix = fmt("%1%-%2%", profile, num);
outLink = prefix + "-link";
}
@@ -269,7 +269,7 @@ void switchGeneration(
void lockProfile(PathLocks & lock, const Path & profile)
{
- lock.lockPaths({profile}, (format("waiting for lock on profile '%1%'") % profile).str());
+ lock.lockPaths({profile}, fmt("waiting for lock on profile '%1%'", profile));
lock.setDeletion(true);
}
@@ -282,28 +282,48 @@ std::string optimisticLockProfile(const Path & profile)
Path profilesDir()
{
- auto profileRoot = createNixStateDir() + "/profiles";
+ auto profileRoot =
+ (getuid() == 0)
+ ? rootProfilesDir()
+ : createNixStateDir() + "/profiles";
createDirs(profileRoot);
return profileRoot;
}
+Path rootProfilesDir()
+{
+ return settings.nixStateDir + "/profiles/per-user/root";
+}
+
Path getDefaultProfile()
{
Path profileLink = settings.useXDGBaseDirectories ? createNixStateDir() + "/profile" : getHome() + "/.nix-profile";
try {
- auto profile =
- getuid() == 0
- ? settings.nixStateDir + "/profiles/default"
- : profilesDir() + "/profile";
+ auto profile = profilesDir() + "/profile";
if (!pathExists(profileLink)) {
replaceSymlink(profile, profileLink);
}
+ // Backwards compatibiliy measure: Make root's profile available as
+ // `.../default` as it's what NixOS and most of the init scripts expect
+ Path globalProfileLink = settings.nixStateDir + "/profiles/default";
+ if (getuid() == 0 && !pathExists(globalProfileLink)) {
+ replaceSymlink(profile, globalProfileLink);
+ }
return absPath(readLink(profileLink), dirOf(profileLink));
} catch (Error &) {
return profileLink;
}
}
+Path defaultChannelsDir()
+{
+ return profilesDir() + "/channels";
+}
+
+Path rootChannelsDir()
+{
+ return rootProfilesDir() + "/channels";
+}
}
diff --git a/src/libstore/profiles.hh b/src/libstore/profiles.hh
index fbf95b850..6a5965390 100644
--- a/src/libstore/profiles.hh
+++ b/src/libstore/profiles.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "types.hh"
#include "pathlocks.hh"
@@ -68,13 +69,32 @@ void lockProfile(PathLocks & lock, const Path & profile);
rebuilt. */
std::string optimisticLockProfile(const Path & profile);
-/* Creates and returns the path to a directory suitable for storing the user’s
- profiles. */
+/**
+ * Create and return the path to a directory suitable for storing the user’s
+ * profiles.
+ */
Path profilesDir();
-/* Resolve the default profile (~/.nix-profile by default, $XDG_STATE_HOME/
- nix/profile if XDG Base Directory Support is enabled), and create if doesn't
- exist */
+/**
+ * Return the path to the profile directory for root (but don't try creating it)
+ */
+Path rootProfilesDir();
+
+/**
+ * Create and return the path to the file used for storing the users's channels
+ */
+Path defaultChannelsDir();
+
+/**
+ * Return the path to the channel directory for root (but don't try creating it)
+ */
+Path rootChannelsDir();
+
+/**
+ * Resolve the default profile (~/.nix-profile by default,
+ * $XDG_STATE_HOME/nix/profile if XDG Base Directory Support is enabled),
+ * and create if doesn't exist
+ */
Path getDefaultProfile();
}
diff --git a/src/libstore/realisation.hh b/src/libstore/realisation.hh
index 48d0283de..a18cf2aa8 100644
--- a/src/libstore/realisation.hh
+++ b/src/libstore/realisation.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include <variant>
diff --git a/src/libstore/references.cc b/src/libstore/references.cc
index 3bb297fc8..345f4528b 100644
--- a/src/libstore/references.cc
+++ b/src/libstore/references.cc
@@ -39,8 +39,7 @@ static void search(
if (!match) continue;
std::string ref(s.substr(i, refLength));
if (hashes.erase(ref)) {
- debug(format("found reference to '%1%' at offset '%2%'")
- % ref % i);
+ debug("found reference to '%1%' at offset '%2%'", ref, i);
seen.insert(ref);
}
++i;
diff --git a/src/libstore/references.hh b/src/libstore/references.hh
index 6f381f96c..52d71b333 100644
--- a/src/libstore/references.hh
+++ b/src/libstore/references.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "hash.hh"
#include "path.hh"
diff --git a/src/libstore/remote-fs-accessor.hh b/src/libstore/remote-fs-accessor.hh
index 99f5544ef..e2673b6f6 100644
--- a/src/libstore/remote-fs-accessor.hh
+++ b/src/libstore/remote-fs-accessor.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "fs-accessor.hh"
#include "ref.hh"
diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc
index d1296627a..d24d83117 100644
--- a/src/libstore/remote-store.cc
+++ b/src/libstore/remote-store.cc
@@ -265,7 +265,7 @@ void RemoteStore::setOptions(Connection & conn)
overrides.erase(settings.buildCores.name);
overrides.erase(settings.useSubstitutes.name);
overrides.erase(loggerSettings.showTrace.name);
- overrides.erase(settings.experimentalFeatures.name);
+ overrides.erase(experimentalFeatureSettings.experimentalFeatures.name);
overrides.erase(settings.pluginFiles.name);
conn.to << overrides.size();
for (auto & i : overrides)
@@ -876,7 +876,7 @@ std::vector<BuildResult> RemoteStore::buildPathsWithResults(
"the derivation '%s' doesn't have an output named '%s'",
printStorePath(bfd.drvPath), output);
auto outputId = DrvOutput{ *outputHash, output };
- if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations)) {
+ if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) {
auto realisation =
queryRealisation(outputId);
if (!realisation)
diff --git a/src/libstore/remote-store.hh b/src/libstore/remote-store.hh
index 11d089cd2..f5f45f853 100644
--- a/src/libstore/remote-store.hh
+++ b/src/libstore/remote-store.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include <limits>
#include <string>
@@ -22,11 +23,13 @@ struct RemoteStoreConfig : virtual StoreConfig
{
using StoreConfig::StoreConfig;
- const Setting<int> maxConnections{(StoreConfig*) this, 1,
- "max-connections", "maximum number of concurrent connections to the Nix daemon"};
+ const Setting<int> maxConnections{(StoreConfig*) this, 1, "max-connections",
+ "Maximum number of concurrent connections to the Nix daemon."};
- const Setting<unsigned int> maxConnectionAge{(StoreConfig*) this, std::numeric_limits<unsigned int>::max(),
- "max-connection-age", "number of seconds to reuse a connection"};
+ const Setting<unsigned int> maxConnectionAge{(StoreConfig*) this,
+ std::numeric_limits<unsigned int>::max(),
+ "max-connection-age",
+ "Maximum age of a connection before it is closed."};
};
/* FIXME: RemoteStore is a misnomer - should be something like
@@ -38,8 +41,6 @@ class RemoteStore : public virtual RemoteStoreConfig,
{
public:
- virtual bool sameMachine() = 0;
-
RemoteStore(const Params & params);
/* Implementations of abstract store API methods. */
diff --git a/src/libstore/repair-flag.hh b/src/libstore/repair-flag.hh
index a13cda312..f412d6a20 100644
--- a/src/libstore/repair-flag.hh
+++ b/src/libstore/repair-flag.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
namespace nix {
diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc
index 8d76eee99..ac82147ee 100644
--- a/src/libstore/s3-binary-cache-store.cc
+++ b/src/libstore/s3-binary-cache-store.cc
@@ -40,12 +40,12 @@ struct S3Error : public Error
/* Helper: given an Outcome<R, E>, return R in case of success, or
throw an exception in case of an error. */
template<typename R, typename E>
-R && checkAws(const FormatOrString & fs, Aws::Utils::Outcome<R, E> && outcome)
+R && checkAws(std::string_view s, Aws::Utils::Outcome<R, E> && outcome)
{
if (!outcome.IsSuccess())
throw S3Error(
outcome.GetError().GetErrorType(),
- fs.s + ": " + outcome.GetError().GetMessage());
+ s + ": " + outcome.GetError().GetMessage());
return outcome.GetResultWithOwnership();
}
@@ -192,19 +192,72 @@ S3BinaryCacheStore::S3BinaryCacheStore(const Params & params)
struct S3BinaryCacheStoreConfig : virtual BinaryCacheStoreConfig
{
using BinaryCacheStoreConfig::BinaryCacheStoreConfig;
- const Setting<std::string> profile{(StoreConfig*) this, "", "profile", "The name of the AWS configuration profile to use."};
- const Setting<std::string> region{(StoreConfig*) this, Aws::Region::US_EAST_1, "region", {"aws-region"}};
- const Setting<std::string> scheme{(StoreConfig*) this, "", "scheme", "The scheme to use for S3 requests, https by default."};
- const Setting<std::string> endpoint{(StoreConfig*) this, "", "endpoint", "An optional override of the endpoint to use when talking to S3."};
- const Setting<std::string> narinfoCompression{(StoreConfig*) this, "", "narinfo-compression", "compression method for .narinfo files"};
- const Setting<std::string> lsCompression{(StoreConfig*) this, "", "ls-compression", "compression method for .ls files"};
- const Setting<std::string> logCompression{(StoreConfig*) this, "", "log-compression", "compression method for log/* files"};
+
+ const Setting<std::string> profile{(StoreConfig*) this, "", "profile",
+ R"(
+ The name of the AWS configuration profile to use. By default
+ Nix will use the `default` profile.
+ )"};
+
+ const Setting<std::string> region{(StoreConfig*) this, Aws::Region::US_EAST_1, "region",
+ R"(
+ The region of the S3 bucket. If your bucket is not in
+ `us–east-1`, you should always explicitly specify the region
+ parameter.
+ )"};
+
+ const Setting<std::string> scheme{(StoreConfig*) this, "", "scheme",
+ R"(
+ The scheme used for S3 requests, `https` (default) or `http`. This
+ option allows you to disable HTTPS for binary caches which don't
+ support it.
+
+ > **Note**
+ >
+ > HTTPS should be used if the cache might contain sensitive
+ > information.
+ )"};
+
+ const Setting<std::string> endpoint{(StoreConfig*) this, "", "endpoint",
+ R"(
+ The URL of the endpoint of an S3-compatible service such as MinIO.
+ Do not specify this setting if you're using Amazon S3.
+
+ > **Note**
+ >
+ > This endpoint must support HTTPS and will use path-based
+ > addressing instead of virtual host based addressing.
+ )"};
+
+ const Setting<std::string> narinfoCompression{(StoreConfig*) this, "", "narinfo-compression",
+ "Compression method for `.narinfo` files."};
+
+ const Setting<std::string> lsCompression{(StoreConfig*) this, "", "ls-compression",
+ "Compression method for `.ls` files."};
+
+ const Setting<std::string> logCompression{(StoreConfig*) this, "", "log-compression",
+ R"(
+ Compression method for `log/*` files. It is recommended to
+ use a compression method supported by most web browsers
+ (e.g. `brotli`).
+ )"};
+
const Setting<bool> multipartUpload{
- (StoreConfig*) this, false, "multipart-upload", "whether to use multi-part uploads"};
+ (StoreConfig*) this, false, "multipart-upload",
+ "Whether to use multi-part uploads."};
+
const Setting<uint64_t> bufferSize{
- (StoreConfig*) this, 5 * 1024 * 1024, "buffer-size", "size (in bytes) of each part in multi-part uploads"};
+ (StoreConfig*) this, 5 * 1024 * 1024, "buffer-size",
+ "Size (in bytes) of each part in multi-part uploads."};
const std::string name() override { return "S3 Binary Cache Store"; }
+
+ std::string doc() override
+ {
+ return
+ #include "s3-binary-cache-store.md"
+ ;
+ }
};
struct S3BinaryCacheStoreImpl : virtual S3BinaryCacheStoreConfig, public virtual S3BinaryCacheStore
@@ -430,9 +483,9 @@ struct S3BinaryCacheStoreImpl : virtual S3BinaryCacheStoreConfig, public virtual
std::string marker;
do {
- debug(format("listing bucket 's3://%s' from key '%s'...") % bucketName % marker);
+ debug("listing bucket 's3://%s' from key '%s'...", bucketName, marker);
- auto res = checkAws(format("AWS error listing bucket '%s'") % bucketName,
+ auto res = checkAws(fmt("AWS error listing bucket '%s'", bucketName),
s3Helper.client->ListObjects(
Aws::S3::Model::ListObjectsRequest()
.WithBucket(bucketName)
@@ -441,8 +494,8 @@ struct S3BinaryCacheStoreImpl : virtual S3BinaryCacheStoreConfig, public virtual
auto & contents = res.GetContents();
- debug(format("got %d keys, next marker '%s'")
- % contents.size() % res.GetNextMarker());
+ debug("got %d keys, next marker '%s'",
+ contents.size(), res.GetNextMarker());
for (auto object : contents) {
auto & key = object.GetKey();
diff --git a/src/libstore/s3-binary-cache-store.hh b/src/libstore/s3-binary-cache-store.hh
index bce828b11..c62ea5147 100644
--- a/src/libstore/s3-binary-cache-store.hh
+++ b/src/libstore/s3-binary-cache-store.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "binary-cache-store.hh"
diff --git a/src/libstore/s3-binary-cache-store.md b/src/libstore/s3-binary-cache-store.md
new file mode 100644
index 000000000..70fe0eb09
--- /dev/null
+++ b/src/libstore/s3-binary-cache-store.md
@@ -0,0 +1,8 @@
+R"(
+
+**Store URL format**: `s3://`*bucket-name*
+
+This store allows reading and writing a binary cache stored in an AWS
+S3 bucket.
+
+)"
diff --git a/src/libstore/s3.hh b/src/libstore/s3.hh
index cdb3e5908..f0aeb3bed 100644
--- a/src/libstore/s3.hh
+++ b/src/libstore/s3.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#if ENABLE_S3
diff --git a/src/libstore/serve-protocol.hh b/src/libstore/serve-protocol.hh
index 3f76baa82..553fd3a09 100644
--- a/src/libstore/serve-protocol.hh
+++ b/src/libstore/serve-protocol.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
namespace nix {
diff --git a/src/libstore/sqlite.hh b/src/libstore/sqlite.hh
index 1853731a2..86410f998 100644
--- a/src/libstore/sqlite.hh
+++ b/src/libstore/sqlite.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include <functional>
#include <string>
diff --git a/src/libstore/ssh-store-config.hh b/src/libstore/ssh-store-config.hh
new file mode 100644
index 000000000..c27a5d00f
--- /dev/null
+++ b/src/libstore/ssh-store-config.hh
@@ -0,0 +1,29 @@
+#pragma once
+///@file
+
+#include "store-api.hh"
+
+namespace nix {
+
+struct CommonSSHStoreConfig : virtual StoreConfig
+{
+ using StoreConfig::StoreConfig;
+
+ const Setting<Path> sshKey{(StoreConfig*) this, "", "ssh-key",
+ "Path to the SSH private key used to authenticate to the remote machine."};
+
+ const Setting<std::string> sshPublicHostKey{(StoreConfig*) this, "", "base64-ssh-public-host-key",
+ "The public host key of the remote machine."};
+
+ const Setting<bool> compress{(StoreConfig*) this, false, "compress",
+ "Whether to enable SSH compression."};
+
+ const Setting<std::string> remoteStore{(StoreConfig*) this, "", "remote-store",
+ R"(
+ [Store URL](@docroot@/command-ref/new-cli/nix3-help-stores.md#store-url-format)
+ to be used on the remote machine. The default is `auto`
+ (i.e. use the Nix daemon or `/nix/store` directly).
+ )"};
+};
+
+}
diff --git a/src/libstore/ssh-store.cc b/src/libstore/ssh-store.cc
index a1d4daafd..962221ad2 100644
--- a/src/libstore/ssh-store.cc
+++ b/src/libstore/ssh-store.cc
@@ -1,3 +1,4 @@
+#include "ssh-store-config.hh"
#include "store-api.hh"
#include "remote-store.hh"
#include "remote-fs-accessor.hh"
@@ -8,17 +9,22 @@
namespace nix {
-struct SSHStoreConfig : virtual RemoteStoreConfig
+struct SSHStoreConfig : virtual RemoteStoreConfig, virtual CommonSSHStoreConfig
{
using RemoteStoreConfig::RemoteStoreConfig;
+ using CommonSSHStoreConfig::CommonSSHStoreConfig;
- const Setting<Path> sshKey{(StoreConfig*) this, "", "ssh-key", "path to an SSH private key"};
- const Setting<std::string> sshPublicHostKey{(StoreConfig*) this, "", "base64-ssh-public-host-key", "The public half of the host's SSH key"};
- const Setting<bool> compress{(StoreConfig*) this, false, "compress", "whether to compress the connection"};
- const Setting<Path> remoteProgram{(StoreConfig*) this, "nix-daemon", "remote-program", "path to the nix-daemon executable on the remote system"};
- const Setting<std::string> remoteStore{(StoreConfig*) this, "", "remote-store", "URI of the store on the remote system"};
+ const Setting<Path> remoteProgram{(StoreConfig*) this, "nix-daemon", "remote-program",
+ "Path to the `nix-daemon` executable on the remote machine."};
- const std::string name() override { return "SSH Store"; }
+ const std::string name() override { return "Experimental SSH Store"; }
+
+ std::string doc() override
+ {
+ return
+ #include "ssh-store.md"
+ ;
+ }
};
class SSHStore : public virtual SSHStoreConfig, public virtual RemoteStore
@@ -28,6 +34,7 @@ public:
SSHStore(const std::string & scheme, const std::string & host, const Params & params)
: StoreConfig(params)
, RemoteStoreConfig(params)
+ , CommonSSHStoreConfig(params)
, SSHStoreConfig(params)
, Store(params)
, RemoteStore(params)
@@ -49,9 +56,6 @@ public:
return *uriSchemes().begin() + "://" + host;
}
- bool sameMachine() override
- { return false; }
-
// FIXME extend daemon protocol, move implementation to RemoteStore
std::optional<std::string> getBuildLogExact(const StorePath & path) override
{ unsupported("getBuildLogExact"); }
diff --git a/src/libstore/ssh-store.md b/src/libstore/ssh-store.md
new file mode 100644
index 000000000..881537e71
--- /dev/null
+++ b/src/libstore/ssh-store.md
@@ -0,0 +1,8 @@
+R"(
+
+**Store URL format**: `ssh-ng://[username@]hostname`
+
+Experimental store type that allows full access to a Nix store on a
+remote machine.
+
+)"
diff --git a/src/libstore/ssh.cc b/src/libstore/ssh.cc
index 69bfe3418..6f6deda51 100644
--- a/src/libstore/ssh.cc
+++ b/src/libstore/ssh.cc
@@ -1,4 +1,5 @@
#include "ssh.hh"
+#include "finally.hh"
namespace nix {
@@ -35,6 +36,9 @@ void SSHMaster::addCommonSSHOpts(Strings & args)
}
if (compress)
args.push_back("-C");
+
+ args.push_back("-oPermitLocalCommand=yes");
+ args.push_back("-oLocalCommand=echo started");
}
std::unique_ptr<SSHMaster::Connection> SSHMaster::startCommand(const std::string & command)
@@ -49,6 +53,11 @@ std::unique_ptr<SSHMaster::Connection> SSHMaster::startCommand(const std::string
ProcessOptions options;
options.dieWithParent = false;
+ if (!fakeSSH && !useMaster) {
+ logger->pause();
+ }
+ Finally cleanup = [&]() { logger->resume(); };
+
conn->sshPid = startProcess([&]() {
restoreProcessContext();
@@ -86,6 +95,18 @@ std::unique_ptr<SSHMaster::Connection> SSHMaster::startCommand(const std::string
in.readSide = -1;
out.writeSide = -1;
+ // Wait for the SSH connection to be established,
+ // So that we don't overwrite the password prompt with our progress bar.
+ if (!fakeSSH && !useMaster) {
+ std::string reply;
+ try {
+ reply = readLine(out.readSide.get());
+ } catch (EndOfFile & e) { }
+
+ if (reply != "started")
+ throw Error("failed to start SSH connection to '%s'", host);
+ }
+
conn->out = std::move(out.readSide);
conn->in = std::move(in.writeSide);
@@ -109,6 +130,9 @@ Path SSHMaster::startMaster()
ProcessOptions options;
options.dieWithParent = false;
+ logger->pause();
+ Finally cleanup = [&]() { logger->resume(); };
+
state->sshMaster = startProcess([&]() {
restoreProcessContext();
@@ -117,11 +141,7 @@ Path SSHMaster::startMaster()
if (dup2(out.writeSide.get(), STDOUT_FILENO) == -1)
throw SysError("duping over stdout");
- Strings args =
- { "ssh", host.c_str(), "-M", "-N", "-S", state->socketPath
- , "-o", "LocalCommand=echo started"
- , "-o", "PermitLocalCommand=yes"
- };
+ Strings args = { "ssh", host.c_str(), "-M", "-N", "-S", state->socketPath };
if (verbosity >= lvlChatty)
args.push_back("-v");
addCommonSSHOpts(args);
diff --git a/src/libstore/ssh.hh b/src/libstore/ssh.hh
index dabbcedda..c86a8a986 100644
--- a/src/libstore/ssh.hh
+++ b/src/libstore/ssh.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "util.hh"
#include "sync.hh"
diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc
index 601efa1cc..8fc3ed46a 100644
--- a/src/libstore/store-api.cc
+++ b/src/libstore/store-api.cc
@@ -445,10 +445,10 @@ StringSet StoreConfig::getDefaultSystemFeatures()
{
auto res = settings.systemFeatures.get();
- if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations))
+ if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations))
res.insert("ca-derivations");
- if (settings.isExperimentalFeatureEnabled(Xp::RecursiveNix))
+ if (experimentalFeatureSettings.isEnabled(Xp::RecursiveNix))
res.insert("recursive-nix");
return res;
@@ -507,6 +507,54 @@ StorePathSet Store::queryDerivationOutputs(const StorePath & path)
return outputPaths;
}
+
+void Store::querySubstitutablePathInfos(const StorePathCAMap & paths, SubstitutablePathInfos & infos)
+{
+ if (!settings.useSubstitutes) return;
+ for (auto & sub : getDefaultSubstituters()) {
+ for (auto & path : paths) {
+ if (infos.count(path.first))
+ // Choose first succeeding substituter.
+ continue;
+
+ auto subPath(path.first);
+
+ // Recompute store path so that we can use a different store root.
+ if (path.second) {
+ subPath = makeFixedOutputPathFromCA(path.first.name(), *path.second);
+ if (sub->storeDir == storeDir)
+ assert(subPath == path.first);
+ if (subPath != path.first)
+ debug("replaced path '%s' with '%s' for substituter '%s'", printStorePath(path.first), sub->printStorePath(subPath), sub->getUri());
+ } else if (sub->storeDir != storeDir) continue;
+
+ debug("checking substituter '%s' for path '%s'", sub->getUri(), sub->printStorePath(subPath));
+ try {
+ auto info = sub->queryPathInfo(subPath);
+
+ if (sub->storeDir != storeDir && !(info->isContentAddressed(*sub) && info->references.empty()))
+ continue;
+
+ auto narInfo = std::dynamic_pointer_cast<const NarInfo>(
+ std::shared_ptr<const ValidPathInfo>(info));
+ infos.insert_or_assign(path.first, SubstitutablePathInfo{
+ info->deriver,
+ info->references,
+ narInfo ? narInfo->fileSize : 0,
+ info->narSize});
+ } catch (InvalidPath &) {
+ } catch (SubstituterDisabled &) {
+ } catch (Error & e) {
+ if (settings.tryFallback)
+ logError(e.info());
+ else
+ throw;
+ }
+ }
+ }
+}
+
+
bool Store::isValidPath(const StorePath & storePath)
{
{
@@ -790,13 +838,13 @@ std::string Store::makeValidityRegistration(const StorePathSet & paths,
if (showHash) {
s += info->narHash.to_string(Base16, false) + "\n";
- s += (format("%1%\n") % info->narSize).str();
+ s += fmt("%1%\n", info->narSize);
}
auto deriver = showDerivers && info->deriver ? printStorePath(*info->deriver) : "";
s += deriver + "\n";
- s += (format("%1%\n") % info->references.size()).str();
+ s += fmt("%1%\n", info->references.size());
for (auto & j : info->references)
s += printStorePath(j) + "\n";
@@ -855,6 +903,7 @@ json Store::pathInfoToJSON(const StorePathSet & storePaths,
auto info = queryPathInfo(storePath);
jsonPath["path"] = printStorePath(info->path);
+ jsonPath["valid"] = true;
jsonPath["narHash"] = info->narHash.to_string(hashBase, true);
jsonPath["narSize"] = info->narSize;
@@ -1016,7 +1065,7 @@ std::map<StorePath, StorePath> copyPaths(
for (auto & path : paths) {
storePaths.insert(path.path());
if (auto realisation = std::get_if<Realisation>(&path.raw)) {
- settings.requireExperimentalFeature(Xp::CaDerivations);
+ experimentalFeatureSettings.require(Xp::CaDerivations);
toplevelRealisations.insert(*realisation);
}
}
@@ -1100,6 +1149,9 @@ std::map<StorePath, StorePath> copyPaths(
return storePathForDst;
};
+ // total is accessed by each copy, which are each handled in separate threads
+ std::atomic<uint64_t> total = 0;
+
for (auto & missingPath : sortedMissing) {
auto info = srcStore.queryPathInfo(missingPath);
@@ -1120,7 +1172,13 @@ std::map<StorePath, StorePath> copyPaths(
{storePathS, srcUri, dstUri});
PushActivity pact(act.id);
- srcStore.narFromPath(missingPath, sink);
+ LambdaSink progressSink([&](std::string_view data) {
+ total += data.size();
+ act.progress(total, info->narSize);
+ });
+ TeeSink tee { sink, progressSink };
+
+ srcStore.narFromPath(missingPath, tee);
});
pathsToCopy.push_back(std::pair{infoForDst, std::move(source)});
}
@@ -1241,7 +1299,7 @@ std::optional<StorePath> Store::getBuildDerivationPath(const StorePath & path)
}
}
- if (!settings.isExperimentalFeatureEnabled(Xp::CaDerivations) || !isValidPath(path))
+ if (!experimentalFeatureSettings.isEnabled(Xp::CaDerivations) || !isValidPath(path))
return path;
auto drv = readDerivation(path);
diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh
index 4d8db3596..4d1047380 100644
--- a/src/libstore/store-api.hh
+++ b/src/libstore/store-api.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "nar-info.hh"
#include "realisation.hh"
@@ -55,7 +56,10 @@ namespace nix {
*/
MakeError(SubstError, Error);
-MakeError(BuildError, Error); // denotes a permanent build failure
+/**
+ * denotes a permanent build failure
+ */
+MakeError(BuildError, Error);
MakeError(InvalidPath, Error);
MakeError(Unsupported, Error);
MakeError(SubstituteGone, Error);
@@ -78,7 +82,9 @@ enum CheckSigsFlag : bool { NoCheckSigs = false, CheckSigs = true };
enum SubstituteFlag : bool { NoSubstitute = false, Substitute = true };
enum AllowInvalidFlag : bool { DisallowInvalid = false, AllowInvalid = true };
-/* Magic header of exportPath() output (obsolete). */
+/**
+ * Magic header of exportPath() output (obsolete).
+ */
const uint32_t exportMagic = 0x4558494e;
@@ -101,17 +107,41 @@ struct StoreConfig : public Config
virtual const std::string name() = 0;
+ virtual std::string doc()
+ {
+ return "";
+ }
+
const PathSetting storeDir_{this, false, settings.nixStore,
- "store", "path to the Nix store"};
+ "store",
+ R"(
+ Logical location of the Nix store, usually
+ `/nix/store`. Note that you can only copy store paths
+ between stores if they have the same `store` setting.
+ )"};
const Path storeDir = storeDir_;
- const Setting<int> pathInfoCacheSize{this, 65536, "path-info-cache-size", "size of the in-memory store path information cache"};
+ const Setting<int> pathInfoCacheSize{this, 65536, "path-info-cache-size",
+ "Size of the in-memory store path metadata cache."};
- const Setting<bool> isTrusted{this, false, "trusted", "whether paths from this store can be used as substitutes even when they lack trusted signatures"};
+ const Setting<bool> isTrusted{this, false, "trusted",
+ R"(
+ Whether paths from this store can be used as substitutes
+ even if they are not signed by a key listed in the
+ [`trusted-public-keys`](@docroot@/command-ref/conf-file.md#conf-trusted-public-keys)
+ setting.
+ )"};
- Setting<int> priority{this, 0, "priority", "priority of this substituter (lower value means higher priority)"};
+ Setting<int> priority{this, 0, "priority",
+ R"(
+ Priority of this store when used as a substituter. A lower value means a higher priority.
+ )"};
- Setting<bool> wantMassQuery{this, false, "want-mass-query", "whether this substituter can be queried efficiently for path validity"};
+ Setting<bool> wantMassQuery{this, false, "want-mass-query",
+ R"(
+ Whether this store (when used as a substituter) can be
+ queried efficiently for path validity.
+ )"};
Setting<StringSet> systemFeatures{this, getDefaultSystemFeatures(),
"system-features",
@@ -125,23 +155,30 @@ public:
typedef std::map<std::string, std::string> Params;
-
-
protected:
struct PathInfoCacheValue {
- // Time of cache entry creation or update
+ /**
+ * Time of cache entry creation or update
+ */
std::chrono::time_point<std::chrono::steady_clock> time_point = std::chrono::steady_clock::now();
- // Null if missing
+ /**
+ * Null if missing
+ */
std::shared_ptr<const ValidPathInfo> value;
- // Whether the value is valid as a cache entry. The path may not exist.
+ /**
+ * Whether the value is valid as a cache entry. The path may not
+ * exist.
+ */
bool isKnownNow();
- // Past tense, because a path can only be assumed to exists when
- // isKnownNow() && didExist()
+ /**
+ * Past tense, because a path can only be assumed to exists when
+ * isKnownNow() && didExist()
+ */
inline bool didExist() {
return value != nullptr;
}
@@ -175,35 +212,53 @@ public:
std::string printStorePath(const StorePath & path) const;
- // FIXME: remove
+ /**
+ * Deprecated
+ *
+ * \todo remove
+ */
StorePathSet parseStorePathSet(const PathSet & paths) const;
PathSet printStorePathSet(const StorePathSet & path) const;
- /* Display a set of paths in human-readable form (i.e., between quotes
- and separated by commas). */
+ /**
+ * Display a set of paths in human-readable form (i.e., between quotes
+ * and separated by commas).
+ */
std::string showPaths(const StorePathSet & paths);
- /* Return true if ‘path’ is in the Nix store (but not the Nix
- store itself). */
+ /**
+ * @return true if ‘path’ is in the Nix store (but not the Nix
+ * store itself).
+ */
bool isInStore(PathView path) const;
- /* Return true if ‘path’ is a store path, i.e. a direct child of
- the Nix store. */
+ /**
+ * @return true if ‘path’ is a store path, i.e. a direct child of the
+ * Nix store.
+ */
bool isStorePath(std::string_view path) const;
- /* Split a path like /nix/store/<hash>-<name>/<bla> into
- /nix/store/<hash>-<name> and /<bla>. */
+ /**
+ * Split a path like /nix/store/<hash>-<name>/<bla> into
+ * /nix/store/<hash>-<name> and /<bla>.
+ */
std::pair<StorePath, Path> toStorePath(PathView path) const;
- /* Follow symlinks until we end up with a path in the Nix store. */
+ /**
+ * Follow symlinks until we end up with a path in the Nix store.
+ */
Path followLinksToStore(std::string_view path) const;
- /* Same as followLinksToStore(), but apply toStorePath() to the
- result. */
+ /**
+ * Same as followLinksToStore(), but apply toStorePath() to the
+ * result.
+ */
StorePath followLinksToStorePath(std::string_view path) const;
- /* Constructs a unique store path name. */
+ /**
+ * Constructs a unique store path name.
+ */
StorePath makeStorePath(std::string_view type,
std::string_view hash, std::string_view name) const;
StorePath makeStorePath(std::string_view type,
@@ -224,33 +279,40 @@ public:
const StorePathSet & references = {},
bool hasSelfReference = false) const;
- /* This is the preparatory part of addToStore(); it computes the
- store path to which srcPath is to be copied. Returns the store
- path and the cryptographic hash of the contents of srcPath. */
+ /**
+ * Preparatory part of addToStore().
+ *
+ * @return the store path to which srcPath is to be copied
+ * and the cryptographic hash of the contents of srcPath.
+ */
std::pair<StorePath, Hash> computeStorePathForPath(std::string_view name,
const Path & srcPath, FileIngestionMethod method = FileIngestionMethod::Recursive,
HashType hashAlgo = htSHA256, PathFilter & filter = defaultPathFilter) const;
- /* Preparatory part of addTextToStore().
-
- !!! Computation of the path should take the references given to
- addTextToStore() into account, otherwise we have a (relatively
- minor) security hole: a caller can register a source file with
- bogus references. If there are too many references, the path may
- not be garbage collected when it has to be (not really a problem,
- the caller could create a root anyway), or it may be garbage
- collected when it shouldn't be (more serious).
-
- Hashing the references would solve this (bogus references would
- simply yield a different store path, so other users wouldn't be
- affected), but it has some backwards compatibility issues (the
- hashing scheme changes), so I'm not doing that for now. */
+ /**
+ * Preparatory part of addTextToStore().
+ *
+ * !!! Computation of the path should take the references given to
+ * addTextToStore() into account, otherwise we have a (relatively
+ * minor) security hole: a caller can register a source file with
+ * bogus references. If there are too many references, the path may
+ * not be garbage collected when it has to be (not really a problem,
+ * the caller could create a root anyway), or it may be garbage
+ * collected when it shouldn't be (more serious).
+ *
+ * Hashing the references would solve this (bogus references would
+ * simply yield a different store path, so other users wouldn't be
+ * affected), but it has some backwards compatibility issues (the
+ * hashing scheme changes), so I'm not doing that for now.
+ */
StorePath computeStorePathForText(
std::string_view name,
std::string_view s,
const StorePathSet & references) const;
- /* Check whether a path is valid. */
+ /**
+ * Check whether a path is valid.
+ */
bool isValidPath(const StorePath & path);
protected:
@@ -259,53 +321,68 @@ protected:
public:
- /* If requested, substitute missing paths. This
- implements nix-copy-closure's --use-substitutes
- flag. */
+ /**
+ * If requested, substitute missing paths. This
+ * implements nix-copy-closure's --use-substitutes
+ * flag.
+ */
void substitutePaths(const StorePathSet & paths);
- /* Query which of the given paths is valid. Optionally, try to
- substitute missing paths. */
+ /**
+ * Query which of the given paths is valid. Optionally, try to
+ * substitute missing paths.
+ */
virtual StorePathSet queryValidPaths(const StorePathSet & paths,
SubstituteFlag maybeSubstitute = NoSubstitute);
- /* Query the set of all valid paths. Note that for some store
- backends, the name part of store paths may be replaced by 'x'
- (i.e. you'll get /nix/store/<hash>-x rather than
- /nix/store/<hash>-<name>). Use queryPathInfo() to obtain the
- full store path. FIXME: should return a set of
- std::variant<StorePath, HashPart> to get rid of this hack. */
+ /**
+ * Query the set of all valid paths. Note that for some store
+ * backends, the name part of store paths may be replaced by 'x'
+ * (i.e. you'll get /nix/store/<hash>-x rather than
+ * /nix/store/<hash>-<name>). Use queryPathInfo() to obtain the
+ * full store path. FIXME: should return a set of
+ * std::variant<StorePath, HashPart> to get rid of this hack.
+ */
virtual StorePathSet queryAllValidPaths()
{ unsupported("queryAllValidPaths"); }
constexpr static const char * MissingName = "x";
- /* Query information about a valid path. It is permitted to omit
- the name part of the store path. */
+ /**
+ * Query information about a valid path. It is permitted to omit
+ * the name part of the store path.
+ */
ref<const ValidPathInfo> queryPathInfo(const StorePath & path);
- /* Asynchronous version of queryPathInfo(). */
+ /**
+ * Asynchronous version of queryPathInfo().
+ */
void queryPathInfo(const StorePath & path,
Callback<ref<const ValidPathInfo>> callback) noexcept;
- /* Query the information about a realisation. */
+ /**
+ * Query the information about a realisation.
+ */
std::shared_ptr<const Realisation> queryRealisation(const DrvOutput &);
- /* Asynchronous version of queryRealisation(). */
+ /**
+ * Asynchronous version of queryRealisation().
+ */
void queryRealisation(const DrvOutput &,
Callback<std::shared_ptr<const Realisation>> callback) noexcept;
- /* Check whether the given valid path info is sufficiently attested, by
- either being signed by a trusted public key or content-addressed, in
- order to be included in the given store.
-
- These same checks would be performed in addToStore, but this allows an
- earlier failure in the case where dependencies need to be added too, but
- the addToStore wouldn't fail until those dependencies are added. Also,
- we don't really want to add the dependencies listed in a nar info we
- don't trust anyyways.
- */
+ /**
+ * Check whether the given valid path info is sufficiently attested, by
+ * either being signed by a trusted public key or content-addressed, in
+ * order to be included in the given store.
+ *
+ * These same checks would be performed in addToStore, but this allows an
+ * earlier failure in the case where dependencies need to be added too, but
+ * the addToStore wouldn't fail until those dependencies are added. Also,
+ * we don't really want to add the dependencies listed in a nar info we
+ * don't trust anyyways.
+ */
virtual bool pathInfoIsUntrusted(const ValidPathInfo &)
{
return true;
@@ -325,53 +402,77 @@ protected:
public:
- /* Queries the set of incoming FS references for a store path.
- The result is not cleared. */
+ /**
+ * Queries the set of incoming FS references for a store path.
+ * The result is not cleared.
+ */
virtual void queryReferrers(const StorePath & path, StorePathSet & referrers)
{ unsupported("queryReferrers"); }
- /* Return all currently valid derivations that have `path' as an
- output. (Note that the result of `queryDeriver()' is the
- derivation that was actually used to produce `path', which may
- not exist anymore.) */
+ /**
+ * @return all currently valid derivations that have `path' as an
+ * output.
+ *
+ * (Note that the result of `queryDeriver()' is the derivation that
+ * was actually used to produce `path', which may not exist
+ * anymore.)
+ */
virtual StorePathSet queryValidDerivers(const StorePath & path) { return {}; };
- /* Query the outputs of the derivation denoted by `path'. */
+ /**
+ * Query the outputs of the derivation denoted by `path'.
+ */
virtual StorePathSet queryDerivationOutputs(const StorePath & path);
- /* Query the mapping outputName => outputPath for the given derivation. All
- outputs are mentioned so ones mising the mapping are mapped to
- `std::nullopt`. */
+ /**
+ * Query the mapping outputName => outputPath for the given
+ * derivation. All outputs are mentioned so ones mising the mapping
+ * are mapped to `std::nullopt`.
+ */
virtual std::map<std::string, std::optional<StorePath>> queryPartialDerivationOutputMap(const StorePath & path);
- /* Query the mapping outputName=>outputPath for the given derivation.
- Assume every output has a mapping and throw an exception otherwise. */
+ /**
+ * Query the mapping outputName=>outputPath for the given derivation.
+ * Assume every output has a mapping and throw an exception otherwise.
+ */
OutputPathMap queryDerivationOutputMap(const StorePath & path);
- /* Query the full store path given the hash part of a valid store
- path, or empty if the path doesn't exist. */
+ /**
+ * Query the full store path given the hash part of a valid store
+ * path, or empty if the path doesn't exist.
+ */
virtual std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) = 0;
- /* Query which of the given paths have substitutes. */
+ /**
+ * Query which of the given paths have substitutes.
+ */
virtual StorePathSet querySubstitutablePaths(const StorePathSet & paths) { return {}; };
- /* Query substitute info (i.e. references, derivers and download
- sizes) of a map of paths to their optional ca values. The info
- of the first succeeding substituter for each path will be
- returned. If a path does not have substitute info, it's omitted
- from the resulting ‘infos’ map. */
+ /**
+ * Query substitute info (i.e. references, derivers and download
+ * sizes) of a map of paths to their optional ca values. The info of
+ * the first succeeding substituter for each path will be returned.
+ * If a path does not have substitute info, it's omitted from the
+ * resulting ‘infos’ map.
+ */
virtual void querySubstitutablePathInfos(const StorePathCAMap & paths,
- SubstitutablePathInfos & infos) { return; };
+ SubstitutablePathInfos & infos);
- /* Import a path into the store. */
+ /**
+ * Import a path into the store.
+ */
virtual void addToStore(const ValidPathInfo & info, Source & narSource,
RepairFlag repair = NoRepair, CheckSigsFlag checkSigs = CheckSigs) = 0;
- // A list of paths infos along with a source providing the content of the
- // associated store path
+ /**
+ * A list of paths infos along with a source providing the content
+ * of the associated store path
+ */
using PathsSource = std::vector<std::pair<ValidPathInfo, std::unique_ptr<Source>>>;
- /* Import multiple paths into the store. */
+ /**
+ * Import multiple paths into the store.
+ */
virtual void addMultipleToStore(
Source & source,
RepairFlag repair = NoRepair,
@@ -383,10 +484,14 @@ public:
RepairFlag repair = NoRepair,
CheckSigsFlag checkSigs = CheckSigs);
- /* Copy the contents of a path to the store and register the
- validity the resulting path. The resulting path is returned.
- The function object `filter' can be used to exclude files (see
- libutil/archive.hh). */
+ /**
+ * Copy the contents of a path to the store and register the
+ * validity the resulting path.
+ *
+ * @return The resulting path is returned.
+ * @param filter This function can be used to exclude files (see
+ * libutil/archive.hh).
+ */
virtual StorePath addToStore(
std::string_view name,
const Path & srcPath,
@@ -396,26 +501,33 @@ public:
RepairFlag repair = NoRepair,
const StorePathSet & references = StorePathSet());
- /* Copy the contents of a path to the store and register the
- validity the resulting path, using a constant amount of
- memory. */
+ /**
+ * Copy the contents of a path to the store and register the
+ * validity the resulting path, using a constant amount of
+ * memory.
+ */
ValidPathInfo addToStoreSlow(std::string_view name, const Path & srcPath,
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256,
std::optional<Hash> expectedCAHash = {});
- /* Like addToStore(), but the contents of the path are contained
- in `dump', which is either a NAR serialisation (if recursive ==
- true) or simply the contents of a regular file (if recursive ==
- false).
- `dump` may be drained */
- // FIXME: remove?
+ /**
+ * Like addToStore(), but the contents of the path are contained
+ * in `dump', which is either a NAR serialisation (if recursive ==
+ * true) or simply the contents of a regular file (if recursive ==
+ * false).
+ * `dump` may be drained
+ *
+ * \todo remove?
+ */
virtual StorePath addToStoreFromDump(Source & dump, std::string_view name,
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair,
const StorePathSet & references = StorePathSet())
{ unsupported("addToStoreFromDump"); }
- /* Like addToStore, but the contents written to the output path is
- a regular file containing the given string. */
+ /**
+ * Like addToStore, but the contents written to the output path is a
+ * regular file containing the given string.
+ */
virtual StorePath addTextToStore(
std::string_view name,
std::string_view s,
@@ -436,140 +548,180 @@ public:
virtual void registerDrvOutput(const Realisation & output, CheckSigsFlag checkSigs)
{ return registerDrvOutput(output); }
- /* Write a NAR dump of a store path. */
+ /**
+ * Write a NAR dump of a store path.
+ */
virtual void narFromPath(const StorePath & path, Sink & sink) = 0;
- /* For each path, if it's a derivation, build it. Building a
- derivation means ensuring that the output paths are valid. If
- they are already valid, this is a no-op. Otherwise, validity
- can be reached in two ways. First, if the output paths is
- substitutable, then build the path that way. Second, the
- output paths can be created by running the builder, after
- recursively building any sub-derivations. For inputs that are
- not derivations, substitute them. */
+ /**
+ * For each path, if it's a derivation, build it. Building a
+ * derivation means ensuring that the output paths are valid. If
+ * they are already valid, this is a no-op. Otherwise, validity
+ * can be reached in two ways. First, if the output paths is
+ * substitutable, then build the path that way. Second, the
+ * output paths can be created by running the builder, after
+ * recursively building any sub-derivations. For inputs that are
+ * not derivations, substitute them.
+ */
virtual void buildPaths(
const std::vector<DerivedPath> & paths,
BuildMode buildMode = bmNormal,
std::shared_ptr<Store> evalStore = nullptr);
- /* Like `buildPaths()`, but return a vector of `BuildResult`s
- corresponding to each element in `paths`. Note that in case of
- a build/substitution error, this function won't throw an
- exception, but return a `BuildResult` containing an error
- message. */
+ /**
+ * Like buildPaths(), but return a vector of \ref BuildResult
+ * BuildResults corresponding to each element in paths. Note that in
+ * case of a build/substitution error, this function won't throw an
+ * exception, but return a BuildResult containing an error message.
+ */
virtual std::vector<BuildResult> buildPathsWithResults(
const std::vector<DerivedPath> & paths,
BuildMode buildMode = bmNormal,
std::shared_ptr<Store> evalStore = nullptr);
- /* Build a single non-materialized derivation (i.e. not from an
- on-disk .drv file).
-
- ‘drvPath’ is used to deduplicate worker goals so it is imperative that
- is correct. That said, it doesn't literally need to be store path that
- would be calculated from writing this derivation to the store: it is OK
- if it instead is that of a Derivation which would resolve to this (by
- taking the outputs of it's input derivations and adding them as input
- sources) such that the build time referenceable-paths are the same.
-
- In the input-addressed case, we usually *do* use an "original"
- unresolved derivations's path, as that is what will be used in the
- `buildPaths` case. Also, the input-addressed output paths are verified
- only by that contents of that specific unresolved derivation, so it is
- nice to keep that information around so if the original derivation is
- ever obtained later, it can be verified whether the trusted user in fact
- used the proper output path.
-
- In the content-addressed case, we want to always use the
- resolved drv path calculated from the provided derivation. This serves
- two purposes:
-
- - It keeps the operation trustless, by ruling out a maliciously
- invalid drv path corresponding to a non-resolution-equivalent
- derivation.
-
- - For the floating case in particular, it ensures that the derivation
- to output mapping respects the resolution equivalence relation, so
- one cannot choose different resolution-equivalent derivations to
- subvert dependency coherence (i.e. the property that one doesn't end
- up with multiple different versions of dependencies without
- explicitly choosing to allow it).
- */
+ /**
+ * Build a single non-materialized derivation (i.e. not from an
+ * on-disk .drv file).
+ *
+ * @param drvPath This is used to deduplicate worker goals so it is
+ * imperative that is correct. That said, it doesn't literally need
+ * to be store path that would be calculated from writing this
+ * derivation to the store: it is OK if it instead is that of a
+ * Derivation which would resolve to this (by taking the outputs of
+ * it's input derivations and adding them as input sources) such
+ * that the build time referenceable-paths are the same.
+ *
+ * In the input-addressed case, we usually *do* use an "original"
+ * unresolved derivations's path, as that is what will be used in the
+ * buildPaths case. Also, the input-addressed output paths are verified
+ * only by that contents of that specific unresolved derivation, so it is
+ * nice to keep that information around so if the original derivation is
+ * ever obtained later, it can be verified whether the trusted user in fact
+ * used the proper output path.
+ *
+ * In the content-addressed case, we want to always use the resolved
+ * drv path calculated from the provided derivation. This serves two
+ * purposes:
+ *
+ * - It keeps the operation trustless, by ruling out a maliciously
+ * invalid drv path corresponding to a non-resolution-equivalent
+ * derivation.
+ *
+ * - For the floating case in particular, it ensures that the derivation
+ * to output mapping respects the resolution equivalence relation, so
+ * one cannot choose different resolution-equivalent derivations to
+ * subvert dependency coherence (i.e. the property that one doesn't end
+ * up with multiple different versions of dependencies without
+ * explicitly choosing to allow it).
+ */
virtual BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv,
BuildMode buildMode = bmNormal);
- /* Ensure that a path is valid. If it is not currently valid, it
- may be made valid by running a substitute (if defined for the
- path). */
+ /**
+ * Ensure that a path is valid. If it is not currently valid, it
+ * may be made valid by running a substitute (if defined for the
+ * path).
+ */
virtual void ensurePath(const StorePath & path);
- /* Add a store path as a temporary root of the garbage collector.
- The root disappears as soon as we exit. */
+ /**
+ * Add a store path as a temporary root of the garbage collector.
+ * The root disappears as soon as we exit.
+ */
virtual void addTempRoot(const StorePath & path)
{ debug("not creating temporary root, store doesn't support GC"); }
- /* Return a string representing information about the path that
- can be loaded into the database using `nix-store --load-db' or
- `nix-store --register-validity'. */
+ /**
+ * @return a string representing information about the path that
+ * can be loaded into the database using `nix-store --load-db' or
+ * `nix-store --register-validity'.
+ */
std::string makeValidityRegistration(const StorePathSet & paths,
bool showDerivers, bool showHash);
- /* Write a JSON representation of store path metadata, such as the
- hash and the references. If ‘includeImpureInfo’ is true,
- variable elements such as the registration time are
- included. If ‘showClosureSize’ is true, the closure size of
- each path is included. */
+ /**
+ * Write a JSON representation of store path metadata, such as the
+ * hash and the references.
+ *
+ * @param includeImpureInfo If true, variable elements such as the
+ * registration time are included.
+ *
+ * @param showClosureSize If true, the closure size of each path is
+ * included.
+ */
nlohmann::json pathInfoToJSON(const StorePathSet & storePaths,
bool includeImpureInfo, bool showClosureSize,
Base hashBase = Base32,
AllowInvalidFlag allowInvalid = DisallowInvalid);
- /* Return the size of the closure of the specified path, that is,
- the sum of the size of the NAR serialisation of each path in
- the closure. */
+ /**
+ * @return the size of the closure of the specified path, that is,
+ * the sum of the size of the NAR serialisation of each path in the
+ * closure.
+ */
std::pair<uint64_t, uint64_t> getClosureSize(const StorePath & storePath);
- /* Optimise the disk space usage of the Nix store by hard-linking files
- with the same contents. */
+ /**
+ * Optimise the disk space usage of the Nix store by hard-linking files
+ * with the same contents.
+ */
virtual void optimiseStore() { };
- /* Check the integrity of the Nix store. Returns true if errors
- remain. */
+ /**
+ * Check the integrity of the Nix store.
+ *
+ * @return true if errors remain.
+ */
virtual bool verifyStore(bool checkContents, RepairFlag repair = NoRepair) { return false; };
- /* Return an object to access files in the Nix store. */
+ /**
+ * @return An object to access files in the Nix store.
+ */
virtual ref<FSAccessor> getFSAccessor()
{ unsupported("getFSAccessor"); }
- /* Repair the contents of the given path by redownloading it using
- a substituter (if available). */
+ /**
+ * Repair the contents of the given path by redownloading it using
+ * a substituter (if available).
+ */
virtual void repairPath(const StorePath & path)
{ unsupported("repairPath"); }
- /* Add signatures to the specified store path. The signatures are
- not verified. */
+ /**
+ * Add signatures to the specified store path. The signatures are
+ * not verified.
+ */
virtual void addSignatures(const StorePath & storePath, const StringSet & sigs)
{ unsupported("addSignatures"); }
/* Utility functions. */
- /* Read a derivation, after ensuring its existence through
- ensurePath(). */
+ /**
+ * Read a derivation, after ensuring its existence through
+ * ensurePath().
+ */
Derivation derivationFromPath(const StorePath & drvPath);
- /* Read a derivation (which must already be valid). */
+ /**
+ * Read a derivation (which must already be valid).
+ */
Derivation readDerivation(const StorePath & drvPath);
- /* Read a derivation from a potentially invalid path. */
+ /**
+ * Read a derivation from a potentially invalid path.
+ */
Derivation readInvalidDerivation(const StorePath & drvPath);
- /* Place in `out' the set of all store paths in the file system
- closure of `storePath'; that is, all paths than can be directly
- or indirectly reached from it. `out' is not cleared. If
- `flipDirection' is true, the set of paths that can reach
- `storePath' is returned; that is, the closures under the
- `referrers' relation instead of the `references' relation is
- returned. */
+ /**
+ * @param [out] out Place in here the set of all store paths in the
+ * file system closure of `storePath'; that is, all paths than can
+ * be directly or indirectly reached from it. `out' is not cleared.
+ *
+ * @param flipDirection If true, the set of paths that can reach
+ * `storePath' is returned; that is, the closures under the
+ * `referrers' relation instead of the `references' relation is
+ * returned.
+ */
virtual void computeFSClosure(const StorePathSet & paths,
StorePathSet & out, bool flipDirection = false,
bool includeOutputs = false, bool includeDerivers = false);
@@ -578,27 +730,34 @@ public:
StorePathSet & out, bool flipDirection = false,
bool includeOutputs = false, bool includeDerivers = false);
- /* Given a set of paths that are to be built, return the set of
- derivations that will be built, and the set of output paths
- that will be substituted. */
+ /**
+ * Given a set of paths that are to be built, return the set of
+ * derivations that will be built, and the set of output paths that
+ * will be substituted.
+ */
virtual void queryMissing(const std::vector<DerivedPath> & targets,
StorePathSet & willBuild, StorePathSet & willSubstitute, StorePathSet & unknown,
uint64_t & downloadSize, uint64_t & narSize);
- /* Sort a set of paths topologically under the references
- relation. If p refers to q, then p precedes q in this list. */
+ /**
+ * Sort a set of paths topologically under the references
+ * relation. If p refers to q, then p precedes q in this list.
+ */
StorePaths topoSortPaths(const StorePathSet & paths);
- /* Export multiple paths in the format expected by ‘nix-store
- --import’. */
+ /**
+ * Export multiple paths in the format expected by ‘nix-store
+ * --import’.
+ */
void exportPaths(const StorePathSet & paths, Sink & sink);
void exportPath(const StorePath & path, Sink & sink);
- /* Import a sequence of NAR dumps created by exportPaths() into
- the Nix store. Optionally, the contents of the NARs are
- preloaded into the specified FS accessor to speed up subsequent
- access. */
+ /**
+ * Import a sequence of NAR dumps created by exportPaths() into the
+ * Nix store. Optionally, the contents of the NARs are preloaded
+ * into the specified FS accessor to speed up subsequent access.
+ */
StorePaths importPaths(Source & source, CheckSigsFlag checkSigs = CheckSigs);
struct Stats
@@ -620,8 +779,9 @@ public:
const Stats & getStats();
- /* Computes the full closure of of a set of store-paths for e.g.
- derivations that need this information for `exportReferencesGraph`.
+ /**
+ * Computes the full closure of of a set of store-paths for e.g.
+ * derivations that need this information for `exportReferencesGraph`.
*/
StorePathSet exportReferences(const StorePathSet & storePaths, const StorePathSet & inputPaths);
@@ -632,18 +792,24 @@ public:
*/
std::optional<StorePath> getBuildDerivationPath(const StorePath &);
- /* Hack to allow long-running processes like hydra-queue-runner to
- occasionally flush their path info cache. */
+ /**
+ * Hack to allow long-running processes like hydra-queue-runner to
+ * occasionally flush their path info cache.
+ */
void clearPathInfoCache()
{
state.lock()->pathInfoCache.clear();
}
- /* Establish a connection to the store, for store types that have
- a notion of connection. Otherwise this is a no-op. */
+ /**
+ * Establish a connection to the store, for store types that have
+ * a notion of connection. Otherwise this is a no-op.
+ */
virtual void connect() { };
- /* Get the protocol version of this store or it's connection. */
+ /**
+ * Get the protocol version of this store or it's connection.
+ */
virtual unsigned int getProtocol()
{
return 0;
@@ -659,7 +825,7 @@ public:
return toRealPath(printStorePath(storePath));
}
- /*
+ /**
* Synchronises the options of the client with those of the daemon
* (a no-op when there’s no daemon)
*/
@@ -671,7 +837,13 @@ protected:
Stats stats;
- /* Unsupported methods. */
+ /**
+ * Helper for methods that are not unsupported: this is used for
+ * default definitions for virtual methods that are meant to be overriden.
+ *
+ * \todo Using this should be a last resort. It is better to make
+ * the method "virtual pure" and/or move it to a subclass.
+ */
[[noreturn]] void unsupported(const std::string & op)
{
throw Unsupported("operation '%s' is not supported by store '%s'", op, getUri());
@@ -680,7 +852,9 @@ protected:
};
-/* Copy a path from one store to another. */
+/**
+ * Copy a path from one store to another.
+ */
void copyStorePath(
Store & srcStore,
Store & dstStore,
@@ -689,12 +863,14 @@ void copyStorePath(
CheckSigsFlag checkSigs = CheckSigs);
-/* Copy store paths from one store to another. The paths may be copied
- in parallel. They are copied in a topologically sorted order (i.e.
- if A is a reference of B, then A is copied before B), but the set
- of store paths is not automatically closed; use copyClosure() for
- that. Returns a map of what each path was copied to the dstStore
- as. */
+/**
+ * Copy store paths from one store to another. The paths may be copied
+ * in parallel. They are copied in a topologically sorted order (i.e. if
+ * A is a reference of B, then A is copied before B), but the set of
+ * store paths is not automatically closed; use copyClosure() for that.
+ *
+ * @return a map of what each path was copied to the dstStore as.
+ */
std::map<StorePath, StorePath> copyPaths(
Store & srcStore, Store & dstStore,
const RealisedPath::Set &,
@@ -709,7 +885,9 @@ std::map<StorePath, StorePath> copyPaths(
CheckSigsFlag checkSigs = CheckSigs,
SubstituteFlag substitute = NoSubstitute);
-/* Copy the closure of `paths` from `srcStore` to `dstStore`. */
+/**
+ * Copy the closure of `paths` from `srcStore` to `dstStore`.
+ */
void copyClosure(
Store & srcStore, Store & dstStore,
const RealisedPath::Set & paths,
@@ -724,52 +902,61 @@ void copyClosure(
CheckSigsFlag checkSigs = CheckSigs,
SubstituteFlag substitute = NoSubstitute);
-/* Remove the temporary roots file for this process. Any temporary
- root becomes garbage after this point unless it has been registered
- as a (permanent) root. */
+/**
+ * Remove the temporary roots file for this process. Any temporary
+ * root becomes garbage after this point unless it has been registered
+ * as a (permanent) root.
+ */
void removeTempRoots();
-/* Resolve the derived path completely, failing if any derivation output
- is unknown. */
+/**
+ * Resolve the derived path completely, failing if any derivation output
+ * is unknown.
+ */
OutputPathMap resolveDerivedPath(Store &, const DerivedPath::Built &, Store * evalStore = nullptr);
-/* Return a Store object to access the Nix store denoted by
- ‘uri’ (slight misnomer...). Supported values are:
-
- * ‘local’: The Nix store in /nix/store and database in
- /nix/var/nix/db, accessed directly.
-
- * ‘daemon’: The Nix store accessed via a Unix domain socket
- connection to nix-daemon.
-
- * ‘unix://<path>’: The Nix store accessed via a Unix domain socket
- connection to nix-daemon, with the socket located at <path>.
-
- * ‘auto’ or ‘’: Equivalent to ‘local’ or ‘daemon’ depending on
- whether the user has write access to the local Nix
- store/database.
-
- * ‘file://<path>’: A binary cache stored in <path>.
-
- * ‘https://<path>’: A binary cache accessed via HTTP.
-
- * ‘s3://<path>’: A writable binary cache stored on Amazon's Simple
- Storage Service.
-
- * ‘ssh://[user@]<host>’: A remote Nix store accessed by running
- ‘nix-store --serve’ via SSH.
-
- You can pass parameters to the store implementation by appending
- ‘?key=value&key=value&...’ to the URI.
-*/
+/**
+ * @return a Store object to access the Nix store denoted by
+ * ‘uri’ (slight misnomer...).
+ *
+ * @param uri Supported values are:
+ *
+ * - ‘local’: The Nix store in /nix/store and database in
+ * /nix/var/nix/db, accessed directly.
+ *
+ * - ‘daemon’: The Nix store accessed via a Unix domain socket
+ * connection to nix-daemon.
+ *
+ * - ‘unix://<path>’: The Nix store accessed via a Unix domain socket
+ * connection to nix-daemon, with the socket located at <path>.
+ *
+ * - ‘auto’ or ‘’: Equivalent to ‘local’ or ‘daemon’ depending on
+ * whether the user has write access to the local Nix
+ * store/database.
+ *
+ * - ‘file://<path>’: A binary cache stored in <path>.
+ *
+ * - ‘https://<path>’: A binary cache accessed via HTTP.
+ *
+ * - ‘s3://<path>’: A writable binary cache stored on Amazon's Simple
+ * Storage Service.
+ *
+ * - ‘ssh://[user@]<host>’: A remote Nix store accessed by running
+ * ‘nix-store --serve’ via SSH.
+ *
+ * You can pass parameters to the store implementation by appending
+ * ‘?key=value&key=value&...’ to the URI.
+ */
ref<Store> openStore(const std::string & uri = settings.storeUri.get(),
const Store::Params & extraParams = Store::Params());
-/* Return the default substituter stores, defined by the
- ‘substituters’ option and various legacy options. */
+/**
+ * @return the default substituter stores, defined by the
+ * ‘substituters’ option and various legacy options.
+ */
std::list<ref<Store>> getDefaultSubstituters();
struct StoreFactory
@@ -812,8 +999,10 @@ struct RegisterStoreImplementation
};
-/* Display a set of paths in human-readable form (i.e., between quotes
- and separated by commas). */
+/**
+ * Display a set of paths in human-readable form (i.e., between quotes
+ * and separated by commas).
+ */
std::string showPaths(const PathSet & paths);
@@ -822,7 +1011,9 @@ std::optional<ValidPathInfo> decodeValidPathInfo(
std::istream & str,
std::optional<HashResult> hashGiven = std::nullopt);
-/* Split URI into protocol+hierarchy part and its parameter set. */
+/**
+ * Split URI into protocol+hierarchy part and its parameter set.
+ */
std::pair<std::string, Store::Params> splitUriAndParams(const std::string & uri);
std::optional<ContentAddress> getDerivationCA(const BasicDerivation & drv);
diff --git a/src/libstore/store-cast.hh b/src/libstore/store-cast.hh
index ff62fc359..2473e72c5 100644
--- a/src/libstore/store-cast.hh
+++ b/src/libstore/store-cast.hh
@@ -1,9 +1,17 @@
#pragma once
+///@file
#include "store-api.hh"
namespace nix {
+/**
+ * Helper to try downcasting a Store with a nice method if it fails.
+ *
+ * This is basically an alternative to the user-facing part of
+ * Store::unsupported that allows us to still have a nice message but
+ * better interface design.
+ */
template<typename T>
T & require(Store & store)
{
diff --git a/src/libstore/tests/derived-path.hh b/src/libstore/tests/derived-path.hh
index 3bc812440..506f3ccb1 100644
--- a/src/libstore/tests/derived-path.hh
+++ b/src/libstore/tests/derived-path.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include <rapidcheck/gen/Arbitrary.h>
diff --git a/src/libstore/tests/libstore.hh b/src/libstore/tests/libstore.hh
index 05397659b..ef93457b5 100644
--- a/src/libstore/tests/libstore.hh
+++ b/src/libstore/tests/libstore.hh
@@ -1,3 +1,6 @@
+#pragma once
+///@file
+
#include <gtest/gtest.h>
#include <gmock/gmock.h>
diff --git a/src/libstore/tests/outputs-spec.hh b/src/libstore/tests/outputs-spec.hh
index 2d455c817..ded331b33 100644
--- a/src/libstore/tests/outputs-spec.hh
+++ b/src/libstore/tests/outputs-spec.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include <rapidcheck/gen/Arbitrary.h>
diff --git a/src/libstore/tests/path.hh b/src/libstore/tests/path.hh
index d7f1a8988..21cb62310 100644
--- a/src/libstore/tests/path.hh
+++ b/src/libstore/tests/path.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include <rapidcheck/gen/Arbitrary.h>
diff --git a/src/libstore/uds-remote-store.cc b/src/libstore/uds-remote-store.cc
index 5c38323cd..0fb7c38e9 100644
--- a/src/libstore/uds-remote-store.cc
+++ b/src/libstore/uds-remote-store.cc
@@ -26,9 +26,9 @@ UDSRemoteStore::UDSRemoteStore(const Params & params)
UDSRemoteStore::UDSRemoteStore(
- const std::string scheme,
- std::string socket_path,
- const Params & params)
+ const std::string scheme,
+ std::string socket_path,
+ const Params & params)
: UDSRemoteStore(params)
{
path.emplace(socket_path);
diff --git a/src/libstore/uds-remote-store.hh b/src/libstore/uds-remote-store.hh
index f8dfcca70..bd1dcb67c 100644
--- a/src/libstore/uds-remote-store.hh
+++ b/src/libstore/uds-remote-store.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "remote-store.hh"
#include "local-fs-store.hh"
@@ -15,6 +16,13 @@ struct UDSRemoteStoreConfig : virtual LocalFSStoreConfig, virtual RemoteStoreCon
}
const std::string name() override { return "Local Daemon Store"; }
+
+ std::string doc() override
+ {
+ return
+ #include "uds-remote-store.md"
+ ;
+ }
};
class UDSRemoteStore : public virtual UDSRemoteStoreConfig, public virtual LocalFSStore, public virtual RemoteStore
@@ -29,9 +37,6 @@ public:
static std::set<std::string> uriSchemes()
{ return {"unix"}; }
- bool sameMachine() override
- { return true; }
-
ref<FSAccessor> getFSAccessor() override
{ return LocalFSStore::getFSAccessor(); }
diff --git a/src/libstore/uds-remote-store.md b/src/libstore/uds-remote-store.md
new file mode 100644
index 000000000..8df0bd6ff
--- /dev/null
+++ b/src/libstore/uds-remote-store.md
@@ -0,0 +1,9 @@
+R"(
+
+**Store URL format**: `daemon`, `unix://`*path*
+
+This store type accesses a Nix store by talking to a Nix daemon
+listening on the Unix domain socket *path*. The store pseudo-URL
+`daemon` is equivalent to `unix:///nix/var/nix/daemon-socket/socket`.
+
+)"
diff --git a/src/libstore/worker-protocol.hh b/src/libstore/worker-protocol.hh
index 87088a3ac..697dd2b8c 100644
--- a/src/libstore/worker-protocol.hh
+++ b/src/libstore/worker-protocol.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "store-api.hh"
#include "serialise.hh"
@@ -14,6 +15,10 @@ namespace nix {
#define GET_PROTOCOL_MINOR(x) ((x) & 0x00ff)
+/**
+ * Enumeration of all the request types for the "worker protocol", used
+ * by unix:// and ssh-ng:// stores.
+ */
typedef enum {
wopIsValidPath = 1,
wopHasSubstitutes = 3,
@@ -74,7 +79,12 @@ typedef enum {
class Store;
struct Source;
-/* To guide overloading */
+/**
+ * Used to guide overloading
+ *
+ * See https://en.cppreference.com/w/cpp/language/adl for the broader
+ * concept of what is going on here.
+ */
template<typename T>
struct Phantom {};
@@ -103,18 +113,19 @@ MAKE_WORKER_PROTO(X_, Y_);
#undef X_
#undef Y_
-/* These use the empty string for the null case, relying on the fact
- that the underlying types never serialize to the empty string.
-
- We do this instead of a generic std::optional<T> instance because
- ordinal tags (0 or 1, here) are a bit of a compatability hazard. For
- the same reason, we don't have a std::variant<T..> instances (ordinal
- tags 0...n).
-
- We could the generic instances and then these as specializations for
- compatability, but that's proven a bit finnicky, and also makes the
- worker protocol harder to implement in other languages where such
- specializations may not be allowed.
+/**
+ * These use the empty string for the null case, relying on the fact
+ * that the underlying types never serialize to the empty string.
+ *
+ * We do this instead of a generic std::optional<T> instance because
+ * ordinal tags (0 or 1, here) are a bit of a compatability hazard. For
+ * the same reason, we don't have a std::variant<T..> instances (ordinal
+ * tags 0...n).
+ *
+ * We could the generic instances and then these as specializations for
+ * compatability, but that's proven a bit finnicky, and also makes the
+ * worker protocol harder to implement in other languages where such
+ * specializations may not be allowed.
*/
MAKE_WORKER_PROTO(, std::optional<StorePath>);
MAKE_WORKER_PROTO(, std::optional<ContentAddress>);
diff --git a/src/libutil/abstract-setting-to-json.hh b/src/libutil/abstract-setting-to-json.hh
index 2d82b54e7..7b6c3fcb5 100644
--- a/src/libutil/abstract-setting-to-json.hh
+++ b/src/libutil/abstract-setting-to-json.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include <nlohmann/json.hpp>
#include "config.hh"
diff --git a/src/libutil/ansicolor.hh b/src/libutil/ansicolor.hh
index 38305e71c..54721649c 100644
--- a/src/libutil/ansicolor.hh
+++ b/src/libutil/ansicolor.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
namespace nix {
diff --git a/src/libutil/archive.cc b/src/libutil/archive.cc
index 0e2b9d12c..268a798d9 100644
--- a/src/libutil/archive.cc
+++ b/src/libutil/archive.cc
@@ -87,7 +87,7 @@ static time_t dump(const Path & path, Sink & sink, PathFilter & filter)
std::string name(i.name);
size_t pos = i.name.find(caseHackSuffix);
if (pos != std::string::npos) {
- debug(format("removing case hack suffix from '%1%'") % (path + "/" + i.name));
+ debug("removing case hack suffix from '%1%'", path + "/" + i.name);
name.erase(pos);
}
if (!unhacked.emplace(name, i.name).second)
@@ -262,7 +262,7 @@ static void parse(ParseSink & sink, Source & source, const Path & path)
if (archiveSettings.useCaseHack) {
auto i = names.find(name);
if (i != names.end()) {
- debug(format("case collision between '%1%' and '%2%'") % i->first % name);
+ debug("case collision between '%1%' and '%2%'", i->first, name);
name += caseHackSuffix;
name += std::to_string(++i->second);
} else
diff --git a/src/libutil/archive.hh b/src/libutil/archive.hh
index e42dea540..60e33dd40 100644
--- a/src/libutil/archive.hh
+++ b/src/libutil/archive.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "types.hh"
#include "serialise.hh"
@@ -7,54 +8,71 @@
namespace nix {
-/* dumpPath creates a Nix archive of the specified path. The format
- is as follows:
-
- IF path points to a REGULAR FILE:
- dump(path) = attrs(
- [ ("type", "regular")
- , ("contents", contents(path))
- ])
-
- IF path points to a DIRECTORY:
- dump(path) = attrs(
- [ ("type", "directory")
- , ("entries", concat(map(f, sort(entries(path)))))
- ])
- where f(fn) = attrs(
- [ ("name", fn)
- , ("file", dump(path + "/" + fn))
- ])
-
- where:
-
- attrs(as) = concat(map(attr, as)) + encN(0)
- attrs((a, b)) = encS(a) + encS(b)
-
- encS(s) = encN(len(s)) + s + (padding until next 64-bit boundary)
-
- encN(n) = 64-bit little-endian encoding of n.
-
- contents(path) = the contents of a regular file.
-
- sort(strings) = lexicographic sort by 8-bit value (strcmp).
-
- entries(path) = the entries of a directory, without `.' and
- `..'.
-
- `+' denotes string concatenation. */
-
-
+/**
+ * dumpPath creates a Nix archive of the specified path.
+ *
+ * @param path the file system data to dump. Dumping is recursive so if
+ * this is a directory we dump it and all its children.
+ *
+ * @param [out] sink The serialised archive is fed into this sink.
+ *
+ * @param filter Can be used to skip certain files.
+ *
+ * The format is as follows:
+ *
+ * IF path points to a REGULAR FILE:
+ * dump(path) = attrs(
+ * [ ("type", "regular")
+ * , ("contents", contents(path))
+ * ])
+ *
+ * IF path points to a DIRECTORY:
+ * dump(path) = attrs(
+ * [ ("type", "directory")
+ * , ("entries", concat(map(f, sort(entries(path)))))
+ * ])
+ * where f(fn) = attrs(
+ * [ ("name", fn)
+ * , ("file", dump(path + "/" + fn))
+ * ])
+ *
+ * where:
+ *
+ * attrs(as) = concat(map(attr, as)) + encN(0)
+ * attrs((a, b)) = encS(a) + encS(b)
+ *
+ * encS(s) = encN(len(s)) + s + (padding until next 64-bit boundary)
+ *
+ * encN(n) = 64-bit little-endian encoding of n.
+ *
+ * contents(path) = the contents of a regular file.
+ *
+ * sort(strings) = lexicographic sort by 8-bit value (strcmp).
+ *
+ * entries(path) = the entries of a directory, without `.' and
+ * `..'.
+ *
+ * `+' denotes string concatenation.
+ */
void dumpPath(const Path & path, Sink & sink,
PathFilter & filter = defaultPathFilter);
-/* Same as `void dumpPath()`, but returns the last modified date of the path */
+/**
+ * Same as dumpPath(), but returns the last modified date of the path.
+ */
time_t dumpPathAndGetMtime(const Path & path, Sink & sink,
PathFilter & filter = defaultPathFilter);
+/**
+ * Dump an archive with a single file with these contents.
+ *
+ * @param s Contents of the file.
+ */
void dumpString(std::string_view s, Sink & sink);
-/* FIXME: fix this API, it sucks. */
+/**
+ * \todo Fix this API, it sucks.
+ */
struct ParseSink
{
virtual void createDirectory(const Path & path) { };
@@ -68,8 +86,10 @@ struct ParseSink
virtual void createSymlink(const Path & path, const std::string & target) { };
};
-/* If the NAR archive contains a single file at top-level, then save
- the contents of the file to `s'. Otherwise barf. */
+/**
+ * If the NAR archive contains a single file at top-level, then save
+ * the contents of the file to `s'. Otherwise barf.
+ */
struct RetrieveRegularNARSink : ParseSink
{
bool regular = true;
@@ -97,7 +117,9 @@ void parseDump(ParseSink & sink, Source & source);
void restorePath(const Path & path, Source & source);
-/* Read a NAR from 'source' and write it to 'sink'. */
+/**
+ * Read a NAR from 'source' and write it to 'sink'.
+ */
void copyNAR(Source & source, Sink & sink);
void copyPath(const Path & from, const Path & to);
diff --git a/src/libutil/args.cc b/src/libutil/args.cc
index 35686a8aa..081dbeb28 100644
--- a/src/libutil/args.cc
+++ b/src/libutil/args.cc
@@ -52,7 +52,7 @@ std::shared_ptr<Completions> completions;
std::string completionMarker = "___COMPLETE___";
-std::optional<std::string> needsCompletion(std::string_view s)
+static std::optional<std::string> needsCompletion(std::string_view s)
{
if (!completions) return {};
auto i = s.find(completionMarker);
@@ -120,6 +120,12 @@ void Args::parseCmdline(const Strings & _cmdline)
if (!argsSeen)
initialFlagsProcessed();
+
+ /* Now that we are done parsing, make sure that any experimental
+ * feature required by the flags is enabled */
+ for (auto & f : flagExperimentalFeatures)
+ experimentalFeatureSettings.require(f);
+
}
bool Args::processFlag(Strings::iterator & pos, Strings::iterator end)
@@ -128,12 +134,18 @@ bool Args::processFlag(Strings::iterator & pos, Strings::iterator end)
auto process = [&](const std::string & name, const Flag & flag) -> bool {
++pos;
+
+ if (auto & f = flag.experimentalFeature)
+ flagExperimentalFeatures.insert(*f);
+
std::vector<std::string> args;
bool anyCompleted = false;
for (size_t n = 0 ; n < flag.handler.arity; ++n) {
if (pos == end) {
if (flag.handler.arity == ArityAny || anyCompleted) break;
- throw UsageError("flag '%s' requires %d argument(s)", name, flag.handler.arity);
+ throw UsageError(
+ "flag '%s' requires %d argument(s), but only %d were given",
+ name, flag.handler.arity, n);
}
if (auto prefix = needsCompletion(*pos)) {
anyCompleted = true;
@@ -152,7 +164,11 @@ bool Args::processFlag(Strings::iterator & pos, Strings::iterator end)
for (auto & [name, flag] : longFlags) {
if (!hiddenCategories.count(flag->category)
&& hasPrefix(name, std::string(*prefix, 2)))
+ {
+ if (auto & f = flag->experimentalFeature)
+ flagExperimentalFeatures.insert(*f);
completions->add("--" + name, flag->description);
+ }
}
return false;
}
@@ -172,7 +188,8 @@ bool Args::processFlag(Strings::iterator & pos, Strings::iterator end)
if (prefix == "-") {
completions->add("--");
for (auto & [flagName, flag] : shortFlags)
- completions->add(std::string("-") + flagName, flag->description);
+ if (experimentalFeatureSettings.isEnabled(flag->experimentalFeature))
+ completions->add(std::string("-") + flagName, flag->description);
}
}
@@ -230,6 +247,11 @@ nlohmann::json Args::toJSON()
j["arity"] = flag->handler.arity;
if (!flag->labels.empty())
j["labels"] = flag->labels;
+ // TODO With C++23 use `std::optional::tranform`
+ if (auto & xp = flag->experimentalFeature)
+ j["experimental-feature"] = showExperimentalFeature(*xp);
+ else
+ j["experimental-feature"] = nullptr;
flags[name] = std::move(j);
}
@@ -326,6 +348,11 @@ Strings argvToStrings(int argc, char * * argv)
return args;
}
+std::optional<ExperimentalFeature> Command::experimentalFeature ()
+{
+ return { Xp::NixCommand };
+}
+
MultiCommand::MultiCommand(const Commands & commands_)
: commands(commands_)
{
@@ -389,6 +416,11 @@ nlohmann::json MultiCommand::toJSON()
cat["id"] = command->category();
cat["description"] = trim(categories[command->category()]);
j["category"] = std::move(cat);
+ // TODO With C++23 use `std::optional::tranform`
+ if (auto xp = command->experimentalFeature())
+ cat["experimental-feature"] = showExperimentalFeature(*xp);
+ else
+ cat["experimental-feature"] = nullptr;
cmds[name] = std::move(j);
}
diff --git a/src/libutil/args.hh b/src/libutil/args.hh
index 84866f12b..d90129796 100644
--- a/src/libutil/args.hh
+++ b/src/libutil/args.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include <iostream>
#include <map>
@@ -18,16 +19,22 @@ class Args
{
public:
- /* Parse the command line, throwing a UsageError if something goes
- wrong. */
+ /**
+ * Parse the command line, throwing a UsageError if something goes
+ * wrong.
+ */
void parseCmdline(const Strings & cmdline);
- /* Return a short one-line description of the command. */
+ /**
+ * Return a short one-line description of the command.
+ */
virtual std::string description() { return ""; }
virtual bool forceImpureByDefault() { return false; }
- /* Return documentation about this command, in Markdown format. */
+ /**
+ * Return documentation about this command, in Markdown format.
+ */
virtual std::string doc() { return ""; }
protected:
@@ -117,6 +124,8 @@ protected:
Handler handler;
std::function<void(size_t, std::string_view)> completer;
+ std::optional<ExperimentalFeature> experimentalFeature;
+
static Flag mkHashTypeFlag(std::string && longName, HashType * ht);
static Flag mkHashTypeOptFlag(std::string && longName, std::optional<HashType> * oht);
};
@@ -144,13 +153,17 @@ protected:
std::set<std::string> hiddenCategories;
- /* Called after all command line flags before the first non-flag
- argument (if any) have been processed. */
+ /**
+ * Called after all command line flags before the first non-flag
+ * argument (if any) have been processed.
+ */
virtual void initialFlagsProcessed() {}
- /* Called after the command line has been processed if we need to generate
- completions. Useful for commands that need to know the whole command line
- in order to know what completions to generate. */
+ /**
+ * Called after the command line has been processed if we need to generate
+ * completions. Useful for commands that need to know the whole command line
+ * in order to know what completions to generate.
+ */
virtual void completionHook() { }
public:
@@ -164,7 +177,9 @@ public:
expectedArgs.emplace_back(std::move(arg));
}
- /* Expect a string argument. */
+ /**
+ * Expect a string argument.
+ */
void expectArg(const std::string & label, std::string * dest, bool optional = false)
{
expectArgs({
@@ -174,7 +189,9 @@ public:
});
}
- /* Expect 0 or more arguments. */
+ /**
+ * Expect 0 or more arguments.
+ */
void expectArgs(const std::string & label, std::vector<std::string> * dest)
{
expectArgs({
@@ -188,30 +205,48 @@ public:
friend class MultiCommand;
MultiCommand * parent = nullptr;
+
+private:
+
+ /**
+ * Experimental features needed when parsing args. These are checked
+ * after flag parsing is completed in order to support enabling
+ * experimental features coming after the flag that needs the
+ * experimental feature.
+ */
+ std::set<ExperimentalFeature> flagExperimentalFeatures;
};
-/* A command is an argument parser that can be executed by calling its
- run() method. */
+/**
+ * A command is an argument parser that can be executed by calling its
+ * run() method.
+ */
struct Command : virtual public Args
{
friend class MultiCommand;
virtual ~Command() { }
- virtual void prepare() { };
+ /**
+ * Entry point to the command
+ */
virtual void run() = 0;
typedef int Category;
static constexpr Category catDefault = 0;
+ virtual std::optional<ExperimentalFeature> experimentalFeature ();
+
virtual Category category() { return catDefault; }
};
typedef std::map<std::string, std::function<ref<Command>()>> Commands;
-/* An argument parser that supports multiple subcommands,
- i.e. ‘<command> <subcommand>’. */
+/**
+ * An argument parser that supports multiple subcommands,
+ * i.e. ‘<command> <subcommand>’.
+ */
class MultiCommand : virtual public Args
{
public:
@@ -219,7 +254,9 @@ public:
std::map<Command::Category, std::string> categories;
- // Selected command, if any.
+ /**
+ * Selected command, if any.
+ */
std::optional<std::pair<std::string, ref<Command>>> command;
MultiCommand(const Commands & commands);
@@ -254,8 +291,6 @@ enum CompletionType {
};
extern CompletionType completionType;
-std::optional<std::string> needsCompletion(std::string_view s);
-
void completePath(size_t, std::string_view prefix);
void completeDir(size_t, std::string_view prefix);
diff --git a/src/libutil/callback.hh b/src/libutil/callback.hh
index ef31794be..3710d1239 100644
--- a/src/libutil/callback.hh
+++ b/src/libutil/callback.hh
@@ -1,13 +1,16 @@
#pragma once
+///@file
#include <future>
#include <functional>
namespace nix {
-/* A callback is a wrapper around a lambda that accepts a valid of
- type T or an exception. (We abuse std::future<T> to pass the value or
- exception.) */
+/**
+ * A callback is a wrapper around a lambda that accepts a valid of
+ * type T or an exception. (We abuse std::future<T> to pass the value or
+ * exception.)
+ */
template<typename T>
class Callback
{
diff --git a/src/libutil/canon-path.cc b/src/libutil/canon-path.cc
index b132b4262..ddf6db6d1 100644
--- a/src/libutil/canon-path.cc
+++ b/src/libutil/canon-path.cc
@@ -100,4 +100,30 @@ std::ostream & operator << (std::ostream & stream, const CanonPath & path)
return stream;
}
+std::string CanonPath::makeRelative(const CanonPath & path) const
+{
+ auto p1 = begin();
+ auto p2 = path.begin();
+
+ for (; p1 != end() && p2 != path.end() && *p1 == *p2; ++p1, ++p2) ;
+
+ if (p1 == end() && p2 == path.end())
+ return ".";
+ else if (p1 == end())
+ return std::string(p2.remaining);
+ else {
+ std::string res;
+ while (p1 != end()) {
+ ++p1;
+ if (!res.empty()) res += '/';
+ res += "..";
+ }
+ if (p2 != path.end()) {
+ if (!res.empty()) res += '/';
+ res += p2.remaining;
+ }
+ return res;
+ }
+}
+
}
diff --git a/src/libutil/canon-path.hh b/src/libutil/canon-path.hh
index 9d5984584..76e48c4f2 100644
--- a/src/libutil/canon-path.hh
+++ b/src/libutil/canon-path.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include <string>
#include <optional>
@@ -8,28 +9,31 @@
namespace nix {
-/* A canonical representation of a path. It ensures the following:
-
- - It always starts with a slash.
-
- - It never ends with a slash, except if the path is "/".
-
- - A slash is never followed by a slash (i.e. no empty components).
-
- - There are no components equal to '.' or '..'.
-
- Note that the path does not need to correspond to an actually
- existing path, and there is no guarantee that symlinks are
- resolved.
-*/
+/**
+ * A canonical representation of a path. It ensures the following:
+ *
+ * - It always starts with a slash.
+ *
+ * - It never ends with a slash, except if the path is "/".
+ *
+ * - A slash is never followed by a slash (i.e. no empty components).
+ *
+ * - There are no components equal to '.' or '..'.
+ *
+ * Note that the path does not need to correspond to an actually
+ * existing path, and there is no guarantee that symlinks are
+ * resolved.
+ */
class CanonPath
{
std::string path;
public:
- /* Construct a canon path from a non-canonical path. Any '.', '..'
- or empty components are removed. */
+ /**
+ * Construct a canon path from a non-canonical path. Any '.', '..'
+ * or empty components are removed.
+ */
CanonPath(std::string_view raw);
explicit CanonPath(const char * raw)
@@ -44,9 +48,11 @@ public:
static CanonPath root;
- /* If `raw` starts with a slash, return
- `CanonPath(raw)`. Otherwise return a `CanonPath` representing
- `root + "/" + raw`. */
+ /**
+ * If `raw` starts with a slash, return
+ * `CanonPath(raw)`. Otherwise return a `CanonPath` representing
+ * `root + "/" + raw`.
+ */
CanonPath(std::string_view raw, const CanonPath & root);
bool isRoot() const
@@ -58,8 +64,10 @@ public:
const std::string & abs() const
{ return path; }
- /* Like abs(), but return an empty string if this path is
- '/'. Thus the returned string never ends in a slash. */
+ /**
+ * Like abs(), but return an empty string if this path is
+ * '/'. Thus the returned string never ends in a slash.
+ */
const std::string & absOrEmpty() const
{
const static std::string epsilon;
@@ -85,6 +93,9 @@ public:
bool operator != (const Iterator & x) const
{ return remaining.data() != x.remaining.data(); }
+ bool operator == (const Iterator & x) const
+ { return !(*this != x); }
+
const std::string_view operator * () const
{ return remaining.substr(0, slash); }
@@ -104,7 +115,9 @@ public:
std::optional<CanonPath> parent() const;
- /* Remove the last component. Panics if this path is the root. */
+ /**
+ * Remove the last component. Panics if this path is the root.
+ */
void pop();
std::optional<std::string_view> dirOf() const
@@ -125,10 +138,12 @@ public:
bool operator != (const CanonPath & x) const
{ return path != x.path; }
- /* Compare paths lexicographically except that path separators
- are sorted before any other character. That is, in the sorted order
- a directory is always followed directly by its children. For
- instance, 'foo' < 'foo/bar' < 'foo!'. */
+ /**
+ * Compare paths lexicographically except that path separators
+ * are sorted before any other character. That is, in the sorted order
+ * a directory is always followed directly by its children. For
+ * instance, 'foo' < 'foo/bar' < 'foo!'.
+ */
bool operator < (const CanonPath & x) const
{
auto i = path.begin();
@@ -144,28 +159,42 @@ public:
return i == path.end() && j != x.path.end();
}
- /* Return true if `this` is equal to `parent` or a child of
- `parent`. */
+ /**
+ * Return true if `this` is equal to `parent` or a child of
+ * `parent`.
+ */
bool isWithin(const CanonPath & parent) const;
CanonPath removePrefix(const CanonPath & prefix) const;
- /* Append another path to this one. */
+ /**
+ * Append another path to this one.
+ */
void extend(const CanonPath & x);
- /* Concatenate two paths. */
+ /**
+ * Concatenate two paths.
+ */
CanonPath operator + (const CanonPath & x) const;
- /* Add a path component to this one. It must not contain any slashes. */
+ /**
+ * Add a path component to this one. It must not contain any slashes.
+ */
void push(std::string_view c);
CanonPath operator + (std::string_view c) const;
- /* Check whether access to this path is allowed, which is the case
- if 1) `this` is within any of the `allowed` paths; or 2) any of
- the `allowed` paths are within `this`. (The latter condition
- ensures access to the parents of allowed paths.) */
+ /**
+ * Check whether access to this path is allowed, which is the case
+ * if 1) `this` is within any of the `allowed` paths; or 2) any of
+ * the `allowed` paths are within `this`. (The latter condition
+ * ensures access to the parents of allowed paths.)
+ */
bool isAllowed(const std::set<CanonPath> & allowed) const;
+
+ /* Return a representation `x` of `path` relative to `this`, i.e.
+ `CanonPath(this.makeRelative(x), this) == path`. */
+ std::string makeRelative(const CanonPath & path) const;
};
std::ostream & operator << (std::ostream & stream, const CanonPath & path);
diff --git a/src/libutil/cgroup.hh b/src/libutil/cgroup.hh
index d08c8ad29..574ae8e5b 100644
--- a/src/libutil/cgroup.hh
+++ b/src/libutil/cgroup.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#if __linux__
@@ -18,10 +19,12 @@ struct CgroupStats
std::optional<std::chrono::microseconds> cpuUser, cpuSystem;
};
-/* Destroy the cgroup denoted by 'path'. The postcondition is that
- 'path' does not exist, and thus any processes in the cgroup have
- been killed. Also return statistics from the cgroup just before
- destruction. */
+/**
+ * Destroy the cgroup denoted by 'path'. The postcondition is that
+ * 'path' does not exist, and thus any processes in the cgroup have
+ * been killed. Also return statistics from the cgroup just before
+ * destruction.
+ */
CgroupStats destroyCgroup(const Path & cgroup);
}
diff --git a/src/libutil/chunked-vector.hh b/src/libutil/chunked-vector.hh
index 0a4f0b400..d914e2542 100644
--- a/src/libutil/chunked-vector.hh
+++ b/src/libutil/chunked-vector.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include <cstdint>
#include <cstdlib>
@@ -7,20 +8,24 @@
namespace nix {
-/* Provides an indexable container like vector<> with memory overhead
- guarantees like list<> by allocating storage in chunks of ChunkSize
- elements instead of using a contiguous memory allocation like vector<>
- does. Not using a single vector that is resized reduces memory overhead
- on large data sets by on average (growth factor)/2, mostly
- eliminates copies within the vector during resizing, and provides stable
- references to its elements. */
+/**
+ * Provides an indexable container like vector<> with memory overhead
+ * guarantees like list<> by allocating storage in chunks of ChunkSize
+ * elements instead of using a contiguous memory allocation like vector<>
+ * does. Not using a single vector that is resized reduces memory overhead
+ * on large data sets by on average (growth factor)/2, mostly
+ * eliminates copies within the vector during resizing, and provides stable
+ * references to its elements.
+ */
template<typename T, size_t ChunkSize>
class ChunkedVector {
private:
uint32_t size_ = 0;
std::vector<std::vector<T>> chunks;
- /* keep this out of the ::add hot path */
+ /**
+ * Keep this out of the ::add hot path
+ */
[[gnu::noinline]]
auto & addChunk()
{
diff --git a/src/libutil/closure.hh b/src/libutil/closure.hh
index 779b9b2d5..16e3b93e4 100644
--- a/src/libutil/closure.hh
+++ b/src/libutil/closure.hh
@@ -1,3 +1,6 @@
+#pragma once
+///@file
+
#include <set>
#include <future>
#include "sync.hh"
diff --git a/src/libutil/comparator.hh b/src/libutil/comparator.hh
index eecd5b819..2b5424b3d 100644
--- a/src/libutil/comparator.hh
+++ b/src/libutil/comparator.hh
@@ -1,6 +1,8 @@
#pragma once
+///@file
-/* Awfull hacky generation of the comparison operators by doing a lexicographic
+/**
+ * Awful hacky generation of the comparison operators by doing a lexicographic
* comparison between the choosen fields.
*
* ```
diff --git a/src/libutil/compression.hh b/src/libutil/compression.hh
index c470b82a5..3892831c2 100644
--- a/src/libutil/compression.hh
+++ b/src/libutil/compression.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "ref.hh"
#include "types.hh"
diff --git a/src/libutil/compute-levels.hh b/src/libutil/compute-levels.hh
index 8ded295f9..093e7a915 100644
--- a/src/libutil/compute-levels.hh
+++ b/src/libutil/compute-levels.hh
@@ -1,3 +1,6 @@
+#pragma once
+///@file
+
#include "types.hh"
namespace nix {
diff --git a/src/libutil/config.cc b/src/libutil/config.cc
index b349f2d80..8d63536d6 100644
--- a/src/libutil/config.cc
+++ b/src/libutil/config.cc
@@ -70,10 +70,17 @@ void AbstractConfig::reapplyUnknownSettings()
set(s.first, s.second);
}
+// Whether we should process the option. Excludes aliases, which are handled elsewhere, and disabled features.
+static bool applicable(const Config::SettingData & sd)
+{
+ return !sd.isAlias
+ && experimentalFeatureSettings.isEnabled(sd.setting->experimentalFeature);
+}
+
void Config::getSettings(std::map<std::string, SettingInfo> & res, bool overriddenOnly)
{
for (auto & opt : _settings)
- if (!opt.second.isAlias && (!overriddenOnly || opt.second.setting->overridden))
+ if (applicable(opt.second) && (!overriddenOnly || opt.second.setting->overridden))
res.emplace(opt.first, SettingInfo{opt.second.setting->to_string(), opt.second.setting->description});
}
@@ -147,9 +154,8 @@ nlohmann::json Config::toJSON()
{
auto res = nlohmann::json::object();
for (auto & s : _settings)
- if (!s.second.isAlias) {
+ if (applicable(s.second))
res.emplace(s.first, s.second.setting->toJSON());
- }
return res;
}
@@ -157,24 +163,31 @@ std::string Config::toKeyValue()
{
auto res = std::string();
for (auto & s : _settings)
- if (!s.second.isAlias) {
+ if (applicable(s.second))
res += fmt("%s = %s\n", s.first, s.second.setting->to_string());
- }
return res;
}
void Config::convertToArgs(Args & args, const std::string & category)
{
- for (auto & s : _settings)
+ for (auto & s : _settings) {
+ /* We do include args for settings gated on disabled
+ experimental-features. The args themselves however will also be
+ gated on any experimental feature the underlying setting is. */
if (!s.second.isAlias)
s.second.setting->convertToArg(args, category);
+ }
}
AbstractSetting::AbstractSetting(
const std::string & name,
const std::string & description,
- const std::set<std::string> & aliases)
- : name(name), description(stripIndentation(description)), aliases(aliases)
+ const std::set<std::string> & aliases,
+ std::optional<ExperimentalFeature> experimentalFeature)
+ : name(name)
+ , description(stripIndentation(description))
+ , aliases(aliases)
+ , experimentalFeature(experimentalFeature)
{
}
@@ -210,6 +223,7 @@ void BaseSetting<T>::convertToArg(Args & args, const std::string & category)
.category = category,
.labels = {"value"},
.handler = {[this](std::string s) { overridden = true; set(s); }},
+ .experimentalFeature = experimentalFeature,
});
if (isAppendable())
@@ -219,6 +233,7 @@ void BaseSetting<T>::convertToArg(Args & args, const std::string & category)
.category = category,
.labels = {"value"},
.handler = {[this](std::string s) { overridden = true; set(s, true); }},
+ .experimentalFeature = experimentalFeature,
});
}
@@ -270,13 +285,15 @@ template<> void BaseSetting<bool>::convertToArg(Args & args, const std::string &
.longName = name,
.description = fmt("Enable the `%s` setting.", name),
.category = category,
- .handler = {[this]() { override(true); }}
+ .handler = {[this]() { override(true); }},
+ .experimentalFeature = experimentalFeature,
});
args.addFlag({
.longName = "no-" + name,
.description = fmt("Disable the `%s` setting.", name),
.category = category,
- .handler = {[this]() { override(false); }}
+ .handler = {[this]() { override(false); }},
+ .experimentalFeature = experimentalFeature,
});
}
@@ -444,4 +461,30 @@ GlobalConfig::Register::Register(Config * config)
configRegistrations->emplace_back(config);
}
+ExperimentalFeatureSettings experimentalFeatureSettings;
+
+static GlobalConfig::Register rSettings(&experimentalFeatureSettings);
+
+bool ExperimentalFeatureSettings::isEnabled(const ExperimentalFeature & feature) const
+{
+ auto & f = experimentalFeatures.get();
+ return std::find(f.begin(), f.end(), feature) != f.end();
+}
+
+void ExperimentalFeatureSettings::require(const ExperimentalFeature & feature) const
+{
+ if (!isEnabled(feature))
+ throw MissingExperimentalFeature(feature);
+}
+
+bool ExperimentalFeatureSettings::isEnabled(const std::optional<ExperimentalFeature> & feature) const
+{
+ return !feature || isEnabled(*feature);
+}
+
+void ExperimentalFeatureSettings::require(const std::optional<ExperimentalFeature> & feature) const
+{
+ if (feature) require(*feature);
+}
+
}
diff --git a/src/libutil/config.hh b/src/libutil/config.hh
index 7ac43c854..3c1d70294 100644
--- a/src/libutil/config.hh
+++ b/src/libutil/config.hh
@@ -1,12 +1,14 @@
+#pragma once
+///@file
+
#include <cassert>
#include <map>
#include <set>
-#include "types.hh"
-
#include <nlohmann/json_fwd.hpp>
-#pragma once
+#include "types.hh"
+#include "experimental-features.hh"
namespace nix {
@@ -123,21 +125,21 @@ public:
void reapplyUnknownSettings();
};
-/* A class to simplify providing configuration settings. The typical
- use is to inherit Config and add Setting<T> members:
-
- class MyClass : private Config
- {
- Setting<int> foo{this, 123, "foo", "the number of foos to use"};
- Setting<std::string> bar{this, "blabla", "bar", "the name of the bar"};
-
- MyClass() : Config(readConfigFile("/etc/my-app.conf"))
- {
- std::cout << foo << "\n"; // will print 123 unless overridden
- }
- };
-*/
-
+/**
+ * A class to simplify providing configuration settings. The typical
+ * use is to inherit Config and add Setting<T> members:
+ *
+ * class MyClass : private Config
+ * {
+ * Setting<int> foo{this, 123, "foo", "the number of foos to use"};
+ * Setting<std::string> bar{this, "blabla", "bar", "the name of the bar"};
+ *
+ * MyClass() : Config(readConfigFile("/etc/my-app.conf"))
+ * {
+ * std::cout << foo << "\n"; // will print 123 unless overridden
+ * }
+ * };
+ */
class Config : public AbstractConfig
{
friend class AbstractSetting;
@@ -194,12 +196,15 @@ public:
bool overridden = false;
+ std::optional<ExperimentalFeature> experimentalFeature;
+
protected:
AbstractSetting(
const std::string & name,
const std::string & description,
- const std::set<std::string> & aliases);
+ const std::set<std::string> & aliases,
+ std::optional<ExperimentalFeature> experimentalFeature = std::nullopt);
virtual ~AbstractSetting()
{
@@ -224,7 +229,9 @@ protected:
bool isOverridden() const { return overridden; }
};
-/* A setting of type T. */
+/**
+ * A setting of type T.
+ */
template<typename T>
class BaseSetting : public AbstractSetting
{
@@ -240,8 +247,9 @@ public:
const bool documentDefault,
const std::string & name,
const std::string & description,
- const std::set<std::string> & aliases = {})
- : AbstractSetting(name, description, aliases)
+ const std::set<std::string> & aliases = {},
+ std::optional<ExperimentalFeature> experimentalFeature = std::nullopt)
+ : AbstractSetting(name, description, aliases, experimentalFeature)
, value(def)
, defaultValue(def)
, documentDefault(documentDefault)
@@ -296,8 +304,9 @@ public:
const std::string & name,
const std::string & description,
const std::set<std::string> & aliases = {},
- const bool documentDefault = true)
- : BaseSetting<T>(def, documentDefault, name, description, aliases)
+ const bool documentDefault = true,
+ std::optional<ExperimentalFeature> experimentalFeature = std::nullopt)
+ : BaseSetting<T>(def, documentDefault, name, description, aliases, experimentalFeature)
{
options->addSetting(this);
}
@@ -305,8 +314,10 @@ public:
void operator =(const T & v) { this->assign(v); }
};
-/* A special setting for Paths. These are automatically canonicalised
- (e.g. "/foo//bar/" becomes "/foo/bar"). */
+/**
+ * A special setting for Paths. These are automatically canonicalised
+ * (e.g. "/foo//bar/" becomes "/foo/bar").
+ */
class PathSetting : public BaseSetting<Path>
{
bool allowEmpty;
@@ -357,4 +368,37 @@ struct GlobalConfig : public AbstractConfig
extern GlobalConfig globalConfig;
+
+struct ExperimentalFeatureSettings : Config {
+
+ Setting<std::set<ExperimentalFeature>> experimentalFeatures{this, {}, "experimental-features",
+ "Experimental Nix features to enable."};
+
+ /**
+ * Check whether the given experimental feature is enabled.
+ */
+ bool isEnabled(const ExperimentalFeature &) const;
+
+ /**
+ * Require an experimental feature be enabled, throwing an error if it is
+ * not.
+ */
+ void require(const ExperimentalFeature &) const;
+
+ /**
+ * `std::nullopt` pointer means no feature, which means there is nothing that could be
+ * disabled, and so the function returns true in that case.
+ */
+ bool isEnabled(const std::optional<ExperimentalFeature> &) const;
+
+ /**
+ * `std::nullopt` pointer means no feature, which means there is nothing that could be
+ * disabled, and so the function does nothing in that case.
+ */
+ void require(const std::optional<ExperimentalFeature> &) const;
+};
+
+// FIXME: don't use a global variable.
+extern ExperimentalFeatureSettings experimentalFeatureSettings;
+
}
diff --git a/src/libutil/error.cc b/src/libutil/error.cc
index e4f0d4677..c9d61942a 100644
--- a/src/libutil/error.cc
+++ b/src/libutil/error.cc
@@ -302,14 +302,14 @@ std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool s
if (!einfo.traces.empty()) {
size_t count = 0;
for (const auto & trace : einfo.traces) {
+ if (trace.hint.str().empty()) continue;
+ if (frameOnly && !trace.frame) continue;
+
if (!showTrace && count > 3) {
oss << "\n" << ANSI_WARNING "(stack trace truncated; use '--show-trace' to show the full trace)" ANSI_NORMAL << "\n";
break;
}
- if (trace.hint.str().empty()) continue;
- if (frameOnly && !trace.frame) continue;
-
count++;
frameOnly = trace.frame;
diff --git a/src/libutil/error.hh b/src/libutil/error.hh
index 0ebeaba61..eafc6a540 100644
--- a/src/libutil/error.hh
+++ b/src/libutil/error.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "suggestions.hh"
#include "ref.hh"
@@ -54,20 +55,26 @@ typedef enum {
lvlVomit
} Verbosity;
-// the lines of code surrounding an error.
+/**
+ * The lines of code surrounding an error.
+ */
struct LinesOfCode {
std::optional<std::string> prevLineOfCode;
std::optional<std::string> errLineOfCode;
std::optional<std::string> nextLineOfCode;
};
-/* An abstract type that represents a location in a source file. */
+/**
+ * An abstract type that represents a location in a source file.
+ */
struct AbstractPos
{
uint32_t line = 0;
uint32_t column = 0;
- /* Return the contents of the source file. */
+ /**
+ * Return the contents of the source file.
+ */
virtual std::optional<std::string> getSource() const
{ return std::nullopt; };
@@ -104,8 +111,10 @@ struct ErrorInfo {
std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool showTrace);
-/* BaseError should generally not be caught, as it has Interrupted as
- a subclass. Catch Error instead. */
+/**
+ * BaseError should generally not be caught, as it has Interrupted as
+ * a subclass. Catch Error instead.
+ */
class BaseError : public std::exception
{
protected:
diff --git a/src/libutil/experimental-features.hh b/src/libutil/experimental-features.hh
index ac372e03e..5948ad7ad 100644
--- a/src/libutil/experimental-features.hh
+++ b/src/libutil/experimental-features.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "comparator.hh"
#include "error.hh"
@@ -12,7 +13,7 @@ namespace nix {
*
* If you update this, don’t forget to also change the map defining their
* string representation in the corresponding `.cc` file.
- **/
+ */
enum struct ExperimentalFeature
{
CaDerivations,
diff --git a/src/libutil/filesystem.cc b/src/libutil/filesystem.cc
index 3a732cff8..56be76ecc 100644
--- a/src/libutil/filesystem.cc
+++ b/src/libutil/filesystem.cc
@@ -15,9 +15,9 @@ static Path tempName(Path tmpRoot, const Path & prefix, bool includePid,
{
tmpRoot = canonPath(tmpRoot.empty() ? getEnv("TMPDIR").value_or("/tmp") : tmpRoot, true);
if (includePid)
- return (format("%1%/%2%-%3%-%4%") % tmpRoot % prefix % getpid() % counter++).str();
+ return fmt("%1%/%2%-%3%-%4%", tmpRoot, prefix, getpid(), counter++);
else
- return (format("%1%/%2%-%3%") % tmpRoot % prefix % counter++).str();
+ return fmt("%1%/%2%-%3%", tmpRoot, prefix, counter++);
}
Path createTempDir(const Path & tmpRoot, const Path & prefix,
diff --git a/src/libutil/finally.hh b/src/libutil/finally.hh
index dee2e8d2f..db654301f 100644
--- a/src/libutil/finally.hh
+++ b/src/libutil/finally.hh
@@ -1,6 +1,9 @@
#pragma once
+///@file
-/* A trivial class to run a function at the end of a scope. */
+/**
+ * A trivial class to run a function at the end of a scope.
+ */
template<typename Fn>
class Finally
{
diff --git a/src/libutil/fmt.hh b/src/libutil/fmt.hh
index e879fd3b8..727255b45 100644
--- a/src/libutil/fmt.hh
+++ b/src/libutil/fmt.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include <boost/format.hpp>
#include <string>
@@ -8,30 +9,25 @@
namespace nix {
-/* Inherit some names from other namespaces for convenience. */
+/**
+ * Inherit some names from other namespaces for convenience.
+ */
using boost::format;
-/* A variadic template that does nothing. Useful to call a function
- for all variadic arguments but ignoring the result. */
+/**
+ * A variadic template that does nothing. Useful to call a function
+ * for all variadic arguments but ignoring the result.
+ */
struct nop { template<typename... T> nop(T...) {} };
-struct FormatOrString
-{
- std::string s;
- FormatOrString(std::string s) : s(std::move(s)) { };
- template<class F>
- FormatOrString(const F & f) : s(f.str()) { };
- FormatOrString(const char * s) : s(s) { };
-};
-
-
-/* A helper for formatting strings. ‘fmt(format, a_0, ..., a_n)’ is
- equivalent to ‘boost::format(format) % a_0 % ... %
- ... a_n’. However, ‘fmt(s)’ is equivalent to ‘s’ (so no %-expansion
- takes place). */
-
+/**
+ * A helper for formatting strings. ‘fmt(format, a_0, ..., a_n)’ is
+ * equivalent to ‘boost::format(format) % a_0 % ... %
+ * ... a_n’. However, ‘fmt(s)’ is equivalent to ‘s’ (so no %-expansion
+ * takes place).
+ */
template<class F>
inline void formatHelper(F & f)
{
@@ -53,11 +49,6 @@ inline std::string fmt(const char * s)
return s;
}
-inline std::string fmt(const FormatOrString & fs)
-{
- return fs.s;
-}
-
template<typename... Args>
inline std::string fmt(const std::string & fs, const Args & ... args)
{
diff --git a/src/libutil/git.hh b/src/libutil/git.hh
index cb13ef0e5..bf2b9a286 100644
--- a/src/libutil/git.hh
+++ b/src/libutil/git.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include <string>
#include <string_view>
@@ -8,21 +9,23 @@ namespace nix {
namespace git {
-// A line from the output of `git ls-remote --symref`.
-//
-// These can be of two kinds:
-//
-// - Symbolic references of the form
-//
-// ref: {target} {reference}
-//
-// where {target} is itself a reference and {reference} is optional
-//
-// - Object references of the form
-//
-// {target} {reference}
-//
-// where {target} is a commit id and {reference} is mandatory
+/**
+ * A line from the output of `git ls-remote --symref`.
+ *
+ * These can be of two kinds:
+ *
+ * - Symbolic references of the form
+ *
+ * ref: {target} {reference}
+ *
+ * where {target} is itself a reference and {reference} is optional
+ *
+ * - Object references of the form
+ *
+ * {target} {reference}
+ *
+ * where {target} is a commit id and {reference} is mandatory
+ */
struct LsRemoteRefLine {
enum struct Kind {
Symbolic,
diff --git a/src/libutil/hash.cc b/src/libutil/hash.cc
index d2fd0c15a..5735e4715 100644
--- a/src/libutil/hash.cc
+++ b/src/libutil/hash.cc
@@ -71,12 +71,13 @@ const std::string base16Chars = "0123456789abcdef";
static std::string printHash16(const Hash & hash)
{
- char buf[hash.hashSize * 2];
+ std::string buf;
+ buf.reserve(hash.hashSize * 2);
for (unsigned int i = 0; i < hash.hashSize; i++) {
- buf[i * 2] = base16Chars[hash.hash[i] >> 4];
- buf[i * 2 + 1] = base16Chars[hash.hash[i] & 0x0f];
+ buf.push_back(base16Chars[hash.hash[i] >> 4]);
+ buf.push_back(base16Chars[hash.hash[i] & 0x0f]);
}
- return std::string(buf, hash.hashSize * 2);
+ return buf;
}
@@ -130,7 +131,7 @@ std::string Hash::to_string(Base base, bool includeType) const
break;
case Base64:
case SRI:
- s += base64Encode(std::string((const char *) hash, hashSize));
+ s += base64Encode(std::string_view((const char *) hash, hashSize));
break;
}
return s;
@@ -403,7 +404,7 @@ HashType parseHashType(std::string_view s)
throw UsageError("unknown hash algorithm '%1%'", s);
}
-std::string printHashType(HashType ht)
+std::string_view printHashType(HashType ht)
{
switch (ht) {
case htMD5: return "md5";
diff --git a/src/libutil/hash.hh b/src/libutil/hash.hh
index 00f70a572..be1fdba2a 100644
--- a/src/libutil/hash.hh
+++ b/src/libutil/hash.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "types.hh"
#include "serialise.hh"
@@ -33,62 +34,86 @@ struct Hash
HashType type;
- /* Create a zero-filled hash object. */
+ /**
+ * Create a zero-filled hash object.
+ */
Hash(HashType type);
- /* Parse the hash from a string representation in the format
- "[<type>:]<base16|base32|base64>" or "<type>-<base64>" (a
- Subresource Integrity hash expression). If the 'type' argument
- is not present, then the hash type must be specified in the
- string. */
+ /**
+ * Parse the hash from a string representation in the format
+ * "[<type>:]<base16|base32|base64>" or "<type>-<base64>" (a
+ * Subresource Integrity hash expression). If the 'type' argument
+ * is not present, then the hash type must be specified in the
+ * string.
+ */
static Hash parseAny(std::string_view s, std::optional<HashType> type);
- /* Parse a hash from a string representation like the above, except the
- type prefix is mandatory is there is no separate arguement. */
+ /**
+ * Parse a hash from a string representation like the above, except the
+ * type prefix is mandatory is there is no separate arguement.
+ */
static Hash parseAnyPrefixed(std::string_view s);
- /* Parse a plain hash that musst not have any prefix indicating the type.
- The type is passed in to disambiguate. */
+ /**
+ * Parse a plain hash that musst not have any prefix indicating the type.
+ * The type is passed in to disambiguate.
+ */
static Hash parseNonSRIUnprefixed(std::string_view s, HashType type);
static Hash parseSRI(std::string_view original);
private:
- /* The type must be provided, the string view must not include <type>
- prefix. `isSRI` helps disambigate the various base-* encodings. */
+ /**
+ * The type must be provided, the string view must not include <type>
+ * prefix. `isSRI` helps disambigate the various base-* encodings.
+ */
Hash(std::string_view s, HashType type, bool isSRI);
public:
- /* Check whether two hash are equal. */
+ /**
+ * Check whether two hash are equal.
+ */
bool operator == (const Hash & h2) const;
- /* Check whether two hash are not equal. */
+ /**
+ * Check whether two hash are not equal.
+ */
bool operator != (const Hash & h2) const;
- /* For sorting. */
+ /**
+ * For sorting.
+ */
bool operator < (const Hash & h) const;
- /* Returns the length of a base-16 representation of this hash. */
+ /**
+ * Returns the length of a base-16 representation of this hash.
+ */
size_t base16Len() const
{
return hashSize * 2;
}
- /* Returns the length of a base-32 representation of this hash. */
+ /**
+ * Returns the length of a base-32 representation of this hash.
+ */
size_t base32Len() const
{
return (hashSize * 8 - 1) / 5 + 1;
}
- /* Returns the length of a base-64 representation of this hash. */
+ /**
+ * Returns the length of a base-64 representation of this hash.
+ */
size_t base64Len() const
{
return ((4 * hashSize / 3) + 3) & ~3;
}
- /* Return a string representation of the hash, in base-16, base-32
- or base-64. By default, this is prefixed by the hash type
- (e.g. "sha256:"). */
+ /**
+ * Return a string representation of the hash, in base-16, base-32
+ * or base-64. By default, this is prefixed by the hash type
+ * (e.g. "sha256:").
+ */
std::string to_string(Base base, bool includeType) const;
std::string gitRev() const
@@ -104,36 +129,54 @@ public:
static Hash dummy;
};
-/* Helper that defaults empty hashes to the 0 hash. */
+/**
+ * Helper that defaults empty hashes to the 0 hash.
+ */
Hash newHashAllowEmpty(std::string_view hashStr, std::optional<HashType> ht);
-/* Print a hash in base-16 if it's MD5, or base-32 otherwise. */
+/**
+ * Print a hash in base-16 if it's MD5, or base-32 otherwise.
+ */
std::string printHash16or32(const Hash & hash);
-/* Compute the hash of the given string. */
+/**
+ * Compute the hash of the given string.
+ */
Hash hashString(HashType ht, std::string_view s);
-/* Compute the hash of the given file. */
+/**
+ * Compute the hash of the given file.
+ */
Hash hashFile(HashType ht, const Path & path);
-/* Compute the hash of the given path. The hash is defined as
- (essentially) hashString(ht, dumpPath(path)). */
+/**
+ * Compute the hash of the given path. The hash is defined as
+ * (essentially) hashString(ht, dumpPath(path)).
+ */
typedef std::pair<Hash, uint64_t> HashResult;
HashResult hashPath(HashType ht, const Path & path,
PathFilter & filter = defaultPathFilter);
-/* Compress a hash to the specified number of bytes by cyclically
- XORing bytes together. */
+/**
+ * Compress a hash to the specified number of bytes by cyclically
+ * XORing bytes together.
+ */
Hash compressHash(const Hash & hash, unsigned int newSize);
-/* Parse a string representing a hash type. */
+/**
+ * Parse a string representing a hash type.
+ */
HashType parseHashType(std::string_view s);
-/* Will return nothing on parse error */
+/**
+ * Will return nothing on parse error
+ */
std::optional<HashType> parseHashTypeOpt(std::string_view s);
-/* And the reverse. */
-std::string printHashType(HashType ht);
+/**
+ * And the reverse.
+ */
+std::string_view printHashType(HashType ht);
union Ctx;
diff --git a/src/libutil/hilite.hh b/src/libutil/hilite.hh
index f8bdbfc55..2d5cf7c6f 100644
--- a/src/libutil/hilite.hh
+++ b/src/libutil/hilite.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include <regex>
#include <vector>
@@ -6,11 +7,13 @@
namespace nix {
-/* Highlight all the given matches in the given string `s` by wrapping
- them between `prefix` and `postfix`.
-
- If some matches overlap, then their union will be wrapped rather
- than the individual matches. */
+/**
+ * Highlight all the given matches in the given string `s` by wrapping
+ * them between `prefix` and `postfix`.
+ *
+ * If some matches overlap, then their union will be wrapped rather
+ * than the individual matches.
+ */
std::string hiliteMatches(
std::string_view s,
std::vector<std::smatch> matches,
diff --git a/src/libutil/json-impls.hh b/src/libutil/json-impls.hh
index bd75748ad..b26163a04 100644
--- a/src/libutil/json-impls.hh
+++ b/src/libutil/json-impls.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "nlohmann/json_fwd.hpp"
diff --git a/src/libutil/json-utils.hh b/src/libutil/json-utils.hh
index b8a031227..eb00e954f 100644
--- a/src/libutil/json-utils.hh
+++ b/src/libutil/json-utils.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include <nlohmann/json.hpp>
diff --git a/src/libutil/logging.cc b/src/libutil/logging.cc
index 904ba6ebe..5a2dd99af 100644
--- a/src/libutil/logging.cc
+++ b/src/libutil/logging.cc
@@ -32,7 +32,8 @@ void Logger::warn(const std::string & msg)
void Logger::writeToStdout(std::string_view s)
{
- std::cout << s << "\n";
+ writeFull(STDOUT_FILENO, s);
+ writeFull(STDOUT_FILENO, "\n");
}
class SimpleLogger : public Logger
@@ -53,7 +54,7 @@ public:
return printBuildLogs;
}
- void log(Verbosity lvl, const FormatOrString & fs) override
+ void log(Verbosity lvl, std::string_view s) override
{
if (lvl > verbosity) return;
@@ -64,14 +65,15 @@ public:
switch (lvl) {
case lvlError: c = '3'; break;
case lvlWarn: c = '4'; break;
- case lvlInfo: c = '5'; break;
+ case lvlNotice: case lvlInfo: c = '5'; break;
case lvlTalkative: case lvlChatty: c = '6'; break;
- default: c = '7';
+ case lvlDebug: case lvlVomit: c = '7';
+ default: c = '7'; break; // should not happen, and missing enum case is reported by -Werror=switch-enum
}
prefix = std::string("<") + c + ">";
}
- writeToStderr(prefix + filterANSIEscapes(fs.s, !tty) + "\n");
+ writeToStderr(prefix + filterANSIEscapes(s, !tty) + "\n");
}
void logEI(const ErrorInfo & ei) override
@@ -84,7 +86,7 @@ public:
void startActivity(ActivityId act, Verbosity lvl, ActivityType type,
const std::string & s, const Fields & fields, ActivityId parent)
- override
+ override
{
if (lvl <= verbosity && !s.empty())
log(lvl, s + "...");
@@ -173,12 +175,12 @@ struct JSONLogger : Logger {
prevLogger.log(lvlError, "@nix " + json.dump(-1, ' ', false, nlohmann::json::error_handler_t::replace));
}
- void log(Verbosity lvl, const FormatOrString & fs) override
+ void log(Verbosity lvl, std::string_view s) override
{
nlohmann::json json;
json["action"] = "msg";
json["level"] = lvl;
- json["msg"] = fs.s;
+ json["msg"] = s;
write(json);
}
diff --git a/src/libutil/logging.hh b/src/libutil/logging.hh
index 4642c49f7..576068c22 100644
--- a/src/libutil/logging.hh
+++ b/src/libutil/logging.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "types.hh"
#include "error.hh"
@@ -72,14 +73,17 @@ public:
virtual void stop() { };
+ virtual void pause() { };
+ virtual void resume() { };
+
// Whether the logger prints the whole build log
virtual bool isVerbose() { return false; }
- virtual void log(Verbosity lvl, const FormatOrString & fs) = 0;
+ virtual void log(Verbosity lvl, std::string_view s) = 0;
- void log(const FormatOrString & fs)
+ void log(std::string_view s)
{
- log(lvlInfo, fs);
+ log(lvlInfo, s);
}
virtual void logEI(const ErrorInfo & ei) = 0;
@@ -102,11 +106,9 @@ public:
virtual void writeToStdout(std::string_view s);
template<typename... Args>
- inline void cout(const std::string & fs, const Args & ... args)
+ inline void cout(const Args & ... args)
{
- boost::format f(fs);
- formatHelper(f, args...);
- writeToStdout(f.str());
+ writeToStdout(fmt(args...));
}
virtual std::optional<char> ask(std::string_view s)
@@ -216,7 +218,9 @@ extern Verbosity verbosity; /* suppress msgs > this */
#define debug(args...) printMsg(lvlDebug, args)
#define vomit(args...) printMsg(lvlVomit, args)
-/* if verbosity >= lvlWarn, print a message with a yellow 'warning:' prefix. */
+/**
+ * if verbosity >= lvlWarn, print a message with a yellow 'warning:' prefix.
+ */
template<typename... Args>
inline void warn(const std::string & fs, const Args & ... args)
{
diff --git a/src/libutil/lru-cache.hh b/src/libutil/lru-cache.hh
index 6ef4a3e06..0e19517ed 100644
--- a/src/libutil/lru-cache.hh
+++ b/src/libutil/lru-cache.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include <cassert>
#include <map>
@@ -7,7 +8,9 @@
namespace nix {
-/* A simple least-recently used cache. Not thread-safe. */
+/**
+ * A simple least-recently used cache. Not thread-safe.
+ */
template<typename Key, typename Value>
class LRUCache
{
@@ -31,7 +34,9 @@ public:
LRUCache(size_t capacity) : capacity(capacity) { }
- /* Insert or upsert an item in the cache. */
+ /**
+ * Insert or upsert an item in the cache.
+ */
void upsert(const Key & key, const Value & value)
{
if (capacity == 0) return;
@@ -39,7 +44,9 @@ public:
erase(key);
if (data.size() >= capacity) {
- /* Retire the oldest item. */
+ /**
+ * Retire the oldest item.
+ */
auto oldest = lru.begin();
data.erase(*oldest);
lru.erase(oldest);
@@ -63,14 +70,18 @@ public:
return true;
}
- /* Look up an item in the cache. If it exists, it becomes the most
- recently used item. */
+ /**
+ * Look up an item in the cache. If it exists, it becomes the most
+ * recently used item.
+ * */
std::optional<Value> get(const Key & key)
{
auto i = data.find(key);
if (i == data.end()) return {};
- /* Move this item to the back of the LRU list. */
+ /**
+ * Move this item to the back of the LRU list.
+ */
lru.erase(i->second.first.it);
auto j = lru.insert(lru.end(), i);
i->second.first.it = j;
diff --git a/src/libutil/monitor-fd.hh b/src/libutil/monitor-fd.hh
index 9518cf8aa..86d0115fc 100644
--- a/src/libutil/monitor-fd.hh
+++ b/src/libutil/monitor-fd.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include <thread>
#include <atomic>
diff --git a/src/libutil/namespaces.hh b/src/libutil/namespaces.hh
index e82379b9c..0b7eeb66c 100644
--- a/src/libutil/namespaces.hh
+++ b/src/libutil/namespaces.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
namespace nix {
diff --git a/src/libutil/pool.hh b/src/libutil/pool.hh
index d49067bb9..6247b6125 100644
--- a/src/libutil/pool.hh
+++ b/src/libutil/pool.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include <functional>
#include <limits>
@@ -11,33 +12,37 @@
namespace nix {
-/* This template class implements a simple pool manager of resources
- of some type R, such as database connections. It is used as
- follows:
-
- class Connection { ... };
-
- Pool<Connection> pool;
-
- {
- auto conn(pool.get());
- conn->exec("select ...");
- }
-
- Here, the Connection object referenced by ‘conn’ is automatically
- returned to the pool when ‘conn’ goes out of scope.
-*/
-
+/**
+ * This template class implements a simple pool manager of resources
+ * of some type R, such as database connections. It is used as
+ * follows:
+ *
+ * class Connection { ... };
+ *
+ * Pool<Connection> pool;
+ *
+ * {
+ * auto conn(pool.get());
+ * conn->exec("select ...");
+ * }
+ *
+ * Here, the Connection object referenced by ‘conn’ is automatically
+ * returned to the pool when ‘conn’ goes out of scope.
+ */
template <class R>
class Pool
{
public:
- /* A function that produces new instances of R on demand. */
+ /**
+ * A function that produces new instances of R on demand.
+ */
typedef std::function<ref<R>()> Factory;
- /* A function that checks whether an instance of R is still
- usable. Unusable instances are removed from the pool. */
+ /**
+ * A function that checks whether an instance of R is still
+ * usable. Unusable instances are removed from the pool.
+ */
typedef std::function<bool(const ref<R> &)> Validator;
private:
diff --git a/src/libutil/ref.hh b/src/libutil/ref.hh
index 7d38b059c..af5f8304c 100644
--- a/src/libutil/ref.hh
+++ b/src/libutil/ref.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include <memory>
#include <exception>
@@ -6,8 +7,10 @@
namespace nix {
-/* A simple non-nullable reference-counted pointer. Actually a wrapper
- around std::shared_ptr that prevents null constructions. */
+/**
+ * A simple non-nullable reference-counted pointer. Actually a wrapper
+ * around std::shared_ptr that prevents null constructions.
+ */
template<typename T>
class ref
{
diff --git a/src/libutil/regex-combinators.hh b/src/libutil/regex-combinators.hh
index 0b997b25a..87d6aa678 100644
--- a/src/libutil/regex-combinators.hh
+++ b/src/libutil/regex-combinators.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include <string_view>
diff --git a/src/libutil/serialise.cc b/src/libutil/serialise.cc
index c653db9d0..7476e6f6c 100644
--- a/src/libutil/serialise.cc
+++ b/src/libutil/serialise.cc
@@ -415,7 +415,7 @@ Error readError(Source & source)
auto msg = readString(source);
ErrorInfo info {
.level = level,
- .msg = hintformat(std::move(format("%s") % msg)),
+ .msg = hintformat(fmt("%s", msg)),
};
auto havePos = readNum<size_t>(source);
assert(havePos == 0);
@@ -424,7 +424,7 @@ Error readError(Source & source)
havePos = readNum<size_t>(source);
assert(havePos == 0);
info.traces.push_back(Trace {
- .hint = hintformat(std::move(format("%s") % readString(source)))
+ .hint = hintformat(fmt("%s", readString(source)))
});
}
return Error(std::move(info));
diff --git a/src/libutil/serialise.hh b/src/libutil/serialise.hh
index 7da5b07fd..2cf527023 100644
--- a/src/libutil/serialise.hh
+++ b/src/libutil/serialise.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include <memory>
@@ -10,7 +11,9 @@ namespace boost::context { struct stack_context; }
namespace nix {
-/* Abstract destination of binary data. */
+/**
+ * Abstract destination of binary data.
+ */
struct Sink
{
virtual ~Sink() { }
@@ -18,7 +21,9 @@ struct Sink
virtual bool good() { return true; }
};
-/* Just throws away data. */
+/**
+ * Just throws away data.
+ */
struct NullSink : Sink
{
void operator () (std::string_view data) override
@@ -32,8 +37,10 @@ struct FinishSink : virtual Sink
};
-/* A buffered abstract sink. Warning: a BufferedSink should not be
- used from multiple threads concurrently. */
+/**
+ * A buffered abstract sink. Warning: a BufferedSink should not be
+ * used from multiple threads concurrently.
+ */
struct BufferedSink : virtual Sink
{
size_t bufSize, bufPos;
@@ -50,19 +57,25 @@ struct BufferedSink : virtual Sink
};
-/* Abstract source of binary data. */
+/**
+ * Abstract source of binary data.
+ */
struct Source
{
virtual ~Source() { }
- /* Store exactly ‘len’ bytes in the buffer pointed to by ‘data’.
- It blocks until all the requested data is available, or throws
- an error if it is not going to be available. */
+ /**
+ * Store exactly ‘len’ bytes in the buffer pointed to by ‘data’.
+ * It blocks until all the requested data is available, or throws
+ * an error if it is not going to be available.
+ */
void operator () (char * data, size_t len);
- /* Store up to ‘len’ in the buffer pointed to by ‘data’, and
- return the number of bytes stored. It blocks until at least
- one byte is available. */
+ /**
+ * Store up to ‘len’ in the buffer pointed to by ‘data’, and
+ * return the number of bytes stored. It blocks until at least
+ * one byte is available.
+ */
virtual size_t read(char * data, size_t len) = 0;
virtual bool good() { return true; }
@@ -73,8 +86,10 @@ struct Source
};
-/* A buffered abstract source. Warning: a BufferedSource should not be
- used from multiple threads concurrently. */
+/**
+ * A buffered abstract source. Warning: a BufferedSource should not be
+ * used from multiple threads concurrently.
+ */
struct BufferedSource : Source
{
size_t bufSize, bufPosIn, bufPosOut;
@@ -88,12 +103,16 @@ struct BufferedSource : Source
bool hasData();
protected:
- /* Underlying read call, to be overridden. */
+ /**
+ * Underlying read call, to be overridden.
+ */
virtual size_t readUnbuffered(char * data, size_t len) = 0;
};
-/* A sink that writes data to a file descriptor. */
+/**
+ * A sink that writes data to a file descriptor.
+ */
struct FdSink : BufferedSink
{
int fd;
@@ -123,7 +142,9 @@ private:
};
-/* A source that reads data from a file descriptor. */
+/**
+ * A source that reads data from a file descriptor.
+ */
struct FdSource : BufferedSource
{
int fd;
@@ -149,7 +170,9 @@ private:
};
-/* A sink that writes data to a string. */
+/**
+ * A sink that writes data to a string.
+ */
struct StringSink : Sink
{
std::string s;
@@ -163,7 +186,9 @@ struct StringSink : Sink
};
-/* A source that reads data from a string. */
+/**
+ * A source that reads data from a string.
+ */
struct StringSource : Source
{
std::string_view s;
@@ -173,7 +198,9 @@ struct StringSource : Source
};
-/* A sink that writes all incoming data to two other sinks. */
+/**
+ * A sink that writes all incoming data to two other sinks.
+ */
struct TeeSink : Sink
{
Sink & sink1, & sink2;
@@ -186,7 +213,9 @@ struct TeeSink : Sink
};
-/* Adapter class of a Source that saves all data read to a sink. */
+/**
+ * Adapter class of a Source that saves all data read to a sink.
+ */
struct TeeSource : Source
{
Source & orig;
@@ -201,7 +230,9 @@ struct TeeSource : Source
}
};
-/* A reader that consumes the original Source until 'size'. */
+/**
+ * A reader that consumes the original Source until 'size'.
+ */
struct SizedSource : Source
{
Source & orig;
@@ -219,7 +250,9 @@ struct SizedSource : Source
return n;
}
- /* Consume the original source until no remain data is left to consume. */
+ /**
+ * Consume the original source until no remain data is left to consume.
+ */
size_t drainAll()
{
std::vector<char> buf(8192);
@@ -232,7 +265,9 @@ struct SizedSource : Source
}
};
-/* A sink that that just counts the number of bytes given to it */
+/**
+ * A sink that that just counts the number of bytes given to it
+ */
struct LengthSink : Sink
{
uint64_t length = 0;
@@ -243,7 +278,9 @@ struct LengthSink : Sink
}
};
-/* Convert a function into a sink. */
+/**
+ * Convert a function into a sink.
+ */
struct LambdaSink : Sink
{
typedef std::function<void(std::string_view data)> lambda_t;
@@ -259,7 +296,9 @@ struct LambdaSink : Sink
};
-/* Convert a function into a source. */
+/**
+ * Convert a function into a source.
+ */
struct LambdaSource : Source
{
typedef std::function<size_t(char *, size_t)> lambda_t;
@@ -274,8 +313,10 @@ struct LambdaSource : Source
}
};
-/* Chain two sources together so after the first is exhausted, the second is
- used */
+/**
+ * Chain two sources together so after the first is exhausted, the second is
+ * used
+ */
struct ChainSource : Source
{
Source & source1, & source2;
@@ -289,8 +330,10 @@ struct ChainSource : Source
std::unique_ptr<FinishSink> sourceToSink(std::function<void(Source &)> fun);
-/* Convert a function that feeds data into a Sink into a Source. The
- Source executes the function as a coroutine. */
+/**
+ * Convert a function that feeds data into a Sink into a Source. The
+ * Source executes the function as a coroutine.
+ */
std::unique_ptr<Source> sinkToSource(
std::function<void(Sink &)> fun,
std::function<void()> eof = []() {
@@ -376,7 +419,9 @@ Source & operator >> (Source & in, bool & b)
Error readError(Source & source);
-/* An adapter that converts a std::basic_istream into a source. */
+/**
+ * An adapter that converts a std::basic_istream into a source.
+ */
struct StreamToSourceAdapter : Source
{
std::shared_ptr<std::basic_istream<char>> istream;
@@ -399,13 +444,14 @@ struct StreamToSourceAdapter : Source
};
-/* A source that reads a distinct format of concatenated chunks back into its
- logical form, in order to guarantee a known state to the original stream,
- even in the event of errors.
-
- Use with FramedSink, which also allows the logical stream to be terminated
- in the event of an exception.
-*/
+/**
+ * A source that reads a distinct format of concatenated chunks back into its
+ * logical form, in order to guarantee a known state to the original stream,
+ * even in the event of errors.
+ *
+ * Use with FramedSink, which also allows the logical stream to be terminated
+ * in the event of an exception.
+ */
struct FramedSource : Source
{
Source & from;
@@ -450,11 +496,12 @@ struct FramedSource : Source
}
};
-/* Write as chunks in the format expected by FramedSource.
-
- The exception_ptr reference can be used to terminate the stream when you
- detect that an error has occurred on the remote end.
-*/
+/**
+ * Write as chunks in the format expected by FramedSource.
+ *
+ * The exception_ptr reference can be used to terminate the stream when you
+ * detect that an error has occurred on the remote end.
+ */
struct FramedSink : nix::BufferedSink
{
BufferedSink & to;
@@ -487,17 +534,20 @@ struct FramedSink : nix::BufferedSink
};
};
-/* Stack allocation strategy for sinkToSource.
- Mutable to avoid a boehm gc dependency in libutil.
-
- boost::context doesn't provide a virtual class, so we define our own.
+/**
+ * Stack allocation strategy for sinkToSource.
+ * Mutable to avoid a boehm gc dependency in libutil.
+ *
+ * boost::context doesn't provide a virtual class, so we define our own.
*/
struct StackAllocator {
virtual boost::context::stack_context allocate() = 0;
virtual void deallocate(boost::context::stack_context sctx) = 0;
- /* The stack allocator to use in sinkToSource and potentially elsewhere.
- It is reassigned by the initGC() method in libexpr. */
+ /**
+ * The stack allocator to use in sinkToSource and potentially elsewhere.
+ * It is reassigned by the initGC() method in libexpr.
+ */
static StackAllocator *defaultAllocator;
};
diff --git a/src/libutil/split.hh b/src/libutil/split.hh
index 87a23b13e..3b9b2b83b 100644
--- a/src/libutil/split.hh
+++ b/src/libutil/split.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include <optional>
#include <string_view>
@@ -7,10 +8,12 @@
namespace nix {
-// If `separator` is found, we return the portion of the string before the
-// separator, and modify the string argument to contain only the part after the
-// separator. Otherwise, we return `std::nullopt`, and we leave the argument
-// string alone.
+/**
+ * If `separator` is found, we return the portion of the string before the
+ * separator, and modify the string argument to contain only the part after the
+ * separator. Otherwise, we return `std::nullopt`, and we leave the argument
+ * string alone.
+ */
static inline std::optional<std::string_view> splitPrefixTo(std::string_view & string, char separator) {
auto sepInstance = string.find(separator);
diff --git a/src/libutil/suggestions.hh b/src/libutil/suggestions.hh
index d54dd8e31..9abf5ee5f 100644
--- a/src/libutil/suggestions.hh
+++ b/src/libutil/suggestions.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "comparator.hh"
#include "types.hh"
@@ -13,7 +14,8 @@ int levenshteinDistance(std::string_view first, std::string_view second);
*/
class Suggestion {
public:
- int distance; // The smaller the better
+ /// The smaller the better
+ int distance;
std::string suggestion;
std::string to_string() const;
@@ -43,7 +45,9 @@ public:
std::ostream & operator<<(std::ostream & str, const Suggestion &);
std::ostream & operator<<(std::ostream & str, const Suggestions &);
-// Either a value of type `T`, or some suggestions
+/**
+ * Either a value of type `T`, or some suggestions
+ */
template<typename T>
class OrSuggestions {
public:
diff --git a/src/libutil/sync.hh b/src/libutil/sync.hh
index e1d591d77..47e4512b1 100644
--- a/src/libutil/sync.hh
+++ b/src/libutil/sync.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include <cstdlib>
#include <mutex>
@@ -7,22 +8,22 @@
namespace nix {
-/* This template class ensures synchronized access to a value of type
- T. It is used as follows:
-
- struct Data { int x; ... };
-
- Sync<Data> data;
-
- {
- auto data_(data.lock());
- data_->x = 123;
- }
-
- Here, "data" is automatically unlocked when "data_" goes out of
- scope.
-*/
-
+/**
+ * This template class ensures synchronized access to a value of type
+ * T. It is used as follows:
+ *
+ * struct Data { int x; ... };
+ *
+ * Sync<Data> data;
+ *
+ * {
+ * auto data_(data.lock());
+ * data_->x = 123;
+ * }
+ *
+ * Here, "data" is automatically unlocked when "data_" goes out of
+ * scope.
+ */
template<class T, class M = std::mutex>
class Sync
{
diff --git a/src/libutil/tarfile.hh b/src/libutil/tarfile.hh
index 4d9141fd4..24afb710a 100644
--- a/src/libutil/tarfile.hh
+++ b/src/libutil/tarfile.hh
@@ -1,3 +1,6 @@
+#pragma once
+///@file
+
#include "serialise.hh"
#include <archive.h>
@@ -14,7 +17,7 @@ struct TarArchive {
TarArchive(const Path & path);
- // disable copy constructor
+ /// disable copy constructor
TarArchive(const TarArchive &) = delete;
void close();
diff --git a/src/libutil/tests/canon-path.cc b/src/libutil/tests/canon-path.cc
index c1c5adadf..fc94ccc3d 100644
--- a/src/libutil/tests/canon-path.cc
+++ b/src/libutil/tests/canon-path.cc
@@ -107,15 +107,13 @@ namespace nix {
}
TEST(CanonPath, within) {
- {
- ASSERT_TRUE(CanonPath("foo").isWithin(CanonPath("foo")));
- ASSERT_FALSE(CanonPath("foo").isWithin(CanonPath("bar")));
- ASSERT_FALSE(CanonPath("foo").isWithin(CanonPath("fo")));
- ASSERT_TRUE(CanonPath("foo/bar").isWithin(CanonPath("foo")));
- ASSERT_FALSE(CanonPath("foo").isWithin(CanonPath("foo/bar")));
- ASSERT_TRUE(CanonPath("/foo/bar/default.nix").isWithin(CanonPath("/")));
- ASSERT_TRUE(CanonPath("/").isWithin(CanonPath("/")));
- }
+ ASSERT_TRUE(CanonPath("foo").isWithin(CanonPath("foo")));
+ ASSERT_FALSE(CanonPath("foo").isWithin(CanonPath("bar")));
+ ASSERT_FALSE(CanonPath("foo").isWithin(CanonPath("fo")));
+ ASSERT_TRUE(CanonPath("foo/bar").isWithin(CanonPath("foo")));
+ ASSERT_FALSE(CanonPath("foo").isWithin(CanonPath("foo/bar")));
+ ASSERT_TRUE(CanonPath("/foo/bar/default.nix").isWithin(CanonPath("/")));
+ ASSERT_TRUE(CanonPath("/").isWithin(CanonPath("/")));
}
TEST(CanonPath, sort) {
@@ -127,29 +125,38 @@ namespace nix {
}
TEST(CanonPath, allowed) {
- {
- std::set<CanonPath> allowed {
- CanonPath("foo/bar"),
- CanonPath("foo!"),
- CanonPath("xyzzy"),
- CanonPath("a/b/c"),
- };
-
- ASSERT_TRUE (CanonPath("foo/bar").isAllowed(allowed));
- ASSERT_TRUE (CanonPath("foo/bar/bla").isAllowed(allowed));
- ASSERT_TRUE (CanonPath("foo").isAllowed(allowed));
- ASSERT_FALSE(CanonPath("bar").isAllowed(allowed));
- ASSERT_FALSE(CanonPath("bar/a").isAllowed(allowed));
- ASSERT_TRUE (CanonPath("a").isAllowed(allowed));
- ASSERT_TRUE (CanonPath("a/b").isAllowed(allowed));
- ASSERT_TRUE (CanonPath("a/b/c").isAllowed(allowed));
- ASSERT_TRUE (CanonPath("a/b/c/d").isAllowed(allowed));
- ASSERT_TRUE (CanonPath("a/b/c/d/e").isAllowed(allowed));
- ASSERT_FALSE(CanonPath("a/b/a").isAllowed(allowed));
- ASSERT_FALSE(CanonPath("a/b/d").isAllowed(allowed));
- ASSERT_FALSE(CanonPath("aaa").isAllowed(allowed));
- ASSERT_FALSE(CanonPath("zzz").isAllowed(allowed));
- ASSERT_TRUE (CanonPath("/").isAllowed(allowed));
- }
+ std::set<CanonPath> allowed {
+ CanonPath("foo/bar"),
+ CanonPath("foo!"),
+ CanonPath("xyzzy"),
+ CanonPath("a/b/c"),
+ };
+
+ ASSERT_TRUE (CanonPath("foo/bar").isAllowed(allowed));
+ ASSERT_TRUE (CanonPath("foo/bar/bla").isAllowed(allowed));
+ ASSERT_TRUE (CanonPath("foo").isAllowed(allowed));
+ ASSERT_FALSE(CanonPath("bar").isAllowed(allowed));
+ ASSERT_FALSE(CanonPath("bar/a").isAllowed(allowed));
+ ASSERT_TRUE (CanonPath("a").isAllowed(allowed));
+ ASSERT_TRUE (CanonPath("a/b").isAllowed(allowed));
+ ASSERT_TRUE (CanonPath("a/b/c").isAllowed(allowed));
+ ASSERT_TRUE (CanonPath("a/b/c/d").isAllowed(allowed));
+ ASSERT_TRUE (CanonPath("a/b/c/d/e").isAllowed(allowed));
+ ASSERT_FALSE(CanonPath("a/b/a").isAllowed(allowed));
+ ASSERT_FALSE(CanonPath("a/b/d").isAllowed(allowed));
+ ASSERT_FALSE(CanonPath("aaa").isAllowed(allowed));
+ ASSERT_FALSE(CanonPath("zzz").isAllowed(allowed));
+ ASSERT_TRUE (CanonPath("/").isAllowed(allowed));
+ }
+
+ TEST(CanonPath, makeRelative) {
+ CanonPath d("/foo/bar");
+ ASSERT_EQ(d.makeRelative(CanonPath("/foo/bar")), ".");
+ ASSERT_EQ(d.makeRelative(CanonPath("/foo")), "..");
+ ASSERT_EQ(d.makeRelative(CanonPath("/")), "../..");
+ ASSERT_EQ(d.makeRelative(CanonPath("/foo/bar/xyzzy")), "xyzzy");
+ ASSERT_EQ(d.makeRelative(CanonPath("/foo/bar/xyzzy/bla")), "xyzzy/bla");
+ ASSERT_EQ(d.makeRelative(CanonPath("/foo/xyzzy/bla")), "../xyzzy/bla");
+ ASSERT_EQ(d.makeRelative(CanonPath("/xyzzy/bla")), "../../xyzzy/bla");
}
}
diff --git a/src/libutil/tests/hash.hh b/src/libutil/tests/hash.hh
index 9e9650e6e..1f9fa59ae 100644
--- a/src/libutil/tests/hash.hh
+++ b/src/libutil/tests/hash.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include <rapidcheck/gen/Arbitrary.h>
diff --git a/src/libutil/thread-pool.hh b/src/libutil/thread-pool.hh
index b22e0d162..14b32279c 100644
--- a/src/libutil/thread-pool.hh
+++ b/src/libutil/thread-pool.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "sync.hh"
#include "util.hh"
@@ -13,8 +14,10 @@ namespace nix {
MakeError(ThreadPoolShutDown, Error);
-/* A simple thread pool that executes a queue of work items
- (lambdas). */
+/**
+ * A simple thread pool that executes a queue of work items
+ * (lambdas).
+ */
class ThreadPool
{
public:
@@ -23,19 +26,30 @@ public:
~ThreadPool();
- // FIXME: use std::packaged_task?
+ /**
+ * An individual work item.
+ *
+ * \todo use std::packaged_task?
+ */
typedef std::function<void()> work_t;
- /* Enqueue a function to be executed by the thread pool. */
+ /**
+ * Enqueue a function to be executed by the thread pool.
+ */
void enqueue(const work_t & t);
- /* Execute work items until the queue is empty. Note that work
- items are allowed to add new items to the queue; this is
- handled correctly. Queue processing stops prematurely if any
- work item throws an exception. This exception is propagated to
- the calling thread. If multiple work items throw an exception
- concurrently, only one item is propagated; the others are
- printed on stderr and otherwise ignored. */
+ /**
+ * Execute work items until the queue is empty.
+ *
+ * \note Note that work items are allowed to add new items to the
+ * queue; this is handled correctly.
+ *
+ * Queue processing stops prematurely if any work item throws an
+ * exception. This exception is propagated to the calling thread. If
+ * multiple work items throw an exception concurrently, only one
+ * item is propagated; the others are printed on stderr and
+ * otherwise ignored.
+ */
void process();
private:
@@ -62,9 +76,11 @@ private:
void shutdown();
};
-/* Process in parallel a set of items of type T that have a partial
- ordering between them. Thus, any item is only processed after all
- its dependencies have been processed. */
+/**
+ * Process in parallel a set of items of type T that have a partial
+ * ordering between them. Thus, any item is only processed after all
+ * its dependencies have been processed.
+ */
template<typename T>
void processGraph(
ThreadPool & pool,
diff --git a/src/libutil/topo-sort.hh b/src/libutil/topo-sort.hh
index 7418be5e0..a52811fbf 100644
--- a/src/libutil/topo-sort.hh
+++ b/src/libutil/topo-sort.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "error.hh"
diff --git a/src/libutil/types.hh b/src/libutil/types.hh
index 6bcbd7e1d..c86f52175 100644
--- a/src/libutil/types.hh
+++ b/src/libutil/types.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "ref.hh"
@@ -17,7 +18,9 @@ typedef std::set<std::string> StringSet;
typedef std::map<std::string, std::string> StringMap;
typedef std::map<std::string, std::string> StringPairs;
-/* Paths are just strings. */
+/**
+ * Paths are just strings.
+ */
typedef std::string Path;
typedef std::string_view PathView;
typedef std::list<Path> Paths;
@@ -25,15 +28,19 @@ typedef std::set<Path> PathSet;
typedef std::vector<std::pair<std::string, std::string>> Headers;
-/* Helper class to run code at startup. */
+/**
+ * Helper class to run code at startup.
+ */
template<typename T>
struct OnStartup
{
OnStartup(T && t) { t(); }
};
-/* Wrap bools to prevent string literals (i.e. 'char *') from being
- cast to a bool in Attr. */
+/**
+ * Wrap bools to prevent string literals (i.e. 'char *') from being
+ * cast to a bool in Attr.
+ */
template<typename T>
struct Explicit {
T t;
@@ -45,21 +52,25 @@ struct Explicit {
};
-/* This wants to be a little bit like rust's Cow type.
- Some parts of the evaluator benefit greatly from being able to reuse
- existing allocations for strings, but have to be able to also use
- newly allocated storage for values.
-
- We do not define implicit conversions, even with ref qualifiers,
- since those can easily become ambiguous to the reader and can degrade
- into copying behaviour we want to avoid. */
+/**
+ * This wants to be a little bit like rust's Cow type.
+ * Some parts of the evaluator benefit greatly from being able to reuse
+ * existing allocations for strings, but have to be able to also use
+ * newly allocated storage for values.
+ *
+ * We do not define implicit conversions, even with ref qualifiers,
+ * since those can easily become ambiguous to the reader and can degrade
+ * into copying behaviour we want to avoid.
+ */
class BackedStringView {
private:
std::variant<std::string, std::string_view> data;
- /* Needed to introduce a temporary since operator-> must return
- a pointer. Without this we'd need to store the view object
- even when we already own a string. */
+ /**
+ * Needed to introduce a temporary since operator-> must return
+ * a pointer. Without this we'd need to store the view object
+ * even when we already own a string.
+ */
class Ptr {
private:
std::string_view view;
@@ -77,8 +88,10 @@ public:
BackedStringView(const BackedStringView &) = delete;
BackedStringView & operator=(const BackedStringView &) = delete;
- /* We only want move operations defined since the sole purpose of
- this type is to avoid copies. */
+ /**
+ * We only want move operations defined since the sole purpose of
+ * this type is to avoid copies.
+ */
BackedStringView(BackedStringView && other) = default;
BackedStringView & operator=(BackedStringView && other) = default;
diff --git a/src/libutil/url-parts.hh b/src/libutil/url-parts.hh
index d5e6a2736..98162b0f7 100644
--- a/src/libutil/url-parts.hh
+++ b/src/libutil/url-parts.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include <string>
#include <regex>
@@ -22,21 +23,22 @@ const static std::string segmentRegex = "(?:" + pcharRegex + "*)";
const static std::string absPathRegex = "(?:(?:/" + segmentRegex + ")*/?)";
const static std::string pathRegex = "(?:" + segmentRegex + "(?:/" + segmentRegex + ")*/?)";
-// A Git ref (i.e. branch or tag name).
-const static std::string refRegexS = "[a-zA-Z0-9][a-zA-Z0-9_.\\/-]*"; // FIXME: check
+/// A Git ref (i.e. branch or tag name).
+/// \todo check that this is correct.
+const static std::string refRegexS = "[a-zA-Z0-9@][a-zA-Z0-9_.\\/@-]*";
extern std::regex refRegex;
-// Instead of defining what a good Git Ref is, we define what a bad Git Ref is
-// This is because of the definition of a ref in refs.c in https://github.com/git/git
-// See tests/fetchGitRefs.sh for the full definition
+/// Instead of defining what a good Git Ref is, we define what a bad Git Ref is
+/// This is because of the definition of a ref in refs.c in https://github.com/git/git
+/// See tests/fetchGitRefs.sh for the full definition
const static std::string badGitRefRegexS = "//|^[./]|/\\.|\\.\\.|[[:cntrl:][:space:]:?^~\[]|\\\\|\\*|\\.lock$|\\.lock/|@\\{|[/.]$|^@$|^$";
extern std::regex badGitRefRegex;
-// A Git revision (a SHA-1 commit hash).
+/// A Git revision (a SHA-1 commit hash).
const static std::string revRegexS = "[0-9a-fA-F]{40}";
extern std::regex revRegex;
-// A ref or revision, or a ref followed by a revision.
+/// A ref or revision, or a ref followed by a revision.
const static std::string refAndOrRevRegex = "(?:(" + revRegexS + ")|(?:(" + refRegexS + ")(?:/(" + revRegexS + "))?))";
const static std::string flakeIdRegexS = "[a-zA-Z][a-zA-Z0-9_-]*";
diff --git a/src/libutil/url.hh b/src/libutil/url.hh
index ddd673d65..d2413ec0e 100644
--- a/src/libutil/url.hh
+++ b/src/libutil/url.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "error.hh"
@@ -7,7 +8,8 @@ namespace nix {
struct ParsedURL
{
std::string url;
- std::string base; // URL without query/fragment
+ /// URL without query/fragment
+ std::string base;
std::string scheme;
std::optional<std::string> authority;
std::string path;
@@ -28,7 +30,7 @@ std::map<std::string, std::string> decodeQuery(const std::string & query);
ParsedURL parseURL(const std::string & url);
-/*
+/**
* Although that’s not really standardized anywhere, an number of tools
* use a scheme of the form 'x+y' in urls, where y is the “transport layer”
* scheme, and x is the “application layer” scheme.
diff --git a/src/libutil/util.cc b/src/libutil/util.cc
index 885bae69c..843a10eab 100644
--- a/src/libutil/util.cc
+++ b/src/libutil/util.cc
@@ -54,6 +54,11 @@ std::optional<std::string> getEnv(const std::string & key)
return std::string(value);
}
+std::optional<std::string> getEnvNonEmpty(const std::string & key) {
+ auto value = getEnv(key);
+ if (value == "") return {};
+ return value;
+}
std::map<std::string, std::string> getEnv()
{
@@ -523,7 +528,7 @@ void deletePath(const Path & path)
void deletePath(const Path & path, uint64_t & bytesFreed)
{
- //Activity act(*logger, lvlDebug, format("recursively deleting path '%1%'") % path);
+ //Activity act(*logger, lvlDebug, "recursively deleting path '%1%'", path);
bytesFreed = 0;
_deletePath(path, bytesFreed);
}
@@ -1065,12 +1070,14 @@ static pid_t doFork(bool allowVfork, std::function<void()> fun)
}
+#if __linux__
static int childEntry(void * arg)
{
auto main = (std::function<void()> *) arg;
(*main)();
return 1;
}
+#endif
pid_t startProcess(std::function<void()> fun, const ProcessOptions & options)
@@ -1394,14 +1401,14 @@ std::string statusToString(int status)
{
if (!WIFEXITED(status) || WEXITSTATUS(status) != 0) {
if (WIFEXITED(status))
- return (format("failed with exit code %1%") % WEXITSTATUS(status)).str();
+ return fmt("failed with exit code %1%", WEXITSTATUS(status));
else if (WIFSIGNALED(status)) {
int sig = WTERMSIG(status);
#if HAVE_STRSIGNAL
const char * description = strsignal(sig);
- return (format("failed due to signal %1% (%2%)") % sig % description).str();
+ return fmt("failed due to signal %1% (%2%)", sig, description);
#else
- return (format("failed due to signal %1%") % sig).str();
+ return fmt("failed due to signal %1%", sig);
#endif
}
else
@@ -1470,7 +1477,7 @@ bool shouldANSI()
&& !getEnv("NO_COLOR").has_value();
}
-std::string filterANSIEscapes(const std::string & s, bool filterAll, unsigned int width)
+std::string filterANSIEscapes(std::string_view s, bool filterAll, unsigned int width)
{
std::string t, e;
size_t w = 0;
@@ -1961,7 +1968,7 @@ std::string showBytes(uint64_t bytes)
// FIXME: move to libstore/build
-void commonChildInit(Pipe & logPipe)
+void commonChildInit()
{
logger = makeSimpleLogger();
@@ -1975,10 +1982,6 @@ void commonChildInit(Pipe & logPipe)
if (setsid() == -1)
throw SysError("creating a new session");
- /* Dup the write side of the logger pipe into stderr. */
- if (dup2(logPipe.writeSide.get(), STDERR_FILENO) == -1)
- throw SysError("cannot pipe standard error into log file");
-
/* Dup stderr to stdout. */
if (dup2(STDERR_FILENO, STDOUT_FILENO) == -1)
throw SysError("cannot dup stderr into stdout");
diff --git a/src/libutil/util.hh b/src/libutil/util.hh
index b5625ecef..6c2706cc1 100644
--- a/src/libutil/util.hh
+++ b/src/libutil/util.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "types.hh"
#include "error.hh"
@@ -39,6 +40,10 @@ extern const std::string nativeSystem;
/* Return an environment variable. */
std::optional<std::string> getEnv(const std::string & key);
+/* Return a non empty environment variable. Returns nullopt if the env
+variable is set to "" */
+std::optional<std::string> getEnvNonEmpty(const std::string & key);
+
/* Get the entire environment. */
std::map<std::string, std::string> getEnv();
@@ -446,7 +451,6 @@ template<class C> Strings quoteStrings(const C & c)
return res;
}
-
/* Remove trailing whitespace from a string. FIXME: return
std::string_view. */
std::string chomp(std::string_view s);
@@ -569,7 +573,7 @@ bool shouldANSI();
some escape sequences (such as colour setting) are copied but not
included in the character count. Also, tabs are expanded to
spaces. */
-std::string filterANSIEscapes(const std::string & s,
+std::string filterANSIEscapes(std::string_view s,
bool filterAll = false,
unsigned int width = std::numeric_limits<unsigned int>::max());
@@ -700,7 +704,7 @@ typedef std::function<bool(const Path & path)> PathFilter;
extern PathFilter defaultPathFilter;
/* Common initialisation performed in child processes. */
-void commonChildInit(Pipe & logPipe);
+void commonChildInit();
/* Create a Unix domain socket. */
AutoCloseFD createUnixDomainSocket();
diff --git a/src/libutil/xml-writer.hh b/src/libutil/xml-writer.hh
index 4c91adee6..74f53b7ca 100644
--- a/src/libutil/xml-writer.hh
+++ b/src/libutil/xml-writer.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include <iostream>
#include <string>
diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc
index da76c2ace..bc7e7eb18 100644
--- a/src/nix-build/nix-build.cc
+++ b/src/nix-build/nix-build.cc
@@ -219,9 +219,9 @@ static void main_nix_build(int argc, char * * argv)
// read the shebang to understand which packages to read from. Since
// this is handled via nix-shell -p, we wrap our ruby script execution
// in ruby -e 'load' which ignores the shebangs.
- envCommand = (format("exec %1% %2% -e 'load(ARGV.shift)' -- %3% %4%") % execArgs % interpreter % shellEscape(script) % joined.str()).str();
+ envCommand = fmt("exec %1% %2% -e 'load(ARGV.shift)' -- %3% %4%", execArgs, interpreter, shellEscape(script), joined.str());
} else {
- envCommand = (format("exec %1% %2% %3% %4%") % execArgs % interpreter % shellEscape(script) % joined.str()).str();
+ envCommand = fmt("exec %1% %2% %3% %4%", execArgs, interpreter, shellEscape(script), joined.str());
}
}
@@ -440,7 +440,7 @@ static void main_nix_build(int argc, char * * argv)
shell = store->printStorePath(shellDrvOutputs.at("out").value()) + "/bin/bash";
}
- if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations)) {
+ if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) {
auto resolvedDrv = drv.tryResolve(*store);
assert(resolvedDrv && "Successfully resolved the derivation");
drv = *resolvedDrv;
diff --git a/src/nix-channel/nix-channel.cc b/src/nix-channel/nix-channel.cc
index 338a7d18e..740737ffe 100755
--- a/src/nix-channel/nix-channel.cc
+++ b/src/nix-channel/nix-channel.cc
@@ -168,7 +168,8 @@ static int main_nix_channel(int argc, char ** argv)
nixDefExpr = settings.useXDGBaseDirectories ? createNixStateDir() + "/defexpr" : home + "/.nix-defexpr";
// Figure out the name of the channels profile.
- profile = profilesDir() + "/channels";
+ profile = profilesDir() + "/channels";
+ createDirs(dirOf(profile));
enum {
cNone,
diff --git a/src/nix-collect-garbage/nix-collect-garbage.cc b/src/nix-collect-garbage/nix-collect-garbage.cc
index e413faffe..3cc57af4e 100644
--- a/src/nix-collect-garbage/nix-collect-garbage.cc
+++ b/src/nix-collect-garbage/nix-collect-garbage.cc
@@ -40,7 +40,7 @@ void removeOldGenerations(std::string dir)
throw;
}
if (link.find("link") != std::string::npos) {
- printInfo(format("removing old generations of profile %1%") % path);
+ printInfo("removing old generations of profile %s", path);
if (deleteOlderThan != "")
deleteGenerationsOlderThan(path, deleteOlderThan, dryRun);
else
@@ -77,8 +77,7 @@ static int main_nix_collect_garbage(int argc, char * * argv)
return true;
});
- auto profilesDir = settings.nixStateDir + "/profiles";
- if (removeOld) removeOldGenerations(profilesDir);
+ if (removeOld) removeOldGenerations(profilesDir());
// Run the actual garbage collector.
if (!dryRun) {
diff --git a/src/nix-copy-closure/nix-copy-closure.cc b/src/nix-copy-closure/nix-copy-closure.cc
index 841d50fd3..7f2bb93b6 100755
--- a/src/nix-copy-closure/nix-copy-closure.cc
+++ b/src/nix-copy-closure/nix-copy-closure.cc
@@ -22,7 +22,7 @@ static int main_nix_copy_closure(int argc, char ** argv)
printVersion("nix-copy-closure");
else if (*arg == "--gzip" || *arg == "--bzip2" || *arg == "--xz") {
if (*arg != "--gzip")
- printMsg(lvlError, format("Warning: '%1%' is not implemented, falling back to gzip") % *arg);
+ warn("'%1%' is not implemented, falling back to gzip", *arg);
gzip = true;
} else if (*arg == "--from")
toMode = false;
diff --git a/src/nix-env/nix-env.cc b/src/nix-env/nix-env.cc
index 0daf374de..f076ffdb0 100644
--- a/src/nix-env/nix-env.cc
+++ b/src/nix-env/nix-env.cc
@@ -500,7 +500,7 @@ static bool keep(DrvInfo & drv)
static void installDerivations(Globals & globals,
const Strings & args, const Path & profile)
{
- debug(format("installing derivations"));
+ debug("installing derivations");
/* Get the set of user environment elements to be installed. */
DrvInfos newElems, newElemsTmp;
@@ -579,7 +579,7 @@ typedef enum { utLt, utLeq, utEq, utAlways } UpgradeType;
static void upgradeDerivations(Globals & globals,
const Strings & args, UpgradeType upgradeType)
{
- debug(format("upgrading derivations"));
+ debug("upgrading derivations");
/* Upgrade works as follows: we take all currently installed
derivations, and for any derivation matching any selector, look
@@ -768,7 +768,7 @@ static void opSet(Globals & globals, Strings opFlags, Strings opArgs)
if (globals.dryRun) return;
globals.state->store->buildPaths(paths, globals.state->repair ? bmRepair : bmNormal);
- debug(format("switching to new user environment"));
+ debug("switching to new user environment");
Path generation = createGeneration(
ref<LocalFSStore>(store2),
globals.profile,
@@ -1093,7 +1093,7 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs)
try {
if (i.hasFailed()) continue;
- //Activity act(*logger, lvlDebug, format("outputting query result '%1%'") % i.attrPath);
+ //Activity act(*logger, lvlDebug, "outputting query result '%1%'", i.attrPath);
if (globals.prebuiltOnly &&
!validPaths.count(i.queryOutPath()) &&
@@ -1229,11 +1229,11 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs)
xml.writeEmptyElement("meta", attrs2);
} else if (v->type() == nInt) {
attrs2["type"] = "int";
- attrs2["value"] = (format("%1%") % v->integer).str();
+ attrs2["value"] = fmt("%1%", v->integer);
xml.writeEmptyElement("meta", attrs2);
} else if (v->type() == nFloat) {
attrs2["type"] = "float";
- attrs2["value"] = (format("%1%") % v->fpoint).str();
+ attrs2["value"] = fmt("%1%", v->fpoint);
xml.writeEmptyElement("meta", attrs2);
} else if (v->type() == nBool) {
attrs2["type"] = "bool";
@@ -1337,11 +1337,11 @@ static void opListGenerations(Globals & globals, Strings opFlags, Strings opArgs
for (auto & i : gens) {
tm t;
if (!localtime_r(&i.creationTime, &t)) throw Error("cannot convert time");
- cout << format("%|4| %|4|-%|02|-%|02| %|02|:%|02|:%|02| %||\n")
- % i.number
- % (t.tm_year + 1900) % (t.tm_mon + 1) % t.tm_mday
- % t.tm_hour % t.tm_min % t.tm_sec
- % (i.number == curGen ? "(current)" : "");
+ logger->cout("%|4| %|4|-%|02|-%|02| %|02|:%|02|:%|02| %||",
+ i.number,
+ t.tm_year + 1900, t.tm_mon + 1, t.tm_mday,
+ t.tm_hour, t.tm_min, t.tm_sec,
+ i.number == curGen ? "(current)" : "");
}
}
@@ -1387,6 +1387,8 @@ static int main_nix_env(int argc, char * * argv)
{
Strings opFlags, opArgs;
Operation op = 0;
+ std::string opName;
+ bool showHelp = false;
RepairFlag repair = NoRepair;
std::string file;
@@ -1403,11 +1405,11 @@ static int main_nix_env(int argc, char * * argv)
try {
createDirs(globals.instSource.nixExprPath);
replaceSymlink(
- fmt("%s/profiles/per-user/%s/channels", settings.nixStateDir, getUserName()),
+ defaultChannelsDir(),
globals.instSource.nixExprPath + "/channels");
if (getuid() != 0)
replaceSymlink(
- fmt("%s/profiles/per-user/root/channels", settings.nixStateDir),
+ rootChannelsDir(),
globals.instSource.nixExprPath + "/channels_root");
} catch (Error &) { }
}
@@ -1426,37 +1428,59 @@ static int main_nix_env(int argc, char * * argv)
Operation oldOp = op;
if (*arg == "--help")
- showManPage("nix-env");
+ showHelp = true;
else if (*arg == "--version")
op = opVersion;
- else if (*arg == "--install" || *arg == "-i")
+ else if (*arg == "--install" || *arg == "-i") {
op = opInstall;
+ opName = "-install";
+ }
else if (*arg == "--force-name") // undocumented flag for nix-install-package
globals.forceName = getArg(*arg, arg, end);
- else if (*arg == "--uninstall" || *arg == "-e")
+ else if (*arg == "--uninstall" || *arg == "-e") {
op = opUninstall;
- else if (*arg == "--upgrade" || *arg == "-u")
+ opName = "-uninstall";
+ }
+ else if (*arg == "--upgrade" || *arg == "-u") {
op = opUpgrade;
- else if (*arg == "--set-flag")
+ opName = "-upgrade";
+ }
+ else if (*arg == "--set-flag") {
op = opSetFlag;
- else if (*arg == "--set")
+ opName = arg->substr(1);
+ }
+ else if (*arg == "--set") {
op = opSet;
- else if (*arg == "--query" || *arg == "-q")
+ opName = arg->substr(1);
+ }
+ else if (*arg == "--query" || *arg == "-q") {
op = opQuery;
+ opName = "-query";
+ }
else if (*arg == "--profile" || *arg == "-p")
globals.profile = absPath(getArg(*arg, arg, end));
else if (*arg == "--file" || *arg == "-f")
file = getArg(*arg, arg, end);
- else if (*arg == "--switch-profile" || *arg == "-S")
+ else if (*arg == "--switch-profile" || *arg == "-S") {
op = opSwitchProfile;
- else if (*arg == "--switch-generation" || *arg == "-G")
+ opName = "-switch-profile";
+ }
+ else if (*arg == "--switch-generation" || *arg == "-G") {
op = opSwitchGeneration;
- else if (*arg == "--rollback")
+ opName = "-switch-generation";
+ }
+ else if (*arg == "--rollback") {
op = opRollback;
- else if (*arg == "--list-generations")
+ opName = arg->substr(1);
+ }
+ else if (*arg == "--list-generations") {
op = opListGenerations;
- else if (*arg == "--delete-generations")
+ opName = arg->substr(1);
+ }
+ else if (*arg == "--delete-generations") {
op = opDeleteGenerations;
+ opName = arg->substr(1);
+ }
else if (*arg == "--dry-run") {
printInfo("(dry run; not doing anything)");
globals.dryRun = true;
@@ -1485,6 +1509,7 @@ static int main_nix_env(int argc, char * * argv)
myArgs.parseCmdline(argvToStrings(argc, argv));
+ if (showHelp) showManPage("nix-env" + opName);
if (!op) throw UsageError("no operation specified");
auto store = openStore();
diff --git a/src/nix-env/user-env.cc b/src/nix-env/user-env.cc
index cad7f9c88..745e9e174 100644
--- a/src/nix-env/user-env.cc
+++ b/src/nix-env/user-env.cc
@@ -41,7 +41,7 @@ bool createUserEnv(EvalState & state, DrvInfos & elems,
if (auto drvPath = i.queryDrvPath())
drvsToBuild.push_back({*drvPath});
- debug(format("building user environment dependencies"));
+ debug("building user environment dependencies");
state.store->buildPaths(
toDerivedPaths(drvsToBuild),
state.repair ? bmRepair : bmNormal);
@@ -159,7 +159,7 @@ bool createUserEnv(EvalState & state, DrvInfos & elems,
return false;
}
- debug(format("switching to new user environment"));
+ debug("switching to new user environment");
Path generation = createGeneration(ref<LocalFSStore>(store2), profile, topLevelOut);
switchLink(profile, generation);
}
diff --git a/src/nix-env/user-env.hh b/src/nix-env/user-env.hh
index 10646f713..af45d2d85 100644
--- a/src/nix-env/user-env.hh
+++ b/src/nix-env/user-env.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "get-drvs.hh"
diff --git a/src/nix-store/dotgraph.hh b/src/nix-store/dotgraph.hh
index 73b8d06b9..4fd944080 100644
--- a/src/nix-store/dotgraph.hh
+++ b/src/nix-store/dotgraph.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "store-api.hh"
diff --git a/src/nix-store/graphml.cc b/src/nix-store/graphml.cc
index 425d61e53..439557658 100644
--- a/src/nix-store/graphml.cc
+++ b/src/nix-store/graphml.cc
@@ -57,7 +57,7 @@ void printGraphML(ref<Store> store, StorePathSet && roots)
<< "<graphml xmlns='http://graphml.graphdrawing.org/xmlns'\n"
<< " xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance'\n"
<< " xsi:schemaLocation='http://graphml.graphdrawing.org/xmlns/1.0/graphml.xsd'>\n"
- << "<key id='narSize' for='node' attr.name='narSize' attr.type='int'/>"
+ << "<key id='narSize' for='node' attr.name='narSize' attr.type='long'/>"
<< "<key id='name' for='node' attr.name='name' attr.type='string'/>"
<< "<key id='type' for='node' attr.name='type' attr.type='string'/>"
<< "<graph id='G' edgedefault='directed'>\n";
diff --git a/src/nix-store/graphml.hh b/src/nix-store/graphml.hh
index 78be8a367..bd3a4a37c 100644
--- a/src/nix-store/graphml.hh
+++ b/src/nix-store/graphml.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "store-api.hh"
diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc
index 3bbefedbe..7035e6a7b 100644
--- a/src/nix-store/nix-store.cc
+++ b/src/nix-store/nix-store.cc
@@ -72,11 +72,13 @@ static PathSet realisePath(StorePathWithOutputs path, bool build = true)
Derivation drv = store->derivationFromPath(path.path);
rootNr++;
+ /* FIXME: Encode this empty special case explicitly in the type. */
if (path.outputs.empty())
for (auto & i : drv.outputs) path.outputs.insert(i.first);
PathSet outputs;
for (auto & j : path.outputs) {
+ /* Match outputs of a store path with outputs of the derivation that produces it. */
DerivationOutputs::iterator i = drv.outputs.find(j);
if (i == drv.outputs.end())
throw Error("derivation '%s' does not have an output named '%s'",
@@ -141,6 +143,7 @@ static void opRealise(Strings opFlags, Strings opArgs)
toDerivedPaths(paths),
willBuild, willSubstitute, unknown, downloadSize, narSize);
+ /* Filter out unknown paths from `paths`. */
if (ignoreUnknown) {
std::vector<StorePathWithOutputs> paths2;
for (auto & i : paths)
@@ -274,17 +277,17 @@ static void printTree(const StorePath & path,
static void opQuery(Strings opFlags, Strings opArgs)
{
enum QueryType
- { qDefault, qOutputs, qRequisites, qReferences, qReferrers
+ { qOutputs, qRequisites, qReferences, qReferrers
, qReferrersClosure, qDeriver, qBinding, qHash, qSize
, qTree, qGraph, qGraphML, qResolve, qRoots };
- QueryType query = qDefault;
+ std::optional<QueryType> query;
bool useOutput = false;
bool includeOutputs = false;
bool forceRealise = false;
std::string bindingName;
for (auto & i : opFlags) {
- QueryType prev = query;
+ std::optional<QueryType> prev = query;
if (i == "--outputs") query = qOutputs;
else if (i == "--requisites" || i == "-R") query = qRequisites;
else if (i == "--references") query = qReferences;
@@ -309,15 +312,15 @@ static void opQuery(Strings opFlags, Strings opArgs)
else if (i == "--force-realise" || i == "--force-realize" || i == "-f") forceRealise = true;
else if (i == "--include-outputs") includeOutputs = true;
else throw UsageError("unknown flag '%1%'", i);
- if (prev != qDefault && prev != query)
+ if (prev && prev != query)
throw UsageError("query type '%1%' conflicts with earlier flag", i);
}
- if (query == qDefault) query = qOutputs;
+ if (!query) query = qOutputs;
RunPager pager;
- switch (query) {
+ switch (*query) {
case qOutputs: {
for (auto & i : opArgs) {
@@ -457,7 +460,7 @@ static void opPrintEnv(Strings opFlags, Strings opArgs)
/* Print each environment variable in the derivation in a format
* that can be sourced by the shell. */
for (auto & i : drv.env)
- cout << format("export %1%; %1%=%2%\n") % i.first % shellEscape(i.second);
+ logger->cout("export %1%; %1%=%2%\n", i.first, shellEscape(i.second));
/* Also output the arguments. This doesn't preserve whitespace in
arguments. */
@@ -1020,64 +1023,109 @@ static int main_nix_store(int argc, char * * argv)
{
Strings opFlags, opArgs;
Operation op = 0;
+ bool readFromStdIn = false;
+ std::string opName;
+ bool showHelp = false;
parseCmdLine(argc, argv, [&](Strings::iterator & arg, const Strings::iterator & end) {
Operation oldOp = op;
if (*arg == "--help")
- showManPage("nix-store");
+ showHelp = true;
else if (*arg == "--version")
op = opVersion;
- else if (*arg == "--realise" || *arg == "--realize" || *arg == "-r")
+ else if (*arg == "--realise" || *arg == "--realize" || *arg == "-r") {
op = opRealise;
- else if (*arg == "--add" || *arg == "-A")
+ opName = "-realise";
+ }
+ else if (*arg == "--add" || *arg == "-A"){
op = opAdd;
- else if (*arg == "--add-fixed")
+ opName = "-add";
+ }
+ else if (*arg == "--add-fixed") {
op = opAddFixed;
+ opName = arg->substr(1);
+ }
else if (*arg == "--print-fixed-path")
op = opPrintFixedPath;
- else if (*arg == "--delete")
+ else if (*arg == "--delete") {
op = opDelete;
- else if (*arg == "--query" || *arg == "-q")
+ opName = arg->substr(1);
+ }
+ else if (*arg == "--query" || *arg == "-q") {
op = opQuery;
- else if (*arg == "--print-env")
+ opName = "-query";
+ }
+ else if (*arg == "--print-env") {
op = opPrintEnv;
- else if (*arg == "--read-log" || *arg == "-l")
+ opName = arg->substr(1);
+ }
+ else if (*arg == "--read-log" || *arg == "-l") {
op = opReadLog;
- else if (*arg == "--dump-db")
+ opName = "-read-log";
+ }
+ else if (*arg == "--dump-db") {
op = opDumpDB;
- else if (*arg == "--load-db")
+ opName = arg->substr(1);
+ }
+ else if (*arg == "--load-db") {
op = opLoadDB;
+ opName = arg->substr(1);
+ }
else if (*arg == "--register-validity")
op = opRegisterValidity;
else if (*arg == "--check-validity")
op = opCheckValidity;
- else if (*arg == "--gc")
+ else if (*arg == "--gc") {
op = opGC;
- else if (*arg == "--dump")
+ opName = arg->substr(1);
+ }
+ else if (*arg == "--dump") {
op = opDump;
- else if (*arg == "--restore")
+ opName = arg->substr(1);
+ }
+ else if (*arg == "--restore") {
op = opRestore;
- else if (*arg == "--export")
+ opName = arg->substr(1);
+ }
+ else if (*arg == "--export") {
op = opExport;
- else if (*arg == "--import")
+ opName = arg->substr(1);
+ }
+ else if (*arg == "--import") {
op = opImport;
+ opName = arg->substr(1);
+ }
else if (*arg == "--init")
op = opInit;
- else if (*arg == "--verify")
+ else if (*arg == "--verify") {
op = opVerify;
- else if (*arg == "--verify-path")
+ opName = arg->substr(1);
+ }
+ else if (*arg == "--verify-path") {
op = opVerifyPath;
- else if (*arg == "--repair-path")
+ opName = arg->substr(1);
+ }
+ else if (*arg == "--repair-path") {
op = opRepairPath;
- else if (*arg == "--optimise" || *arg == "--optimize")
+ opName = arg->substr(1);
+ }
+ else if (*arg == "--optimise" || *arg == "--optimize") {
op = opOptimise;
- else if (*arg == "--serve")
+ opName = "-optimise";
+ }
+ else if (*arg == "--serve") {
op = opServe;
- else if (*arg == "--generate-binary-cache-key")
+ opName = arg->substr(1);
+ }
+ else if (*arg == "--generate-binary-cache-key") {
op = opGenerateBinaryCacheKey;
+ opName = arg->substr(1);
+ }
else if (*arg == "--add-root")
gcRoot = absPath(getArg(*arg, arg, end));
+ else if (*arg == "--stdin" && !isatty(STDIN_FILENO))
+ readFromStdIn = true;
else if (*arg == "--indirect")
;
else if (*arg == "--no-output")
@@ -1090,12 +1138,20 @@ static int main_nix_store(int argc, char * * argv)
else
opArgs.push_back(*arg);
+ if (readFromStdIn && op != opImport && op != opRestore && op != opServe) {
+ std::string word;
+ while (std::cin >> word) {
+ opArgs.emplace_back(std::move(word));
+ };
+ }
+
if (oldOp && oldOp != op)
throw UsageError("only one operation may be specified");
return true;
});
+ if (showHelp) showManPage("nix-store" + opName);
if (!op) throw UsageError("no operation specified");
if (op != opDump && op != opRestore) /* !!! hack */
diff --git a/src/nix/app.cc b/src/nix/app.cc
index 5cd65136f..fd4569bb4 100644
--- a/src/nix/app.cc
+++ b/src/nix/app.cc
@@ -1,5 +1,6 @@
#include "installables.hh"
#include "installable-derived-path.hh"
+#include "installable-value.hh"
#include "store-api.hh"
#include "eval-inline.hh"
#include "eval-cache.hh"
@@ -40,7 +41,7 @@ std::string resolveString(
return rewriteStrings(toResolve, rewrites);
}
-UnresolvedApp Installable::toApp(EvalState & state)
+UnresolvedApp InstallableValue::toApp(EvalState & state)
{
auto cursor = getCursor(state);
auto attrPath = cursor->getAttrPath();
@@ -119,11 +120,11 @@ App UnresolvedApp::resolve(ref<Store> evalStore, ref<Store> store)
{
auto res = unresolved;
- std::vector<std::shared_ptr<Installable>> installableContext;
+ Installables installableContext;
for (auto & ctxElt : unresolved.context)
installableContext.push_back(
- std::make_shared<InstallableDerivedPath>(store, DerivedPath { ctxElt }));
+ make_ref<InstallableDerivedPath>(store, DerivedPath { ctxElt }));
auto builtContext = Installable::build(evalStore, store, Realise::Outputs, installableContext);
res.program = resolveString(*store, unresolved.program, builtContext);
diff --git a/src/nix/build.cc b/src/nix/build.cc
index 12b22d999..4e133e288 100644
--- a/src/nix/build.cc
+++ b/src/nix/build.cc
@@ -1,4 +1,3 @@
-#include "eval.hh"
#include "command.hh"
#include "common-args.hh"
#include "shared.hh"
@@ -41,6 +40,29 @@ nlohmann::json builtPathsWithResultToJSON(const std::vector<BuiltPathWithResult>
return res;
}
+// TODO deduplicate with other code also setting such out links.
+static void createOutLinks(const Path& outLink, const std::vector<BuiltPathWithResult>& buildables, LocalFSStore& store2)
+{
+ for (const auto & [_i, buildable] : enumerate(buildables)) {
+ auto i = _i;
+ std::visit(overloaded {
+ [&](const BuiltPath::Opaque & bo) {
+ std::string symlink = outLink;
+ if (i) symlink += fmt("-%d", i);
+ store2.addPermRoot(bo.path, absPath(symlink));
+ },
+ [&](const BuiltPath::Built & bfd) {
+ for (auto & output : bfd.outputs) {
+ std::string symlink = outLink;
+ if (i) symlink += fmt("-%d", i);
+ if (output.first != "out") symlink += fmt("-%s", output.first);
+ store2.addPermRoot(output.second, absPath(symlink));
+ }
+ },
+ }, buildable.path.raw());
+ }
+}
+
struct CmdBuild : InstallablesCommand, MixDryRun, MixJSON, MixProfile
{
Path outLink = "result";
@@ -89,7 +111,7 @@ struct CmdBuild : InstallablesCommand, MixDryRun, MixJSON, MixProfile
;
}
- void run(ref<Store> store) override
+ void run(ref<Store> store, Installables && installables) override
{
if (dryRun) {
std::vector<DerivedPath> pathsToBuild;
@@ -115,35 +137,18 @@ struct CmdBuild : InstallablesCommand, MixDryRun, MixJSON, MixProfile
if (outLink != "")
if (auto store2 = store.dynamic_pointer_cast<LocalFSStore>())
- for (const auto & [_i, buildable] : enumerate(buildables)) {
- auto i = _i;
- std::visit(overloaded {
- [&](const BuiltPath::Opaque & bo) {
- std::string symlink = outLink;
- if (i) symlink += fmt("-%d", i);
- store2->addPermRoot(bo.path, absPath(symlink));
- },
- [&](const BuiltPath::Built & bfd) {
- for (auto & output : bfd.outputs) {
- std::string symlink = outLink;
- if (i) symlink += fmt("-%d", i);
- if (output.first != "out") symlink += fmt("-%s", output.first);
- store2->addPermRoot(output.second, absPath(symlink));
- }
- },
- }, buildable.path.raw());
- }
+ createOutLinks(outLink, buildables, *store2);
if (printOutputPaths) {
stopProgressBar();
for (auto & buildable : buildables) {
std::visit(overloaded {
[&](const BuiltPath::Opaque & bo) {
- std::cout << store->printStorePath(bo.path) << std::endl;
+ logger->cout(store->printStorePath(bo.path));
},
[&](const BuiltPath::Built & bfd) {
for (auto & output : bfd.outputs) {
- std::cout << store->printStorePath(output.second) << std::endl;
+ logger->cout(store->printStorePath(output.second));
}
},
}, buildable.path.raw());
diff --git a/src/nix/build.md b/src/nix/build.md
index 6a79f308c..ee414dc86 100644
--- a/src/nix/build.md
+++ b/src/nix/build.md
@@ -82,7 +82,7 @@ R""(
# Description
-`nix build` builds the specified *installables*. Installables that
+`nix build` builds the specified *installables*. [Installables](./nix.md#installables) that
resolve to derivations are built (or substituted if possible). Store
path installables are substituted.
diff --git a/src/nix/bundle.cc b/src/nix/bundle.cc
index dcf9a6f2d..57c355f0c 100644
--- a/src/nix/bundle.cc
+++ b/src/nix/bundle.cc
@@ -1,14 +1,15 @@
-#include "command.hh"
#include "installable-flake.hh"
+#include "command-installable-value.hh"
#include "common-args.hh"
#include "shared.hh"
#include "store-api.hh"
#include "local-fs-store.hh"
#include "fs-accessor.hh"
+#include "eval-inline.hh"
using namespace nix;
-struct CmdBundle : InstallableCommand
+struct CmdBundle : InstallableValueCommand
{
std::string bundler = "github:NixOS/bundlers";
std::optional<Path> outLink;
@@ -70,7 +71,7 @@ struct CmdBundle : InstallableCommand
return res;
}
- void run(ref<Store> store) override
+ void run(ref<Store> store, ref<InstallableValue> installable) override
{
auto evalState = getEvalState();
diff --git a/src/nix/bundle.md b/src/nix/bundle.md
index a18161a3c..89458aaaa 100644
--- a/src/nix/bundle.md
+++ b/src/nix/bundle.md
@@ -29,7 +29,7 @@ R""(
# Description
-`nix bundle`, by default, packs the closure of the *installable* into a single
+`nix bundle`, by default, packs the closure of the [*installable*](./nix.md#installables) into a single
self-extracting executable. See the [`bundlers`
homepage](https://github.com/NixOS/bundlers) for more details.
diff --git a/src/nix/cat.cc b/src/nix/cat.cc
index 6420a0f79..60aa66ce0 100644
--- a/src/nix/cat.cc
+++ b/src/nix/cat.cc
@@ -17,7 +17,7 @@ struct MixCat : virtual Args
if (st.type != FSAccessor::Type::tRegular)
throw Error("path '%1%' is not a regular file", path);
- std::cout << accessor->readFile(path);
+ writeFull(STDOUT_FILENO, accessor->readFile(path));
}
};
diff --git a/src/nix/copy.cc b/src/nix/copy.cc
index 8730a9a5c..151d28277 100644
--- a/src/nix/copy.cc
+++ b/src/nix/copy.cc
@@ -10,8 +10,6 @@ struct CmdCopy : virtual CopyCommand, virtual BuiltPathsCommand
SubstituteFlag substitute = NoSubstitute;
- using BuiltPathsCommand::run;
-
CmdCopy()
: BuiltPathsCommand(true)
{
diff --git a/src/nix/daemon.cc b/src/nix/daemon.cc
index a22bccba1..7e4a7ba86 100644
--- a/src/nix/daemon.cc
+++ b/src/nix/daemon.cc
@@ -249,9 +249,9 @@ static void daemonLoop()
if ((!trusted && !matchUser(user, group, allowedUsers)) || group == settings.buildUsersGroup)
throw Error("user '%1%' is not allowed to connect to the Nix daemon", user);
- printInfo(format((std::string) "accepted connection from pid %1%, user %2%" + (trusted ? " (trusted)" : ""))
- % (peer.pidKnown ? std::to_string(peer.pid) : "<unknown>")
- % (peer.uidKnown ? user : "<unknown>"));
+ printInfo((std::string) "accepted connection from pid %1%, user %2%" + (trusted ? " (trusted)" : ""),
+ peer.pidKnown ? std::to_string(peer.pid) : "<unknown>",
+ peer.uidKnown ? user : "<unknown>");
// Fork a child to handle the connection.
ProcessOptions options;
diff --git a/src/nix/describe-stores.cc b/src/nix/describe-stores.cc
deleted file mode 100644
index 1dd384c0e..000000000
--- a/src/nix/describe-stores.cc
+++ /dev/null
@@ -1,44 +0,0 @@
-#include "command.hh"
-#include "common-args.hh"
-#include "shared.hh"
-#include "store-api.hh"
-
-#include <nlohmann/json.hpp>
-
-using namespace nix;
-
-struct CmdDescribeStores : Command, MixJSON
-{
- std::string description() override
- {
- return "show registered store types and their available options";
- }
-
- Category category() override { return catUtility; }
-
- void run() override
- {
- auto res = nlohmann::json::object();
- for (auto & implem : *Implementations::registered) {
- auto storeConfig = implem.getConfig();
- auto storeName = storeConfig->name();
- res[storeName] = storeConfig->toJSON();
- }
- if (json) {
- std::cout << res;
- } else {
- for (auto & [storeName, storeConfig] : res.items()) {
- std::cout << "## " << storeName << std::endl << std::endl;
- for (auto & [optionName, optionDesc] : storeConfig.items()) {
- std::cout << "### " << optionName << std::endl << std::endl;
- std::cout << optionDesc["description"].get<std::string>() << std::endl;
- std::cout << "default: " << optionDesc["defaultValue"] << std::endl <<std::endl;
- if (!optionDesc["aliases"].empty())
- std::cout << "aliases: " << optionDesc["aliases"] << std::endl << std::endl;
- }
- }
- }
- }
-};
-
-static auto rDescribeStore = registerCommand<CmdDescribeStores>("describe-stores");
diff --git a/src/nix/develop.cc b/src/nix/develop.cc
index 9d07a7a85..9e2dcff61 100644
--- a/src/nix/develop.cc
+++ b/src/nix/develop.cc
@@ -1,6 +1,6 @@
#include "eval.hh"
-#include "command.hh"
#include "installable-flake.hh"
+#include "command-installable-value.hh"
#include "common-args.hh"
#include "shared.hh"
#include "store-api.hh"
@@ -208,7 +208,7 @@ static StorePath getDerivationEnvironment(ref<Store> store, ref<Store> evalStore
drv.name += "-env";
drv.env.emplace("name", drv.name);
drv.inputSrcs.insert(std::move(getEnvShPath));
- if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations)) {
+ if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) {
for (auto & output : drv.outputs) {
output.second = DerivationOutput::Deferred {},
drv.env[output.first] = hashPlaceholder(output.first);
@@ -252,7 +252,7 @@ static StorePath getDerivationEnvironment(ref<Store> store, ref<Store> evalStore
throw Error("get-env.sh failed to produce an environment");
}
-struct Common : InstallableCommand, MixProfile
+struct Common : InstallableValueCommand, MixProfile
{
std::set<std::string> ignoreVars{
"BASHOPTS",
@@ -313,7 +313,7 @@ struct Common : InstallableCommand, MixProfile
buildEnvironment.toBash(out, ignoreVars);
for (auto & var : savedVars)
- out << fmt("%s=\"$%s:$nix_saved_%s\"\n", var, var, var);
+ out << fmt("%s=\"$%s${nix_saved_%s:+:$nix_saved_%s}\"\n", var, var, var, var);
out << "export NIX_BUILD_TOP=\"$(mktemp -d -t nix-shell.XXXXXX)\"\n";
for (auto & i : {"TMP", "TMPDIR", "TEMP", "TEMPDIR"})
@@ -374,7 +374,7 @@ struct Common : InstallableCommand, MixProfile
return res;
}
- StorePath getShellOutPath(ref<Store> store)
+ StorePath getShellOutPath(ref<Store> store, ref<InstallableValue> installable)
{
auto path = installable->getStorePath();
if (path && hasSuffix(path->to_string(), "-env"))
@@ -392,9 +392,10 @@ struct Common : InstallableCommand, MixProfile
}
}
- std::pair<BuildEnvironment, std::string> getBuildEnvironment(ref<Store> store)
+ std::pair<BuildEnvironment, std::string>
+ getBuildEnvironment(ref<Store> store, ref<InstallableValue> installable)
{
- auto shellOutPath = getShellOutPath(store);
+ auto shellOutPath = getShellOutPath(store, installable);
auto strPath = store->printStorePath(shellOutPath);
@@ -480,9 +481,9 @@ struct CmdDevelop : Common, MixEnvironment
;
}
- void run(ref<Store> store) override
+ void run(ref<Store> store, ref<InstallableValue> installable) override
{
- auto [buildEnvironment, gcroot] = getBuildEnvironment(store);
+ auto [buildEnvironment, gcroot] = getBuildEnvironment(store, installable);
auto [rcFileFd, rcFilePath] = createTempFile("nix-shell");
@@ -537,7 +538,7 @@ struct CmdDevelop : Common, MixEnvironment
nixpkgsLockFlags.inputOverrides = {};
nixpkgsLockFlags.inputUpdates = {};
- auto bashInstallable = std::make_shared<InstallableFlake>(
+ auto bashInstallable = make_ref<InstallableFlake>(
this,
state,
installable->nixpkgsFlakeRef(),
@@ -573,7 +574,7 @@ struct CmdDevelop : Common, MixEnvironment
// Need to chdir since phases assume in flake directory
if (phase) {
// chdir if installable is a flake of type git+file or path
- auto installableFlake = std::dynamic_pointer_cast<InstallableFlake>(installable);
+ auto installableFlake = installable.dynamic_pointer_cast<InstallableFlake>();
if (installableFlake) {
auto sourcePath = installableFlake->getLockedFlake()->flake.resolvedRef.input.getSourcePath();
if (sourcePath) {
@@ -604,9 +605,9 @@ struct CmdPrintDevEnv : Common, MixJSON
Category category() override { return catUtility; }
- void run(ref<Store> store) override
+ void run(ref<Store> store, ref<InstallableValue> installable) override
{
- auto buildEnvironment = getBuildEnvironment(store).first;
+ auto buildEnvironment = getBuildEnvironment(store, installable).first;
stopProgressBar();
diff --git a/src/nix/develop.md b/src/nix/develop.md
index 4e8542d1b..c49b39669 100644
--- a/src/nix/develop.md
+++ b/src/nix/develop.md
@@ -76,7 +76,7 @@ R""(
`nix develop` starts a `bash` shell that provides an interactive build
environment nearly identical to what Nix would use to build
-*installable*. Inside this shell, environment variables and shell
+[*installable*](./nix.md#installables). Inside this shell, environment variables and shell
functions are set up so that you can interactively and incrementally
build your package.
diff --git a/src/nix/diff-closures.cc b/src/nix/diff-closures.cc
index 3489cc132..c7c37b66f 100644
--- a/src/nix/diff-closures.cc
+++ b/src/nix/diff-closures.cc
@@ -97,7 +97,7 @@ void printClosureDiff(
items.push_back(fmt("%s → %s", showVersions(removed), showVersions(added)));
if (showDelta)
items.push_back(fmt("%s%+.1f KiB" ANSI_NORMAL, sizeDelta > 0 ? ANSI_RED : ANSI_GREEN, sizeDelta / 1024.0));
- std::cout << fmt("%s%s: %s\n", indent, name, concatStringsSep(", ", items));
+ logger->cout("%s%s: %s", indent, name, concatStringsSep(", ", items));
}
}
}
diff --git a/src/nix/doctor.cc b/src/nix/doctor.cc
index ea87e3d87..354b03cf6 100644
--- a/src/nix/doctor.cc
+++ b/src/nix/doctor.cc
@@ -18,7 +18,7 @@ std::string formatProtocol(unsigned int proto)
if (proto) {
auto major = GET_PROTOCOL_MAJOR(proto) >> 8;
auto minor = GET_PROTOCOL_MINOR(proto);
- return (format("%1%.%2%") % major % minor).str();
+ return fmt("%1%.%2%", major, minor);
}
return "unknown";
}
@@ -39,6 +39,14 @@ struct CmdDoctor : StoreCommand
{
bool success = true;
+ /**
+ * This command is stable before the others
+ */
+ std::optional<ExperimentalFeature> experimentalFeature() override
+ {
+ return std::nullopt;
+ }
+
std::string description() override
{
return "check your system for potential problems and print a PASS or FAIL for each check";
diff --git a/src/nix/edit.cc b/src/nix/edit.cc
index dfe75fbdf..66629fab0 100644
--- a/src/nix/edit.cc
+++ b/src/nix/edit.cc
@@ -1,4 +1,4 @@
-#include "command.hh"
+#include "command-installable-value.hh"
#include "shared.hh"
#include "eval.hh"
#include "attr-path.hh"
@@ -9,7 +9,7 @@
using namespace nix;
-struct CmdEdit : InstallableCommand
+struct CmdEdit : InstallableValueCommand
{
std::string description() override
{
@@ -25,7 +25,7 @@ struct CmdEdit : InstallableCommand
Category category() override { return catSecondary; }
- void run(ref<Store> store) override
+ void run(ref<Store> store, ref<InstallableValue> installable) override
{
auto state = getEvalState();
diff --git a/src/nix/eval.cc b/src/nix/eval.cc
index a579213fd..43db5150c 100644
--- a/src/nix/eval.cc
+++ b/src/nix/eval.cc
@@ -1,4 +1,4 @@
-#include "command.hh"
+#include "command-installable-value.hh"
#include "common-args.hh"
#include "shared.hh"
#include "store-api.hh"
@@ -11,13 +11,13 @@
using namespace nix;
-struct CmdEval : MixJSON, InstallableCommand, MixReadOnlyOption
+struct CmdEval : MixJSON, InstallableValueCommand, MixReadOnlyOption
{
bool raw = false;
std::optional<std::string> apply;
std::optional<Path> writeTo;
- CmdEval() : InstallableCommand()
+ CmdEval() : InstallableValueCommand()
{
addFlag({
.longName = "raw",
@@ -54,7 +54,7 @@ struct CmdEval : MixJSON, InstallableCommand, MixReadOnlyOption
Category category() override { return catSecondary; }
- void run(ref<Store> store) override
+ void run(ref<Store> store, ref<InstallableValue> installable) override
{
if (raw && json)
throw UsageError("--raw and --json are mutually exclusive");
@@ -112,11 +112,11 @@ struct CmdEval : MixJSON, InstallableCommand, MixReadOnlyOption
else if (raw) {
stopProgressBar();
- std::cout << *state->coerceToString(noPos, *v, context, "while generating the eval command output");
+ writeFull(STDOUT_FILENO, *state->coerceToString(noPos, *v, context, "while generating the eval command output"));
}
else if (json) {
- std::cout << printValueAsJSON(*state, true, *v, pos, context, false).dump() << std::endl;
+ logger->cout("%s", printValueAsJSON(*state, true, *v, pos, context, false));
}
else {
diff --git a/src/nix/eval.md b/src/nix/eval.md
index 61334cde1..3b510737a 100644
--- a/src/nix/eval.md
+++ b/src/nix/eval.md
@@ -50,7 +50,7 @@ R""(
# Description
-This command evaluates the Nix expression *installable* and prints the
+This command evaluates the given Nix expression and prints the
result on standard output.
# Output format
diff --git a/src/nix/flake.cc b/src/nix/flake.cc
index 053a9c9e1..cd4ee5921 100644
--- a/src/nix/flake.cc
+++ b/src/nix/flake.cc
@@ -952,7 +952,7 @@ struct CmdFlakeArchive : FlakeCommand, MixJSON, MixDryRun
{"path", store->printStorePath(flake.flake.sourceInfo->storePath)},
{"inputs", traverse(*flake.lockFile.root)},
};
- std::cout << jsonRoot.dump() << std::endl;
+ logger->cout("%s", jsonRoot);
} else {
traverse(*flake.lockFile.root);
}
@@ -1026,36 +1026,43 @@ struct CmdFlakeShow : FlakeCommand, MixJSON
auto visitor2 = visitor.getAttr(attrName);
- if ((attrPathS[0] == "apps"
- || attrPathS[0] == "checks"
- || attrPathS[0] == "devShells"
- || attrPathS[0] == "legacyPackages"
- || attrPathS[0] == "packages")
- && (attrPathS.size() == 1 || attrPathS.size() == 2)) {
- for (const auto &subAttr : visitor2->getAttrs()) {
- if (hasContent(*visitor2, attrPath2, subAttr)) {
- return true;
+ try {
+ if ((attrPathS[0] == "apps"
+ || attrPathS[0] == "checks"
+ || attrPathS[0] == "devShells"
+ || attrPathS[0] == "legacyPackages"
+ || attrPathS[0] == "packages")
+ && (attrPathS.size() == 1 || attrPathS.size() == 2)) {
+ for (const auto &subAttr : visitor2->getAttrs()) {
+ if (hasContent(*visitor2, attrPath2, subAttr)) {
+ return true;
+ }
}
+ return false;
}
- return false;
- }
- if ((attrPathS.size() == 1)
- && (attrPathS[0] == "formatter"
- || attrPathS[0] == "nixosConfigurations"
- || attrPathS[0] == "nixosModules"
- || attrPathS[0] == "overlays"
- )) {
- for (const auto &subAttr : visitor2->getAttrs()) {
- if (hasContent(*visitor2, attrPath2, subAttr)) {
- return true;
+ if ((attrPathS.size() == 1)
+ && (attrPathS[0] == "formatter"
+ || attrPathS[0] == "nixosConfigurations"
+ || attrPathS[0] == "nixosModules"
+ || attrPathS[0] == "overlays"
+ )) {
+ for (const auto &subAttr : visitor2->getAttrs()) {
+ if (hasContent(*visitor2, attrPath2, subAttr)) {
+ return true;
+ }
}
+ return false;
}
- return false;
- }
- // If we don't recognize it, it's probably content
- return true;
+ // If we don't recognize it, it's probably content
+ return true;
+ } catch (EvalError & e) {
+ // Some attrs may contain errors, eg. legacyPackages of
+ // nixpkgs. We still want to recurse into it, instead of
+ // skipping it at all.
+ return true;
+ }
};
std::function<nlohmann::json(
@@ -1328,8 +1335,7 @@ struct CmdFlake : NixMultiCommand
{
if (!command)
throw UsageError("'nix flake' requires a sub-command.");
- settings.requireExperimentalFeature(Xp::Flakes);
- command->second->prepare();
+ experimentalFeatureSettings.require(Xp::Flakes);
command->second->run();
}
};
diff --git a/src/nix/flake.md b/src/nix/flake.md
index 810e9ebea..d70f34eeb 100644
--- a/src/nix/flake.md
+++ b/src/nix/flake.md
@@ -54,7 +54,7 @@ output attribute). They are also allowed in the `inputs` attribute
of a flake, e.g.
```nix
-inputs.nixpkgs.url = github:NixOS/nixpkgs;
+inputs.nixpkgs.url = "github:NixOS/nixpkgs";
```
is equivalent to
@@ -221,11 +221,46 @@ Currently the `type` attribute can be one of the following:
commit hash (`rev`). Note that unlike Git, GitHub allows fetching by
commit hash without specifying a branch or tag.
+ You can also specify `host` as a parameter, to point to a custom GitHub
+ Enterprise server.
+
Some examples:
* `github:edolstra/dwarffs`
* `github:edolstra/dwarffs/unstable`
* `github:edolstra/dwarffs/d3f2baba8f425779026c6ec04021b2e927f61e31`
+ * `github:internal/project?host=company-github.example.org`
+
+* `gitlab`: Similar to `github`, is a more efficient way to fetch
+ GitLab repositories. The following attributes are required:
+
+ * `owner`: The owner of the repository.
+
+ * `repo`: The name of the repository.
+
+ Like `github`, these are downloaded as tarball archives.
+
+ The URL syntax for `gitlab` flakes is:
+
+ `gitlab:<owner>/<repo>(/<rev-or-ref>)?(\?<params>)?`
+
+ `<rev-or-ref>` works the same as `github`. Either a branch or tag name
+ (`ref`), or a commit hash (`rev`) can be specified.
+
+ Since GitLab allows for self-hosting, you can specify `host` as
+ a parameter, to point to any instances other than `gitlab.com`.
+
+ Some examples:
+
+ * `gitlab:veloren/veloren`
+ * `gitlab:veloren/veloren/master`
+ * `gitlab:veloren/veloren/80a4d7f13492d916e47d6195be23acae8001985a`
+ * `gitlab:openldap/openldap?host=git.openldap.org`
+
+ When accessing a project in a (nested) subgroup, make sure to URL-encode any
+ slashes, i.e. replace `/` with `%2F`:
+
+ * `gitlab:veloren%2Fdev/rfcs`
* `sourcehut`: Similar to `github`, is a more efficient way to fetch
SourceHut repositories. The following attributes are required:
@@ -275,14 +310,14 @@ Currently the `type` attribute can be one of the following:
# Flake format
As an example, here is a simple `flake.nix` that depends on the
-Nixpkgs flake and provides a single package (i.e. an installable
-derivation):
+Nixpkgs flake and provides a single package (i.e. an
+[installable](./nix.md#installables) derivation):
```nix
{
description = "A flake for building Hello World";
- inputs.nixpkgs.url = github:NixOS/nixpkgs/nixos-20.03;
+ inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-20.03";
outputs = { self, nixpkgs }: {
@@ -317,6 +352,8 @@ The following attributes are supported in `flake.nix`:
also contains some metadata about the inputs. These are:
* `outPath`: The path in the Nix store of the flake's source tree.
+ This way, the attribute set can be passed to `import` as if it was a path,
+ as in the example above (`import nixpkgs`).
* `rev`: The commit hash of the flake's repository, if applicable.
@@ -374,7 +411,7 @@ inputs.nixpkgs = {
Alternatively, you can use the URL-like syntax:
```nix
-inputs.import-cargo.url = github:edolstra/import-cargo;
+inputs.import-cargo.url = "github:edolstra/import-cargo";
inputs.nixpkgs.url = "nixpkgs";
```
diff --git a/src/nix/fmt.cc b/src/nix/fmt.cc
index 6f6a4a632..c85eacded 100644
--- a/src/nix/fmt.cc
+++ b/src/nix/fmt.cc
@@ -1,4 +1,5 @@
#include "command.hh"
+#include "installable-value.hh"
#include "run.hh"
using namespace nix;
@@ -31,8 +32,9 @@ struct CmdFmt : SourceExprCommand {
auto evalState = getEvalState();
auto evalStore = getEvalStore();
- auto installable = parseInstallable(store, ".");
- auto app = installable->toApp(*evalState).resolve(evalStore, store);
+ auto installable_ = parseInstallable(store, ".");
+ auto & installable = InstallableValue::require(*installable_);
+ auto app = installable.toApp(*evalState).resolve(evalStore, store);
Strings programArgs{app.program};
diff --git a/src/nix/hash.cc b/src/nix/hash.cc
index 60d9593a7..9feca9345 100644
--- a/src/nix/hash.cc
+++ b/src/nix/hash.cc
@@ -151,7 +151,6 @@ struct CmdHash : NixMultiCommand
{
if (!command)
throw UsageError("'nix hash' requires a sub-command.");
- command->second->prepare();
command->second->run();
}
};
@@ -161,11 +160,11 @@ static auto rCmdHash = registerCommand<CmdHash>("hash");
/* Legacy nix-hash command. */
static int compatNixHash(int argc, char * * argv)
{
- HashType ht = htMD5;
+ std::optional<HashType> ht;
bool flat = false;
- bool base32 = false;
+ Base base = Base16;
bool truncate = false;
- enum { opHash, opTo32, opTo16 } op = opHash;
+ enum { opHash, opTo } op = opHash;
std::vector<std::string> ss;
parseCmdLine(argc, argv, [&](Strings::iterator & arg, const Strings::iterator & end) {
@@ -174,14 +173,31 @@ static int compatNixHash(int argc, char * * argv)
else if (*arg == "--version")
printVersion("nix-hash");
else if (*arg == "--flat") flat = true;
- else if (*arg == "--base32") base32 = true;
+ else if (*arg == "--base16") base = Base16;
+ else if (*arg == "--base32") base = Base32;
+ else if (*arg == "--base64") base = Base64;
+ else if (*arg == "--sri") base = SRI;
else if (*arg == "--truncate") truncate = true;
else if (*arg == "--type") {
std::string s = getArg(*arg, arg, end);
ht = parseHashType(s);
}
- else if (*arg == "--to-base16") op = opTo16;
- else if (*arg == "--to-base32") op = opTo32;
+ else if (*arg == "--to-base16") {
+ op = opTo;
+ base = Base16;
+ }
+ else if (*arg == "--to-base32") {
+ op = opTo;
+ base = Base32;
+ }
+ else if (*arg == "--to-base64") {
+ op = opTo;
+ base = Base64;
+ }
+ else if (*arg == "--to-sri") {
+ op = opTo;
+ base = SRI;
+ }
else if (*arg != "" && arg->at(0) == '-')
return false;
else
@@ -191,17 +207,18 @@ static int compatNixHash(int argc, char * * argv)
if (op == opHash) {
CmdHashBase cmd(flat ? FileIngestionMethod::Flat : FileIngestionMethod::Recursive);
- cmd.ht = ht;
- cmd.base = base32 ? Base32 : Base16;
+ if (!ht.has_value()) ht = htMD5;
+ cmd.ht = ht.value();
+ cmd.base = base;
cmd.truncate = truncate;
cmd.paths = ss;
cmd.run();
}
else {
- CmdToBase cmd(op == opTo32 ? Base32 : Base16);
+ CmdToBase cmd(base);
cmd.args = ss;
- cmd.ht = ht;
+ if (ht.has_value()) cmd.ht = ht;
cmd.run();
}
diff --git a/src/nix/help-stores.md b/src/nix/help-stores.md
new file mode 100644
index 000000000..47ba9b94d
--- /dev/null
+++ b/src/nix/help-stores.md
@@ -0,0 +1,46 @@
+R"(
+
+Nix supports different types of stores. These are described below.
+
+## Store URL format
+
+Stores are specified using a URL-like syntax. For example, the command
+
+```console
+# nix path-info --store https://cache.nixos.org/ --json \
+ /nix/store/a7gvj343m05j2s32xcnwr35v31ynlypr-coreutils-9.1
+```
+
+fetches information about a store path in the HTTP binary cache
+located at https://cache.nixos.org/, which is a type of store.
+
+Store URLs can specify **store settings** using URL query strings,
+i.e. by appending `?name1=value1&name2=value2&...` to the URL. For
+instance,
+
+```
+--store ssh://machine.example.org?ssh-key=/path/to/my/key
+```
+
+tells Nix to access the store on a remote machine via the SSH
+protocol, using `/path/to/my/key` as the SSH private key. The
+supported settings for each store type are documented below.
+
+The special store URL `auto` causes Nix to automatically select a
+store as follows:
+
+* Use the [local store](#local-store) `/nix/store` if `/nix/var/nix`
+ is writable by the current user.
+
+* Otherwise, if `/nix/var/nix/daemon-socket/socket` exists, [connect
+ to the Nix daemon listening on that socket](#local-daemon-store).
+
+* Otherwise, on Linux only, use the [local chroot store](#local-store)
+ `~/.local/share/nix/root`, which will be created automatically if it
+ does not exist.
+
+* Otherwise, use the [local store](#local-store) `/nix/store`.
+
+@stores@
+
+)"
diff --git a/src/nix/log.cc b/src/nix/log.cc
index a0598ca13..aaf829764 100644
--- a/src/nix/log.cc
+++ b/src/nix/log.cc
@@ -23,7 +23,7 @@ struct CmdLog : InstallableCommand
Category category() override { return catSecondary; }
- void run(ref<Store> store) override
+ void run(ref<Store> store, ref<Installable> installable) override
{
settings.readOnlyMode = true;
@@ -53,7 +53,7 @@ struct CmdLog : InstallableCommand
if (!log) continue;
stopProgressBar();
printInfo("got build log for '%s' from '%s'", installable->what(), logSub.getUri());
- std::cout << *log;
+ writeFull(STDOUT_FILENO, *log);
return;
}
diff --git a/src/nix/log.md b/src/nix/log.md
index 1c76226a3..01e9801df 100644
--- a/src/nix/log.md
+++ b/src/nix/log.md
@@ -22,8 +22,7 @@ R""(
# Description
-This command prints the log of a previous build of the derivation
-*installable* on standard output.
+This command prints the log of a previous build of the [*installable*](./nix.md#installables) on standard output.
Nix looks for build logs in two places:
diff --git a/src/nix/ls.cc b/src/nix/ls.cc
index e964b01b3..c990a303c 100644
--- a/src/nix/ls.cc
+++ b/src/nix/ls.cc
@@ -93,7 +93,7 @@ struct MixLs : virtual Args, MixJSON
if (json) {
if (showDirectory)
throw UsageError("'--directory' is useless with '--json'");
- std::cout << listNar(accessor, path, recursive);
+ logger->cout("%s", listNar(accessor, path, recursive));
} else
listText(accessor);
}
diff --git a/src/nix/main.cc b/src/nix/main.cc
index d3d2f5b16..4d4164333 100644
--- a/src/nix/main.cc
+++ b/src/nix/main.cc
@@ -54,17 +54,17 @@ static bool haveInternet()
std::string programPath;
-struct HelpRequested { };
-
struct NixArgs : virtual MultiCommand, virtual MixCommonArgs
{
bool useNet = true;
bool refresh = false;
+ bool helpRequested = false;
bool showVersion = false;
NixArgs() : MultiCommand(RegisterCommand::getCommandsFor({})), MixCommonArgs("nix")
{
categories.clear();
+ categories[catHelp] = "Help commands";
categories[Command::catDefault] = "Main commands";
categories[catSecondary] = "Infrequently used commands";
categories[catUtility] = "Utility/scripting commands";
@@ -74,7 +74,7 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs
.longName = "help",
.description = "Show usage information.",
.category = miscCategory,
- .handler = {[&]() { throw HelpRequested(); }},
+ .handler = {[this]() { this->helpRequested = true; }},
});
addFlag({
@@ -83,6 +83,7 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs
.description = "Print full build logs on standard error.",
.category = loggingCategory,
.handler = {[&]() { logger->setPrintBuildLogs(true); }},
+ .experimentalFeature = Xp::NixCommand,
});
addFlag({
@@ -98,6 +99,7 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs
.description = "Disable substituters and consider all previously downloaded files up-to-date.",
.category = miscCategory,
.handler = {[&]() { useNet = false; }},
+ .experimentalFeature = Xp::NixCommand,
});
addFlag({
@@ -105,6 +107,7 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs
.description = "Consider all previously downloaded files out-of-date.",
.category = miscCategory,
.handler = {[&]() { refresh = true; }},
+ .experimentalFeature = Xp::NixCommand,
});
}
@@ -164,11 +167,29 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs
{
commands = RegisterCommand::getCommandsFor({});
}
+
+ std::string dumpCli()
+ {
+ auto res = nlohmann::json::object();
+
+ res["args"] = toJSON();
+
+ auto stores = nlohmann::json::object();
+ for (auto & implem : *Implementations::registered) {
+ auto storeConfig = implem.getConfig();
+ auto storeName = storeConfig->name();
+ stores[storeName]["doc"] = storeConfig->doc();
+ stores[storeName]["settings"] = storeConfig->toJSON();
+ }
+ res["stores"] = std::move(stores);
+
+ return res.dump();
+ }
};
/* Render the help for the specified subcommand to stdout using
lowdown. */
-static void showHelp(std::vector<std::string> subcommand, MultiCommand & toplevel)
+static void showHelp(std::vector<std::string> subcommand, NixArgs & toplevel)
{
auto mdName = subcommand.empty() ? "nix" : fmt("nix3-%s", concatStringsSep("-", subcommand));
@@ -189,11 +210,11 @@ static void showHelp(std::vector<std::string> subcommand, MultiCommand & topleve
, "/"),
*vUtils);
- auto attrs = state.buildBindings(16);
- attrs.alloc("toplevel").mkString(toplevel.toJSON().dump());
+ auto vDump = state.allocValue();
+ vDump->mkString(toplevel.dumpCli());
auto vRes = state.allocValue();
- state.callFunction(*vGenerateManpage, state.allocValue()->mkAttrs(attrs), *vRes, noPos);
+ state.callFunction(*vGenerateManpage, *vDump, *vRes, noPos);
auto attr = vRes->attrs->get(state.symbols.create(mdName + ".md"));
if (!attr)
@@ -205,6 +226,14 @@ static void showHelp(std::vector<std::string> subcommand, MultiCommand & topleve
std::cout << renderMarkdownToTerminal(markdown) << "\n";
}
+static NixArgs & getNixArgs(Command & cmd)
+{
+ assert(cmd.parent);
+ MultiCommand * toplevel = cmd.parent;
+ while (toplevel->parent) toplevel = toplevel->parent;
+ return dynamic_cast<NixArgs &>(*toplevel);
+}
+
struct CmdHelp : Command
{
std::vector<std::string> subcommand;
@@ -229,17 +258,43 @@ struct CmdHelp : Command
;
}
+ Category category() override { return catHelp; }
+
void run() override
{
assert(parent);
MultiCommand * toplevel = parent;
while (toplevel->parent) toplevel = toplevel->parent;
- showHelp(subcommand, *toplevel);
+ showHelp(subcommand, getNixArgs(*this));
}
};
static auto rCmdHelp = registerCommand<CmdHelp>("help");
+struct CmdHelpStores : Command
+{
+ std::string description() override
+ {
+ return "show help about store types and their settings";
+ }
+
+ std::string doc() override
+ {
+ return
+ #include "help-stores.md"
+ ;
+ }
+
+ Category category() override { return catHelp; }
+
+ void run() override
+ {
+ showHelp({"help-stores"}, getNixArgs(*this));
+ }
+};
+
+static auto rCmdHelpStores = registerCommand<CmdHelpStores>("help-stores");
+
void mainWrapped(int argc, char * * argv)
{
savedArgv = argv;
@@ -291,13 +346,16 @@ void mainWrapped(int argc, char * * argv)
NixArgs args;
- if (argc == 2 && std::string(argv[1]) == "__dump-args") {
- std::cout << args.toJSON().dump() << "\n";
+ if (argc == 2 && std::string(argv[1]) == "__dump-cli") {
+ logger->cout(args.dumpCli());
return;
}
if (argc == 2 && std::string(argv[1]) == "__dump-builtins") {
- settings.experimentalFeatures = {Xp::Flakes, Xp::FetchClosure};
+ experimentalFeatureSettings.experimentalFeatures = {
+ Xp::Flakes,
+ Xp::FetchClosure,
+ };
evalSettings.pureEval = false;
EvalState state({}, openStore("dummy://"));
auto res = nlohmann::json::object();
@@ -312,7 +370,7 @@ void mainWrapped(int argc, char * * argv)
b["doc"] = trim(stripIndentation(primOp->doc));
res[state.symbols[builtin.name]] = std::move(b);
}
- std::cout << res.dump() << "\n";
+ logger->cout("%s", res);
return;
}
@@ -321,20 +379,24 @@ void mainWrapped(int argc, char * * argv)
if (completions) {
switch (completionType) {
case ctNormal:
- std::cout << "normal\n"; break;
+ logger->cout("normal"); break;
case ctFilenames:
- std::cout << "filenames\n"; break;
+ logger->cout("filenames"); break;
case ctAttrs:
- std::cout << "attrs\n"; break;
+ logger->cout("attrs"); break;
}
for (auto & s : *completions)
- std::cout << s.completion << "\t" << trim(s.description) << "\n";
+ logger->cout(s.completion + "\t" + trim(s.description));
}
});
try {
args.parseCmdline(argvToStrings(argc, argv));
- } catch (HelpRequested &) {
+ } catch (UsageError &) {
+ if (!args.helpRequested && !completions) throw;
+ }
+
+ if (args.helpRequested) {
std::vector<std::string> subcommand;
MultiCommand * command = &args;
while (command) {
@@ -346,8 +408,6 @@ void mainWrapped(int argc, char * * argv)
}
showHelp(subcommand, args);
return;
- } catch (UsageError &) {
- if (!completions) throw;
}
if (completions) {
@@ -363,10 +423,8 @@ void mainWrapped(int argc, char * * argv)
if (!args.command)
throw UsageError("no subcommand specified");
- if (args.command->first != "repl"
- && args.command->first != "doctor"
- && args.command->first != "upgrade-nix")
- settings.requireExperimentalFeature(Xp::NixCommand);
+ experimentalFeatureSettings.require(
+ args.command->second->experimentalFeature());
if (args.useNet && !haveInternet()) {
warn("you don't have Internet access; disabling some network-dependent features");
@@ -394,7 +452,6 @@ void mainWrapped(int argc, char * * argv)
if (args.command->second->forceImpureByDefault() && !evalSettings.pureEval.overridden) {
evalSettings.pureEval = false;
}
- args.command->second->prepare();
args.command->second->run();
}
diff --git a/src/nix/make-content-addressed.cc b/src/nix/make-content-addressed.cc
index d86b90fc7..d9c988a9f 100644
--- a/src/nix/make-content-addressed.cc
+++ b/src/nix/make-content-addressed.cc
@@ -28,7 +28,6 @@ struct CmdMakeContentAddressed : virtual CopyCommand, virtual StorePathsCommand,
;
}
- using StorePathsCommand::run;
void run(ref<Store> srcStore, StorePaths && storePaths) override
{
auto dstStore = dstUri.empty() ? openStore() : openStore(dstUri);
@@ -45,7 +44,7 @@ struct CmdMakeContentAddressed : virtual CopyCommand, virtual StorePathsCommand,
}
auto json = json::object();
json["rewrites"] = jsonRewrites;
- std::cout << json.dump();
+ logger->cout("%s", json);
} else {
for (auto & path : storePaths) {
auto i = remappings.find(path);
diff --git a/src/nix/make-content-addressed.md b/src/nix/make-content-addressed.md
index 32eecc880..b1f7da525 100644
--- a/src/nix/make-content-addressed.md
+++ b/src/nix/make-content-addressed.md
@@ -35,7 +35,9 @@ R""(
# Description
This command converts the closure of the store paths specified by
-*installables* to content-addressed form. Nix store paths are usually
+[*installables*](./nix.md#installables) to content-addressed form.
+
+Nix store paths are usually
*input-addressed*, meaning that the hash part of the store path is
computed from the contents of the derivation (i.e., the build-time
dependency graph). Input-addressed paths need to be signed by a
diff --git a/src/nix/nar.cc b/src/nix/nar.cc
index dbb043d9b..9815410cf 100644
--- a/src/nix/nar.cc
+++ b/src/nix/nar.cc
@@ -25,7 +25,6 @@ struct CmdNar : NixMultiCommand
{
if (!command)
throw UsageError("'nix nar' requires a sub-command.");
- command->second->prepare();
command->second->run();
}
};
diff --git a/src/nix/nix.md b/src/nix/nix.md
index db60c59ff..e1865b31c 100644
--- a/src/nix/nix.md
+++ b/src/nix/nix.md
@@ -48,102 +48,112 @@ manual](https://nixos.org/manual/nix/stable/).
# Installables
-Many `nix` subcommands operate on one or more *installables*. These are
-command line arguments that represent something that can be built in
-the Nix store. Here are the recognised types of installables:
-
-* **Flake output attributes**: `nixpkgs#hello`
-
- These have the form *flakeref*[`#`*attrpath*], where *flakeref* is a
- flake reference and *attrpath* is an optional attribute path. For
- more information on flakes, see [the `nix flake` manual
- page](./nix3-flake.md). Flake references are most commonly a flake
- identifier in the flake registry (e.g. `nixpkgs`), or a raw path
- (e.g. `/path/to/my-flake` or `.` or `../foo`), or a full URL
- (e.g. `github:nixos/nixpkgs` or `path:.`)
-
- When the flake reference is a raw path (a path without any URL
- scheme), it is interpreted as a `path:` or `git+file:` url in the following
- way:
-
- - If the path is within a Git repository, then the url will be of the form
- `git+file://[GIT_REPO_ROOT]?dir=[RELATIVE_FLAKE_DIR_PATH]`
- where `GIT_REPO_ROOT` is the path to the root of the git repository,
- and `RELATIVE_FLAKE_DIR_PATH` is the path (relative to the directory
- root) of the closest parent of the given path that contains a `flake.nix` within
- the git repository.
- If no such directory exists, then Nix will error-out.
-
- Note that the search will only include files indexed by git. In particular, files
- which are matched by `.gitignore` or have never been `git add`-ed will not be
- available in the flake. If this is undesirable, specify `path:<directory>` explicitly;
-
- For example, if `/foo/bar` is a git repository with the following structure:
- ```
- .
- └── baz
- ├── blah
- │  └── file.txt
- └── flake.nix
- ```
+Many `nix` subcommands operate on one or more *installables*.
+These are command line arguments that represent something that can be realised in the Nix store.
- Then `/foo/bar/baz/blah` will resolve to `git+file:///foo/bar?dir=baz`
+The following types of installable are supported by most commands:
- - If the supplied path is not a git repository, then the url will have the form
- `path:FLAKE_DIR_PATH` where `FLAKE_DIR_PATH` is the closest parent
- of the supplied path that contains a `flake.nix` file (within the same file-system).
- If no such directory exists, then Nix will error-out.
-
- For example, if `/foo/bar/flake.nix` exists, then `/foo/bar/baz/` will resolve to
- `path:/foo/bar`
+- [Flake output attribute](#flake-output-attribute)
+- [Store path](#store-path)
+- [Nix file](#nix-file), optionally qualified by an attribute path
+- [Nix expression](#nix-expression), optionally qualified by an attribute path
- If *attrpath* is omitted, Nix tries some default values; for most
- subcommands, the default is `packages.`*system*`.default`
- (e.g. `packages.x86_64-linux.default`), but some subcommands have
- other defaults. If *attrpath* *is* specified, *attrpath* is
- interpreted as relative to one or more prefixes; for most
- subcommands, these are `packages.`*system*,
- `legacyPackages.*system*` and the empty prefix. Thus, on
- `x86_64-linux` `nix build nixpkgs#hello` will try to build the
- attributes `packages.x86_64-linux.hello`,
- `legacyPackages.x86_64-linux.hello` and `hello`.
+For most commands, if no installable is specified, `.` as assumed.
+That is, Nix will operate on the default flake output attribute of the flake in the current directory.
-* **Store paths**: `/nix/store/v5sv61sszx301i0x6xysaqzla09nksnd-hello-2.10`
+### Flake output attribute
- These are paths inside the Nix store, or symlinks that resolve to a
- path in the Nix store.
+Example: `nixpkgs#hello`
-* **Store derivations**: `/nix/store/p7gp6lxdg32h4ka1q398wd9r2zkbbz2v-hello-2.10.drv`
+These have the form *flakeref*[`#`*attrpath*], where *flakeref* is a
+[flake reference](./nix3-flake.md#flake-references) and *attrpath* is an optional attribute path. For
+more information on flakes, see [the `nix flake` manual
+page](./nix3-flake.md). Flake references are most commonly a flake
+identifier in the flake registry (e.g. `nixpkgs`), or a raw path
+(e.g. `/path/to/my-flake` or `.` or `../foo`), or a full URL
+(e.g. `github:nixos/nixpkgs` or `path:.`)
- By default, if you pass a [store derivation] path to a `nix` subcommand, the command will operate on the [output path]s of the derivation.
+When the flake reference is a raw path (a path without any URL
+scheme), it is interpreted as a `path:` or `git+file:` url in the following
+way:
- [output path]: ../../glossary.md#gloss-output-path
+- If the path is within a Git repository, then the url will be of the form
+ `git+file://[GIT_REPO_ROOT]?dir=[RELATIVE_FLAKE_DIR_PATH]`
+ where `GIT_REPO_ROOT` is the path to the root of the git repository,
+ and `RELATIVE_FLAKE_DIR_PATH` is the path (relative to the directory
+ root) of the closest parent of the given path that contains a `flake.nix` within
+ the git repository.
+ If no such directory exists, then Nix will error-out.
- For example, `nix path-info` prints information about the output paths:
+ Note that the search will only include files indexed by git. In particular, files
+ which are matched by `.gitignore` or have never been `git add`-ed will not be
+ available in the flake. If this is undesirable, specify `path:<directory>` explicitly;
- ```console
- # nix path-info --json /nix/store/p7gp6lxdg32h4ka1q398wd9r2zkbbz2v-hello-2.10.drv
- [{"path":"/nix/store/v5sv61sszx301i0x6xysaqzla09nksnd-hello-2.10",…}]
+ For example, if `/foo/bar` is a git repository with the following structure:
+ ```
+ .
+ └── baz
+ ├── blah
+ │  └── file.txt
+ └── flake.nix
```
- If you want to operate on the store derivation itself, pass the
- `--derivation` flag.
+ Then `/foo/bar/baz/blah` will resolve to `git+file:///foo/bar?dir=baz`
+
+- If the supplied path is not a git repository, then the url will have the form
+ `path:FLAKE_DIR_PATH` where `FLAKE_DIR_PATH` is the closest parent
+ of the supplied path that contains a `flake.nix` file (within the same file-system).
+ If no such directory exists, then Nix will error-out.
+
+ For example, if `/foo/bar/flake.nix` exists, then `/foo/bar/baz/` will resolve to
+ `path:/foo/bar`
+
+If *attrpath* is omitted, Nix tries some default values; for most
+subcommands, the default is `packages.`*system*`.default`
+(e.g. `packages.x86_64-linux.default`), but some subcommands have
+other defaults. If *attrpath* *is* specified, *attrpath* is
+interpreted as relative to one or more prefixes; for most
+subcommands, these are `packages.`*system*,
+`legacyPackages.*system*` and the empty prefix. Thus, on
+`x86_64-linux` `nix build nixpkgs#hello` will try to build the
+attributes `packages.x86_64-linux.hello`,
+`legacyPackages.x86_64-linux.hello` and `hello`.
+
+### Store path
+
+Example: `/nix/store/v5sv61sszx301i0x6xysaqzla09nksnd-hello-2.10`
+
+These are paths inside the Nix store, or symlinks that resolve to a path in the Nix store.
-* **Nix attributes**: `--file /path/to/nixpkgs hello`
+A [store derivation] is also addressed by store path.
- When the `-f` / `--file` *path* option is given, installables are
- interpreted as attribute paths referencing a value returned by
- evaluating the Nix file *path*.
+Example: `/nix/store/p7gp6lxdg32h4ka1q398wd9r2zkbbz2v-hello-2.10.drv`
-* **Nix expressions**: `--expr '(import <nixpkgs> {}).hello.overrideDerivation (prev: { name = "my-hello"; })'`.
+If you want to refer to an output path of that store derivation, add the output name preceded by a caret (`^`).
- When the `--expr` option is given, all installables are interpreted
- as Nix expressions. You may need to specify `--impure` if the
- expression references impure inputs (such as `<nixpkgs>`).
+Example: `/nix/store/p7gp6lxdg32h4ka1q398wd9r2zkbbz2v-hello-2.10.drv^out`
-For most commands, if no installable is specified, the default is `.`,
-i.e. Nix will operate on the default flake output attribute of the
-flake in the current directory.
+All outputs can be referred to at once with the special syntax `^*`.
+
+Example: `/nix/store/p7gp6lxdg32h4ka1q398wd9r2zkbbz2v-hello-2.10.drv^*`
+
+### Nix file
+
+Example: `--file /path/to/nixpkgs hello`
+
+When the option `-f` / `--file` *path* \[*attrpath*...\] is given, installables are interpreted as the value of the expression in the Nix file at *path*.
+If attribute paths are provided, commands will operate on the corresponding values accessible at these paths.
+The Nix expression in that file, or any selected attribute, must evaluate to a derivation.
+
+### Nix expression
+
+Example: `--expr 'import <nixpkgs> {}' hello`
+
+When the option `--expr` *expression* \[*attrpath*...\] is given, installables are interpreted as the value of the of the Nix expression.
+If attribute paths are provided, commands will operate on the corresponding values accessible at these paths.
+The Nix expression, or any selected attribute, must evaluate to a derivation.
+
+You may need to specify `--impure` if the expression references impure inputs (such as `<nixpkgs>`).
## Derivation output selection
@@ -210,8 +220,7 @@ operate are determined as follows:
# Nix stores
-Most `nix` subcommands operate on a *Nix store*.
-
-TODO: list store types, options
+Most `nix` subcommands operate on a *Nix store*. These are documented
+in [`nix help-stores`](./nix3-help-stores.md).
)""
diff --git a/src/nix/path-info.md b/src/nix/path-info.md
index b30898ac0..6ad23a02e 100644
--- a/src/nix/path-info.md
+++ b/src/nix/path-info.md
@@ -80,7 +80,7 @@ R""(
# Description
This command shows information about the store paths produced by
-*installables*, or about all paths in the store if you pass `--all`.
+[*installables*](./nix.md#installables), or about all paths in the store if you pass `--all`.
By default, this command only prints the store paths. You can get
additional information by passing flags such as `--closure-size`,
diff --git a/src/nix/prefetch.cc b/src/nix/prefetch.cc
index fc3823406..51c8a3319 100644
--- a/src/nix/prefetch.cc
+++ b/src/nix/prefetch.cc
@@ -234,9 +234,9 @@ static int main_nix_prefetch_url(int argc, char * * argv)
if (!printPath)
printInfo("path is '%s'", store->printStorePath(storePath));
- std::cout << printHash16or32(hash) << std::endl;
+ logger->cout(printHash16or32(hash));
if (printPath)
- std::cout << store->printStorePath(storePath) << std::endl;
+ logger->cout(store->printStorePath(storePath));
return 0;
}
diff --git a/src/nix/print-dev-env.md b/src/nix/print-dev-env.md
index 2aad491de..a8ce9d36a 100644
--- a/src/nix/print-dev-env.md
+++ b/src/nix/print-dev-env.md
@@ -40,7 +40,7 @@ R""(
This command prints a shell script that can be sourced by `bash` and
that sets the variables and shell functions defined by the build
-process of *installable*. This allows you to get a similar build
+process of [*installable*](./nix.md#installables). This allows you to get a similar build
environment in your current shell rather than in a subshell (as with
`nix develop`).
diff --git a/src/nix/profile-install.md b/src/nix/profile-install.md
index aed414963..4c0f82c09 100644
--- a/src/nix/profile-install.md
+++ b/src/nix/profile-install.md
@@ -29,6 +29,6 @@ R""(
# Description
-This command adds *installables* to a Nix profile.
+This command adds [*installables*](./nix.md#installables) to a Nix profile.
)""
diff --git a/src/nix/profile.cc b/src/nix/profile.cc
index 711fbe2f0..88d4a3ce5 100644
--- a/src/nix/profile.cc
+++ b/src/nix/profile.cc
@@ -228,12 +228,12 @@ struct ProfileManifest
while (i != prevElems.end() || j != curElems.end()) {
if (j != curElems.end() && (i == prevElems.end() || i->describe() > j->describe())) {
- std::cout << fmt("%s%s: ∅ -> %s\n", indent, j->describe(), j->versions());
+ logger->cout("%s%s: ∅ -> %s", indent, j->describe(), j->versions());
changes = true;
++j;
}
else if (i != prevElems.end() && (j == curElems.end() || i->describe() < j->describe())) {
- std::cout << fmt("%s%s: %s -> ∅\n", indent, i->describe(), i->versions());
+ logger->cout("%s%s: %s -> ∅", indent, i->describe(), i->versions());
changes = true;
++i;
}
@@ -241,7 +241,7 @@ struct ProfileManifest
auto v1 = i->versions();
auto v2 = j->versions();
if (v1 != v2) {
- std::cout << fmt("%s%s: %s -> %s\n", indent, i->describe(), v1, v2);
+ logger->cout("%s%s: %s -> %s", indent, i->describe(), v1, v2);
changes = true;
}
++i;
@@ -250,17 +250,23 @@ struct ProfileManifest
}
if (!changes)
- std::cout << fmt("%sNo changes.\n", indent);
+ logger->cout("%sNo changes.", indent);
}
};
-static std::map<Installable *, std::pair<BuiltPaths, ExtraPathInfo>>
+static std::map<Installable *, std::pair<BuiltPaths, ref<ExtraPathInfo>>>
builtPathsPerInstallable(
- const std::vector<std::pair<std::shared_ptr<Installable>, BuiltPathWithResult>> & builtPaths)
+ const std::vector<std::pair<ref<Installable>, BuiltPathWithResult>> & builtPaths)
{
- std::map<Installable *, std::pair<BuiltPaths, ExtraPathInfo>> res;
+ std::map<Installable *, std::pair<BuiltPaths, ref<ExtraPathInfo>>> res;
for (auto & [installable, builtPath] : builtPaths) {
- auto & r = res[installable.get()];
+ auto & r = res.insert({
+ &*installable,
+ {
+ {},
+ make_ref<ExtraPathInfo>(),
+ }
+ }).first->second;
/* Note that there could be conflicting info
(e.g. meta.priority fields) if the installable returned
multiple derivations. So pick one arbitrarily. FIXME:
@@ -296,7 +302,7 @@ struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile
;
}
- void run(ref<Store> store) override
+ void run(ref<Store> store, Installables && installables) override
{
ProfileManifest manifest(*getEvalState(), *profile);
@@ -307,14 +313,16 @@ struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile
for (auto & installable : installables) {
ProfileElement element;
- auto & [res, info] = builtPaths[installable.get()];
+ auto iter = builtPaths.find(&*installable);
+ if (iter == builtPaths.end()) continue;
+ auto & [res, info] = iter->second;
- if (info.originalRef && info.resolvedRef && info.attrPath && info.extendedOutputsSpec) {
+ if (auto * info2 = dynamic_cast<ExtraPathInfoFlake *>(&*info)) {
element.source = ProfileElementSource {
- .originalRef = *info.originalRef,
- .resolvedRef = *info.resolvedRef,
- .attrPath = *info.attrPath,
- .outputs = *info.extendedOutputsSpec,
+ .originalRef = info2->flake.originalRef,
+ .resolvedRef = info2->flake.resolvedRef,
+ .attrPath = info2->value.attrPath,
+ .outputs = info2->value.extendedOutputsSpec,
};
}
@@ -323,7 +331,12 @@ struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile
element.priority =
priority
? *priority
- : info.priority.value_or(defaultPriority);
+ : ({
+ auto * info2 = dynamic_cast<ExtraPathInfoValue *>(&*info);
+ info2
+ ? info2->value.priority.value_or(defaultPriority)
+ : defaultPriority;
+ });
element.updateStorePaths(getEvalStore(), store, res);
@@ -513,7 +526,7 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf
auto matchers = getMatchers(store);
- std::vector<std::shared_ptr<Installable>> installables;
+ Installables installables;
std::vector<size_t> indices;
auto upgradedCount = 0;
@@ -529,7 +542,7 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf
Activity act(*logger, lvlChatty, actUnknown,
fmt("checking '%s' for updates", element.source->attrPath));
- auto installable = std::make_shared<InstallableFlake>(
+ auto installable = make_ref<InstallableFlake>(
this,
getEvalState(),
FlakeRef(element.source->originalRef),
@@ -541,19 +554,20 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf
auto derivedPaths = installable->toDerivedPaths();
if (derivedPaths.empty()) continue;
- auto & info = derivedPaths[0].info;
-
- assert(info.resolvedRef && info.attrPath);
+ auto * infop = dynamic_cast<ExtraPathInfoFlake *>(&*derivedPaths[0].info);
+ // `InstallableFlake` should use `ExtraPathInfoFlake`.
+ assert(infop);
+ auto & info = *infop;
- if (element.source->resolvedRef == info.resolvedRef) continue;
+ if (element.source->resolvedRef == info.flake.resolvedRef) continue;
printInfo("upgrading '%s' from flake '%s' to '%s'",
- element.source->attrPath, element.source->resolvedRef, *info.resolvedRef);
+ element.source->attrPath, element.source->resolvedRef, info.flake.resolvedRef);
element.source = ProfileElementSource {
.originalRef = installable->flakeRef,
- .resolvedRef = *info.resolvedRef,
- .attrPath = *info.attrPath,
+ .resolvedRef = info.flake.resolvedRef,
+ .attrPath = info.value.attrPath,
.outputs = installable->extendedOutputsSpec,
};
@@ -582,7 +596,10 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf
for (size_t i = 0; i < installables.size(); ++i) {
auto & installable = installables.at(i);
auto & element = manifest.elements[indices.at(i)];
- element.updateStorePaths(getEvalStore(), store, builtPaths[installable.get()].first);
+ element.updateStorePaths(
+ getEvalStore(),
+ store,
+ builtPaths.find(&*installable)->second.first);
}
updateProfile(manifest.build(store));
@@ -640,9 +657,9 @@ struct CmdProfileDiffClosures : virtual StoreCommand, MixDefaultProfile
for (auto & gen : gens) {
if (prevGen) {
- if (!first) std::cout << "\n";
+ if (!first) logger->cout("");
first = false;
- std::cout << fmt("Version %d -> %d:\n", prevGen->number, gen.number);
+ logger->cout("Version %d -> %d:", prevGen->number, gen.number);
printClosureDiff(store,
store->followLinksToStorePath(prevGen->path),
store->followLinksToStorePath(gen.path),
@@ -678,10 +695,10 @@ struct CmdProfileHistory : virtual StoreCommand, EvalCommand, MixDefaultProfile
for (auto & gen : gens) {
ProfileManifest manifest(*getEvalState(), gen.path);
- if (!first) std::cout << "\n";
+ if (!first) logger->cout("");
first = false;
- std::cout << fmt("Version %s%d" ANSI_NORMAL " (%s)%s:\n",
+ logger->cout("Version %s%d" ANSI_NORMAL " (%s)%s:",
gen.number == curGen ? ANSI_GREEN : ANSI_BOLD,
gen.number,
std::put_time(std::gmtime(&gen.creationTime), "%Y-%m-%d"),
@@ -798,7 +815,6 @@ struct CmdProfile : NixMultiCommand
{
if (!command)
throw UsageError("'nix profile' requires a sub-command.");
- command->second->prepare();
command->second->run();
}
};
diff --git a/src/nix/profile.md b/src/nix/profile.md
index 273e02280..bf61ef4b9 100644
--- a/src/nix/profile.md
+++ b/src/nix/profile.md
@@ -12,7 +12,7 @@ them to be rolled back easily.
The default profile used by `nix profile` is `$HOME/.nix-profile`,
which, if it does not exist, is created as a symlink to
`/nix/var/nix/profiles/default` if Nix is invoked by the
-`root` user, or `/nix/var/nix/profiles/per-user/`*username* otherwise.
+`root` user, or `${XDG_STATE_HOME-$HOME/.local/state}/nix/profiles/profile` otherwise.
You can specify another profile location using `--profile` *path*.
@@ -24,11 +24,11 @@ the profile. In turn, *path*`-`*N* is a symlink to a path in the Nix
store. For example:
```console
-$ ls -l /nix/var/nix/profiles/per-user/alice/profile*
-lrwxrwxrwx 1 alice users 14 Nov 25 14:35 /nix/var/nix/profiles/per-user/alice/profile -> profile-7-link
-lrwxrwxrwx 1 alice users 51 Oct 28 16:18 /nix/var/nix/profiles/per-user/alice/profile-5-link -> /nix/store/q69xad13ghpf7ir87h0b2gd28lafjj1j-profile
-lrwxrwxrwx 1 alice users 51 Oct 29 13:20 /nix/var/nix/profiles/per-user/alice/profile-6-link -> /nix/store/6bvhpysd7vwz7k3b0pndn7ifi5xr32dg-profile
-lrwxrwxrwx 1 alice users 51 Nov 25 14:35 /nix/var/nix/profiles/per-user/alice/profile-7-link -> /nix/store/mp0x6xnsg0b8qhswy6riqvimai4gm677-profile
+$ ls -l ~alice/.local/state/nix/profiles/profile*
+lrwxrwxrwx 1 alice users 14 Nov 25 14:35 /home/alice/.local/state/nix/profiles/profile -> profile-7-link
+lrwxrwxrwx 1 alice users 51 Oct 28 16:18 /home/alice/.local/state/nix/profiles/profile-5-link -> /nix/store/q69xad13ghpf7ir87h0b2gd28lafjj1j-profile
+lrwxrwxrwx 1 alice users 51 Oct 29 13:20 /home/alice/.local/state/nix/profiles/profile-6-link -> /nix/store/6bvhpysd7vwz7k3b0pndn7ifi5xr32dg-profile
+lrwxrwxrwx 1 alice users 51 Nov 25 14:35 /home/alice/.local/state/nix/profiles/profile-7-link -> /nix/store/mp0x6xnsg0b8qhswy6riqvimai4gm677-profile
```
Each of these symlinks is a root for the Nix garbage collector.
@@ -38,20 +38,20 @@ profile is a tree of symlinks to the files of the installed packages,
e.g.
```console
-$ ll -R /nix/var/nix/profiles/per-user/eelco/profile-7-link/
-/nix/var/nix/profiles/per-user/eelco/profile-7-link/:
+$ ll -R ~eelco/.local/state/nix/profiles/profile-7-link/
+/home/eelco/.local/state/nix/profiles/profile-7-link/:
total 20
dr-xr-xr-x 2 root root 4096 Jan 1 1970 bin
-r--r--r-- 2 root root 1402 Jan 1 1970 manifest.json
dr-xr-xr-x 4 root root 4096 Jan 1 1970 share
-/nix/var/nix/profiles/per-user/eelco/profile-7-link/bin:
+/home/eelco/.local/state/nix/profiles/profile-7-link/bin:
total 20
lrwxrwxrwx 5 root root 79 Jan 1 1970 chromium -> /nix/store/ijm5k0zqisvkdwjkc77mb9qzb35xfi4m-chromium-86.0.4240.111/bin/chromium
lrwxrwxrwx 7 root root 87 Jan 1 1970 spotify -> /nix/store/w9182874m1bl56smps3m5zjj36jhp3rn-spotify-1.1.26.501.gbe11e53b-15/bin/spotify
lrwxrwxrwx 3 root root 79 Jan 1 1970 zoom-us -> /nix/store/wbhg2ga8f3h87s9h5k0slxk0m81m4cxl-zoom-us-5.3.469451.0927/bin/zoom-us
-/nix/var/nix/profiles/per-user/eelco/profile-7-link/share/applications:
+/home/eelco/.local/state/nix/profiles/profile-7-link/share/applications:
total 12
lrwxrwxrwx 4 root root 120 Jan 1 1970 chromium-browser.desktop -> /nix/store/4cf803y4vzfm3gyk3vzhzb2327v0kl8a-chromium-unwrapped-86.0.4240.111/share/applications/chromium-browser.desktop
lrwxrwxrwx 7 root root 110 Jan 1 1970 spotify.desktop -> /nix/store/w9182874m1bl56smps3m5zjj36jhp3rn-spotify-1.1.26.501.gbe11e53b-15/share/applications/spotify.desktop
diff --git a/src/nix/realisation.cc b/src/nix/realisation.cc
index c9a7157cd..e19e93219 100644
--- a/src/nix/realisation.cc
+++ b/src/nix/realisation.cc
@@ -21,7 +21,6 @@ struct CmdRealisation : virtual NixMultiCommand
{
if (!command)
throw UsageError("'nix realisation' requires a sub-command.");
- command->second->prepare();
command->second->run();
}
};
@@ -46,7 +45,7 @@ struct CmdRealisationInfo : BuiltPathsCommand, MixJSON
void run(ref<Store> store, BuiltPaths && paths) override
{
- settings.requireExperimentalFeature(Xp::CaDerivations);
+ experimentalFeatureSettings.require(Xp::CaDerivations);
RealisedPath::Set realisations;
for (auto & builtPath : paths) {
@@ -65,18 +64,16 @@ struct CmdRealisationInfo : BuiltPathsCommand, MixJSON
res.push_back(currentPath);
}
- std::cout << res.dump();
+ logger->cout("%s", res);
}
else {
for (auto & path : realisations) {
if (auto realisation = std::get_if<Realisation>(&path.raw)) {
- std::cout <<
- realisation->id.to_string() << " " <<
- store->printStorePath(realisation->outPath);
+ logger->cout("%s %s",
+ realisation->id.to_string(),
+ store->printStorePath(realisation->outPath));
} else
- std::cout << store->printStorePath(path.path());
-
- std::cout << std::endl;
+ logger->cout("%s", store->printStorePath(path.path()));
}
}
}
diff --git a/src/nix/registry.cc b/src/nix/registry.cc
index b5bdfba95..cb94bbd31 100644
--- a/src/nix/registry.cc
+++ b/src/nix/registry.cc
@@ -224,10 +224,9 @@ struct CmdRegistry : virtual NixMultiCommand
void run() override
{
- settings.requireExperimentalFeature(Xp::Flakes);
+ experimentalFeatureSettings.require(Xp::Flakes);
if (!command)
throw UsageError("'nix registry' requires a sub-command.");
- command->second->prepare();
command->second->run();
}
};
diff --git a/src/nix/repl.cc b/src/nix/repl.cc
index 679bdea77..bb14f3f99 100644
--- a/src/nix/repl.cc
+++ b/src/nix/repl.cc
@@ -1,28 +1,23 @@
#include "eval.hh"
#include "globals.hh"
#include "command.hh"
+#include "installable-value.hh"
#include "repl.hh"
namespace nix {
-struct CmdRepl : InstallablesCommand
+struct CmdRepl : RawInstallablesCommand
{
CmdRepl() {
evalSettings.pureEval = false;
}
- void prepare() override
+ /**
+ * This command is stable before the others
+ */
+ std::optional<ExperimentalFeature> experimentalFeature() override
{
- if (!settings.isExperimentalFeatureEnabled(Xp::ReplFlake) && !(file) && this->_installables.size() >= 1) {
- warn("future versions of Nix will require using `--file` to load a file");
- if (this->_installables.size() > 1)
- warn("more than one input file is not currently supported");
- auto filePath = this->_installables[0].data();
- file = std::optional(filePath);
- _installables.front() = _installables.back();
- _installables.pop_back();
- }
- installables = InstallablesCommand::load();
+ return std::nullopt;
}
std::vector<std::string> files;
@@ -32,11 +27,6 @@ struct CmdRepl : InstallablesCommand
return {""};
}
- bool useDefaultInstallables() override
- {
- return file.has_value() or expr.has_value();
- }
-
bool forceImpureByDefault() override
{
return true;
@@ -54,17 +44,34 @@ struct CmdRepl : InstallablesCommand
;
}
- void run(ref<Store> store) override
+ void applyDefaultInstallables(std::vector<std::string> & rawInstallables) override
+ {
+ if (!experimentalFeatureSettings.isEnabled(Xp::ReplFlake) && !(file) && rawInstallables.size() >= 1) {
+ warn("future versions of Nix will require using `--file` to load a file");
+ if (rawInstallables.size() > 1)
+ warn("more than one input file is not currently supported");
+ auto filePath = rawInstallables[0].data();
+ file = std::optional(filePath);
+ rawInstallables.front() = rawInstallables.back();
+ rawInstallables.pop_back();
+ }
+ if (rawInstallables.empty() && (file.has_value() || expr.has_value())) {
+ rawInstallables.push_back(".");
+ }
+ }
+
+ void run(ref<Store> store, std::vector<std::string> && rawInstallables) override
{
auto state = getEvalState();
auto getValues = [&]()->AbstractNixRepl::AnnotatedValues{
- auto installables = load();
+ auto installables = parseInstallables(store, rawInstallables);
AbstractNixRepl::AnnotatedValues values;
- for (auto & installable: installables){
- auto what = installable->what();
+ for (auto & installable_: installables){
+ auto & installable = InstallableValue::require(*installable_);
+ auto what = installable.what();
if (file){
- auto [val, pos] = installable->toValue(*state);
- auto what = installable->what();
+ auto [val, pos] = installable.toValue(*state);
+ auto what = installable.what();
state->forceValue(*val, pos);
auto autoArgs = getAutoArgs(*state);
auto valPost = state->allocValue();
@@ -72,7 +79,7 @@ struct CmdRepl : InstallablesCommand
state->forceValue(*valPost, pos);
values.push_back( {valPost, what });
} else {
- auto [val, pos] = installable->toValue(*state);
+ auto [val, pos] = installable.toValue(*state);
values.push_back( {val, what} );
}
}
diff --git a/src/nix/run.cc b/src/nix/run.cc
index 6fca68047..1baf299ab 100644
--- a/src/nix/run.cc
+++ b/src/nix/run.cc
@@ -1,5 +1,5 @@
#include "run.hh"
-#include "command.hh"
+#include "command-installable-value.hh"
#include "common-args.hh"
#include "shared.hh"
#include "store-api.hh"
@@ -97,7 +97,7 @@ struct CmdShell : InstallablesCommand, MixEnvironment
;
}
- void run(ref<Store> store) override
+ void run(ref<Store> store, Installables && installables) override
{
auto outPaths = Installable::toStorePaths(getEvalStore(), store, Realise::Outputs, OperateOn::Output, installables);
@@ -137,7 +137,7 @@ struct CmdShell : InstallablesCommand, MixEnvironment
static auto rCmdShell = registerCommand<CmdShell>("shell");
-struct CmdRun : InstallableCommand
+struct CmdRun : InstallableValueCommand
{
using InstallableCommand::run;
@@ -183,7 +183,7 @@ struct CmdRun : InstallableCommand
return res;
}
- void run(ref<Store> store) override
+ void run(ref<Store> store, ref<InstallableValue> installable) override
{
auto state = getEvalState();
diff --git a/src/nix/run.hh b/src/nix/run.hh
index fed360158..97ddef19b 100644
--- a/src/nix/run.hh
+++ b/src/nix/run.hh
@@ -1,4 +1,5 @@
#pragma once
+///@file
#include "store-api.hh"
diff --git a/src/nix/run.md b/src/nix/run.md
index a0f362076..250ea65aa 100644
--- a/src/nix/run.md
+++ b/src/nix/run.md
@@ -35,7 +35,7 @@ R""(
# Description
-`nix run` builds and runs *installable*, which must evaluate to an
+`nix run` builds and runs [*installable*](./nix.md#installables), which must evaluate to an
*app* or a regular Nix derivation.
If *installable* evaluates to an *app* (see below), it executes the
diff --git a/src/nix/search.cc b/src/nix/search.cc
index 4fa1e7837..c92ed1663 100644
--- a/src/nix/search.cc
+++ b/src/nix/search.cc
@@ -1,4 +1,4 @@
-#include "command.hh"
+#include "command-installable-value.hh"
#include "globals.hh"
#include "eval.hh"
#include "eval-inline.hh"
@@ -22,7 +22,7 @@ std::string wrap(std::string prefix, std::string s)
return concatStrings(prefix, s, ANSI_NORMAL);
}
-struct CmdSearch : InstallableCommand, MixJSON
+struct CmdSearch : InstallableValueCommand, MixJSON
{
std::vector<std::string> res;
std::vector<std::string> excludeRes;
@@ -61,7 +61,7 @@ struct CmdSearch : InstallableCommand, MixJSON
};
}
- void run(ref<Store> store) override
+ void run(ref<Store> store, ref<InstallableValue> installable) override
{
settings.readOnlyMode = true;
evalSettings.enableImportFromDerivation.setDefault(false);
@@ -196,9 +196,8 @@ struct CmdSearch : InstallableCommand, MixJSON
for (auto & cursor : installable->getCursors(*state))
visit(*cursor, cursor->getAttrPath(), true);
- if (json) {
- std::cout << jsonOut->dump() << std::endl;
- }
+ if (json)
+ logger->cout("%s", *jsonOut);
if (!json && !results)
throw Error("no results for the given search term(s)!");
diff --git a/src/nix/search.md b/src/nix/search.md
index 5a5b5ae05..4caa90654 100644
--- a/src/nix/search.md
+++ b/src/nix/search.md
@@ -62,10 +62,10 @@ R""(
# Description
-`nix search` searches *installable* (which must be evaluatable, e.g. a
-flake) for packages whose name or description matches all of the
+`nix search` searches [*installable*](./nix.md#installables) (which can be evaluated, that is, a
+flake or Nix expression, but not a store path or store derivation path) for packages whose name or description matches all of the
regular expressions *regex*. For each matching package, It prints the
-full attribute name (from the root of the installable), the version
+full attribute name (from the root of the [installable](./nix.md#installables)), the version
and the `meta.description` field, highlighting the substrings that
were matched by the regular expressions. If no regular expressions are
specified, all packages are shown.
diff --git a/src/nix/shell.md b/src/nix/shell.md
index 9fa1031f5..13a389103 100644
--- a/src/nix/shell.md
+++ b/src/nix/shell.md
@@ -48,7 +48,7 @@ R""(
# Description
`nix shell` runs a command in an environment in which the `$PATH` variable
-provides the specified *installables*. If no command is specified, it starts the
+provides the specified [*installables*](./nix.md#installable). If no command is specified, it starts the
default shell of your user account specified by `$SHELL`.
)""
diff --git a/src/nix/show-derivation.cc b/src/nix/show-derivation.cc
index d1a516cad..4a406ae08 100644
--- a/src/nix/show-derivation.cc
+++ b/src/nix/show-derivation.cc
@@ -39,7 +39,7 @@ struct CmdShowDerivation : InstallablesCommand
Category category() override { return catUtility; }
- void run(ref<Store> store) override
+ void run(ref<Store> store, Installables && installables) override
{
auto drvPaths = Installable::toDerivations(store, installables, true);
@@ -57,7 +57,7 @@ struct CmdShowDerivation : InstallablesCommand
jsonRoot[store->printStorePath(drvPath)] =
store->readDerivation(drvPath).toJSON(*store);
}
- std::cout << jsonRoot.dump(2) << std::endl;
+ logger->cout(jsonRoot.dump(2));
}
};
diff --git a/src/nix/show-derivation.md b/src/nix/show-derivation.md
index 2cd93aa62..1d37c6f5a 100644
--- a/src/nix/show-derivation.md
+++ b/src/nix/show-derivation.md
@@ -39,7 +39,7 @@ R""(
# Description
This command prints on standard output a JSON representation of the
-[store derivation]s to which *installables* evaluate. Store derivations
+[store derivation]s to which [*installables*](./nix.md#installables) evaluate. Store derivations
are used internally by Nix. They are store paths with extension `.drv`
that represent the build-time dependency graph to which a Nix
expression evaluates.
diff --git a/src/nix/sigs.cc b/src/nix/sigs.cc
index 3d659d6d2..45cd2e1a6 100644
--- a/src/nix/sigs.cc
+++ b/src/nix/sigs.cc
@@ -45,7 +45,7 @@ struct CmdCopySigs : StorePathsCommand
//logger->setExpected(doneLabel, storePaths.size());
auto doPath = [&](const Path & storePathS) {
- //Activity act(*logger, lvlInfo, format("getting signatures for '%s'") % storePath);
+ //Activity act(*logger, lvlInfo, "getting signatures for '%s'", storePath);
checkInterrupt();
@@ -173,7 +173,7 @@ struct CmdKeyGenerateSecret : Command
if (!keyName)
throw UsageError("required argument '--key-name' is missing");
- std::cout << SecretKey::generate(*keyName).to_string();
+ writeFull(STDOUT_FILENO, SecretKey::generate(*keyName).to_string());
}
};
@@ -194,7 +194,7 @@ struct CmdKeyConvertSecretToPublic : Command
void run() override
{
SecretKey secretKey(drainFD(STDIN_FILENO));
- std::cout << secretKey.toPublicKey().to_string();
+ writeFull(STDOUT_FILENO, secretKey.toPublicKey().to_string());
}
};
@@ -219,7 +219,6 @@ struct CmdKey : NixMultiCommand
{
if (!command)
throw UsageError("'nix key' requires a sub-command.");
- command->second->prepare();
command->second->run();
}
};
diff --git a/src/nix/store-copy-log.cc b/src/nix/store-copy-log.cc
index d5fab5f2f..a6e8aeff7 100644
--- a/src/nix/store-copy-log.cc
+++ b/src/nix/store-copy-log.cc
@@ -24,9 +24,7 @@ struct CmdCopyLog : virtual CopyCommand, virtual InstallablesCommand
;
}
- Category category() override { return catUtility; }
-
- void run(ref<Store> srcStore) override
+ void run(ref<Store> srcStore, Installables && installables) override
{
auto & srcLogStore = require<LogStore>(*srcStore);
diff --git a/src/nix/store-delete.cc b/src/nix/store-delete.cc
index ca43f1530..6719227df 100644
--- a/src/nix/store-delete.cc
+++ b/src/nix/store-delete.cc
@@ -32,7 +32,7 @@ struct CmdStoreDelete : StorePathsCommand
;
}
- void run(ref<Store> store, std::vector<StorePath> && storePaths) override
+ void run(ref<Store> store, StorePaths && storePaths) override
{
auto & gcStore = require<GcStore>(*store);
diff --git a/src/nix/store-delete.md b/src/nix/store-delete.md
index db535f87c..431bc5f5e 100644
--- a/src/nix/store-delete.md
+++ b/src/nix/store-delete.md
@@ -10,7 +10,7 @@ R""(
# Description
-This command deletes the store paths specified by *installables*. ,
+This command deletes the store paths specified by [*installables*](./nix.md#installables),
but only if it is safe to do so; that is, when the path is not
reachable from a root of the garbage collector. This means that you
can only delete paths that would also be deleted by `nix store
diff --git a/src/nix/store-dump-path.md b/src/nix/store-dump-path.md
index 4ef563526..56e2174b6 100644
--- a/src/nix/store-dump-path.md
+++ b/src/nix/store-dump-path.md
@@ -18,6 +18,6 @@ R""(
# Description
This command generates a NAR file containing the serialisation of the
-store path *installable*. The NAR is written to standard output.
+store path [*installable*](./nix.md#installables). The NAR is written to standard output.
)""
diff --git a/src/nix/store-repair.cc b/src/nix/store-repair.cc
index 8fcb3639a..895e39685 100644
--- a/src/nix/store-repair.cc
+++ b/src/nix/store-repair.cc
@@ -17,7 +17,7 @@ struct CmdStoreRepair : StorePathsCommand
;
}
- void run(ref<Store> store, std::vector<StorePath> && storePaths) override
+ void run(ref<Store> store, StorePaths && storePaths) override
{
for (auto & path : storePaths)
store->repairPath(path);
diff --git a/src/nix/store-repair.md b/src/nix/store-repair.md
index 92d2205a9..180c577ac 100644
--- a/src/nix/store-repair.md
+++ b/src/nix/store-repair.md
@@ -17,7 +17,7 @@ R""(
# Description
This command attempts to "repair" the store paths specified by
-*installables* by redownloading them using the available
+[*installables*](./nix.md#installables) by redownloading them using the available
substituters. If no substitutes are available, then repair is not
possible.
diff --git a/src/nix/store.cc b/src/nix/store.cc
index 44e53c7c7..2879e03b3 100644
--- a/src/nix/store.cc
+++ b/src/nix/store.cc
@@ -18,7 +18,6 @@ struct CmdStore : virtual NixMultiCommand
{
if (!command)
throw UsageError("'nix store' requires a sub-command.");
- command->second->prepare();
command->second->run();
}
};
diff --git a/src/nix/upgrade-nix.cc b/src/nix/upgrade-nix.cc
index 17796d6b8..2295d86d0 100644
--- a/src/nix/upgrade-nix.cc
+++ b/src/nix/upgrade-nix.cc
@@ -32,6 +32,14 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand
});
}
+ /**
+ * This command is stable before the others
+ */
+ std::optional<ExperimentalFeature> experimentalFeature() override
+ {
+ return std::nullopt;
+ }
+
std::string description() override
{
return "upgrade Nix to the stable version declared in Nixpkgs";
diff --git a/src/nix/upgrade-nix.md b/src/nix/upgrade-nix.md
index 084c80ba2..08757aebd 100644
--- a/src/nix/upgrade-nix.md
+++ b/src/nix/upgrade-nix.md
@@ -11,7 +11,7 @@ R""(
* Upgrade Nix in a specific profile:
```console
- # nix upgrade-nix -p /nix/var/nix/profiles/per-user/alice/profile
+ # nix upgrade-nix -p ~alice/.local/state/nix/profiles/profile
```
# Description
diff --git a/src/nix/verify.md b/src/nix/verify.md
index 1c43792e7..cc1122c02 100644
--- a/src/nix/verify.md
+++ b/src/nix/verify.md
@@ -24,7 +24,7 @@ R""(
# Description
-This command verifies the integrity of the store paths *installables*,
+This command verifies the integrity of the store paths [*installables*](./nix.md#installables),
or, if `--all` is given, the entire Nix store. For each path, it
checks that
diff --git a/src/resolve-system-dependencies/resolve-system-dependencies.cc b/src/resolve-system-dependencies/resolve-system-dependencies.cc
index c6023eb03..4ea268d24 100644
--- a/src/resolve-system-dependencies/resolve-system-dependencies.cc
+++ b/src/resolve-system-dependencies/resolve-system-dependencies.cc
@@ -157,13 +157,9 @@ int main(int argc, char ** argv)
uname(&_uname);
- auto cacheParentDir = (format("%1%/dependency-maps") % settings.nixStateDir).str();
+ auto cacheParentDir = fmt("%1%/dependency-maps", settings.nixStateDir);
- cacheDir = (format("%1%/%2%-%3%-%4%")
- % cacheParentDir
- % _uname.machine
- % _uname.sysname
- % _uname.release).str();
+ cacheDir = fmt("%1%/%2%-%3%-%4%", cacheParentDir, _uname.machine, _uname.sysname, _uname.release);
mkdir(cacheParentDir.c_str(), 0755);
mkdir(cacheDir.c_str(), 0755);
diff --git a/tests/binary-cache.sh b/tests/binary-cache.sh
index b38db8a15..7c64a115c 100644
--- a/tests/binary-cache.sh
+++ b/tests/binary-cache.sh
@@ -15,11 +15,11 @@ outPath=$(nix-build dependencies.nix --no-out-link)
nix copy --to file://$cacheDir $outPath
# Test copying build logs to the binary cache.
-nix log --store file://$cacheDir $outPath 2>&1 | grep 'is not available'
+expect 1 nix log --store file://$cacheDir $outPath 2>&1 | grep 'is not available'
nix store copy-log --to file://$cacheDir $outPath
nix log --store file://$cacheDir $outPath | grep FOO
rm -rf $TEST_ROOT/var/log/nix
-nix log $outPath 2>&1 | grep 'is not available'
+expect 1 nix log $outPath 2>&1 | grep 'is not available'
nix log --substituters file://$cacheDir $outPath | grep FOO
# Test copying build logs from the binary cache.
@@ -78,8 +78,8 @@ mv $nar $nar.good
mkdir -p $TEST_ROOT/empty
nix-store --dump $TEST_ROOT/empty | xz > $nar
-nix-build --substituters "file://$cacheDir" --no-require-sigs dependencies.nix -o $TEST_ROOT/result 2>&1 | tee $TEST_ROOT/log
-grep -q "hash mismatch" $TEST_ROOT/log
+expect 1 nix-build --substituters "file://$cacheDir" --no-require-sigs dependencies.nix -o $TEST_ROOT/result 2>&1 | tee $TEST_ROOT/log
+grepQuiet "hash mismatch" $TEST_ROOT/log
mv $nar.good $nar
@@ -126,9 +126,9 @@ clearStore
rm -v $(grep -l "StorePath:.*dependencies-input-2" $cacheDir/*.narinfo)
nix-build --substituters "file://$cacheDir" --no-require-sigs dependencies.nix -o $TEST_ROOT/result 2>&1 | tee $TEST_ROOT/log
-grep -q "copying path.*input-0" $TEST_ROOT/log
-grep -q "copying path.*input-2" $TEST_ROOT/log
-grep -q "copying path.*top" $TEST_ROOT/log
+grepQuiet "copying path.*input-0" $TEST_ROOT/log
+grepQuiet "copying path.*input-2" $TEST_ROOT/log
+grepQuiet "copying path.*top" $TEST_ROOT/log
# Idem, but without cached .narinfo.
@@ -136,11 +136,11 @@ clearStore
clearCacheCache
nix-build --substituters "file://$cacheDir" --no-require-sigs dependencies.nix -o $TEST_ROOT/result 2>&1 | tee $TEST_ROOT/log
-grep -q "don't know how to build" $TEST_ROOT/log
-grep -q "building.*input-1" $TEST_ROOT/log
-grep -q "building.*input-2" $TEST_ROOT/log
-grep -q "copying path.*input-0" $TEST_ROOT/log
-grep -q "copying path.*top" $TEST_ROOT/log
+grepQuiet "don't know how to build" $TEST_ROOT/log
+grepQuiet "building.*input-1" $TEST_ROOT/log
+grepQuiet "building.*input-2" $TEST_ROOT/log
+grepQuiet "copying path.*input-0" $TEST_ROOT/log
+grepQuiet "copying path.*top" $TEST_ROOT/log
# Create a signed binary cache.
diff --git a/tests/build-delete.sh b/tests/build-delete.sh
index 636681f64..9c56b00e8 100644
--- a/tests/build-delete.sh
+++ b/tests/build-delete.sh
@@ -2,8 +2,6 @@ source common.sh
clearStore
-set -o pipefail
-
# https://github.com/NixOS/nix/issues/6572
issue_6572_independent_outputs() {
nix build -f multiple-outputs.nix --json independent --no-link > $TEST_ROOT/independent.json
diff --git a/tests/build-dry.sh b/tests/build-dry.sh
index 5f29239dc..6d1754af5 100644
--- a/tests/build-dry.sh
+++ b/tests/build-dry.sh
@@ -54,7 +54,7 @@ clearCache
RES=$(nix build -f dependencies.nix --dry-run --json)
-if [[ -z "$NIX_TESTS_CA_BY_DEFAULT" ]]; then
+if [[ -z "${NIX_TESTS_CA_BY_DEFAULT-}" ]]; then
echo "$RES" | jq '.[0] | [
(.drvPath | test("'$NIX_STORE_DIR'.*\\.drv")),
(.outputs.out | test("'$NIX_STORE_DIR'"))
diff --git a/tests/build-remote.sh b/tests/build-remote.sh
index 25a482003..78e12b477 100644
--- a/tests/build-remote.sh
+++ b/tests/build-remote.sh
@@ -1,5 +1,5 @@
-if ! canUseSandbox; then exit 99; fi
-if ! [[ $busybox =~ busybox ]]; then exit 99; fi
+requireSandboxSupport
+[[ $busybox =~ busybox ]] || skipTest "no busybox"
unset NIX_STORE_DIR
unset NIX_STATE_DIR
@@ -7,7 +7,7 @@ unset NIX_STATE_DIR
function join_by { local d=$1; shift; echo -n "$1"; shift; printf "%s" "${@/#/$d}"; }
EXTRA_SYSTEM_FEATURES=()
-if [[ -n "$CONTENT_ADDRESSED" ]]; then
+if [[ -n "${CONTENT_ADDRESSED-}" ]]; then
EXTRA_SYSTEM_FEATURES=("ca-derivations")
fi
@@ -42,25 +42,26 @@ testPrintOutPath=$(nix build -L -v -f $file --no-link --print-out-paths --max-jo
[[ $testPrintOutPath =~ store.*build-remote ]]
-set -o pipefail
-
# Ensure that input1 was built on store1 due to the required feature.
-nix path-info --store $TEST_ROOT/machine1 --all \
- | grep builder-build-remote-input-1.sh \
- | grep -v builder-build-remote-input-2.sh \
- | grep -v builder-build-remote-input-3.sh
+output=$(nix path-info --store $TEST_ROOT/machine1 --all)
+echo "$output" | grepQuiet builder-build-remote-input-1.sh
+echo "$output" | grepQuietInverse builder-build-remote-input-2.sh
+echo "$output" | grepQuietInverse builder-build-remote-input-3.sh
+unset output
# Ensure that input2 was built on store2 due to the required feature.
-nix path-info --store $TEST_ROOT/machine2 --all \
- | grep -v builder-build-remote-input-1.sh \
- | grep builder-build-remote-input-2.sh \
- | grep -v builder-build-remote-input-3.sh
+output=$(nix path-info --store $TEST_ROOT/machine2 --all)
+echo "$output" | grepQuietInverse builder-build-remote-input-1.sh
+echo "$output" | grepQuiet builder-build-remote-input-2.sh
+echo "$output" | grepQuietInverse builder-build-remote-input-3.sh
+unset output
# Ensure that input3 was built on store3 due to the required feature.
-nix path-info --store $TEST_ROOT/machine3 --all \
- | grep -v builder-build-remote-input-1.sh \
- | grep -v builder-build-remote-input-2.sh \
- | grep builder-build-remote-input-3.sh
+output=$(nix path-info --store $TEST_ROOT/machine3 --all)
+echo "$output" | grepQuietInverse builder-build-remote-input-1.sh
+echo "$output" | grepQuietInverse builder-build-remote-input-2.sh
+echo "$output" | grepQuiet builder-build-remote-input-3.sh
+unset output
for i in input1 input3; do
diff --git a/tests/build.sh b/tests/build.sh
index 2dfd43b65..b579fc374 100644
--- a/tests/build.sh
+++ b/tests/build.sh
@@ -2,8 +2,6 @@ source common.sh
clearStore
-set -o pipefail
-
# Make sure that 'nix build' returns all outputs by default.
nix build -f multiple-outputs.nix --json a b --no-link | jq --exit-status '
(.[0] |
diff --git a/tests/ca/build.sh b/tests/ca/build.sh
index cc225c6c8..98e1c5125 100644
--- a/tests/ca/build.sh
+++ b/tests/ca/build.sh
@@ -2,14 +2,14 @@
source common.sh
-drv=$(nix-instantiate --experimental-features ca-derivations ./content-addressed.nix -A rootCA --arg seed 1)
-nix --experimental-features 'nix-command ca-derivations' show-derivation "$drv" --arg seed 1
+drv=$(nix-instantiate ./content-addressed.nix -A rootCA --arg seed 1)
+nix show-derivation "$drv" --arg seed 1
buildAttr () {
local derivationPath=$1
local seedValue=$2
shift; shift
- local args=("--experimental-features" "ca-derivations" "./content-addressed.nix" "-A" "$derivationPath" --arg seed "$seedValue" "--no-out-link")
+ local args=("./content-addressed.nix" "-A" "$derivationPath" --arg seed "$seedValue" "--no-out-link")
args+=("$@")
nix-build "${args[@]}"
}
@@ -19,7 +19,7 @@ testRemoteCache () {
local outPath=$(buildAttr dependentNonCA 1)
nix copy --to file://$cacheDir $outPath
clearStore
- buildAttr dependentNonCA 1 --option substituters file://$cacheDir --no-require-sigs |& (! grep "building dependent-non-ca")
+ buildAttr dependentNonCA 1 --option substituters file://$cacheDir --no-require-sigs |& grepQuietInverse "building dependent-non-ca"
}
testDeterministicCA () {
@@ -46,17 +46,17 @@ testCutoff () {
}
testGC () {
- nix-instantiate --experimental-features ca-derivations ./content-addressed.nix -A rootCA --arg seed 5
- nix-collect-garbage --experimental-features ca-derivations --option keep-derivations true
+ nix-instantiate ./content-addressed.nix -A rootCA --arg seed 5
+ nix-collect-garbage --option keep-derivations true
clearStore
buildAttr rootCA 1 --out-link $TEST_ROOT/rootCA
- nix-collect-garbage --experimental-features ca-derivations
+ nix-collect-garbage
buildAttr rootCA 1 -j0
}
testNixCommand () {
clearStore
- nix build --experimental-features 'nix-command ca-derivations' --file ./content-addressed.nix --no-link
+ nix build --file ./content-addressed.nix --no-link
}
# Regression test for https://github.com/NixOS/nix/issues/4775
diff --git a/tests/check-refs.sh b/tests/check-refs.sh
index 65a72552a..2778e491d 100644
--- a/tests/check-refs.sh
+++ b/tests/check-refs.sh
@@ -8,14 +8,14 @@ dep=$(nix-build -o $RESULT check-refs.nix -A dep)
# test1 references dep, not itself.
test1=$(nix-build -o $RESULT check-refs.nix -A test1)
-(! nix-store -q --references $test1 | grep -q $test1)
-nix-store -q --references $test1 | grep -q $dep
+nix-store -q --references $test1 | grepQuietInverse $test1
+nix-store -q --references $test1 | grepQuiet $dep
# test2 references src, not itself nor dep.
test2=$(nix-build -o $RESULT check-refs.nix -A test2)
-(! nix-store -q --references $test2 | grep -q $test2)
-(! nix-store -q --references $test2 | grep -q $dep)
-nix-store -q --references $test2 | grep -q aux-ref
+nix-store -q --references $test2 | grepQuietInverse $test2
+nix-store -q --references $test2 | grepQuietInverse $dep
+nix-store -q --references $test2 | grepQuiet aux-ref
# test3 should fail (unallowed ref).
(! nix-build -o $RESULT check-refs.nix -A test3)
diff --git a/tests/check-reqs.sh b/tests/check-reqs.sh
index e9f65fc2a..856c94cec 100644
--- a/tests/check-reqs.sh
+++ b/tests/check-reqs.sh
@@ -8,8 +8,8 @@ nix-build -o $RESULT check-reqs.nix -A test1
(! nix-build -o $RESULT check-reqs.nix -A test2)
(! nix-build -o $RESULT check-reqs.nix -A test3)
-(! nix-build -o $RESULT check-reqs.nix -A test4) 2>&1 | grep -q 'check-reqs-dep1'
-(! nix-build -o $RESULT check-reqs.nix -A test4) 2>&1 | grep -q 'check-reqs-dep2'
+(! nix-build -o $RESULT check-reqs.nix -A test4) 2>&1 | grepQuiet 'check-reqs-dep1'
+(! nix-build -o $RESULT check-reqs.nix -A test4) 2>&1 | grepQuiet 'check-reqs-dep2'
(! nix-build -o $RESULT check-reqs.nix -A test5)
(! nix-build -o $RESULT check-reqs.nix -A test6)
diff --git a/tests/check.sh b/tests/check.sh
index e77c0405d..645b90222 100644
--- a/tests/check.sh
+++ b/tests/check.sh
@@ -37,7 +37,7 @@ checkBuildTempDirRemoved $TEST_ROOT/log
nix-build check.nix -A deterministic --argstr checkBuildId $checkBuildId \
--no-out-link --check --keep-failed 2> $TEST_ROOT/log
-if grep -q 'may not be deterministic' $TEST_ROOT/log; then false; fi
+if grepQuiet 'may not be deterministic' $TEST_ROOT/log; then false; fi
checkBuildTempDirRemoved $TEST_ROOT/log
nix-build check.nix -A nondeterministic --argstr checkBuildId $checkBuildId \
diff --git a/tests/common.sh b/tests/common.sh
index 68b90a85f..8941671d6 100644
--- a/tests/common.sh
+++ b/tests/common.sh
@@ -1,4 +1,4 @@
-set -e
+set -eu -o pipefail
if [[ -z "${COMMON_SH_SOURCED-}" ]]; then
diff --git a/tests/common/vars-and-functions.sh.in b/tests/common/vars-and-functions.sh.in
index 0deef4c1c..a9e6c802f 100644
--- a/tests/common/vars-and-functions.sh.in
+++ b/tests/common/vars-and-functions.sh.in
@@ -1,4 +1,4 @@
-set -e
+set -eu -o pipefail
if [[ -z "${COMMON_VARS_AND_FUNCTIONS_SH_SOURCED-}" ]]; then
@@ -152,31 +152,64 @@ isDaemonNewer () {
[[ $(nix eval --expr "builtins.compareVersions ''$daemonVersion'' ''$requiredVersion''") -ge 0 ]]
}
+skipTest () {
+ echo "$1, skipping this test..." >&2
+ exit 99
+}
+
requireDaemonNewerThan () {
- isDaemonNewer "$1" || exit 99
+ isDaemonNewer "$1" || skipTest "Daemon is too old"
}
canUseSandbox() {
- if [[ ! $_canUseSandbox ]]; then
- echo "Sandboxing not supported, skipping this test..."
- return 1
- fi
+ [[ ${_canUseSandbox-} ]]
+}
- return 0
+requireSandboxSupport () {
+ canUseSandbox || skipTest "Sandboxing not supported"
+}
+
+requireGit() {
+ [[ $(type -p git) ]] || skipTest "Git not installed"
}
fail() {
- echo "$1"
+ echo "$1" >&2
exit 1
}
+# Run a command failing if it didn't exit with the expected exit code.
+#
+# Has two advantages over the built-in `!`:
+#
+# 1. `!` conflates all non-0 codes. `expect` allows testing for an exact
+# code.
+#
+# 2. `!` unexpectedly negates `set -e`, and cannot be used on individual
+# pipeline stages with `set -o pipefail`. It only works on the entire
+# pipeline, which is useless if we want, say, `nix ...` invocation to
+# *fail*, but a grep on the error message it outputs to *succeed*.
expect() {
local expected res
expected="$1"
shift
- "$@" || res="$?"
+ "$@" && res=0 || res="$?"
if [[ $res -ne $expected ]]; then
- echo "Expected '$expected' but got '$res' while running '$*'"
+ echo "Expected '$expected' but got '$res' while running '${*@Q}'" >&2
+ return 1
+ fi
+ return 0
+}
+
+# Better than just doing `expect ... >&2` because the "Expected..."
+# message below will *not* be redirected.
+expectStderr() {
+ local expected res
+ expected="$1"
+ shift
+ "$@" 2>&1 && res=0 || res="$?"
+ if [[ $res -ne $expected ]]; then
+ echo "Expected '$expected' but got '$res' while running '${*@Q}'" >&2
return 1
fi
return 0
@@ -184,14 +217,13 @@ expect() {
needLocalStore() {
if [[ "$NIX_REMOTE" == "daemon" ]]; then
- echo "Can’t run through the daemon ($1), skipping this test..."
- return 99
+ skipTest "Can’t run through the daemon ($1)"
fi
}
# Just to make it easy to find which tests should be fixed
buggyNeedLocalStore() {
- needLocalStore
+ needLocalStore "$1"
}
enableFeatures() {
@@ -210,6 +242,35 @@ onError() {
done
}
+# `grep -v` doesn't work well for exit codes. We want `!(exist line l. l
+# matches)`. It gives us `exist line l. !(l matches)`.
+#
+# `!` normally doesn't work well with `set -e`, but when we wrap in a
+# function it *does*.
+grepInverse() {
+ ! grep "$@"
+}
+
+# A shorthand, `> /dev/null` is a bit noisy.
+#
+# `grep -q` would seem to do this, no function necessary, but it is a
+# bad fit with pipes and `set -o pipefail`: `-q` will exit after the
+# first match, and then subsequent writes will result in broken pipes.
+#
+# Note that reproducing the above is a bit tricky as it depends on
+# non-deterministic properties such as the timing between the match and
+# the closing of the pipe, the buffering of the pipe, and the speed of
+# the producer into the pipe. But rest assured we've seen it happen in
+# CI reliably.
+grepQuiet() {
+ grep "$@" > /dev/null
+}
+
+# The previous two, combined
+grepQuietInverse() {
+ ! grep "$@" > /dev/null
+}
+
trap onError ERR
fi # COMMON_VARS_AND_FUNCTIONS_SH_SOURCED
diff --git a/tests/compute-levels.sh b/tests/compute-levels.sh
index e4322dfa1..de3da2ebd 100644
--- a/tests/compute-levels.sh
+++ b/tests/compute-levels.sh
@@ -3,5 +3,5 @@ source common.sh
if [[ $(uname -ms) = "Linux x86_64" ]]; then
# x86_64 CPUs must always support the baseline
# microarchitecture level.
- nix -vv --version | grep -q "x86_64-v1-linux"
+ nix -vv --version | grepQuiet "x86_64-v1-linux"
fi
diff --git a/tests/db-migration.sh b/tests/db-migration.sh
index 92dd4f3ba..44cd16bc0 100644
--- a/tests/db-migration.sh
+++ b/tests/db-migration.sh
@@ -1,18 +1,18 @@
# Test that we can successfully migrate from an older db schema
+source common.sh
+
# Only run this if we have an older Nix available
# XXX: This assumes that the `daemon` package is older than the `client` one
-if [[ -z "$NIX_DAEMON_PACKAGE" ]]; then
- exit 99
+if [[ -z "${NIX_DAEMON_PACKAGE-}" ]]; then
+ skipTest "not using the Nix daemon"
fi
-source common.sh
-
killDaemon
# Fill the db using the older Nix
PATH_WITH_NEW_NIX="$PATH"
-export PATH="$NIX_DAEMON_PACKAGE/bin:$PATH"
+export PATH="${NIX_DAEMON_PACKAGE}/bin:$PATH"
clearStore
nix-build simple.nix --no-out-link
nix-store --generate-binary-cache-key cache1.example.org $TEST_ROOT/sk1 $TEST_ROOT/pk1
diff --git a/tests/dependencies.sh b/tests/dependencies.sh
index 092950aa7..f9da0c6bc 100644
--- a/tests/dependencies.sh
+++ b/tests/dependencies.sh
@@ -36,10 +36,10 @@ deps=$(nix-store -quR "$drvPath")
echo "output closure contains $deps"
# The output path should be in the closure.
-echo "$deps" | grep -q "$outPath"
+echo "$deps" | grepQuiet "$outPath"
# Input-1 is not retained.
-if echo "$deps" | grep -q "dependencies-input-1"; then exit 1; fi
+if echo "$deps" | grepQuiet "dependencies-input-1"; then exit 1; fi
# Input-2 is retained.
input2OutPath=$(echo "$deps" | grep "dependencies-input-2")
@@ -49,4 +49,4 @@ nix-store -q --referrers-closure "$input2OutPath" | grep "$outPath"
# Check that the derivers are set properly.
test $(nix-store -q --deriver "$outPath") = "$drvPath"
-nix-store -q --deriver "$input2OutPath" | grep -q -- "-input-2.drv"
+nix-store -q --deriver "$input2OutPath" | grepQuiet -- "-input-2.drv"
diff --git a/tests/describe-stores.sh b/tests/describe-stores.sh
deleted file mode 100644
index 3fea61483..000000000
--- a/tests/describe-stores.sh
+++ /dev/null
@@ -1,8 +0,0 @@
-source common.sh
-
-# Query an arbitrary value in `nix describe-stores --json`'s output just to
-# check that it has the right structure
-[[ $(nix --experimental-features 'nix-command flakes' describe-stores --json | jq '.["SSH Store"]["compress"]["defaultValue"]') == false ]]
-
-# Ensure that the output of `nix describe-stores` isn't empty
-[[ -n $(nix --experimental-features 'nix-command flakes' describe-stores) ]]
diff --git a/tests/eval-store.sh b/tests/eval-store.sh
index 679da5741..8fc859730 100644
--- a/tests/eval-store.sh
+++ b/tests/eval-store.sh
@@ -2,7 +2,7 @@ source common.sh
# Using `--eval-store` with the daemon will eventually copy everything
# to the build store, invalidating most of the tests here
-needLocalStore
+needLocalStore "“--eval-store” doesn't achieve much with the daemon"
eval_store=$TEST_ROOT/eval-store
diff --git a/tests/experimental-features.sh b/tests/experimental-features.sh
new file mode 100644
index 000000000..a4d55f5f4
--- /dev/null
+++ b/tests/experimental-features.sh
@@ -0,0 +1,40 @@
+source common.sh
+
+# Without flakes, flake options should not show up
+# With flakes, flake options should show up
+
+function both_ways {
+ nix --experimental-features 'nix-command' "$@" | grepQuietInverse flake
+ nix --experimental-features 'nix-command flakes' "$@" | grepQuiet flake
+
+ # Also, the order should not matter
+ nix "$@" --experimental-features 'nix-command' | grepQuietInverse flake
+ nix "$@" --experimental-features 'nix-command flakes' | grepQuiet flake
+}
+
+# Simple case, the configuration effects the running command
+both_ways show-config
+
+# Skipping for now, because we actually *do* want these to show up in
+# the manual, just be marked experimental. Will reenable once the manual
+# generation takes advantage of the JSON metadata on this.
+
+# both_ways store gc --help
+
+expect 1 nix --experimental-features 'nix-command' show-config --flake-registry 'https://no'
+nix --experimental-features 'nix-command flakes' show-config --flake-registry 'https://no'
+
+# Double check these are stable
+nix --experimental-features '' --help
+nix --experimental-features '' doctor --help
+nix --experimental-features '' repl --help
+nix --experimental-features '' upgrade-nix --help
+
+# These 3 arguments are currently given to all commands, which is wrong (as not
+# all care). To deal with fixing later, we simply make them require the
+# nix-command experimental features --- it so happens that the commands we wish
+# stabilizing to do not need them anyways.
+for arg in '--print-build-logs' '--offline' '--refresh'; do
+ nix --experimental-features 'nix-command' "$arg" --help
+ ! nix --experimental-features '' "$arg" --help
+done
diff --git a/tests/export-graph.sh b/tests/export-graph.sh
index 4954a6cbc..1f6232a40 100644
--- a/tests/export-graph.sh
+++ b/tests/export-graph.sh
@@ -4,7 +4,7 @@ clearStore
clearProfiles
checkRef() {
- nix-store -q --references $TEST_ROOT/result | grep -q "$1"'$' || fail "missing reference $1"
+ nix-store -q --references $TEST_ROOT/result | grepQuiet "$1"'$' || fail "missing reference $1"
}
# Test the export of the runtime dependency graph.
diff --git a/tests/fetchClosure.sh b/tests/fetchClosure.sh
index d88c55c3c..a207f647c 100644
--- a/tests/fetchClosure.sh
+++ b/tests/fetchClosure.sh
@@ -42,13 +42,13 @@ if [[ "$NIX_REMOTE" != "daemon" ]]; then
fi
# 'toPath' set to empty string should fail but print the expected path.
-nix eval -v --json --expr "
+expectStderr 1 nix eval -v --json --expr "
builtins.fetchClosure {
fromStore = \"file://$cacheDir\";
fromPath = $nonCaPath;
toPath = \"\";
}
-" 2>&1 | grep "error: rewriting.*$nonCaPath.*yielded.*$caPath"
+" | grep "error: rewriting.*$nonCaPath.*yielded.*$caPath"
# If fromPath is CA, then toPath isn't needed.
nix copy --to file://$cacheDir $caPath
diff --git a/tests/fetchGit.sh b/tests/fetchGit.sh
index a7a8df186..e2ccb0e97 100644
--- a/tests/fetchGit.sh
+++ b/tests/fetchGit.sh
@@ -1,9 +1,6 @@
source common.sh
-if [[ -z $(type -p git) ]]; then
- echo "Git not installed; skipping Git tests"
- exit 99
-fi
+requireGit
clearStore
diff --git a/tests/fetchGitRefs.sh b/tests/fetchGitRefs.sh
index 52926040b..d643fea04 100644
--- a/tests/fetchGitRefs.sh
+++ b/tests/fetchGitRefs.sh
@@ -1,9 +1,6 @@
source common.sh
-if [[ -z $(type -p git) ]]; then
- echo "Git not installed; skipping Git tests"
- exit 99
-fi
+requireGit
clearStore
@@ -56,7 +53,7 @@ invalid_ref() {
else
(! git check-ref-format --branch "$1" >/dev/null 2>&1)
fi
- nix --debug eval --raw --impure --expr "(builtins.fetchGit { url = $repo; ref = ''$1''; }).outPath" 2>&1 | grep 'invalid Git branch/tag name' >/dev/null
+ expect 1 nix --debug eval --raw --impure --expr "(builtins.fetchGit { url = $repo; ref = ''$1''; }).outPath" 2>&1 | grep 'invalid Git branch/tag name' >/dev/null
}
diff --git a/tests/fetchGitSubmodules.sh b/tests/fetchGitSubmodules.sh
index 08ccaa3cd..df81232e5 100644
--- a/tests/fetchGitSubmodules.sh
+++ b/tests/fetchGitSubmodules.sh
@@ -2,10 +2,7 @@ source common.sh
set -u
-if [[ -z $(type -p git) ]]; then
- echo "Git not installed; skipping Git submodule tests"
- exit 99
-fi
+requireGit
clearStore
diff --git a/tests/fetchMercurial.sh b/tests/fetchMercurial.sh
index 5c64ffd26..e6f8525c6 100644
--- a/tests/fetchMercurial.sh
+++ b/tests/fetchMercurial.sh
@@ -1,9 +1,6 @@
source common.sh
-if [[ -z $(type -p hg) ]]; then
- echo "Mercurial not installed; skipping Mercurial tests"
- exit 99
-fi
+[[ $(type -p hq) ]] || skipTest "Mercurial not installed"
clearStore
diff --git a/tests/fetchTree-file.sh b/tests/fetchTree-file.sh
index f0c530466..fe569cfb8 100644
--- a/tests/fetchTree-file.sh
+++ b/tests/fetchTree-file.sh
@@ -79,7 +79,7 @@ EOF
EOF
popd
- [[ -z "${NIX_DAEMON_PACKAGE}" ]] && return 0
+ [[ -z "${NIX_DAEMON_PACKAGE-}" ]] && return 0
# Ensure that a lockfile generated by the current Nix for tarball inputs
# can still be read by an older Nix
@@ -91,7 +91,7 @@ EOF
flake = false;
};
outputs = { self, tarball }: {
- foo = builtins.readFile "${tarball}/test_input_file";
+ foo = builtins.readFile "\${tarball}/test_input_file";
};
}
nix flake update
diff --git a/tests/fetchurl.sh b/tests/fetchurl.sh
index b41d8c4b7..8cd40c09f 100644
--- a/tests/fetchurl.sh
+++ b/tests/fetchurl.sh
@@ -62,7 +62,7 @@ hash=$(nix-hash --flat --type sha256 $nar)
outPath=$(nix-build -vvvvv --expr 'import <nix/fetchurl.nix>' --argstr url file://$nar --argstr sha256 $hash \
--arg unpack true --argstr name xyzzy --no-out-link)
-echo $outPath | grep -q 'xyzzy'
+echo $outPath | grepQuiet 'xyzzy'
test -x $outPath/fetchurl.sh
test -L $outPath/symlink
diff --git a/tests/flakes/build-paths.sh b/tests/flakes/build-paths.sh
index 08b4d1763..b399a066e 100644
--- a/tests/flakes/build-paths.sh
+++ b/tests/flakes/build-paths.sh
@@ -35,7 +35,7 @@ cat > $flake1Dir/flake.nix <<EOF
a6 = flake2.outPath;
# FIXME
- a7 = "${flake2}/config.nix";
+ a7 = "\${flake2}/config.nix";
# This is only allowed in impure mode.
a8 = builtins.storePath $dep;
diff --git a/tests/flakes/check.sh b/tests/flakes/check.sh
index 278ac7fa4..865ca61b4 100644
--- a/tests/flakes/check.sh
+++ b/tests/flakes/check.sh
@@ -73,5 +73,5 @@ cat > $flakeDir/flake.nix <<EOF
EOF
checkRes=$(nix flake check --keep-going $flakeDir 2>&1 && fail "nix flake check should have failed" || true)
-echo "$checkRes" | grep -q "packages.system-1.default"
-echo "$checkRes" | grep -q "packages.system-2.default"
+echo "$checkRes" | grepQuiet "packages.system-1.default"
+echo "$checkRes" | grepQuiet "packages.system-2.default"
diff --git a/tests/flakes/common.sh b/tests/flakes/common.sh
index 9d79080cd..427abcdde 100644
--- a/tests/flakes/common.sh
+++ b/tests/flakes/common.sh
@@ -2,13 +2,6 @@ source ../common.sh
registry=$TEST_ROOT/registry.json
-requireGit() {
- if [[ -z $(type -p git) ]]; then
- echo "Git not installed; skipping flake tests"
- exit 99
- fi
-}
-
writeSimpleFlake() {
local flakeDir="$1"
cat > $flakeDir/flake.nix <<EOF
@@ -66,7 +59,7 @@ EOF
createGitRepo() {
local repo="$1"
- local extraArgs="$2"
+ local extraArgs="${2-}"
rm -rf $repo $repo.tmp
mkdir -p $repo
diff --git a/tests/flakes/flake-in-submodule.sh b/tests/flakes/flake-in-submodule.sh
new file mode 100644
index 000000000..21a4b52de
--- /dev/null
+++ b/tests/flakes/flake-in-submodule.sh
@@ -0,0 +1,52 @@
+source common.sh
+
+# Tests that:
+# - flake.nix may reside inside of a git submodule
+# - the flake can access content outside of the submodule
+#
+# rootRepo
+# ├── root.nix
+# └── submodule
+# ├── flake.nix
+# └── sub.nix
+
+
+requireGit
+
+clearStore
+
+# Submodules can't be fetched locally by default.
+# See fetchGitSubmodules.sh
+export XDG_CONFIG_HOME=$TEST_HOME/.config
+git config --global protocol.file.allow always
+
+
+rootRepo=$TEST_ROOT/rootRepo
+subRepo=$TEST_ROOT/submodule
+
+
+createGitRepo $subRepo
+cat > $subRepo/flake.nix <<EOF
+{
+ outputs = { self }: {
+ sub = import ./sub.nix;
+ root = import ../root.nix;
+ };
+}
+EOF
+echo '"expression in submodule"' > $subRepo/sub.nix
+git -C $subRepo add flake.nix sub.nix
+git -C $subRepo commit -m Initial
+
+createGitRepo $rootRepo
+
+git -C $rootRepo submodule init
+git -C $rootRepo submodule add $subRepo submodule
+echo '"expression in root repo"' > $rootRepo/root.nix
+git -C $rootRepo add root.nix
+git -C $rootRepo commit -m "Add root.nix"
+
+# Flake can live inside a submodule and can be accessed via ?dir=submodule
+[[ $(nix eval --json git+file://$rootRepo\?submodules=1\&dir=submodule#sub ) = '"expression in submodule"' ]]
+# The flake can access content outside of the submodule
+[[ $(nix eval --json git+file://$rootRepo\?submodules=1\&dir=submodule#root ) = '"expression in root repo"' ]]
diff --git a/tests/flakes/flakes.sh b/tests/flakes/flakes.sh
index 07f1e6698..f2e216435 100644
--- a/tests/flakes/flakes.sh
+++ b/tests/flakes/flakes.sh
@@ -76,17 +76,17 @@ nix registry add --registry $registry nixpkgs flake1
# Test 'nix registry list'.
[[ $(nix registry list | wc -l) == 5 ]]
-nix registry list | grep -q '^global'
-nix registry list | grep -q -v '^user' # nothing in user registry
+nix registry list | grep '^global'
+nix registry list | grepInverse '^user' # nothing in user registry
# Test 'nix flake metadata'.
nix flake metadata flake1
-nix flake metadata flake1 | grep -q 'Locked URL:.*flake1.*'
+nix flake metadata flake1 | grepQuiet 'Locked URL:.*flake1.*'
# Test 'nix flake metadata' on a local flake.
-(cd $flake1Dir && nix flake metadata) | grep -q 'URL:.*flake1.*'
-(cd $flake1Dir && nix flake metadata .) | grep -q 'URL:.*flake1.*'
-nix flake metadata $flake1Dir | grep -q 'URL:.*flake1.*'
+(cd $flake1Dir && nix flake metadata) | grepQuiet 'URL:.*flake1.*'
+(cd $flake1Dir && nix flake metadata .) | grepQuiet 'URL:.*flake1.*'
+nix flake metadata $flake1Dir | grepQuiet 'URL:.*flake1.*'
# Test 'nix flake metadata --json'.
json=$(nix flake metadata flake1 --json | jq .)
@@ -96,7 +96,9 @@ json=$(nix flake metadata flake1 --json | jq .)
hash1=$(echo "$json" | jq -r .revision)
echo -n '# foo' >> $flake1Dir/flake.nix
+flake1OriginalCommit=$(git -C $flake1Dir rev-parse HEAD)
git -C $flake1Dir commit -a -m 'Foo'
+flake1NewCommit=$(git -C $flake1Dir rev-parse HEAD)
hash2=$(nix flake metadata flake1 --json --refresh | jq -r .revision)
[[ $hash1 != $hash2 ]]
@@ -134,11 +136,11 @@ nix build -o $TEST_ROOT/result flake2#bar --impure --no-write-lock-file
nix eval --expr "builtins.getFlake \"$flake2Dir\"" --impure
# Building a local flake with an unlocked dependency should fail with --no-update-lock-file.
-nix build -o $TEST_ROOT/result $flake2Dir#bar --no-update-lock-file 2>&1 | grep 'requires lock file changes'
+expect 1 nix build -o $TEST_ROOT/result $flake2Dir#bar --no-update-lock-file 2>&1 | grep 'requires lock file changes'
# But it should succeed without that flag.
nix build -o $TEST_ROOT/result $flake2Dir#bar --no-write-lock-file
-nix build -o $TEST_ROOT/result $flake2Dir#bar --no-update-lock-file 2>&1 | grep 'requires lock file changes'
+expect 1 nix build -o $TEST_ROOT/result $flake2Dir#bar --no-update-lock-file 2>&1 | grep 'requires lock file changes'
nix build -o $TEST_ROOT/result $flake2Dir#bar --commit-lock-file
[[ -e $flake2Dir/flake.lock ]]
[[ -z $(git -C $flake2Dir diff main || echo failed) ]]
@@ -196,10 +198,10 @@ git -C $flake3Dir add flake.lock
git -C $flake3Dir commit -m 'Add lockfile'
# Test whether registry caching works.
-nix registry list --flake-registry file://$registry | grep -q flake3
+nix registry list --flake-registry file://$registry | grepQuiet flake3
mv $registry $registry.tmp
nix store gc
-nix registry list --flake-registry file://$registry --refresh | grep -q flake3
+nix registry list --flake-registry file://$registry --refresh | grepQuiet flake3
mv $registry.tmp $registry
# Test whether flakes are registered as GC roots for offline use.
@@ -346,8 +348,8 @@ nix registry remove flake1
nix registry add user-flake1 git+file://$flake1Dir
nix registry add user-flake2 git+file://$flake2Dir
[[ $(nix --flake-registry "" registry list | wc -l) == 2 ]]
-nix --flake-registry "" registry list | grep -q -v '^global' # nothing in global registry
-nix --flake-registry "" registry list | grep -q '^user'
+nix --flake-registry "" registry list | grepQuietInverse '^global' # nothing in global registry
+nix --flake-registry "" registry list | grepQuiet '^user'
nix registry remove user-flake1
nix registry remove user-flake2
[[ $(nix registry list | wc -l) == 5 ]]
@@ -454,7 +456,7 @@ url=$(nix flake metadata --json file://$TEST_ROOT/flake.tar.gz | jq -r .url)
nix build -o $TEST_ROOT/result $url
# Building with an incorrect SRI hash should fail.
-nix build -o $TEST_ROOT/result "file://$TEST_ROOT/flake.tar.gz?narHash=sha256-qQ2Zz4DNHViCUrp6gTS7EE4+RMqFQtUfWF2UNUtJKS0=" 2>&1 | grep 'NAR hash mismatch'
+expectStderr 102 nix build -o $TEST_ROOT/result "file://$TEST_ROOT/flake.tar.gz?narHash=sha256-qQ2Zz4DNHViCUrp6gTS7EE4+RMqFQtUfWF2UNUtJKS0=" | grep 'NAR hash mismatch'
# Test --override-input.
git -C $flake3Dir reset --hard
@@ -491,3 +493,14 @@ nix store delete $(nix store add-path $badFlakeDir)
[[ $(nix-instantiate --eval flake:git+file://$flake3Dir -A x) = 123 ]]
[[ $(nix-instantiate -I flake3=flake:flake3 --eval '<flake3>' -A x) = 123 ]]
[[ $(NIX_PATH=flake3=flake:flake3 nix-instantiate --eval '<flake3>' -A x) = 123 ]]
+
+# Test alternate lockfile paths.
+nix flake lock $flake2Dir --output-lock-file $TEST_ROOT/flake2.lock
+cmp $flake2Dir/flake.lock $TEST_ROOT/flake2.lock >/dev/null # lockfiles should be identical, since we're referencing flake2's original one
+
+nix flake lock $flake2Dir --output-lock-file $TEST_ROOT/flake2-overridden.lock --override-input flake1 git+file://$flake1Dir?rev=$flake1OriginalCommit
+expectStderr 1 cmp $flake2Dir/flake.lock $TEST_ROOT/flake2-overridden.lock
+nix flake metadata $flake2Dir --reference-lock-file $TEST_ROOT/flake2-overridden.lock | grepQuiet $flake1OriginalCommit
+
+# reference-lock-file can only be used if allow-dirty is set.
+expectStderr 1 nix flake metadata $flake2Dir --no-allow-dirty --reference-lock-file $TEST_ROOT/flake2-overridden.lock
diff --git a/tests/flakes/follow-paths.sh b/tests/flakes/follow-paths.sh
index 19cc1bafa..fe9b51c65 100644
--- a/tests/flakes/follow-paths.sh
+++ b/tests/flakes/follow-paths.sh
@@ -128,7 +128,7 @@ EOF
git -C $flakeFollowsA add flake.nix
-nix flake lock $flakeFollowsA 2>&1 | grep 'points outside'
+expect 1 nix flake lock $flakeFollowsA 2>&1 | grep 'points outside'
# Non-existant follows should print a warning.
cat >$flakeFollowsA/flake.nix <<EOF
diff --git a/tests/flakes/mercurial.sh b/tests/flakes/mercurial.sh
index 2614006c8..0622c79b7 100644
--- a/tests/flakes/mercurial.sh
+++ b/tests/flakes/mercurial.sh
@@ -1,9 +1,6 @@
source ./common.sh
-if [[ -z $(type -p hg) ]]; then
- echo "Mercurial not installed; skipping"
- exit 99
-fi
+[[ $(type -p hq) ]] || skipTest "Mercurial not installed"
flake1Dir=$TEST_ROOT/flake-hg1
mkdir -p $flake1Dir
diff --git a/tests/flakes/show.sh b/tests/flakes/show.sh
index dd13264b9..a3d300552 100644
--- a/tests/flakes/show.sh
+++ b/tests/flakes/show.sh
@@ -64,3 +64,24 @@ in
assert show_output == { };
true
'
+
+# Test that attributes with errors are handled correctly.
+# nixpkgs.legacyPackages is a particularly prominent instance of this.
+cat >flake.nix <<EOF
+{
+ outputs = inputs: {
+ legacyPackages.$system = {
+ AAAAAASomeThingsFailToEvaluate = throw "nooo";
+ simple = import ./simple.nix;
+ };
+ };
+}
+EOF
+nix flake show --json --legacy --all-systems > show-output.json
+nix eval --impure --expr '
+let show_output = builtins.fromJSON (builtins.readFile ./show-output.json);
+in
+assert show_output.legacyPackages.${builtins.currentSystem}.AAAAAASomeThingsFailToEvaluate == { };
+assert show_output.legacyPackages.${builtins.currentSystem}.simple.name == "simple";
+true
+'
diff --git a/tests/fmt.sh b/tests/fmt.sh
index 254681ca2..3c1bd9989 100644
--- a/tests/fmt.sh
+++ b/tests/fmt.sh
@@ -1,7 +1,5 @@
source common.sh
-set -o pipefail
-
clearStore
rm -rf $TEST_HOME/.cache $TEST_HOME/.config $TEST_HOME/.local
diff --git a/tests/function-trace.sh b/tests/function-trace.sh
index b0d6c9d59..bd804bf18 100755
--- a/tests/function-trace.sh
+++ b/tests/function-trace.sh
@@ -10,17 +10,15 @@ expect_trace() {
--trace-function-calls \
--expr "$expr" 2>&1 \
| grep "function-trace" \
- | sed -e 's/ [0-9]*$//'
+ | sed -e 's/ [0-9]*$//' \
+ || true
)
echo -n "Tracing expression '$expr'"
- set +e
msg=$(diff -swB \
<(echo "$expect") \
<(echo "$actual")
- );
- result=$?
- set -e
+ ) && result=0 || result=$?
if [ $result -eq 0 ]; then
echo " ok."
else
@@ -67,5 +65,3 @@ expect_trace '1 2' "
function-trace entered «string»:1:1 at
function-trace exited «string»:1:1 at
"
-
-set -e
diff --git a/tests/gc-runtime.sh b/tests/gc-runtime.sh
index 6094959cb..dc1826a55 100644
--- a/tests/gc-runtime.sh
+++ b/tests/gc-runtime.sh
@@ -4,7 +4,7 @@ case $system in
*linux*)
;;
*)
- exit 99;
+ skipTest "Not running Linux";
esac
set -m # enable job control, needed for kill
diff --git a/tests/gc.sh b/tests/gc.sh
index ad09a8b39..98d6cb032 100644
--- a/tests/gc.sh
+++ b/tests/gc.sh
@@ -50,3 +50,20 @@ if test -e $outPath/foobar; then false; fi
# Check that the store is empty.
rmdir $NIX_STORE_DIR/.links
rmdir $NIX_STORE_DIR
+
+## Test `nix-collect-garbage -d`
+# `nix-env` doesn't work with CA derivations, so let's ignore that bit if we're
+# using them
+if [[ -z "${NIX_TESTS_CA_BY_DEFAULT:-}" ]]; then
+ clearProfiles
+ # Run two `nix-env` commands, should create two generations of
+ # the profile
+ nix-env -f ./user-envs.nix -i foo-1.0
+ nix-env -f ./user-envs.nix -i foo-2.0pre1
+ [[ $(nix-env --list-generations | wc -l) -eq 2 ]]
+
+ # Clear the profile history. There should be only one generation
+ # left
+ nix-collect-garbage -d
+ [[ $(nix-env --list-generations | wc -l) -eq 1 ]]
+fi
diff --git a/tests/hash.sh b/tests/hash.sh
index e5f75e2cf..34c1bb38a 100644
--- a/tests/hash.sh
+++ b/tests/hash.sh
@@ -2,13 +2,19 @@ source common.sh
try () {
printf "%s" "$2" > $TEST_ROOT/vector
- hash=$(nix hash file --base16 $EXTRA --type "$1" $TEST_ROOT/vector)
- if test "$hash" != "$3"; then
- echo "hash $1, expected $3, got $hash"
+ hash="$(nix-hash --flat ${FORMAT_FLAG-} --type "$1" "$TEST_ROOT/vector")"
+ if ! (( "${NO_TEST_CLASSIC-}" )) && test "$hash" != "$3"; then
+ echo "try nix-hash: hash $1, expected $3, got $hash"
+ exit 1
+ fi
+ hash="$(nix hash file ${FORMAT_FLAG-} --type "$1" "$TEST_ROOT/vector")"
+ if ! (( "${NO_TEST_NIX_COMMAND-}" )) && test "$hash" != "$3"; then
+ echo "try nix hash: hash $1, expected $3, got $hash"
exit 1
fi
}
+FORMAT_FLAG=--base16
try md5 "" "d41d8cd98f00b204e9800998ecf8427e"
try md5 "a" "0cc175b9c0f1b6a831c399e269772661"
try md5 "abc" "900150983cd24fb0d6963f7d28e17f72"
@@ -28,16 +34,24 @@ try sha256 "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq" "248d6a61d
try sha512 "" "cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e"
try sha512 "abc" "ddaf35a193617abacc417349ae20413112e6fa4e89a97ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd454d4423643ce80e2a9ac94fa54ca49f"
try sha512 "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq" "204a8fc6dda82f0a0ced7beb8e08a41657c16ef468b228a8279be331a703c33596fd15c13b1b07f9aa1d3bea57789ca031ad85c7a71dd70354ec631238ca3445"
+unset FORMAT_FLAG
-EXTRA=--base32
+FORMAT_FLAG=--base32
try sha256 "abc" "1b8m03r63zqhnjf7l5wnldhh7c134ap5vpj0850ymkq1iyzicy5s"
-EXTRA=
+unset FORMAT_FLAG
-EXTRA=--sri
+FORMAT_FLAG=--sri
try sha512 "" "sha512-z4PhNX7vuL3xVChQ1m2AB9Yg5AULVxXcg/SpIdNs6c5H0NE8XYXysP+DGNKHfuwvY7kxvUdBeoGlODJ6+SfaPg=="
try sha512 "abc" "sha512-3a81oZNherrMQXNJriBBMRLm+k6JqX6iCp7u5ktV05ohkpkqJ0/BqDa6PCOj/uu9RU1EI2Q86A4qmslPpUyknw=="
try sha512 "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq" "sha512-IEqPxt2oLwoM7XvrjgikFlfBbvRosiioJ5vjMacDwzWW/RXBOxsH+aodO+pXeJygMa2Fx6cd1wNU7GMSOMo0RQ=="
try sha256 "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq" "sha256-JI1qYdIGOLjlwCaTDD5gOaM85Flk/yFn9uzt1BnbBsE="
+unset FORMAT_FLAG
+
+# nix-hash [--flat] defaults to the Base16 format
+NO_TEST_NIX_COMMAND=1 try sha512 "abc" "ddaf35a193617abacc417349ae20413112e6fa4e89a97ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd454d4423643ce80e2a9ac94fa54ca49f"
+
+# nix hash [file|path] defaults to the SRI format
+NO_TEST_CLASSIC=1 try sha512 "abc" "sha512-3a81oZNherrMQXNJriBBMRLm+k6JqX6iCp7u5ktV05ohkpkqJ0/BqDa6PCOj/uu9RU1EI2Q86A4qmslPpUyknw=="
try2 () {
hash=$(nix-hash --type "$1" $TEST_ROOT/hash-path)
@@ -69,12 +83,18 @@ try2 md5 "f78b733a68f5edbdf9413899339eaa4a"
# Conversion.
try3() {
+ h64=$(nix-hash --type "$1" --to-base64 "$2")
+ [ "$h64" = "$4" ]
h64=$(nix hash to-base64 --type "$1" "$2")
[ "$h64" = "$4" ]
+ sri=$(nix-hash --type "$1" --to-sri "$2")
+ [ "$sri" = "$1-$4" ]
sri=$(nix hash to-sri --type "$1" "$2")
[ "$sri" = "$1-$4" ]
h32=$(nix-hash --type "$1" --to-base32 "$2")
[ "$h32" = "$3" ]
+ h32=$(nix hash to-base32 --type "$1" "$2")
+ [ "$h32" = "$3" ]
h16=$(nix-hash --type "$1" --to-base16 "$h32")
[ "$h16" = "$2" ]
h16=$(nix hash to-base16 --type "$1" "$h64")
diff --git a/tests/impure-derivations.sh b/tests/impure-derivations.sh
index 23a193833..7595fdd35 100644
--- a/tests/impure-derivations.sh
+++ b/tests/impure-derivations.sh
@@ -5,8 +5,6 @@ requireDaemonNewerThan "2.8pre20220311"
enableFeatures "ca-derivations impure-derivations"
restartDaemon
-set -o pipefail
-
clearStore
# Basic test of impure derivations: building one a second time should not use the previous result.
diff --git a/tests/init.sh b/tests/init.sh
index fea659516..c420e8c9f 100755
--- a/tests/init.sh
+++ b/tests/init.sh
@@ -1,5 +1,3 @@
-set -eu -o pipefail
-
# Don't start the daemon
source common/vars-and-functions.sh
diff --git a/tests/install-darwin.sh b/tests/install-darwin.sh
index 7e44e54c4..ea2b75323 100755
--- a/tests/install-darwin.sh
+++ b/tests/install-darwin.sh
@@ -4,7 +4,7 @@ set -eux
cleanup() {
PLIST="/Library/LaunchDaemons/org.nixos.nix-daemon.plist"
- if sudo launchctl list | grep -q nix-daemon; then
+ if sudo launchctl list | grepQuiet nix-daemon; then
sudo launchctl unload "$PLIST"
fi
diff --git a/tests/installer/default.nix b/tests/installer/default.nix
index 31d83699d..49cfd2bcc 100644
--- a/tests/installer/default.nix
+++ b/tests/installer/default.nix
@@ -17,7 +17,7 @@ let
script = ''
tar -xf ./nix.tar.xz
mv ./nix-* nix
- ./nix/install --no-daemon
+ ./nix/install --no-daemon --no-channel-add
'';
};
@@ -30,6 +30,14 @@ let
};
};
+ mockChannel = pkgs:
+ pkgs.runCommandNoCC "mock-channel" {} ''
+ mkdir nixexprs
+ mkdir -p $out/channel
+ echo -n 'someContent' > nixexprs/someFile
+ tar cvf - nixexprs | bzip2 > $out/channel/nixexprs.tar.bz2
+ '';
+
disableSELinux = "sudo setenforce 0";
images = {
@@ -189,6 +197,11 @@ let
echo "Running installer..."
$ssh "set -eux; $installScript"
+ echo "Copying the mock channel"
+ # `scp -r` doesn't seem to work properly on some rhel instances, so let's
+ # use a plain tarpipe instead
+ tar -C ${mockChannel pkgs} -c channel | ssh -p 20022 $ssh_opts vagrant@localhost tar x -f-
+
echo "Testing Nix installation..."
$ssh <<EOF
set -ex
@@ -204,6 +217,17 @@ let
out=\$(nix-build --no-substitute -E 'derivation { name = "foo"; system = "x86_64-linux"; builder = "/bin/sh"; args = ["-c" "echo foobar > \$out"]; }')
[[ \$(cat \$out) = foobar ]]
+
+ if pgrep nix-daemon; then
+ MAYBESUDO="sudo"
+ else
+ MAYBESUDO=""
+ fi
+
+
+ $MAYBESUDO \$(which nix-channel) --add file://\$HOME/channel myChannel
+ $MAYBESUDO \$(which nix-channel) --update
+ [[ \$(nix-instantiate --eval --expr 'builtins.readFile <myChannel/someFile>') = '"someContent"' ]]
EOF
echo "Done!"
diff --git a/tests/lang.sh b/tests/lang.sh
index 95e795e2e..8170cb39d 100644
--- a/tests/lang.sh
+++ b/tests/lang.sh
@@ -4,12 +4,19 @@ export TEST_VAR=foo # for eval-okay-getenv.nix
export NIX_REMOTE=dummy://
export NIX_STORE_DIR=/nix/store
-nix-instantiate --eval -E 'builtins.trace "Hello" 123' 2>&1 | grep -q Hello
+nix-instantiate --eval -E 'builtins.trace "Hello" 123' 2>&1 | grepQuiet Hello
+nix-instantiate --eval -E 'builtins.trace "Hello" 123' 2>/dev/null | grepQuiet 123
nix-instantiate --eval -E 'builtins.addErrorContext "Hello" 123' 2>&1
-nix-instantiate --trace-verbose --eval -E 'builtins.traceVerbose "Hello" 123' 2>&1 | grep -q Hello
-(! nix-instantiate --eval -E 'builtins.traceVerbose "Hello" 123' 2>&1 | grep -q Hello)
-(! nix-instantiate --show-trace --eval -E 'builtins.addErrorContext "Hello" 123' 2>&1 | grep -q Hello)
-nix-instantiate --show-trace --eval -E 'builtins.addErrorContext "Hello" (throw "Foo")' 2>&1 | grep -q Hello
+nix-instantiate --trace-verbose --eval -E 'builtins.traceVerbose "Hello" 123' 2>&1 | grepQuiet Hello
+nix-instantiate --eval -E 'builtins.traceVerbose "Hello" 123' 2>&1 | grepQuietInverse Hello
+nix-instantiate --show-trace --eval -E 'builtins.addErrorContext "Hello" 123' 2>&1 | grepQuietInverse Hello
+expectStderr 1 nix-instantiate --show-trace --eval -E 'builtins.addErrorContext "Hello" (throw "Foo")' | grepQuiet Hello
+
+nix-instantiate --eval -E 'let x = builtins.trace { x = x; } true; in x' \
+ 2>&1 | grepQuiet -E 'trace: { x = «potential infinite recursion»; }'
+
+nix-instantiate --eval -E 'let x = { repeating = x; tracing = builtins.trace x true; }; in x.tracing'\
+ 2>&1 | grepQuiet -F 'trace: { repeating = «repeated»; tracing = «potential infinite recursion»; }'
set +x
diff --git a/tests/linux-sandbox.sh b/tests/linux-sandbox.sh
index e62039567..5a2cf7abd 100644
--- a/tests/linux-sandbox.sh
+++ b/tests/linux-sandbox.sh
@@ -4,13 +4,13 @@ needLocalStore "the sandbox only runs on the builder side, so it makes no sense
clearStore
-if ! canUseSandbox; then exit 99; fi
+requireSandboxSupport
# Note: we need to bind-mount $SHELL into the chroot. Currently we
# only support the case where $SHELL is in the Nix store, because
# otherwise things get complicated (e.g. if it's in /bin, do we need
# /lib as well?).
-if [[ ! $SHELL =~ /nix/store ]]; then exit 99; fi
+if [[ ! $SHELL =~ /nix/store ]]; then skipTest "Shell is not from Nix store"; fi
chmod -R u+w $TEST_ROOT/store0 || true
rm -rf $TEST_ROOT/store0
@@ -35,8 +35,8 @@ nix-build dependencies.nix --no-out-link --check --sandbox-paths /nix/store
nix-build check.nix -A nondeterministic --sandbox-paths /nix/store --no-out-link
(! nix-build check.nix -A nondeterministic --sandbox-paths /nix/store --no-out-link --check -K 2> $TEST_ROOT/log)
-if grep -q 'error: renaming' $TEST_ROOT/log; then false; fi
-grep -q 'may not be deterministic' $TEST_ROOT/log
+if grepQuiet 'error: renaming' $TEST_ROOT/log; then false; fi
+grepQuiet 'may not be deterministic' $TEST_ROOT/log
# Test that sandboxed builds cannot write to /etc easily
(! nix-build -E 'with import ./config.nix; mkDerivation { name = "etc-write"; buildCommand = "echo > /etc/test"; }' --no-out-link --sandbox-paths /nix/store)
diff --git a/tests/local.mk b/tests/local.mk
index 4a620f18b..ccd76eeac 100644
--- a/tests/local.mk
+++ b/tests/local.mk
@@ -1,4 +1,5 @@
nix_tests = \
+ test-infra.sh \
init.sh \
flakes/flakes.sh \
flakes/run.sh \
@@ -12,10 +13,12 @@ nix_tests = \
flakes/unlocked-override.sh \
flakes/absolute-paths.sh \
flakes/build-paths.sh \
+ flakes/flake-in-submodule.sh \
ca/gc.sh \
gc.sh \
remote-store.sh \
lang.sh \
+ experimental-features.sh \
fetchMercurial.sh \
gc-auto.sh \
user-envs.sh \
@@ -113,7 +116,6 @@ nix_tests = \
db-migration.sh \
bash-profile.sh \
pass-as-file.sh \
- describe-stores.sh \
nix-profile.sh \
suggestions.sh \
store-ping.sh \
@@ -130,9 +132,9 @@ endif
install-tests += $(foreach x, $(nix_tests), tests/$(x))
-clean-files += $(d)/tests/common/vars-and-functions.sh $(d)/config.nix $(d)/ca/config.nix
+clean-files += $(d)/common/vars-and-functions.sh $(d)/config.nix $(d)/ca/config.nix
-test-deps += tests/common/vars-and-functions.sh tests/config.nix tests/ca/config.nix tests/plugins/libplugintest.$(SO_EXT)
+test-deps += tests/common/vars-and-functions.sh tests/config.nix tests/ca/config.nix
ifeq ($(BUILD_SHARED_LIBS), 1)
test-deps += tests/plugins/libplugintest.$(SO_EXT)
diff --git a/tests/misc.sh b/tests/misc.sh
index 2830856ae..60d58310e 100644
--- a/tests/misc.sh
+++ b/tests/misc.sh
@@ -3,17 +3,17 @@ source common.sh
# Tests miscellaneous commands.
# Do all commands have help?
-#nix-env --help | grep -q install
-#nix-store --help | grep -q realise
-#nix-instantiate --help | grep -q eval
-#nix-hash --help | grep -q base32
+#nix-env --help | grepQuiet install
+#nix-store --help | grepQuiet realise
+#nix-instantiate --help | grepQuiet eval
+#nix-hash --help | grepQuiet base32
# Can we ask for the version number?
nix-env --version | grep "$version"
# Usage errors.
-nix-env --foo 2>&1 | grep "no operation"
-nix-env -q --foo 2>&1 | grep "unknown flag"
+expect 1 nix-env --foo 2>&1 | grep "no operation"
+expect 1 nix-env -q --foo 2>&1 | grep "unknown flag"
# Eval Errors.
eval_arg_res=$(nix-instantiate --eval -E 'let a = {} // a; in a.foo' 2>&1 || true)
diff --git a/tests/multiple-outputs.sh b/tests/multiple-outputs.sh
index 66be6fa64..330600d08 100644
--- a/tests/multiple-outputs.sh
+++ b/tests/multiple-outputs.sh
@@ -19,8 +19,8 @@ echo "evaluating c..."
# outputs.
drvPath=$(nix-instantiate multiple-outputs.nix -A c)
#[ "$drvPath" = "$drvPath2" ]
-grep -q 'multiple-outputs-a.drv",\["first","second"\]' $drvPath
-grep -q 'multiple-outputs-b.drv",\["out"\]' $drvPath
+grepQuiet 'multiple-outputs-a.drv",\["first","second"\]' $drvPath
+grepQuiet 'multiple-outputs-b.drv",\["out"\]' $drvPath
# While we're at it, test the ‘unsafeDiscardOutputDependency’ primop.
outPath=$(nix-build multiple-outputs.nix -A d --no-out-link)
@@ -84,5 +84,5 @@ nix-store --gc --print-roots
rm -rf $NIX_STORE_DIR/.links
rmdir $NIX_STORE_DIR
-nix build -f multiple-outputs.nix invalid-output-name-1 2>&1 | grep 'contains illegal character'
-nix build -f multiple-outputs.nix invalid-output-name-2 2>&1 | grep 'contains illegal character'
+expect 1 nix build -f multiple-outputs.nix invalid-output-name-1 2>&1 | grep 'contains illegal character'
+expect 1 nix build -f multiple-outputs.nix invalid-output-name-2 2>&1 | grep 'contains illegal character'
diff --git a/tests/nar-access.sh b/tests/nar-access.sh
index dcc2e8a36..d487d58d2 100644
--- a/tests/nar-access.sh
+++ b/tests/nar-access.sh
@@ -46,8 +46,8 @@ diff -u \
<(echo '{"type":"regular","size":0}' | jq -S)
# Test missing files.
-nix store ls --json -R $storePath/xyzzy 2>&1 | grep 'does not exist in NAR'
-nix store ls $storePath/xyzzy 2>&1 | grep 'does not exist'
+expect 1 nix store ls --json -R $storePath/xyzzy 2>&1 | grep 'does not exist in NAR'
+expect 1 nix store ls $storePath/xyzzy 2>&1 | grep 'does not exist'
# Test failure to dump.
if nix-store --dump $storePath >/dev/full ; then
diff --git a/tests/nix-channel.sh b/tests/nix-channel.sh
index b64283f48..dbb3114f1 100644
--- a/tests/nix-channel.sh
+++ b/tests/nix-channel.sh
@@ -6,7 +6,7 @@ rm -f $TEST_HOME/.nix-channels $TEST_HOME/.nix-profile
# Test add/list/remove.
nix-channel --add http://foo/bar xyzzy
-nix-channel --list | grep -q http://foo/bar
+nix-channel --list | grepQuiet http://foo/bar
nix-channel --remove xyzzy
[ -e $TEST_HOME/.nix-channels ]
@@ -17,7 +17,7 @@ nix-channel --remove xyzzy
export NIX_CONFIG="use-xdg-base-directories = true"
nix-channel --add http://foo/bar xyzzy
-nix-channel --list | grep -q http://foo/bar
+nix-channel --list | grepQuiet http://foo/bar
nix-channel --remove xyzzy
unset NIX_CONFIG
@@ -41,8 +41,8 @@ nix-channel --update
# Do a query.
nix-env -qa \* --meta --xml --out-path > $TEST_ROOT/meta.xml
-grep -q 'meta.*description.*Random test package' $TEST_ROOT/meta.xml
-grep -q 'item.*attrPath="foo".*name="dependencies-top"' $TEST_ROOT/meta.xml
+grepQuiet 'meta.*description.*Random test package' $TEST_ROOT/meta.xml
+grepQuiet 'item.*attrPath="foo".*name="dependencies-top"' $TEST_ROOT/meta.xml
# Do an install.
nix-env -i dependencies-top
@@ -54,9 +54,9 @@ nix-channel --update
# Do a query.
nix-env -qa \* --meta --xml --out-path > $TEST_ROOT/meta.xml
-grep -q 'meta.*description.*Random test package' $TEST_ROOT/meta.xml
-grep -q 'item.*attrPath="bar".*name="dependencies-top"' $TEST_ROOT/meta.xml
-grep -q 'item.*attrPath="foo".*name="dependencies-top"' $TEST_ROOT/meta.xml
+grepQuiet 'meta.*description.*Random test package' $TEST_ROOT/meta.xml
+grepQuiet 'item.*attrPath="bar".*name="dependencies-top"' $TEST_ROOT/meta.xml
+grepQuiet 'item.*attrPath="foo".*name="dependencies-top"' $TEST_ROOT/meta.xml
# Do an install.
nix-env -i dependencies-top
diff --git a/tests/nix-shell.sh b/tests/nix-shell.sh
index f291c6f79..044b96d54 100644
--- a/tests/nix-shell.sh
+++ b/tests/nix-shell.sh
@@ -88,20 +88,43 @@ output=$($TEST_ROOT/spaced\ \\\'\"shell.shebang.rb abc ruby)
nix develop -f "$shellDotNix" shellDrv -c bash -c '[[ -n $stdenv ]]'
# Ensure `nix develop -c` preserves stdin
-echo foo | nix develop -f "$shellDotNix" shellDrv -c cat | grep -q foo
+echo foo | nix develop -f "$shellDotNix" shellDrv -c cat | grepQuiet foo
# Ensure `nix develop -c` actually executes the command if stdout isn't a terminal
-nix develop -f "$shellDotNix" shellDrv -c echo foo |& grep -q foo
+nix develop -f "$shellDotNix" shellDrv -c echo foo |& grepQuiet foo
# Test 'nix print-dev-env'.
-[[ $(nix print-dev-env -f "$shellDotNix" shellDrv --json | jq -r .variables.arr1.value[2]) = '3 4' ]]
-
-source <(nix print-dev-env -f "$shellDotNix" shellDrv)
-[[ -n $stdenv ]]
-[[ ${arr1[2]} = "3 4" ]]
-[[ ${arr2[1]} = $'\n' ]]
-[[ ${arr2[2]} = $'x\ny' ]]
-[[ $(fun) = blabla ]]
+
+nix print-dev-env -f "$shellDotNix" shellDrv > $TEST_ROOT/dev-env.sh
+nix print-dev-env -f "$shellDotNix" shellDrv --json > $TEST_ROOT/dev-env.json
+
+# Ensure `nix print-dev-env --json` contains variable assignments.
+[[ $(jq -r .variables.arr1.value[2] $TEST_ROOT/dev-env.json) = '3 4' ]]
+
+# Run tests involving `source <(nix print-dev-inv)` in subshells to avoid modifying the current
+# environment.
+
+set +u # FIXME: Make print-dev-env `set -u` compliant (issue #7951)
+
+# Ensure `source <(nix print-dev-env)` modifies the environment.
+(
+ path=$PATH
+ source $TEST_ROOT/dev-env.sh
+ [[ -n $stdenv ]]
+ [[ ${arr1[2]} = "3 4" ]]
+ [[ ${arr2[1]} = $'\n' ]]
+ [[ ${arr2[2]} = $'x\ny' ]]
+ [[ $(fun) = blabla ]]
+ [[ $PATH = $(jq -r .variables.PATH.value $TEST_ROOT/dev-env.json):$path ]]
+)
+
+# Ensure `source <(nix print-dev-env)` handles the case when PATH is empty.
+(
+ path=$PATH
+ PATH=
+ source $TEST_ROOT/dev-env.sh
+ [[ $PATH = $(PATH=$path jq -r .variables.PATH.value $TEST_ROOT/dev-env.json) ]]
+)
# Test nix-shell with ellipsis and no `inNixShell` argument (for backwards compat with old nixpkgs)
cat >$TEST_ROOT/shell-ellipsis.nix <<EOF
diff --git a/tests/nixos/nix-copy.nix b/tests/nixos/nix-copy.nix
new file mode 100644
index 000000000..ee8b77100
--- /dev/null
+++ b/tests/nixos/nix-copy.nix
@@ -0,0 +1,85 @@
+# Test that ‘nix copy’ works over ssh.
+
+{ lib, config, nixpkgs, hostPkgs, ... }:
+
+let
+ pkgs = config.nodes.client.nixpkgs.pkgs;
+
+ pkgA = pkgs.cowsay;
+ pkgB = pkgs.wget;
+ pkgC = pkgs.hello;
+ pkgD = pkgs.tmux;
+
+in {
+ name = "nix-copy";
+
+ enableOCR = true;
+
+ nodes =
+ { client =
+ { config, lib, pkgs, ... }:
+ { virtualisation.writableStore = true;
+ virtualisation.additionalPaths = [ pkgA pkgD.drvPath ];
+ nix.settings.substituters = lib.mkForce [ ];
+ nix.settings.experimental-features = [ "nix-command" ];
+ services.getty.autologinUser = "root";
+ };
+
+ server =
+ { config, pkgs, ... }:
+ { services.openssh.enable = true;
+ services.openssh.permitRootLogin = "yes";
+ users.users.root.password = "foobar";
+ virtualisation.writableStore = true;
+ virtualisation.additionalPaths = [ pkgB pkgC ];
+ };
+ };
+
+ testScript = { nodes }: ''
+ # fmt: off
+ import subprocess
+
+ # Create an SSH key on the client.
+ subprocess.run([
+ "${pkgs.openssh}/bin/ssh-keygen", "-t", "ed25519", "-f", "key", "-N", ""
+ ], capture_output=True, check=True)
+
+ start_all()
+
+ server.wait_for_unit("sshd")
+ client.wait_for_unit("network.target")
+ client.wait_for_unit("getty@tty1.service")
+ client.wait_for_text("]#")
+
+ # Copy the closure of package A from the client to the server using password authentication,
+ # and check that all prompts are visible
+ server.fail("nix-store --check-validity ${pkgA}")
+ client.send_chars("nix copy --to ssh://server ${pkgA} >&2; echo done\n")
+ client.wait_for_text("continue connecting")
+ client.send_chars("yes\n")
+ client.wait_for_text("Password:")
+ client.send_chars("foobar\n")
+ client.wait_for_text("done")
+ server.succeed("nix-store --check-validity ${pkgA}")
+
+ client.copy_from_host("key", "/root/.ssh/id_ed25519")
+ client.succeed("chmod 600 /root/.ssh/id_ed25519")
+
+ # Install the SSH key on the server.
+ server.copy_from_host("key.pub", "/root/.ssh/authorized_keys")
+ server.succeed("systemctl restart sshd")
+ client.succeed(f"ssh -o StrictHostKeyChecking=no {server.name} 'echo hello world'")
+
+ # Copy the closure of package B from the server to the client, using ssh-ng.
+ client.fail("nix-store --check-validity ${pkgB}")
+ # Shouldn't download untrusted paths by default
+ client.fail("nix copy --from ssh-ng://server ${pkgB} >&2")
+ client.succeed("nix copy --no-check-sigs --from ssh-ng://server ${pkgB} >&2")
+ client.succeed("nix-store --check-validity ${pkgB}")
+
+ # Copy the derivation of package D's derivation from the client to the server.
+ server.fail("nix-store --check-validity ${pkgD.drvPath}")
+ client.succeed("nix copy --derivation --to ssh://server ${pkgD.drvPath} >&2")
+ server.succeed("nix-store --check-validity ${pkgD.drvPath}")
+ '';
+}
diff --git a/tests/plugins.sh b/tests/plugins.sh
index 6e278ad9d..baf71a362 100644
--- a/tests/plugins.sh
+++ b/tests/plugins.sh
@@ -1,10 +1,7 @@
source common.sh
-set -o pipefail
-
if [[ $BUILD_SHARED_LIBS != 1 ]]; then
- echo "plugins are not supported"
- exit 99
+ skipTest "Plugins are not supported"
fi
res=$(nix --option setting-set true --option plugin-files $PWD/plugins/libplugintest* eval --expr builtins.anotherNull)
diff --git a/tests/pure-eval.sh b/tests/pure-eval.sh
index b83ab8afe..5334bf28e 100644
--- a/tests/pure-eval.sh
+++ b/tests/pure-eval.sh
@@ -8,7 +8,7 @@ nix eval --expr 'assert 1 + 2 == 3; true'
missingImpureErrorMsg=$(! nix eval --expr 'builtins.readFile ./pure-eval.sh' 2>&1)
-echo "$missingImpureErrorMsg" | grep -q -- --impure || \
+echo "$missingImpureErrorMsg" | grepQuiet -- --impure || \
fail "The error message should mention the “--impure” flag to unblock users"
[[ $(nix eval --expr 'builtins.pathExists ./pure-eval.sh') == false ]] || \
diff --git a/tests/recursive.sh b/tests/recursive.sh
index 91518d67d..6335d44a5 100644
--- a/tests/recursive.sh
+++ b/tests/recursive.sh
@@ -4,7 +4,7 @@ sed -i 's/experimental-features .*/& recursive-nix/' "$NIX_CONF_DIR"/nix.conf
restartDaemon
# FIXME
-if [[ $(uname) != Linux ]]; then exit 99; fi
+if [[ $(uname) != Linux ]]; then skipTest "Not running Linux"; fi
clearStore
diff --git a/tests/repl.sh b/tests/repl.sh
index c555560cc..be8adb742 100644
--- a/tests/repl.sh
+++ b/tests/repl.sh
@@ -33,14 +33,14 @@ testRepl () {
nix repl "${nixArgs[@]}" <<< "$replCmds" || fail "nix repl does not work twice with the same inputs"
# simple.nix prints a PATH during build
- echo "$replOutput" | grep -qs 'PATH=' || fail "nix repl :log doesn't output logs"
+ echo "$replOutput" | grepQuiet -s 'PATH=' || fail "nix repl :log doesn't output logs"
local replOutput="$(nix repl "${nixArgs[@]}" <<< "$replFailingCmds" 2>&1)"
echo "$replOutput"
- echo "$replOutput" | grep -qs 'This should fail' \
+ echo "$replOutput" | grepQuiet -s 'This should fail' \
|| fail "nix repl :log doesn't output logs for a failed derivation"
local replOutput="$(nix repl --show-trace "${nixArgs[@]}" <<< "$replUndefinedVariable" 2>&1)"
echo "$replOutput"
- echo "$replOutput" | grep -qs "while evaluating the file" \
+ echo "$replOutput" | grepQuiet -s "while evaluating the file" \
|| fail "nix repl --show-trace doesn't show the trace"
nix repl "${nixArgs[@]}" --option pure-eval true 2>&1 <<< "builtins.currentSystem" \
@@ -58,7 +58,7 @@ testReplResponse () {
local commands="$1"; shift
local expectedResponse="$1"; shift
local response="$(nix repl "$@" <<< "$commands")"
- echo "$response" | grep -qs "$expectedResponse" \
+ echo "$response" | grepQuiet -s "$expectedResponse" \
|| fail "repl command set:
$commands
@@ -121,5 +121,5 @@ sed -i 's/beforeChange/afterChange/' flake/flake.nix
echo ":reload"
echo "changingThing"
) | nix repl ./flake --experimental-features 'flakes repl-flake')
-echo "$replResult" | grep -qs beforeChange
-echo "$replResult" | grep -qs afterChange
+echo "$replResult" | grepQuiet -s beforeChange
+echo "$replResult" | grepQuiet -s afterChange
diff --git a/tests/restricted.sh b/tests/restricted.sh
index 9bd16cf51..776893a56 100644
--- a/tests/restricted.sh
+++ b/tests/restricted.sh
@@ -48,4 +48,4 @@ output="$(nix eval --raw --restrict-eval -I "$traverseDir" \
--expr "builtins.readFile \"$traverseDir/$goUp$(pwd)/restricted-innocent\"" \
2>&1 || :)"
echo "$output" | grep "is forbidden"
-! echo "$output" | grep -F restricted-secret
+echo "$output" | grepInverse -F restricted-secret
diff --git a/tests/search.sh b/tests/search.sh
index 1a98f5b49..8742f8736 100644
--- a/tests/search.sh
+++ b/tests/search.sh
@@ -20,9 +20,9 @@ clearCache
## Search expressions
# Check that empty search string matches all
-nix search -f search.nix '' |grep -q foo
-nix search -f search.nix '' |grep -q bar
-nix search -f search.nix '' |grep -q hello
+nix search -f search.nix '' |grepQuiet foo
+nix search -f search.nix '' |grepQuiet bar
+nix search -f search.nix '' |grepQuiet hello
## Tests for multiple regex/match highlighting
diff --git a/tests/shell.sh b/tests/shell.sh
index 6a80e8385..d2f7cf14e 100644
--- a/tests/shell.sh
+++ b/tests/shell.sh
@@ -10,7 +10,7 @@ nix shell -f shell-hello.nix hello -c hello NixOS | grep 'Hello NixOS'
nix shell -f shell-hello.nix hello^dev -c hello2 | grep 'Hello2'
nix shell -f shell-hello.nix 'hello^*' -c hello2 | grep 'Hello2'
-if ! canUseSandbox; then exit 99; fi
+requireSandboxSupport
chmod -R u+w $TEST_ROOT/store0 || true
rm -rf $TEST_ROOT/store0
diff --git a/tests/tarball.sh b/tests/tarball.sh
index d5cab879c..5f39658c9 100644
--- a/tests/tarball.sh
+++ b/tests/tarball.sh
@@ -19,7 +19,7 @@ test_tarball() {
tarball=$TEST_ROOT/tarball.tar$ext
(cd $TEST_ROOT && tar cf - tarball) | $compressor > $tarball
- nix-env -f file://$tarball -qa --out-path | grep -q dependencies
+ nix-env -f file://$tarball -qa --out-path | grepQuiet dependencies
nix-build -o $TEST_ROOT/result file://$tarball
@@ -34,7 +34,7 @@ test_tarball() {
nix-build -o $TEST_ROOT/result -E "import (fetchTree { type = \"tarball\"; url = file://$tarball; narHash = \"$hash\"; })"
# Do not re-fetch paths already present
nix-build -o $TEST_ROOT/result -E "import (fetchTree { type = \"tarball\"; url = file:///does-not-exist/must-remain-unused/$tarball; narHash = \"$hash\"; })"
- nix-build -o $TEST_ROOT/result -E "import (fetchTree { type = \"tarball\"; url = file://$tarball; narHash = \"sha256-xdKv2pq/IiwLSnBBJXW8hNowI4MrdZfW+SYqDQs7Tzc=\"; })" 2>&1 | grep 'NAR hash mismatch in input'
+ expectStderr 102 nix-build -o $TEST_ROOT/result -E "import (fetchTree { type = \"tarball\"; url = file://$tarball; narHash = \"sha256-xdKv2pq/IiwLSnBBJXW8hNowI4MrdZfW+SYqDQs7Tzc=\"; })" | grep 'NAR hash mismatch in input'
nix-instantiate --strict --eval -E "!((import (fetchTree { type = \"tarball\"; url = file://$tarball; narHash = \"$hash\"; })) ? submodules)" >&2
nix-instantiate --strict --eval -E "!((import (fetchTree { type = \"tarball\"; url = file://$tarball; narHash = \"$hash\"; })) ? submodules)" 2>&1 | grep 'true'
diff --git a/tests/test-infra.sh b/tests/test-infra.sh
new file mode 100644
index 000000000..54ae120e7
--- /dev/null
+++ b/tests/test-infra.sh
@@ -0,0 +1,85 @@
+# Test the functions for testing themselves!
+# Also test some assumptions on how bash works that they rely on.
+source common.sh
+
+# `true` should exit with 0
+expect 0 true
+
+# `false` should exit with 1
+expect 1 false
+
+# `expect` will fail when we get it wrong
+expect 1 expect 0 false
+
+noisyTrue () {
+ echo YAY! >&2
+ true
+}
+
+noisyFalse () {
+ echo NAY! >&2
+ false
+}
+
+# These should redirect standard error to standard output
+expectStderr 0 noisyTrue | grepQuiet YAY
+expectStderr 1 noisyFalse | grepQuiet NAY
+
+# `set -o pipefile` is enabled
+
+pipefailure () {
+ # shellcheck disable=SC2216
+ true | false | true
+}
+expect 1 pipefailure
+unset pipefailure
+
+pipefailure () {
+ # shellcheck disable=SC2216
+ false | true | true
+}
+expect 1 pipefailure
+unset pipefailure
+
+commandSubstitutionPipeFailure () {
+ # shellcheck disable=SC2216
+ res=$(set -eu -o pipefail; false | true | echo 0)
+}
+expect 1 commandSubstitutionPipeFailure
+
+# `set -u` is enabled
+
+# note (...), making function use subshell, as unbound variable errors
+# in the outer shell are *rightly* not recoverable.
+useUnbound () (
+ set -eu
+ # shellcheck disable=SC2154
+ echo "$thisVariableIsNotBound"
+)
+expect 1 useUnbound
+
+# ! alone unfortunately negates `set -e`, but it works in functions:
+# shellcheck disable=SC2251
+! true
+funBang () {
+ ! true
+}
+expect 1 funBang
+unset funBang
+
+# `grep -v -q` is not what we want for exit codes, but `grepInverse` is
+# Avoid `grep -v -q`. The following line proves the point, and if it fails,
+# we'll know that `grep` had a breaking change or `-v -q` may not be portable.
+{ echo foo; echo bar; } | grep -v -q foo
+{ echo foo; echo bar; } | expect 1 grepInverse foo
+
+# `grepQuiet` is quiet
+res=$(set -eu -o pipefail; echo foo | grepQuiet foo | wc -c)
+(( res == 0 ))
+unset res
+
+# `greqQietInverse` is both
+{ echo foo; echo bar; } | expect 1 grepQuietInverse foo
+res=$(set -eu -o pipefail; echo foo | expect 1 grepQuietInverse foo | wc -c)
+(( res == 0 ))
+unset res
diff --git a/tests/timeout.sh b/tests/timeout.sh
index e3fb3ebcc..b179b79a2 100644
--- a/tests/timeout.sh
+++ b/tests/timeout.sh
@@ -5,17 +5,14 @@ source common.sh
# XXX: This shouldn’t be, but #4813 cause this test to fail
needLocalStore "see #4813"
-set +e
-messages=$(nix-build -Q timeout.nix -A infiniteLoop --timeout 2 2>&1)
-status=$?
-set -e
+messages=$(nix-build -Q timeout.nix -A infiniteLoop --timeout 2 2>&1) && status=0 || status=$?
if [ $status -ne 101 ]; then
echo "error: 'nix-store' exited with '$status'; should have exited 101"
exit 1
fi
-if ! echo "$messages" | grep -q "timed out"; then
+if echo "$messages" | grepQuietInvert "timed out"; then
echo "error: build may have failed for reasons other than timeout; output:"
echo "$messages" >&2
exit 1
diff --git a/tests/user-envs-migration.sh b/tests/user-envs-migration.sh
index 467c28fbb..187372b16 100644
--- a/tests/user-envs-migration.sh
+++ b/tests/user-envs-migration.sh
@@ -4,7 +4,7 @@
source common.sh
if isDaemonNewer "2.4pre20211005"; then
- exit 99
+ skipTest "Daemon is too new"
fi
diff --git a/tests/user-envs.sh b/tests/user-envs.sh
index d63fe780a..d1260ba04 100644
--- a/tests/user-envs.sh
+++ b/tests/user-envs.sh
@@ -1,6 +1,6 @@
source common.sh
-if [ -z "$storeCleared" ]; then
+if [ -z "${storeCleared-}" ]; then
clearStore
fi
@@ -28,13 +28,13 @@ nix-env -f ./user-envs.nix -qa --json --out-path | jq -e '.[] | select(.name ==
] | all'
# Query descriptions.
-nix-env -f ./user-envs.nix -qa '*' --description | grep -q silly
+nix-env -f ./user-envs.nix -qa '*' --description | grepQuiet silly
rm -rf $HOME/.nix-defexpr
ln -s $(pwd)/user-envs.nix $HOME/.nix-defexpr
-nix-env -qa '*' --description | grep -q silly
+nix-env -qa '*' --description | grepQuiet silly
# Query the system.
-nix-env -qa '*' --system | grep -q $system
+nix-env -qa '*' --system | grepQuiet $system
# Install "foo-1.0".
nix-env -i foo-1.0
@@ -42,19 +42,19 @@ nix-env -i foo-1.0
# Query installed: should contain foo-1.0 now (which should be
# executable).
test "$(nix-env -q '*' | wc -l)" -eq 1
-nix-env -q '*' | grep -q foo-1.0
+nix-env -q '*' | grepQuiet foo-1.0
test "$($profiles/test/bin/foo)" = "foo-1.0"
# Test nix-env -qc to compare installed against available packages, and vice versa.
-nix-env -qc '*' | grep -q '< 2.0'
-nix-env -qac '*' | grep -q '> 1.0'
+nix-env -qc '*' | grepQuiet '< 2.0'
+nix-env -qac '*' | grepQuiet '> 1.0'
# Test the -b flag to filter out source-only packages.
[ "$(nix-env -qab | wc -l)" -eq 1 ]
# Test the -s flag to get package status.
-nix-env -qas | grep -q 'IP- foo-1.0'
-nix-env -qas | grep -q -- '--- bar-0.1'
+nix-env -qas | grepQuiet 'IP- foo-1.0'
+nix-env -qas | grepQuiet -- '--- bar-0.1'
# Disable foo.
nix-env --set-flag active false foo
@@ -74,15 +74,15 @@ nix-env -i foo-2.0pre1
# Query installed: should contain foo-2.0pre1 now.
test "$(nix-env -q '*' | wc -l)" -eq 1
-nix-env -q '*' | grep -q foo-2.0pre1
+nix-env -q '*' | grepQuiet foo-2.0pre1
test "$($profiles/test/bin/foo)" = "foo-2.0pre1"
# Upgrade "foo": should install foo-2.0.
-NIX_PATH=nixpkgs=./user-envs.nix:$NIX_PATH nix-env -f '<nixpkgs>' -u foo
+NIX_PATH=nixpkgs=./user-envs.nix:${NIX_PATH-} nix-env -f '<nixpkgs>' -u foo
# Query installed: should contain foo-2.0 now.
test "$(nix-env -q '*' | wc -l)" -eq 1
-nix-env -q '*' | grep -q foo-2.0
+nix-env -q '*' | grepQuiet foo-2.0
test "$($profiles/test/bin/foo)" = "foo-2.0"
# Store the path of foo-2.0.
@@ -94,20 +94,20 @@ nix-env -i bar-0.1
nix-env -e foo
# Query installed: should only contain bar-0.1 now.
-if nix-env -q '*' | grep -q foo; then false; fi
-nix-env -q '*' | grep -q bar
+if nix-env -q '*' | grepQuiet foo; then false; fi
+nix-env -q '*' | grepQuiet bar
# Rollback: should bring "foo" back.
oldGen="$(nix-store -q --resolve $profiles/test)"
nix-env --rollback
[ "$(nix-store -q --resolve $profiles/test)" != "$oldGen" ]
-nix-env -q '*' | grep -q foo-2.0
-nix-env -q '*' | grep -q bar
+nix-env -q '*' | grepQuiet foo-2.0
+nix-env -q '*' | grepQuiet bar
# Rollback again: should remove "bar".
nix-env --rollback
-nix-env -q '*' | grep -q foo-2.0
-if nix-env -q '*' | grep -q bar; then false; fi
+nix-env -q '*' | grepQuiet foo-2.0
+if nix-env -q '*' | grepQuiet bar; then false; fi
# Count generations.
nix-env --list-generations
@@ -129,7 +129,7 @@ nix-env --switch-generation 7
# Install foo-1.0, now using its store path.
nix-env -i "$outPath10"
-nix-env -q '*' | grep -q foo-1.0
+nix-env -q '*' | grepQuiet foo-1.0
nix-store -qR $profiles/test | grep "$outPath10"
nix-store -q --referrers-closure $profiles/test | grep "$(nix-store -q --resolve $profiles/test)"
[ "$(nix-store -q --deriver "$outPath10")" = $drvPath10 ]
@@ -137,12 +137,12 @@ nix-store -q --referrers-closure $profiles/test | grep "$(nix-store -q --resolve
# Uninstall foo-1.0, using a symlink to its store path.
ln -sfn $outPath10/bin/foo $TEST_ROOT/symlink
nix-env -e $TEST_ROOT/symlink
-if nix-env -q '*' | grep -q foo; then false; fi
-(! nix-store -qR $profiles/test | grep "$outPath10")
+if nix-env -q '*' | grepQuiet foo; then false; fi
+nix-store -qR $profiles/test | grepInverse "$outPath10"
# Install foo-1.0, now using a symlink to its store path.
nix-env -i $TEST_ROOT/symlink
-nix-env -q '*' | grep -q foo
+nix-env -q '*' | grepQuiet foo
# Delete all old generations.
nix-env --delete-generations old
@@ -160,7 +160,7 @@ test "$(nix-env -q '*' | wc -l)" -eq 0
# Installing "foo" should only install the newest foo.
nix-env -i foo
test "$(nix-env -q '*' | grep foo- | wc -l)" -eq 1
-nix-env -q '*' | grep -q foo-2.0
+nix-env -q '*' | grepQuiet foo-2.0
# On the other hand, this should install both (and should fail due to
# a collision).
@@ -171,8 +171,8 @@ nix-env -e '*'
nix-env -e '*'
nix-env -i '*'
test "$(nix-env -q '*' | wc -l)" -eq 2
-nix-env -q '*' | grep -q foo-2.0
-nix-env -q '*' | grep -q bar-0.1.1
+nix-env -q '*' | grepQuiet foo-2.0
+nix-env -q '*' | grepQuiet bar-0.1.1
# Test priorities: foo-0.1 has a lower priority than foo-1.0, so it
# should be possible to install both without a collision. Also test
diff --git a/tests/why-depends.sh b/tests/why-depends.sh
index a04d529b5..b35a0d1cf 100644
--- a/tests/why-depends.sh
+++ b/tests/why-depends.sh
@@ -16,9 +16,9 @@ FAST_WHY_DEPENDS_OUTPUT=$(nix why-depends ./toplevel ./dep)
PRECISE_WHY_DEPENDS_OUTPUT=$(nix why-depends ./toplevel ./dep --precise)
# Both outputs should show that `input-2` is in the dependency chain
-echo "$FAST_WHY_DEPENDS_OUTPUT" | grep -q input-2
-echo "$PRECISE_WHY_DEPENDS_OUTPUT" | grep -q input-2
+echo "$FAST_WHY_DEPENDS_OUTPUT" | grepQuiet input-2
+echo "$PRECISE_WHY_DEPENDS_OUTPUT" | grepQuiet input-2
-# But only the “precise” one should refere to `reference-to-input-2`
-echo "$FAST_WHY_DEPENDS_OUTPUT" | (! grep -q reference-to-input-2)
-echo "$PRECISE_WHY_DEPENDS_OUTPUT" | grep -q reference-to-input-2
+# But only the “precise” one should refer to `reference-to-input-2`
+echo "$FAST_WHY_DEPENDS_OUTPUT" | grepQuietInverse reference-to-input-2
+echo "$PRECISE_WHY_DEPENDS_OUTPUT" | grepQuiet reference-to-input-2